core/ptr/
const_ptr.rs

1use super::*;
2use crate::cmp::Ordering::{Equal, Greater, Less};
3use crate::intrinsics::const_eval_select;
4use crate::mem::{self, SizedTypeProperties};
5use crate::slice::{self, SliceIndex};
6
7impl<T: ?Sized> *const T {
8    /// Returns `true` if the pointer is null.
9    ///
10    /// Note that unsized types have many possible null pointers, as only the
11    /// raw data pointer is considered, not their length, vtable, etc.
12    /// Therefore, two pointers that are null may still not compare equal to
13    /// each other.
14    ///
15    /// # Panics during const evaluation
16    ///
17    /// If this method is used during const evaluation, and `self` is a pointer
18    /// that is offset beyond the bounds of the memory it initially pointed to,
19    /// then there might not be enough information to determine whether the
20    /// pointer is null. This is because the absolute address in memory is not
21    /// known at compile time. If the nullness of the pointer cannot be
22    /// determined, this method will panic.
23    ///
24    /// In-bounds pointers are never null, so the method will never panic for
25    /// such pointers.
26    ///
27    /// # Examples
28    ///
29    /// ```
30    /// let s: &str = "Follow the rabbit";
31    /// let ptr: *const u8 = s.as_ptr();
32    /// assert!(!ptr.is_null());
33    /// ```
34    #[stable(feature = "rust1", since = "1.0.0")]
35    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
36    #[rustc_diagnostic_item = "ptr_const_is_null"]
37    #[inline]
38    #[rustc_allow_const_fn_unstable(const_eval_select)]
39    pub const fn is_null(self) -> bool {
40        // Compare via a cast to a thin pointer, so fat pointers are only
41        // considering their "data" part for null-ness.
42        let ptr = self as *const u8;
43        const_eval_select!(
44            @capture { ptr: *const u8 } -> bool:
45            // This use of `const_raw_ptr_comparison` has been explicitly blessed by t-lang.
46            if const #[rustc_allow_const_fn_unstable(const_raw_ptr_comparison)] {
47                match (ptr).guaranteed_eq(null_mut()) {
48                    Some(res) => res,
49                    // To remain maximally convervative, we stop execution when we don't
50                    // know whether the pointer is null or not.
51                    // We can *not* return `false` here, that would be unsound in `NonNull::new`!
52                    None => panic!("null-ness of this pointer cannot be determined in const context"),
53                }
54            } else {
55                ptr.addr() == 0
56            }
57        )
58    }
59
60    /// Casts to a pointer of another type.
61    #[stable(feature = "ptr_cast", since = "1.38.0")]
62    #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
63    #[rustc_diagnostic_item = "const_ptr_cast"]
64    #[inline(always)]
65    pub const fn cast<U>(self) -> *const U {
66        self as _
67    }
68
69    /// Uses the address value in a new pointer of another type.
70    ///
71    /// This operation will ignore the address part of its `meta` operand and discard existing
72    /// metadata of `self`. For pointers to a sized types (thin pointers), this has the same effect
73    /// as a simple cast. For pointers to an unsized type (fat pointers) this recombines the address
74    /// with new metadata such as slice lengths or `dyn`-vtable.
75    ///
76    /// The resulting pointer will have provenance of `self`. This operation is semantically the
77    /// same as creating a new pointer with the data pointer value of `self` but the metadata of
78    /// `meta`, being fat or thin depending on the `meta` operand.
79    ///
80    /// # Examples
81    ///
82    /// This function is primarily useful for enabling pointer arithmetic on potentially fat
83    /// pointers. The pointer is cast to a sized pointee to utilize offset operations and then
84    /// recombined with its own original metadata.
85    ///
86    /// ```
87    /// #![feature(set_ptr_value)]
88    /// # use core::fmt::Debug;
89    /// let arr: [i32; 3] = [1, 2, 3];
90    /// let mut ptr = arr.as_ptr() as *const dyn Debug;
91    /// let thin = ptr as *const u8;
92    /// unsafe {
93    ///     ptr = thin.add(8).with_metadata_of(ptr);
94    ///     # assert_eq!(*(ptr as *const i32), 3);
95    ///     println!("{:?}", &*ptr); // will print "3"
96    /// }
97    /// ```
98    ///
99    /// # *Incorrect* usage
100    ///
101    /// The provenance from pointers is *not* combined. The result must only be used to refer to the
102    /// address allowed by `self`.
103    ///
104    /// ```rust,no_run
105    /// #![feature(set_ptr_value)]
106    /// let x = 0u32;
107    /// let y = 1u32;
108    ///
109    /// let x = (&x) as *const u32;
110    /// let y = (&y) as *const u32;
111    ///
112    /// let offset = (x as usize - y as usize) / 4;
113    /// let bad = x.wrapping_add(offset).with_metadata_of(y);
114    ///
115    /// // This dereference is UB. The pointer only has provenance for `x` but points to `y`.
116    /// println!("{:?}", unsafe { &*bad });
117    /// ```
118    #[unstable(feature = "set_ptr_value", issue = "75091")]
119    #[must_use = "returns a new pointer rather than modifying its argument"]
120    #[inline]
121    pub const fn with_metadata_of<U>(self, meta: *const U) -> *const U
122    where
123        U: ?Sized,
124    {
125        from_raw_parts::<U>(self as *const (), metadata(meta))
126    }
127
128    /// Changes constness without changing the type.
129    ///
130    /// This is a bit safer than `as` because it wouldn't silently change the type if the code is
131    /// refactored.
132    #[stable(feature = "ptr_const_cast", since = "1.65.0")]
133    #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")]
134    #[rustc_diagnostic_item = "ptr_cast_mut"]
135    #[inline(always)]
136    pub const fn cast_mut(self) -> *mut T {
137        self as _
138    }
139
140    /// Gets the "address" portion of the pointer.
141    ///
142    /// This is similar to `self as usize`, except that the [provenance][crate::ptr#provenance] of
143    /// the pointer is discarded and not [exposed][crate::ptr#exposed-provenance]. This means that
144    /// casting the returned address back to a pointer yields a [pointer without
145    /// provenance][without_provenance], which is undefined behavior to dereference. To properly
146    /// restore the lost information and obtain a dereferenceable pointer, use
147    /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr].
148    ///
149    /// If using those APIs is not possible because there is no way to preserve a pointer with the
150    /// required provenance, then Strict Provenance might not be for you. Use pointer-integer casts
151    /// or [`expose_provenance`][pointer::expose_provenance] and [`with_exposed_provenance`][with_exposed_provenance]
152    /// instead. However, note that this makes your code less portable and less amenable to tools
153    /// that check for compliance with the Rust memory model.
154    ///
155    /// On most platforms this will produce a value with the same bytes as the original
156    /// pointer, because all the bytes are dedicated to describing the address.
157    /// Platforms which need to store additional information in the pointer may
158    /// perform a change of representation to produce a value containing only the address
159    /// portion of the pointer. What that means is up to the platform to define.
160    ///
161    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
162    #[must_use]
163    #[inline(always)]
164    #[stable(feature = "strict_provenance", since = "1.84.0")]
165    pub fn addr(self) -> usize {
166        // A pointer-to-integer transmute currently has exactly the right semantics: it returns the
167        // address without exposing the provenance. Note that this is *not* a stable guarantee about
168        // transmute semantics, it relies on sysroot crates having special status.
169        // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
170        // provenance).
171        unsafe { mem::transmute(self.cast::<()>()) }
172    }
173
174    /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
175    /// [`with_exposed_provenance`] and returns the "address" portion.
176    ///
177    /// This is equivalent to `self as usize`, which semantically discards provenance information.
178    /// Furthermore, this (like the `as` cast) has the implicit side-effect of marking the
179    /// provenance as 'exposed', so on platforms that support it you can later call
180    /// [`with_exposed_provenance`] to reconstitute the original pointer including its provenance.
181    ///
182    /// Due to its inherent ambiguity, [`with_exposed_provenance`] may not be supported by tools
183    /// that help you to stay conformant with the Rust memory model. It is recommended to use
184    /// [Strict Provenance][crate::ptr#strict-provenance] APIs such as [`with_addr`][pointer::with_addr]
185    /// wherever possible, in which case [`addr`][pointer::addr] should be used instead of `expose_provenance`.
186    ///
187    /// On most platforms this will produce a value with the same bytes as the original pointer,
188    /// because all the bytes are dedicated to describing the address. Platforms which need to store
189    /// additional information in the pointer may not support this operation, since the 'expose'
190    /// side-effect which is required for [`with_exposed_provenance`] to work is typically not
191    /// available.
192    ///
193    /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
194    ///
195    /// [`with_exposed_provenance`]: with_exposed_provenance
196    #[inline(always)]
197    #[stable(feature = "exposed_provenance", since = "1.84.0")]
198    pub fn expose_provenance(self) -> usize {
199        self.cast::<()>() as usize
200    }
201
202    /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
203    /// `self`.
204    ///
205    /// This is similar to a `addr as *const T` cast, but copies
206    /// the *provenance* of `self` to the new pointer.
207    /// This avoids the inherent ambiguity of the unary cast.
208    ///
209    /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset
210    /// `self` to the given address, and therefore has all the same capabilities and restrictions.
211    ///
212    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
213    #[must_use]
214    #[inline]
215    #[stable(feature = "strict_provenance", since = "1.84.0")]
216    pub fn with_addr(self, addr: usize) -> Self {
217        // This should probably be an intrinsic to avoid doing any sort of arithmetic, but
218        // meanwhile, we can implement it with `wrapping_offset`, which preserves the pointer's
219        // provenance.
220        let self_addr = self.addr() as isize;
221        let dest_addr = addr as isize;
222        let offset = dest_addr.wrapping_sub(self_addr);
223        self.wrapping_byte_offset(offset)
224    }
225
226    /// Creates a new pointer by mapping `self`'s address to a new one, preserving the
227    /// [provenance][crate::ptr#provenance] of `self`.
228    ///
229    /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details.
230    ///
231    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
232    #[must_use]
233    #[inline]
234    #[stable(feature = "strict_provenance", since = "1.84.0")]
235    pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
236        self.with_addr(f(self.addr()))
237    }
238
239    /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
240    ///
241    /// The pointer can be later reconstructed with [`from_raw_parts`].
242    #[unstable(feature = "ptr_metadata", issue = "81513")]
243    #[inline]
244    pub const fn to_raw_parts(self) -> (*const (), <T as super::Pointee>::Metadata) {
245        (self.cast(), metadata(self))
246    }
247
248    /// Returns `None` if the pointer is null, or else returns a shared reference to
249    /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`]
250    /// must be used instead.
251    ///
252    /// [`as_uninit_ref`]: #method.as_uninit_ref
253    ///
254    /// # Safety
255    ///
256    /// When calling this method, you have to ensure that *either* the pointer is null *or*
257    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
258    ///
259    /// # Panics during const evaluation
260    ///
261    /// This method will panic during const evaluation if the pointer cannot be
262    /// determined to be null or not. See [`is_null`] for more information.
263    ///
264    /// [`is_null`]: #method.is_null
265    ///
266    /// # Examples
267    ///
268    /// ```
269    /// let ptr: *const u8 = &10u8 as *const u8;
270    ///
271    /// unsafe {
272    ///     if let Some(val_back) = ptr.as_ref() {
273    ///         assert_eq!(val_back, &10);
274    ///     }
275    /// }
276    /// ```
277    ///
278    /// # Null-unchecked version
279    ///
280    /// If you are sure the pointer can never be null and are looking for some kind of
281    /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can
282    /// dereference the pointer directly.
283    ///
284    /// ```
285    /// let ptr: *const u8 = &10u8 as *const u8;
286    ///
287    /// unsafe {
288    ///     let val_back = &*ptr;
289    ///     assert_eq!(val_back, &10);
290    /// }
291    /// ```
292    #[stable(feature = "ptr_as_ref", since = "1.9.0")]
293    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
294    #[inline]
295    pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> {
296        // SAFETY: the caller must guarantee that `self` is valid
297        // for a reference if it isn't null.
298        if self.is_null() { None } else { unsafe { Some(&*self) } }
299    }
300
301    /// Returns a shared reference to the value behind the pointer.
302    /// If the pointer may be null or the value may be uninitialized, [`as_uninit_ref`] must be used instead.
303    /// If the pointer may be null, but the value is known to have been initialized, [`as_ref`] must be used instead.
304    ///
305    /// [`as_ref`]: #method.as_ref
306    /// [`as_uninit_ref`]: #method.as_uninit_ref
307    ///
308    /// # Safety
309    ///
310    /// When calling this method, you have to ensure that
311    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
312    ///
313    /// # Examples
314    ///
315    /// ```
316    /// #![feature(ptr_as_ref_unchecked)]
317    /// let ptr: *const u8 = &10u8 as *const u8;
318    ///
319    /// unsafe {
320    ///     assert_eq!(ptr.as_ref_unchecked(), &10);
321    /// }
322    /// ```
323    // FIXME: mention it in the docs for `as_ref` and `as_uninit_ref` once stabilized.
324    #[unstable(feature = "ptr_as_ref_unchecked", issue = "122034")]
325    #[inline]
326    #[must_use]
327    pub const unsafe fn as_ref_unchecked<'a>(self) -> &'a T {
328        // SAFETY: the caller must guarantee that `self` is valid for a reference
329        unsafe { &*self }
330    }
331
332    /// Returns `None` if the pointer is null, or else returns a shared reference to
333    /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
334    /// that the value has to be initialized.
335    ///
336    /// [`as_ref`]: #method.as_ref
337    ///
338    /// # Safety
339    ///
340    /// When calling this method, you have to ensure that *either* the pointer is null *or*
341    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
342    ///
343    /// # Panics during const evaluation
344    ///
345    /// This method will panic during const evaluation if the pointer cannot be
346    /// determined to be null or not. See [`is_null`] for more information.
347    ///
348    /// [`is_null`]: #method.is_null
349    ///
350    /// # Examples
351    ///
352    /// ```
353    /// #![feature(ptr_as_uninit)]
354    ///
355    /// let ptr: *const u8 = &10u8 as *const u8;
356    ///
357    /// unsafe {
358    ///     if let Some(val_back) = ptr.as_uninit_ref() {
359    ///         assert_eq!(val_back.assume_init(), 10);
360    ///     }
361    /// }
362    /// ```
363    #[inline]
364    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
365    pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
366    where
367        T: Sized,
368    {
369        // SAFETY: the caller must guarantee that `self` meets all the
370        // requirements for a reference.
371        if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
372    }
373
374    /// Adds a signed offset to a pointer.
375    ///
376    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
377    /// offset of `3 * size_of::<T>()` bytes.
378    ///
379    /// # Safety
380    ///
381    /// If any of the following conditions are violated, the result is Undefined Behavior:
382    ///
383    /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
384    ///   "wrapping around"), must fit in an `isize`.
385    ///
386    /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
387    ///   [allocated object], and the entire memory range between `self` and the result must be in
388    ///   bounds of that allocated object. In particular, this range must not "wrap around" the edge
389    ///   of the address space. Note that "range" here refers to a half-open range as usual in Rust,
390    ///   i.e., `self..result` for non-negative offsets and `result..self` for negative offsets.
391    ///
392    /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
393    /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
394    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
395    /// safe.
396    ///
397    /// Consider using [`wrapping_offset`] instead if these constraints are
398    /// difficult to satisfy. The only advantage of this method is that it
399    /// enables more aggressive compiler optimizations.
400    ///
401    /// [`wrapping_offset`]: #method.wrapping_offset
402    /// [allocated object]: crate::ptr#allocated-object
403    ///
404    /// # Examples
405    ///
406    /// ```
407    /// let s: &str = "123";
408    /// let ptr: *const u8 = s.as_ptr();
409    ///
410    /// unsafe {
411    ///     assert_eq!(*ptr.offset(1) as char, '2');
412    ///     assert_eq!(*ptr.offset(2) as char, '3');
413    /// }
414    /// ```
415    #[stable(feature = "rust1", since = "1.0.0")]
416    #[must_use = "returns a new pointer rather than modifying its argument"]
417    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
418    #[inline(always)]
419    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
420    pub const unsafe fn offset(self, count: isize) -> *const T
421    where
422        T: Sized,
423    {
424        #[inline]
425        #[rustc_allow_const_fn_unstable(const_eval_select)]
426        const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
427            // We can use const_eval_select here because this is only for UB checks.
428            const_eval_select!(
429                @capture { this: *const (), count: isize, size: usize } -> bool:
430                if const {
431                    true
432                } else {
433                    // `size` is the size of a Rust type, so we know that
434                    // `size <= isize::MAX` and thus `as` cast here is not lossy.
435                    let Some(byte_offset) = count.checked_mul(size as isize) else {
436                        return false;
437                    };
438                    let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
439                    !overflow
440                }
441            )
442        }
443
444        ub_checks::assert_unsafe_precondition!(
445            check_language_ub,
446            "ptr::offset requires the address calculation to not overflow",
447            (
448                this: *const () = self as *const (),
449                count: isize = count,
450                size: usize = size_of::<T>(),
451            ) => runtime_offset_nowrap(this, count, size)
452        );
453
454        // SAFETY: the caller must uphold the safety contract for `offset`.
455        unsafe { intrinsics::offset(self, count) }
456    }
457
458    /// Adds a signed offset in bytes to a pointer.
459    ///
460    /// `count` is in units of **bytes**.
461    ///
462    /// This is purely a convenience for casting to a `u8` pointer and
463    /// using [offset][pointer::offset] on it. See that method for documentation
464    /// and safety requirements.
465    ///
466    /// For non-`Sized` pointees this operation changes only the data pointer,
467    /// leaving the metadata untouched.
468    #[must_use]
469    #[inline(always)]
470    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
471    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
472    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
473    pub const unsafe fn byte_offset(self, count: isize) -> Self {
474        // SAFETY: the caller must uphold the safety contract for `offset`.
475        unsafe { self.cast::<u8>().offset(count).with_metadata_of(self) }
476    }
477
478    /// Adds a signed offset to a pointer using wrapping arithmetic.
479    ///
480    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
481    /// offset of `3 * size_of::<T>()` bytes.
482    ///
483    /// # Safety
484    ///
485    /// This operation itself is always safe, but using the resulting pointer is not.
486    ///
487    /// The resulting pointer "remembers" the [allocated object] that `self` points to
488    /// (this is called "[Provenance](ptr/index.html#provenance)").
489    /// The pointer must not be used to read or write other allocated objects.
490    ///
491    /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
492    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
493    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
494    /// `x` and `y` point into the same allocated object.
495    ///
496    /// Compared to [`offset`], this method basically delays the requirement of staying within the
497    /// same allocated object: [`offset`] is immediate Undefined Behavior when crossing object
498    /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
499    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
500    /// can be optimized better and is thus preferable in performance-sensitive code.
501    ///
502    /// The delayed check only considers the value of the pointer that was dereferenced, not the
503    /// intermediate values used during the computation of the final result. For example,
504    /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
505    /// words, leaving the allocated object and then re-entering it later is permitted.
506    ///
507    /// [`offset`]: #method.offset
508    /// [allocated object]: crate::ptr#allocated-object
509    ///
510    /// # Examples
511    ///
512    /// ```
513    /// # use std::fmt::Write;
514    /// // Iterate using a raw pointer in increments of two elements
515    /// let data = [1u8, 2, 3, 4, 5];
516    /// let mut ptr: *const u8 = data.as_ptr();
517    /// let step = 2;
518    /// let end_rounded_up = ptr.wrapping_offset(6);
519    ///
520    /// let mut out = String::new();
521    /// while ptr != end_rounded_up {
522    ///     unsafe {
523    ///         write!(&mut out, "{}, ", *ptr)?;
524    ///     }
525    ///     ptr = ptr.wrapping_offset(step);
526    /// }
527    /// assert_eq!(out.as_str(), "1, 3, 5, ");
528    /// # std::fmt::Result::Ok(())
529    /// ```
530    #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
531    #[must_use = "returns a new pointer rather than modifying its argument"]
532    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
533    #[inline(always)]
534    pub const fn wrapping_offset(self, count: isize) -> *const T
535    where
536        T: Sized,
537    {
538        // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
539        unsafe { intrinsics::arith_offset(self, count) }
540    }
541
542    /// Adds a signed offset in bytes to a pointer using wrapping arithmetic.
543    ///
544    /// `count` is in units of **bytes**.
545    ///
546    /// This is purely a convenience for casting to a `u8` pointer and
547    /// using [wrapping_offset][pointer::wrapping_offset] on it. See that method
548    /// for documentation.
549    ///
550    /// For non-`Sized` pointees this operation changes only the data pointer,
551    /// leaving the metadata untouched.
552    #[must_use]
553    #[inline(always)]
554    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
555    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
556    pub const fn wrapping_byte_offset(self, count: isize) -> Self {
557        self.cast::<u8>().wrapping_offset(count).with_metadata_of(self)
558    }
559
560    /// Masks out bits of the pointer according to a mask.
561    ///
562    /// This is convenience for `ptr.map_addr(|a| a & mask)`.
563    ///
564    /// For non-`Sized` pointees this operation changes only the data pointer,
565    /// leaving the metadata untouched.
566    ///
567    /// ## Examples
568    ///
569    /// ```
570    /// #![feature(ptr_mask)]
571    /// let v = 17_u32;
572    /// let ptr: *const u32 = &v;
573    ///
574    /// // `u32` is 4 bytes aligned,
575    /// // which means that lower 2 bits are always 0.
576    /// let tag_mask = 0b11;
577    /// let ptr_mask = !tag_mask;
578    ///
579    /// // We can store something in these lower bits
580    /// let tagged_ptr = ptr.map_addr(|a| a | 0b10);
581    ///
582    /// // Get the "tag" back
583    /// let tag = tagged_ptr.addr() & tag_mask;
584    /// assert_eq!(tag, 0b10);
585    ///
586    /// // Note that `tagged_ptr` is unaligned, it's UB to read from it.
587    /// // To get original pointer `mask` can be used:
588    /// let masked_ptr = tagged_ptr.mask(ptr_mask);
589    /// assert_eq!(unsafe { *masked_ptr }, 17);
590    /// ```
591    #[unstable(feature = "ptr_mask", issue = "98290")]
592    #[must_use = "returns a new pointer rather than modifying its argument"]
593    #[inline(always)]
594    pub fn mask(self, mask: usize) -> *const T {
595        intrinsics::ptr_mask(self.cast::<()>(), mask).with_metadata_of(self)
596    }
597
598    /// Calculates the distance between two pointers within the same allocation. The returned value is in
599    /// units of T: the distance in bytes divided by `size_of::<T>()`.
600    ///
601    /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
602    /// except that it has a lot more opportunities for UB, in exchange for the compiler
603    /// better understanding what you are doing.
604    ///
605    /// The primary motivation of this method is for computing the `len` of an array/slice
606    /// of `T` that you are currently representing as a "start" and "end" pointer
607    /// (and "end" is "one past the end" of the array).
608    /// In that case, `end.offset_from(start)` gets you the length of the array.
609    ///
610    /// All of the following safety requirements are trivially satisfied for this usecase.
611    ///
612    /// [`offset`]: #method.offset
613    ///
614    /// # Safety
615    ///
616    /// If any of the following conditions are violated, the result is Undefined Behavior:
617    ///
618    /// * `self` and `origin` must either
619    ///
620    ///   * point to the same address, or
621    ///   * both be [derived from][crate::ptr#provenance] a pointer to the same [allocated object], and the memory range between
622    ///     the two pointers must be in bounds of that object. (See below for an example.)
623    ///
624    /// * The distance between the pointers, in bytes, must be an exact multiple
625    ///   of the size of `T`.
626    ///
627    /// As a consequence, the absolute distance between the pointers, in bytes, computed on
628    /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
629    /// implied by the in-bounds requirement, and the fact that no allocated object can be larger
630    /// than `isize::MAX` bytes.
631    ///
632    /// The requirement for pointers to be derived from the same allocated object is primarily
633    /// needed for `const`-compatibility: the distance between pointers into *different* allocated
634    /// objects is not known at compile-time. However, the requirement also exists at
635    /// runtime and may be exploited by optimizations. If you wish to compute the difference between
636    /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
637    /// origin as isize) / size_of::<T>()`.
638    // FIXME: recommend `addr()` instead of `as usize` once that is stable.
639    ///
640    /// [`add`]: #method.add
641    /// [allocated object]: crate::ptr#allocated-object
642    ///
643    /// # Panics
644    ///
645    /// This function panics if `T` is a Zero-Sized Type ("ZST").
646    ///
647    /// # Examples
648    ///
649    /// Basic usage:
650    ///
651    /// ```
652    /// let a = [0; 5];
653    /// let ptr1: *const i32 = &a[1];
654    /// let ptr2: *const i32 = &a[3];
655    /// unsafe {
656    ///     assert_eq!(ptr2.offset_from(ptr1), 2);
657    ///     assert_eq!(ptr1.offset_from(ptr2), -2);
658    ///     assert_eq!(ptr1.offset(2), ptr2);
659    ///     assert_eq!(ptr2.offset(-2), ptr1);
660    /// }
661    /// ```
662    ///
663    /// *Incorrect* usage:
664    ///
665    /// ```rust,no_run
666    /// let ptr1 = Box::into_raw(Box::new(0u8)) as *const u8;
667    /// let ptr2 = Box::into_raw(Box::new(1u8)) as *const u8;
668    /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
669    /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
670    /// let ptr2_other = (ptr1 as *const u8).wrapping_offset(diff).wrapping_offset(1);
671    /// assert_eq!(ptr2 as usize, ptr2_other as usize);
672    /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
673    /// // computing their offset is undefined behavior, even though
674    /// // they point to addresses that are in-bounds of the same object!
675    /// unsafe {
676    ///     let one = ptr2_other.offset_from(ptr2); // Undefined Behavior! ⚠️
677    /// }
678    /// ```
679    #[stable(feature = "ptr_offset_from", since = "1.47.0")]
680    #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")]
681    #[inline]
682    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
683    pub const unsafe fn offset_from(self, origin: *const T) -> isize
684    where
685        T: Sized,
686    {
687        let pointee_size = size_of::<T>();
688        assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
689        // SAFETY: the caller must uphold the safety contract for `ptr_offset_from`.
690        unsafe { intrinsics::ptr_offset_from(self, origin) }
691    }
692
693    /// Calculates the distance between two pointers within the same allocation. The returned value is in
694    /// units of **bytes**.
695    ///
696    /// This is purely a convenience for casting to a `u8` pointer and
697    /// using [`offset_from`][pointer::offset_from] on it. See that method for
698    /// documentation and safety requirements.
699    ///
700    /// For non-`Sized` pointees this operation considers only the data pointers,
701    /// ignoring the metadata.
702    #[inline(always)]
703    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
704    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
705    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
706    pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: *const U) -> isize {
707        // SAFETY: the caller must uphold the safety contract for `offset_from`.
708        unsafe { self.cast::<u8>().offset_from(origin.cast::<u8>()) }
709    }
710
711    /// Calculates the distance between two pointers within the same allocation, *where it's known that
712    /// `self` is equal to or greater than `origin`*. The returned value is in
713    /// units of T: the distance in bytes is divided by `size_of::<T>()`.
714    ///
715    /// This computes the same value that [`offset_from`](#method.offset_from)
716    /// would compute, but with the added precondition that the offset is
717    /// guaranteed to be non-negative.  This method is equivalent to
718    /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
719    /// but it provides slightly more information to the optimizer, which can
720    /// sometimes allow it to optimize slightly better with some backends.
721    ///
722    /// This method can be thought of as recovering the `count` that was passed
723    /// to [`add`](#method.add) (or, with the parameters in the other order,
724    /// to [`sub`](#method.sub)).  The following are all equivalent, assuming
725    /// that their safety preconditions are met:
726    /// ```rust
727    /// # unsafe fn blah(ptr: *const i32, origin: *const i32, count: usize) -> bool { unsafe {
728    /// ptr.offset_from_unsigned(origin) == count
729    /// # &&
730    /// origin.add(count) == ptr
731    /// # &&
732    /// ptr.sub(count) == origin
733    /// # } }
734    /// ```
735    ///
736    /// # Safety
737    ///
738    /// - The distance between the pointers must be non-negative (`self >= origin`)
739    ///
740    /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
741    ///   apply to this method as well; see it for the full details.
742    ///
743    /// Importantly, despite the return type of this method being able to represent
744    /// a larger offset, it's still *not permitted* to pass pointers which differ
745    /// by more than `isize::MAX` *bytes*.  As such, the result of this method will
746    /// always be less than or equal to `isize::MAX as usize`.
747    ///
748    /// # Panics
749    ///
750    /// This function panics if `T` is a Zero-Sized Type ("ZST").
751    ///
752    /// # Examples
753    ///
754    /// ```
755    /// let a = [0; 5];
756    /// let ptr1: *const i32 = &a[1];
757    /// let ptr2: *const i32 = &a[3];
758    /// unsafe {
759    ///     assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
760    ///     assert_eq!(ptr1.add(2), ptr2);
761    ///     assert_eq!(ptr2.sub(2), ptr1);
762    ///     assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
763    /// }
764    ///
765    /// // This would be incorrect, as the pointers are not correctly ordered:
766    /// // ptr1.offset_from_unsigned(ptr2)
767    /// ```
768    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
769    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
770    #[inline]
771    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
772    pub const unsafe fn offset_from_unsigned(self, origin: *const T) -> usize
773    where
774        T: Sized,
775    {
776        #[rustc_allow_const_fn_unstable(const_eval_select)]
777        const fn runtime_ptr_ge(this: *const (), origin: *const ()) -> bool {
778            const_eval_select!(
779                @capture { this: *const (), origin: *const () } -> bool:
780                if const {
781                    true
782                } else {
783                    this >= origin
784                }
785            )
786        }
787
788        ub_checks::assert_unsafe_precondition!(
789            check_language_ub,
790            "ptr::offset_from_unsigned requires `self >= origin`",
791            (
792                this: *const () = self as *const (),
793                origin: *const () = origin as *const (),
794            ) => runtime_ptr_ge(this, origin)
795        );
796
797        let pointee_size = size_of::<T>();
798        assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
799        // SAFETY: the caller must uphold the safety contract for `ptr_offset_from_unsigned`.
800        unsafe { intrinsics::ptr_offset_from_unsigned(self, origin) }
801    }
802
803    /// Calculates the distance between two pointers within the same allocation, *where it's known that
804    /// `self` is equal to or greater than `origin`*. The returned value is in
805    /// units of **bytes**.
806    ///
807    /// This is purely a convenience for casting to a `u8` pointer and
808    /// using [`offset_from_unsigned`][pointer::offset_from_unsigned] on it.
809    /// See that method for documentation and safety requirements.
810    ///
811    /// For non-`Sized` pointees this operation considers only the data pointers,
812    /// ignoring the metadata.
813    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
814    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
815    #[inline]
816    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
817    pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: *const U) -> usize {
818        // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
819        unsafe { self.cast::<u8>().offset_from_unsigned(origin.cast::<u8>()) }
820    }
821
822    /// Returns whether two pointers are guaranteed to be equal.
823    ///
824    /// At runtime this function behaves like `Some(self == other)`.
825    /// However, in some contexts (e.g., compile-time evaluation),
826    /// it is not always possible to determine equality of two pointers, so this function may
827    /// spuriously return `None` for pointers that later actually turn out to have its equality known.
828    /// But when it returns `Some`, the pointers' equality is guaranteed to be known.
829    ///
830    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
831    /// version and unsafe code must not
832    /// rely on the result of this function for soundness. It is suggested to only use this function
833    /// for performance optimizations where spurious `None` return values by this function do not
834    /// affect the outcome, but just the performance.
835    /// The consequences of using this method to make runtime and compile-time code behave
836    /// differently have not been explored. This method should not be used to introduce such
837    /// differences, and it should also not be stabilized before we have a better understanding
838    /// of this issue.
839    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
840    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
841    #[inline]
842    pub const fn guaranteed_eq(self, other: *const T) -> Option<bool>
843    where
844        T: Sized,
845    {
846        match intrinsics::ptr_guaranteed_cmp(self, other) {
847            2 => None,
848            other => Some(other == 1),
849        }
850    }
851
852    /// Returns whether two pointers are guaranteed to be inequal.
853    ///
854    /// At runtime this function behaves like `Some(self != other)`.
855    /// However, in some contexts (e.g., compile-time evaluation),
856    /// it is not always possible to determine inequality of two pointers, so this function may
857    /// spuriously return `None` for pointers that later actually turn out to have its inequality known.
858    /// But when it returns `Some`, the pointers' inequality is guaranteed to be known.
859    ///
860    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
861    /// version and unsafe code must not
862    /// rely on the result of this function for soundness. It is suggested to only use this function
863    /// for performance optimizations where spurious `None` return values by this function do not
864    /// affect the outcome, but just the performance.
865    /// The consequences of using this method to make runtime and compile-time code behave
866    /// differently have not been explored. This method should not be used to introduce such
867    /// differences, and it should also not be stabilized before we have a better understanding
868    /// of this issue.
869    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
870    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
871    #[inline]
872    pub const fn guaranteed_ne(self, other: *const T) -> Option<bool>
873    where
874        T: Sized,
875    {
876        match self.guaranteed_eq(other) {
877            None => None,
878            Some(eq) => Some(!eq),
879        }
880    }
881
882    /// Adds an unsigned offset to a pointer.
883    ///
884    /// This can only move the pointer forward (or not move it). If you need to move forward or
885    /// backward depending on the value, then you might want [`offset`](#method.offset) instead
886    /// which takes a signed offset.
887    ///
888    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
889    /// offset of `3 * size_of::<T>()` bytes.
890    ///
891    /// # Safety
892    ///
893    /// If any of the following conditions are violated, the result is Undefined Behavior:
894    ///
895    /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
896    ///   "wrapping around"), must fit in an `isize`.
897    ///
898    /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
899    ///   [allocated object], and the entire memory range between `self` and the result must be in
900    ///   bounds of that allocated object. In particular, this range must not "wrap around" the edge
901    ///   of the address space.
902    ///
903    /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
904    /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
905    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
906    /// safe.
907    ///
908    /// Consider using [`wrapping_add`] instead if these constraints are
909    /// difficult to satisfy. The only advantage of this method is that it
910    /// enables more aggressive compiler optimizations.
911    ///
912    /// [`wrapping_add`]: #method.wrapping_add
913    /// [allocated object]: crate::ptr#allocated-object
914    ///
915    /// # Examples
916    ///
917    /// ```
918    /// let s: &str = "123";
919    /// let ptr: *const u8 = s.as_ptr();
920    ///
921    /// unsafe {
922    ///     assert_eq!(*ptr.add(1), b'2');
923    ///     assert_eq!(*ptr.add(2), b'3');
924    /// }
925    /// ```
926    #[stable(feature = "pointer_methods", since = "1.26.0")]
927    #[must_use = "returns a new pointer rather than modifying its argument"]
928    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
929    #[inline(always)]
930    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
931    pub const unsafe fn add(self, count: usize) -> Self
932    where
933        T: Sized,
934    {
935        #[cfg(debug_assertions)]
936        #[inline]
937        #[rustc_allow_const_fn_unstable(const_eval_select)]
938        const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
939            const_eval_select!(
940                @capture { this: *const (), count: usize, size: usize } -> bool:
941                if const {
942                    true
943                } else {
944                    let Some(byte_offset) = count.checked_mul(size) else {
945                        return false;
946                    };
947                    let (_, overflow) = this.addr().overflowing_add(byte_offset);
948                    byte_offset <= (isize::MAX as usize) && !overflow
949                }
950            )
951        }
952
953        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
954        ub_checks::assert_unsafe_precondition!(
955            check_language_ub,
956            "ptr::add requires that the address calculation does not overflow",
957            (
958                this: *const () = self as *const (),
959                count: usize = count,
960                size: usize = size_of::<T>(),
961            ) => runtime_add_nowrap(this, count, size)
962        );
963
964        // SAFETY: the caller must uphold the safety contract for `offset`.
965        unsafe { intrinsics::offset(self, count) }
966    }
967
968    /// Adds an unsigned offset in bytes to a pointer.
969    ///
970    /// `count` is in units of bytes.
971    ///
972    /// This is purely a convenience for casting to a `u8` pointer and
973    /// using [add][pointer::add] on it. See that method for documentation
974    /// and safety requirements.
975    ///
976    /// For non-`Sized` pointees this operation changes only the data pointer,
977    /// leaving the metadata untouched.
978    #[must_use]
979    #[inline(always)]
980    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
981    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
982    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
983    pub const unsafe fn byte_add(self, count: usize) -> Self {
984        // SAFETY: the caller must uphold the safety contract for `add`.
985        unsafe { self.cast::<u8>().add(count).with_metadata_of(self) }
986    }
987
988    /// Subtracts an unsigned offset from a pointer.
989    ///
990    /// This can only move the pointer backward (or not move it). If you need to move forward or
991    /// backward depending on the value, then you might want [`offset`](#method.offset) instead
992    /// which takes a signed offset.
993    ///
994    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
995    /// offset of `3 * size_of::<T>()` bytes.
996    ///
997    /// # Safety
998    ///
999    /// If any of the following conditions are violated, the result is Undefined Behavior:
1000    ///
1001    /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
1002    ///   "wrapping around"), must fit in an `isize`.
1003    ///
1004    /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
1005    ///   [allocated object], and the entire memory range between `self` and the result must be in
1006    ///   bounds of that allocated object. In particular, this range must not "wrap around" the edge
1007    ///   of the address space.
1008    ///
1009    /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
1010    /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
1011    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
1012    /// safe.
1013    ///
1014    /// Consider using [`wrapping_sub`] instead if these constraints are
1015    /// difficult to satisfy. The only advantage of this method is that it
1016    /// enables more aggressive compiler optimizations.
1017    ///
1018    /// [`wrapping_sub`]: #method.wrapping_sub
1019    /// [allocated object]: crate::ptr#allocated-object
1020    ///
1021    /// # Examples
1022    ///
1023    /// ```
1024    /// let s: &str = "123";
1025    ///
1026    /// unsafe {
1027    ///     let end: *const u8 = s.as_ptr().add(3);
1028    ///     assert_eq!(*end.sub(1), b'3');
1029    ///     assert_eq!(*end.sub(2), b'2');
1030    /// }
1031    /// ```
1032    #[stable(feature = "pointer_methods", since = "1.26.0")]
1033    #[must_use = "returns a new pointer rather than modifying its argument"]
1034    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1035    #[inline(always)]
1036    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1037    pub const unsafe fn sub(self, count: usize) -> Self
1038    where
1039        T: Sized,
1040    {
1041        #[cfg(debug_assertions)]
1042        #[inline]
1043        #[rustc_allow_const_fn_unstable(const_eval_select)]
1044        const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
1045            const_eval_select!(
1046                @capture { this: *const (), count: usize, size: usize } -> bool:
1047                if const {
1048                    true
1049                } else {
1050                    let Some(byte_offset) = count.checked_mul(size) else {
1051                        return false;
1052                    };
1053                    byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
1054                }
1055            )
1056        }
1057
1058        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
1059        ub_checks::assert_unsafe_precondition!(
1060            check_language_ub,
1061            "ptr::sub requires that the address calculation does not overflow",
1062            (
1063                this: *const () = self as *const (),
1064                count: usize = count,
1065                size: usize = size_of::<T>(),
1066            ) => runtime_sub_nowrap(this, count, size)
1067        );
1068
1069        if T::IS_ZST {
1070            // Pointer arithmetic does nothing when the pointee is a ZST.
1071            self
1072        } else {
1073            // SAFETY: the caller must uphold the safety contract for `offset`.
1074            // Because the pointee is *not* a ZST, that means that `count` is
1075            // at most `isize::MAX`, and thus the negation cannot overflow.
1076            unsafe { intrinsics::offset(self, intrinsics::unchecked_sub(0, count as isize)) }
1077        }
1078    }
1079
1080    /// Subtracts an unsigned offset in bytes from a pointer.
1081    ///
1082    /// `count` is in units of bytes.
1083    ///
1084    /// This is purely a convenience for casting to a `u8` pointer and
1085    /// using [sub][pointer::sub] on it. See that method for documentation
1086    /// and safety requirements.
1087    ///
1088    /// For non-`Sized` pointees this operation changes only the data pointer,
1089    /// leaving the metadata untouched.
1090    #[must_use]
1091    #[inline(always)]
1092    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1093    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1094    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1095    pub const unsafe fn byte_sub(self, count: usize) -> Self {
1096        // SAFETY: the caller must uphold the safety contract for `sub`.
1097        unsafe { self.cast::<u8>().sub(count).with_metadata_of(self) }
1098    }
1099
1100    /// Adds an unsigned offset to a pointer using wrapping arithmetic.
1101    ///
1102    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1103    /// offset of `3 * size_of::<T>()` bytes.
1104    ///
1105    /// # Safety
1106    ///
1107    /// This operation itself is always safe, but using the resulting pointer is not.
1108    ///
1109    /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
1110    /// be used to read or write other allocated objects.
1111    ///
1112    /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
1113    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1114    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1115    /// `x` and `y` point into the same allocated object.
1116    ///
1117    /// Compared to [`add`], this method basically delays the requirement of staying within the
1118    /// same allocated object: [`add`] is immediate Undefined Behavior when crossing object
1119    /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
1120    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
1121    /// can be optimized better and is thus preferable in performance-sensitive code.
1122    ///
1123    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1124    /// intermediate values used during the computation of the final result. For example,
1125    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1126    /// allocated object and then re-entering it later is permitted.
1127    ///
1128    /// [`add`]: #method.add
1129    /// [allocated object]: crate::ptr#allocated-object
1130    ///
1131    /// # Examples
1132    ///
1133    /// ```
1134    /// # use std::fmt::Write;
1135    /// // Iterate using a raw pointer in increments of two elements
1136    /// let data = [1u8, 2, 3, 4, 5];
1137    /// let mut ptr: *const u8 = data.as_ptr();
1138    /// let step = 2;
1139    /// let end_rounded_up = ptr.wrapping_add(6);
1140    ///
1141    /// let mut out = String::new();
1142    /// while ptr != end_rounded_up {
1143    ///     unsafe {
1144    ///         write!(&mut out, "{}, ", *ptr)?;
1145    ///     }
1146    ///     ptr = ptr.wrapping_add(step);
1147    /// }
1148    /// assert_eq!(out, "1, 3, 5, ");
1149    /// # std::fmt::Result::Ok(())
1150    /// ```
1151    #[stable(feature = "pointer_methods", since = "1.26.0")]
1152    #[must_use = "returns a new pointer rather than modifying its argument"]
1153    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1154    #[inline(always)]
1155    pub const fn wrapping_add(self, count: usize) -> Self
1156    where
1157        T: Sized,
1158    {
1159        self.wrapping_offset(count as isize)
1160    }
1161
1162    /// Adds an unsigned offset in bytes to a pointer using wrapping arithmetic.
1163    ///
1164    /// `count` is in units of bytes.
1165    ///
1166    /// This is purely a convenience for casting to a `u8` pointer and
1167    /// using [wrapping_add][pointer::wrapping_add] on it. See that method for documentation.
1168    ///
1169    /// For non-`Sized` pointees this operation changes only the data pointer,
1170    /// leaving the metadata untouched.
1171    #[must_use]
1172    #[inline(always)]
1173    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1174    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1175    pub const fn wrapping_byte_add(self, count: usize) -> Self {
1176        self.cast::<u8>().wrapping_add(count).with_metadata_of(self)
1177    }
1178
1179    /// Subtracts an unsigned offset from a pointer using wrapping arithmetic.
1180    ///
1181    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1182    /// offset of `3 * size_of::<T>()` bytes.
1183    ///
1184    /// # Safety
1185    ///
1186    /// This operation itself is always safe, but using the resulting pointer is not.
1187    ///
1188    /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
1189    /// be used to read or write other allocated objects.
1190    ///
1191    /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
1192    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1193    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1194    /// `x` and `y` point into the same allocated object.
1195    ///
1196    /// Compared to [`sub`], this method basically delays the requirement of staying within the
1197    /// same allocated object: [`sub`] is immediate Undefined Behavior when crossing object
1198    /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
1199    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
1200    /// can be optimized better and is thus preferable in performance-sensitive code.
1201    ///
1202    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1203    /// intermediate values used during the computation of the final result. For example,
1204    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1205    /// allocated object and then re-entering it later is permitted.
1206    ///
1207    /// [`sub`]: #method.sub
1208    /// [allocated object]: crate::ptr#allocated-object
1209    ///
1210    /// # Examples
1211    ///
1212    /// ```
1213    /// # use std::fmt::Write;
1214    /// // Iterate using a raw pointer in increments of two elements (backwards)
1215    /// let data = [1u8, 2, 3, 4, 5];
1216    /// let mut ptr: *const u8 = data.as_ptr();
1217    /// let start_rounded_down = ptr.wrapping_sub(2);
1218    /// ptr = ptr.wrapping_add(4);
1219    /// let step = 2;
1220    /// let mut out = String::new();
1221    /// while ptr != start_rounded_down {
1222    ///     unsafe {
1223    ///         write!(&mut out, "{}, ", *ptr)?;
1224    ///     }
1225    ///     ptr = ptr.wrapping_sub(step);
1226    /// }
1227    /// assert_eq!(out, "5, 3, 1, ");
1228    /// # std::fmt::Result::Ok(())
1229    /// ```
1230    #[stable(feature = "pointer_methods", since = "1.26.0")]
1231    #[must_use = "returns a new pointer rather than modifying its argument"]
1232    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1233    #[inline(always)]
1234    pub const fn wrapping_sub(self, count: usize) -> Self
1235    where
1236        T: Sized,
1237    {
1238        self.wrapping_offset((count as isize).wrapping_neg())
1239    }
1240
1241    /// Subtracts an unsigned offset in bytes from a pointer using wrapping arithmetic.
1242    ///
1243    /// `count` is in units of bytes.
1244    ///
1245    /// This is purely a convenience for casting to a `u8` pointer and
1246    /// using [wrapping_sub][pointer::wrapping_sub] on it. See that method for documentation.
1247    ///
1248    /// For non-`Sized` pointees this operation changes only the data pointer,
1249    /// leaving the metadata untouched.
1250    #[must_use]
1251    #[inline(always)]
1252    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1253    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1254    pub const fn wrapping_byte_sub(self, count: usize) -> Self {
1255        self.cast::<u8>().wrapping_sub(count).with_metadata_of(self)
1256    }
1257
1258    /// Reads the value from `self` without moving it. This leaves the
1259    /// memory in `self` unchanged.
1260    ///
1261    /// See [`ptr::read`] for safety concerns and examples.
1262    ///
1263    /// [`ptr::read`]: crate::ptr::read()
1264    #[stable(feature = "pointer_methods", since = "1.26.0")]
1265    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1266    #[inline]
1267    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1268    pub const unsafe fn read(self) -> T
1269    where
1270        T: Sized,
1271    {
1272        // SAFETY: the caller must uphold the safety contract for `read`.
1273        unsafe { read(self) }
1274    }
1275
1276    /// Performs a volatile read of the value from `self` without moving it. This
1277    /// leaves the memory in `self` unchanged.
1278    ///
1279    /// Volatile operations are intended to act on I/O memory, and are guaranteed
1280    /// to not be elided or reordered by the compiler across other volatile
1281    /// operations.
1282    ///
1283    /// See [`ptr::read_volatile`] for safety concerns and examples.
1284    ///
1285    /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1286    #[stable(feature = "pointer_methods", since = "1.26.0")]
1287    #[inline]
1288    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1289    pub unsafe fn read_volatile(self) -> T
1290    where
1291        T: Sized,
1292    {
1293        // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1294        unsafe { read_volatile(self) }
1295    }
1296
1297    /// Reads the value from `self` without moving it. This leaves the
1298    /// memory in `self` unchanged.
1299    ///
1300    /// Unlike `read`, the pointer may be unaligned.
1301    ///
1302    /// See [`ptr::read_unaligned`] for safety concerns and examples.
1303    ///
1304    /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1305    #[stable(feature = "pointer_methods", since = "1.26.0")]
1306    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1307    #[inline]
1308    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1309    pub const unsafe fn read_unaligned(self) -> T
1310    where
1311        T: Sized,
1312    {
1313        // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1314        unsafe { read_unaligned(self) }
1315    }
1316
1317    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1318    /// and destination may overlap.
1319    ///
1320    /// NOTE: this has the *same* argument order as [`ptr::copy`].
1321    ///
1322    /// See [`ptr::copy`] for safety concerns and examples.
1323    ///
1324    /// [`ptr::copy`]: crate::ptr::copy()
1325    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1326    #[stable(feature = "pointer_methods", since = "1.26.0")]
1327    #[inline]
1328    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1329    pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
1330    where
1331        T: Sized,
1332    {
1333        // SAFETY: the caller must uphold the safety contract for `copy`.
1334        unsafe { copy(self, dest, count) }
1335    }
1336
1337    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1338    /// and destination may *not* overlap.
1339    ///
1340    /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1341    ///
1342    /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1343    ///
1344    /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1345    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1346    #[stable(feature = "pointer_methods", since = "1.26.0")]
1347    #[inline]
1348    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1349    pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
1350    where
1351        T: Sized,
1352    {
1353        // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1354        unsafe { copy_nonoverlapping(self, dest, count) }
1355    }
1356
1357    /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1358    /// `align`.
1359    ///
1360    /// If it is not possible to align the pointer, the implementation returns
1361    /// `usize::MAX`.
1362    ///
1363    /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1364    /// used with the `wrapping_add` method.
1365    ///
1366    /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1367    /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1368    /// the returned offset is correct in all terms other than alignment.
1369    ///
1370    /// # Panics
1371    ///
1372    /// The function panics if `align` is not a power-of-two.
1373    ///
1374    /// # Examples
1375    ///
1376    /// Accessing adjacent `u8` as `u16`
1377    ///
1378    /// ```
1379    /// # unsafe {
1380    /// let x = [5_u8, 6, 7, 8, 9];
1381    /// let ptr = x.as_ptr();
1382    /// let offset = ptr.align_offset(align_of::<u16>());
1383    ///
1384    /// if offset < x.len() - 1 {
1385    ///     let u16_ptr = ptr.add(offset).cast::<u16>();
1386    ///     assert!(*u16_ptr == u16::from_ne_bytes([5, 6]) || *u16_ptr == u16::from_ne_bytes([6, 7]));
1387    /// } else {
1388    ///     // while the pointer can be aligned via `offset`, it would point
1389    ///     // outside the allocation
1390    /// }
1391    /// # }
1392    /// ```
1393    #[must_use]
1394    #[inline]
1395    #[stable(feature = "align_offset", since = "1.36.0")]
1396    pub fn align_offset(self, align: usize) -> usize
1397    where
1398        T: Sized,
1399    {
1400        if !align.is_power_of_two() {
1401            panic!("align_offset: align is not a power-of-two");
1402        }
1403
1404        // SAFETY: `align` has been checked to be a power of 2 above
1405        let ret = unsafe { align_offset(self, align) };
1406
1407        // Inform Miri that we want to consider the resulting pointer to be suitably aligned.
1408        #[cfg(miri)]
1409        if ret != usize::MAX {
1410            intrinsics::miri_promise_symbolic_alignment(self.wrapping_add(ret).cast(), align);
1411        }
1412
1413        ret
1414    }
1415
1416    /// Returns whether the pointer is properly aligned for `T`.
1417    ///
1418    /// # Examples
1419    ///
1420    /// ```
1421    /// // On some platforms, the alignment of i32 is less than 4.
1422    /// #[repr(align(4))]
1423    /// struct AlignedI32(i32);
1424    ///
1425    /// let data = AlignedI32(42);
1426    /// let ptr = &data as *const AlignedI32;
1427    ///
1428    /// assert!(ptr.is_aligned());
1429    /// assert!(!ptr.wrapping_byte_add(1).is_aligned());
1430    /// ```
1431    #[must_use]
1432    #[inline]
1433    #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1434    pub fn is_aligned(self) -> bool
1435    where
1436        T: Sized,
1437    {
1438        self.is_aligned_to(align_of::<T>())
1439    }
1440
1441    /// Returns whether the pointer is aligned to `align`.
1442    ///
1443    /// For non-`Sized` pointees this operation considers only the data pointer,
1444    /// ignoring the metadata.
1445    ///
1446    /// # Panics
1447    ///
1448    /// The function panics if `align` is not a power-of-two (this includes 0).
1449    ///
1450    /// # Examples
1451    ///
1452    /// ```
1453    /// #![feature(pointer_is_aligned_to)]
1454    ///
1455    /// // On some platforms, the alignment of i32 is less than 4.
1456    /// #[repr(align(4))]
1457    /// struct AlignedI32(i32);
1458    ///
1459    /// let data = AlignedI32(42);
1460    /// let ptr = &data as *const AlignedI32;
1461    ///
1462    /// assert!(ptr.is_aligned_to(1));
1463    /// assert!(ptr.is_aligned_to(2));
1464    /// assert!(ptr.is_aligned_to(4));
1465    ///
1466    /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1467    /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1468    ///
1469    /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1470    /// ```
1471    #[must_use]
1472    #[inline]
1473    #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1474    pub fn is_aligned_to(self, align: usize) -> bool {
1475        if !align.is_power_of_two() {
1476            panic!("is_aligned_to: align is not a power-of-two");
1477        }
1478
1479        self.addr() & (align - 1) == 0
1480    }
1481}
1482
1483impl<T> *const [T] {
1484    /// Returns the length of a raw slice.
1485    ///
1486    /// The returned value is the number of **elements**, not the number of bytes.
1487    ///
1488    /// This function is safe, even when the raw slice cannot be cast to a slice
1489    /// reference because the pointer is null or unaligned.
1490    ///
1491    /// # Examples
1492    ///
1493    /// ```rust
1494    /// use std::ptr;
1495    ///
1496    /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1497    /// assert_eq!(slice.len(), 3);
1498    /// ```
1499    #[inline]
1500    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1501    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1502    pub const fn len(self) -> usize {
1503        metadata(self)
1504    }
1505
1506    /// Returns `true` if the raw slice has a length of 0.
1507    ///
1508    /// # Examples
1509    ///
1510    /// ```
1511    /// use std::ptr;
1512    ///
1513    /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1514    /// assert!(!slice.is_empty());
1515    /// ```
1516    #[inline(always)]
1517    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1518    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1519    pub const fn is_empty(self) -> bool {
1520        self.len() == 0
1521    }
1522
1523    /// Returns a raw pointer to the slice's buffer.
1524    ///
1525    /// This is equivalent to casting `self` to `*const T`, but more type-safe.
1526    ///
1527    /// # Examples
1528    ///
1529    /// ```rust
1530    /// #![feature(slice_ptr_get)]
1531    /// use std::ptr;
1532    ///
1533    /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1534    /// assert_eq!(slice.as_ptr(), ptr::null());
1535    /// ```
1536    #[inline]
1537    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1538    pub const fn as_ptr(self) -> *const T {
1539        self as *const T
1540    }
1541
1542    /// Gets a raw pointer to the underlying array.
1543    ///
1544    /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1545    #[unstable(feature = "slice_as_array", issue = "133508")]
1546    #[inline]
1547    #[must_use]
1548    pub const fn as_array<const N: usize>(self) -> Option<*const [T; N]> {
1549        if self.len() == N {
1550            let me = self.as_ptr() as *const [T; N];
1551            Some(me)
1552        } else {
1553            None
1554        }
1555    }
1556
1557    /// Returns a raw pointer to an element or subslice, without doing bounds
1558    /// checking.
1559    ///
1560    /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable
1561    /// is *[undefined behavior]* even if the resulting pointer is not used.
1562    ///
1563    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1564    ///
1565    /// # Examples
1566    ///
1567    /// ```
1568    /// #![feature(slice_ptr_get)]
1569    ///
1570    /// let x = &[1, 2, 4] as *const [i32];
1571    ///
1572    /// unsafe {
1573    ///     assert_eq!(x.get_unchecked(1), x.as_ptr().add(1));
1574    /// }
1575    /// ```
1576    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1577    #[inline]
1578    pub unsafe fn get_unchecked<I>(self, index: I) -> *const I::Output
1579    where
1580        I: SliceIndex<[T]>,
1581    {
1582        // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1583        unsafe { index.get_unchecked(self) }
1584    }
1585
1586    /// Returns `None` if the pointer is null, or else returns a shared slice to
1587    /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
1588    /// that the value has to be initialized.
1589    ///
1590    /// [`as_ref`]: #method.as_ref
1591    ///
1592    /// # Safety
1593    ///
1594    /// When calling this method, you have to ensure that *either* the pointer is null *or*
1595    /// all of the following is true:
1596    ///
1597    /// * The pointer must be [valid] for reads for `ptr.len() * size_of::<T>()` many bytes,
1598    ///   and it must be properly aligned. This means in particular:
1599    ///
1600    ///     * The entire memory range of this slice must be contained within a single [allocated object]!
1601    ///       Slices can never span across multiple allocated objects.
1602    ///
1603    ///     * The pointer must be aligned even for zero-length slices. One
1604    ///       reason for this is that enum layout optimizations may rely on references
1605    ///       (including slices of any length) being aligned and non-null to distinguish
1606    ///       them from other data. You can obtain a pointer that is usable as `data`
1607    ///       for zero-length slices using [`NonNull::dangling()`].
1608    ///
1609    /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1610    ///   See the safety documentation of [`pointer::offset`].
1611    ///
1612    /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1613    ///   arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1614    ///   In particular, while this reference exists, the memory the pointer points to must
1615    ///   not get mutated (except inside `UnsafeCell`).
1616    ///
1617    /// This applies even if the result of this method is unused!
1618    ///
1619    /// See also [`slice::from_raw_parts`][].
1620    ///
1621    /// [valid]: crate::ptr#safety
1622    /// [allocated object]: crate::ptr#allocated-object
1623    ///
1624    /// # Panics during const evaluation
1625    ///
1626    /// This method will panic during const evaluation if the pointer cannot be
1627    /// determined to be null or not. See [`is_null`] for more information.
1628    ///
1629    /// [`is_null`]: #method.is_null
1630    #[inline]
1631    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1632    pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
1633        if self.is_null() {
1634            None
1635        } else {
1636            // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1637            Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
1638        }
1639    }
1640}
1641
1642impl<T, const N: usize> *const [T; N] {
1643    /// Returns a raw pointer to the array's buffer.
1644    ///
1645    /// This is equivalent to casting `self` to `*const T`, but more type-safe.
1646    ///
1647    /// # Examples
1648    ///
1649    /// ```rust
1650    /// #![feature(array_ptr_get)]
1651    /// use std::ptr;
1652    ///
1653    /// let arr: *const [i8; 3] = ptr::null();
1654    /// assert_eq!(arr.as_ptr(), ptr::null());
1655    /// ```
1656    #[inline]
1657    #[unstable(feature = "array_ptr_get", issue = "119834")]
1658    pub const fn as_ptr(self) -> *const T {
1659        self as *const T
1660    }
1661
1662    /// Returns a raw pointer to a slice containing the entire array.
1663    ///
1664    /// # Examples
1665    ///
1666    /// ```
1667    /// #![feature(array_ptr_get)]
1668    ///
1669    /// let arr: *const [i32; 3] = &[1, 2, 4] as *const [i32; 3];
1670    /// let slice: *const [i32] = arr.as_slice();
1671    /// assert_eq!(slice.len(), 3);
1672    /// ```
1673    #[inline]
1674    #[unstable(feature = "array_ptr_get", issue = "119834")]
1675    pub const fn as_slice(self) -> *const [T] {
1676        self
1677    }
1678}
1679
1680/// Pointer equality is by address, as produced by the [`<*const T>::addr`](pointer::addr) method.
1681#[stable(feature = "rust1", since = "1.0.0")]
1682impl<T: ?Sized> PartialEq for *const T {
1683    #[inline]
1684    #[allow(ambiguous_wide_pointer_comparisons)]
1685    fn eq(&self, other: &*const T) -> bool {
1686        *self == *other
1687    }
1688}
1689
1690/// Pointer equality is an equivalence relation.
1691#[stable(feature = "rust1", since = "1.0.0")]
1692impl<T: ?Sized> Eq for *const T {}
1693
1694/// Pointer comparison is by address, as produced by the `[`<*const T>::addr`](pointer::addr)` method.
1695#[stable(feature = "rust1", since = "1.0.0")]
1696impl<T: ?Sized> Ord for *const T {
1697    #[inline]
1698    #[allow(ambiguous_wide_pointer_comparisons)]
1699    fn cmp(&self, other: &*const T) -> Ordering {
1700        if self < other {
1701            Less
1702        } else if self == other {
1703            Equal
1704        } else {
1705            Greater
1706        }
1707    }
1708}
1709
1710/// Pointer comparison is by address, as produced by the `[`<*const T>::addr`](pointer::addr)` method.
1711#[stable(feature = "rust1", since = "1.0.0")]
1712impl<T: ?Sized> PartialOrd for *const T {
1713    #[inline]
1714    #[allow(ambiguous_wide_pointer_comparisons)]
1715    fn partial_cmp(&self, other: &*const T) -> Option<Ordering> {
1716        Some(self.cmp(other))
1717    }
1718
1719    #[inline]
1720    #[allow(ambiguous_wide_pointer_comparisons)]
1721    fn lt(&self, other: &*const T) -> bool {
1722        *self < *other
1723    }
1724
1725    #[inline]
1726    #[allow(ambiguous_wide_pointer_comparisons)]
1727    fn le(&self, other: &*const T) -> bool {
1728        *self <= *other
1729    }
1730
1731    #[inline]
1732    #[allow(ambiguous_wide_pointer_comparisons)]
1733    fn gt(&self, other: &*const T) -> bool {
1734        *self > *other
1735    }
1736
1737    #[inline]
1738    #[allow(ambiguous_wide_pointer_comparisons)]
1739    fn ge(&self, other: &*const T) -> bool {
1740        *self >= *other
1741    }
1742}
1743
1744#[stable(feature = "raw_ptr_default", since = "1.88.0")]
1745impl<T: ?Sized + Thin> Default for *const T {
1746    /// Returns the default value of [`null()`][crate::ptr::null].
1747    fn default() -> Self {
1748        crate::ptr::null()
1749    }
1750}
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy