core/ptr/
mut_ptr.rs

1use super::*;
2use crate::cmp::Ordering::{Equal, Greater, Less};
3use crate::intrinsics::const_eval_select;
4use crate::mem::{self, SizedTypeProperties};
5use crate::slice::{self, SliceIndex};
6
7impl<T: ?Sized> *mut T {
8    /// Returns `true` if the pointer is null.
9    ///
10    /// Note that unsized types have many possible null pointers, as only the
11    /// raw data pointer is considered, not their length, vtable, etc.
12    /// Therefore, two pointers that are null may still not compare equal to
13    /// each other.
14    ///
15    /// # Panics during const evaluation
16    ///
17    /// If this method is used during const evaluation, and `self` is a pointer
18    /// that is offset beyond the bounds of the memory it initially pointed to,
19    /// then there might not be enough information to determine whether the
20    /// pointer is null. This is because the absolute address in memory is not
21    /// known at compile time. If the nullness of the pointer cannot be
22    /// determined, this method will panic.
23    ///
24    /// In-bounds pointers are never null, so the method will never panic for
25    /// such pointers.
26    ///
27    /// # Examples
28    ///
29    /// ```
30    /// let mut s = [1, 2, 3];
31    /// let ptr: *mut u32 = s.as_mut_ptr();
32    /// assert!(!ptr.is_null());
33    /// ```
34    #[stable(feature = "rust1", since = "1.0.0")]
35    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
36    #[rustc_diagnostic_item = "ptr_is_null"]
37    #[inline]
38    pub const fn is_null(self) -> bool {
39        self.cast_const().is_null()
40    }
41
42    /// Casts to a pointer of another type.
43    #[stable(feature = "ptr_cast", since = "1.38.0")]
44    #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
45    #[rustc_diagnostic_item = "ptr_cast"]
46    #[inline(always)]
47    pub const fn cast<U>(self) -> *mut U {
48        self as _
49    }
50
51    /// Uses the address value in a new pointer of another type.
52    ///
53    /// This operation will ignore the address part of its `meta` operand and discard existing
54    /// metadata of `self`. For pointers to a sized types (thin pointers), this has the same effect
55    /// as a simple cast. For pointers to an unsized type (fat pointers) this recombines the address
56    /// with new metadata such as slice lengths or `dyn`-vtable.
57    ///
58    /// The resulting pointer will have provenance of `self`. This operation is semantically the
59    /// same as creating a new pointer with the data pointer value of `self` but the metadata of
60    /// `meta`, being fat or thin depending on the `meta` operand.
61    ///
62    /// # Examples
63    ///
64    /// This function is primarily useful for enabling pointer arithmetic on potentially fat
65    /// pointers. The pointer is cast to a sized pointee to utilize offset operations and then
66    /// recombined with its own original metadata.
67    ///
68    /// ```
69    /// #![feature(set_ptr_value)]
70    /// # use core::fmt::Debug;
71    /// let mut arr: [i32; 3] = [1, 2, 3];
72    /// let mut ptr = arr.as_mut_ptr() as *mut dyn Debug;
73    /// let thin = ptr as *mut u8;
74    /// unsafe {
75    ///     ptr = thin.add(8).with_metadata_of(ptr);
76    ///     # assert_eq!(*(ptr as *mut i32), 3);
77    ///     println!("{:?}", &*ptr); // will print "3"
78    /// }
79    /// ```
80    ///
81    /// # *Incorrect* usage
82    ///
83    /// The provenance from pointers is *not* combined. The result must only be used to refer to the
84    /// address allowed by `self`.
85    ///
86    /// ```rust,no_run
87    /// #![feature(set_ptr_value)]
88    /// let mut x = 0u32;
89    /// let mut y = 1u32;
90    ///
91    /// let x = (&mut x) as *mut u32;
92    /// let y = (&mut y) as *mut u32;
93    ///
94    /// let offset = (x as usize - y as usize) / 4;
95    /// let bad = x.wrapping_add(offset).with_metadata_of(y);
96    ///
97    /// // This dereference is UB. The pointer only has provenance for `x` but points to `y`.
98    /// println!("{:?}", unsafe { &*bad });
99    #[unstable(feature = "set_ptr_value", issue = "75091")]
100    #[must_use = "returns a new pointer rather than modifying its argument"]
101    #[inline]
102    pub const fn with_metadata_of<U>(self, meta: *const U) -> *mut U
103    where
104        U: ?Sized,
105    {
106        from_raw_parts_mut::<U>(self as *mut (), metadata(meta))
107    }
108
109    /// Changes constness without changing the type.
110    ///
111    /// This is a bit safer than `as` because it wouldn't silently change the type if the code is
112    /// refactored.
113    ///
114    /// While not strictly required (`*mut T` coerces to `*const T`), this is provided for symmetry
115    /// with [`cast_mut`] on `*const T` and may have documentation value if used instead of implicit
116    /// coercion.
117    ///
118    /// [`cast_mut`]: pointer::cast_mut
119    #[stable(feature = "ptr_const_cast", since = "1.65.0")]
120    #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")]
121    #[rustc_diagnostic_item = "ptr_cast_const"]
122    #[inline(always)]
123    pub const fn cast_const(self) -> *const T {
124        self as _
125    }
126
127    /// Gets the "address" portion of the pointer.
128    ///
129    /// This is similar to `self as usize`, except that the [provenance][crate::ptr#provenance] of
130    /// the pointer is discarded and not [exposed][crate::ptr#exposed-provenance]. This means that
131    /// casting the returned address back to a pointer yields a [pointer without
132    /// provenance][without_provenance_mut], which is undefined behavior to dereference. To properly
133    /// restore the lost information and obtain a dereferenceable pointer, use
134    /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr].
135    ///
136    /// If using those APIs is not possible because there is no way to preserve a pointer with the
137    /// required provenance, then Strict Provenance might not be for you. Use pointer-integer casts
138    /// or [`expose_provenance`][pointer::expose_provenance] and [`with_exposed_provenance`][with_exposed_provenance]
139    /// instead. However, note that this makes your code less portable and less amenable to tools
140    /// that check for compliance with the Rust memory model.
141    ///
142    /// On most platforms this will produce a value with the same bytes as the original
143    /// pointer, because all the bytes are dedicated to describing the address.
144    /// Platforms which need to store additional information in the pointer may
145    /// perform a change of representation to produce a value containing only the address
146    /// portion of the pointer. What that means is up to the platform to define.
147    ///
148    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
149    #[must_use]
150    #[inline(always)]
151    #[stable(feature = "strict_provenance", since = "1.84.0")]
152    pub fn addr(self) -> usize {
153        // A pointer-to-integer transmute currently has exactly the right semantics: it returns the
154        // address without exposing the provenance. Note that this is *not* a stable guarantee about
155        // transmute semantics, it relies on sysroot crates having special status.
156        // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
157        // provenance).
158        unsafe { mem::transmute(self.cast::<()>()) }
159    }
160
161    /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
162    /// [`with_exposed_provenance_mut`] and returns the "address" portion.
163    ///
164    /// This is equivalent to `self as usize`, which semantically discards provenance information.
165    /// Furthermore, this (like the `as` cast) has the implicit side-effect of marking the
166    /// provenance as 'exposed', so on platforms that support it you can later call
167    /// [`with_exposed_provenance_mut`] to reconstitute the original pointer including its provenance.
168    ///
169    /// Due to its inherent ambiguity, [`with_exposed_provenance_mut`] may not be supported by tools
170    /// that help you to stay conformant with the Rust memory model. It is recommended to use
171    /// [Strict Provenance][crate::ptr#strict-provenance] APIs such as [`with_addr`][pointer::with_addr]
172    /// wherever possible, in which case [`addr`][pointer::addr] should be used instead of `expose_provenance`.
173    ///
174    /// On most platforms this will produce a value with the same bytes as the original pointer,
175    /// because all the bytes are dedicated to describing the address. Platforms which need to store
176    /// additional information in the pointer may not support this operation, since the 'expose'
177    /// side-effect which is required for [`with_exposed_provenance_mut`] to work is typically not
178    /// available.
179    ///
180    /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
181    ///
182    /// [`with_exposed_provenance_mut`]: with_exposed_provenance_mut
183    #[inline(always)]
184    #[stable(feature = "exposed_provenance", since = "1.84.0")]
185    pub fn expose_provenance(self) -> usize {
186        self.cast::<()>() as usize
187    }
188
189    /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
190    /// `self`.
191    ///
192    /// This is similar to a `addr as *mut T` cast, but copies
193    /// the *provenance* of `self` to the new pointer.
194    /// This avoids the inherent ambiguity of the unary cast.
195    ///
196    /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset
197    /// `self` to the given address, and therefore has all the same capabilities and restrictions.
198    ///
199    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
200    #[must_use]
201    #[inline]
202    #[stable(feature = "strict_provenance", since = "1.84.0")]
203    pub fn with_addr(self, addr: usize) -> Self {
204        // This should probably be an intrinsic to avoid doing any sort of arithmetic, but
205        // meanwhile, we can implement it with `wrapping_offset`, which preserves the pointer's
206        // provenance.
207        let self_addr = self.addr() as isize;
208        let dest_addr = addr as isize;
209        let offset = dest_addr.wrapping_sub(self_addr);
210        self.wrapping_byte_offset(offset)
211    }
212
213    /// Creates a new pointer by mapping `self`'s address to a new one, preserving the original
214    /// pointer's [provenance][crate::ptr#provenance].
215    ///
216    /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details.
217    ///
218    /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
219    #[must_use]
220    #[inline]
221    #[stable(feature = "strict_provenance", since = "1.84.0")]
222    pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
223        self.with_addr(f(self.addr()))
224    }
225
226    /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
227    ///
228    /// The pointer can be later reconstructed with [`from_raw_parts_mut`].
229    #[unstable(feature = "ptr_metadata", issue = "81513")]
230    #[inline]
231    pub const fn to_raw_parts(self) -> (*mut (), <T as super::Pointee>::Metadata) {
232        (self.cast(), super::metadata(self))
233    }
234
235    /// Returns `None` if the pointer is null, or else returns a shared reference to
236    /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`]
237    /// must be used instead.
238    ///
239    /// For the mutable counterpart see [`as_mut`].
240    ///
241    /// [`as_uninit_ref`]: pointer#method.as_uninit_ref-1
242    /// [`as_mut`]: #method.as_mut
243    ///
244    /// # Safety
245    ///
246    /// When calling this method, you have to ensure that *either* the pointer is null *or*
247    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
248    ///
249    /// # Panics during const evaluation
250    ///
251    /// This method will panic during const evaluation if the pointer cannot be
252    /// determined to be null or not. See [`is_null`] for more information.
253    ///
254    /// [`is_null`]: #method.is_null-1
255    ///
256    /// # Examples
257    ///
258    /// ```
259    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
260    ///
261    /// unsafe {
262    ///     if let Some(val_back) = ptr.as_ref() {
263    ///         println!("We got back the value: {val_back}!");
264    ///     }
265    /// }
266    /// ```
267    ///
268    /// # Null-unchecked version
269    ///
270    /// If you are sure the pointer can never be null and are looking for some kind of
271    /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can
272    /// dereference the pointer directly.
273    ///
274    /// ```
275    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
276    ///
277    /// unsafe {
278    ///     let val_back = &*ptr;
279    ///     println!("We got back the value: {val_back}!");
280    /// }
281    /// ```
282    #[stable(feature = "ptr_as_ref", since = "1.9.0")]
283    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
284    #[inline]
285    pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> {
286        // SAFETY: the caller must guarantee that `self` is valid for a
287        // reference if it isn't null.
288        if self.is_null() { None } else { unsafe { Some(&*self) } }
289    }
290
291    /// Returns a shared reference to the value behind the pointer.
292    /// If the pointer may be null or the value may be uninitialized, [`as_uninit_ref`] must be used instead.
293    /// If the pointer may be null, but the value is known to have been initialized, [`as_ref`] must be used instead.
294    ///
295    /// For the mutable counterpart see [`as_mut_unchecked`].
296    ///
297    /// [`as_ref`]: #method.as_ref
298    /// [`as_uninit_ref`]: #method.as_uninit_ref
299    /// [`as_mut_unchecked`]: #method.as_mut_unchecked
300    ///
301    /// # Safety
302    ///
303    /// When calling this method, you have to ensure that the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
304    ///
305    /// # Examples
306    ///
307    /// ```
308    /// #![feature(ptr_as_ref_unchecked)]
309    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
310    ///
311    /// unsafe {
312    ///     println!("We got back the value: {}!", ptr.as_ref_unchecked());
313    /// }
314    /// ```
315    // FIXME: mention it in the docs for `as_ref` and `as_uninit_ref` once stabilized.
316    #[unstable(feature = "ptr_as_ref_unchecked", issue = "122034")]
317    #[inline]
318    #[must_use]
319    pub const unsafe fn as_ref_unchecked<'a>(self) -> &'a T {
320        // SAFETY: the caller must guarantee that `self` is valid for a reference
321        unsafe { &*self }
322    }
323
324    /// Returns `None` if the pointer is null, or else returns a shared reference to
325    /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
326    /// that the value has to be initialized.
327    ///
328    /// For the mutable counterpart see [`as_uninit_mut`].
329    ///
330    /// [`as_ref`]: pointer#method.as_ref-1
331    /// [`as_uninit_mut`]: #method.as_uninit_mut
332    ///
333    /// # Safety
334    ///
335    /// When calling this method, you have to ensure that *either* the pointer is null *or*
336    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
337    /// Note that because the created reference is to `MaybeUninit<T>`, the
338    /// source pointer can point to uninitialized memory.
339    ///
340    /// # Panics during const evaluation
341    ///
342    /// This method will panic during const evaluation if the pointer cannot be
343    /// determined to be null or not. See [`is_null`] for more information.
344    ///
345    /// [`is_null`]: #method.is_null-1
346    ///
347    /// # Examples
348    ///
349    /// ```
350    /// #![feature(ptr_as_uninit)]
351    ///
352    /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
353    ///
354    /// unsafe {
355    ///     if let Some(val_back) = ptr.as_uninit_ref() {
356    ///         println!("We got back the value: {}!", val_back.assume_init());
357    ///     }
358    /// }
359    /// ```
360    #[inline]
361    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
362    pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
363    where
364        T: Sized,
365    {
366        // SAFETY: the caller must guarantee that `self` meets all the
367        // requirements for a reference.
368        if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
369    }
370
371    /// Adds a signed offset to a pointer.
372    ///
373    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
374    /// offset of `3 * size_of::<T>()` bytes.
375    ///
376    /// # Safety
377    ///
378    /// If any of the following conditions are violated, the result is Undefined Behavior:
379    ///
380    /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
381    ///   "wrapping around"), must fit in an `isize`.
382    ///
383    /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
384    ///   [allocated object], and the entire memory range between `self` and the result must be in
385    ///   bounds of that allocated object. In particular, this range must not "wrap around" the edge
386    ///   of the address space.
387    ///
388    /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
389    /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
390    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
391    /// safe.
392    ///
393    /// Consider using [`wrapping_offset`] instead if these constraints are
394    /// difficult to satisfy. The only advantage of this method is that it
395    /// enables more aggressive compiler optimizations.
396    ///
397    /// [`wrapping_offset`]: #method.wrapping_offset
398    /// [allocated object]: crate::ptr#allocated-object
399    ///
400    /// # Examples
401    ///
402    /// ```
403    /// let mut s = [1, 2, 3];
404    /// let ptr: *mut u32 = s.as_mut_ptr();
405    ///
406    /// unsafe {
407    ///     assert_eq!(2, *ptr.offset(1));
408    ///     assert_eq!(3, *ptr.offset(2));
409    /// }
410    /// ```
411    #[stable(feature = "rust1", since = "1.0.0")]
412    #[must_use = "returns a new pointer rather than modifying its argument"]
413    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
414    #[inline(always)]
415    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
416    pub const unsafe fn offset(self, count: isize) -> *mut T
417    where
418        T: Sized,
419    {
420        #[inline]
421        #[rustc_allow_const_fn_unstable(const_eval_select)]
422        const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
423            // We can use const_eval_select here because this is only for UB checks.
424            const_eval_select!(
425                @capture { this: *const (), count: isize, size: usize } -> bool:
426                if const {
427                    true
428                } else {
429                    // `size` is the size of a Rust type, so we know that
430                    // `size <= isize::MAX` and thus `as` cast here is not lossy.
431                    let Some(byte_offset) = count.checked_mul(size as isize) else {
432                        return false;
433                    };
434                    let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
435                    !overflow
436                }
437            )
438        }
439
440        ub_checks::assert_unsafe_precondition!(
441            check_language_ub,
442            "ptr::offset requires the address calculation to not overflow",
443            (
444                this: *const () = self as *const (),
445                count: isize = count,
446                size: usize = size_of::<T>(),
447            ) => runtime_offset_nowrap(this, count, size)
448        );
449
450        // SAFETY: the caller must uphold the safety contract for `offset`.
451        // The obtained pointer is valid for writes since the caller must
452        // guarantee that it points to the same allocated object as `self`.
453        unsafe { intrinsics::offset(self, count) }
454    }
455
456    /// Adds a signed offset in bytes to a pointer.
457    ///
458    /// `count` is in units of **bytes**.
459    ///
460    /// This is purely a convenience for casting to a `u8` pointer and
461    /// using [offset][pointer::offset] on it. See that method for documentation
462    /// and safety requirements.
463    ///
464    /// For non-`Sized` pointees this operation changes only the data pointer,
465    /// leaving the metadata untouched.
466    #[must_use]
467    #[inline(always)]
468    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
469    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
470    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
471    pub const unsafe fn byte_offset(self, count: isize) -> Self {
472        // SAFETY: the caller must uphold the safety contract for `offset`.
473        unsafe { self.cast::<u8>().offset(count).with_metadata_of(self) }
474    }
475
476    /// Adds a signed offset to a pointer using wrapping arithmetic.
477    ///
478    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
479    /// offset of `3 * size_of::<T>()` bytes.
480    ///
481    /// # Safety
482    ///
483    /// This operation itself is always safe, but using the resulting pointer is not.
484    ///
485    /// The resulting pointer "remembers" the [allocated object] that `self` points to
486    /// (this is called "[Provenance](ptr/index.html#provenance)").
487    /// The pointer must not be used to read or write other allocated objects.
488    ///
489    /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
490    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
491    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
492    /// `x` and `y` point into the same allocated object.
493    ///
494    /// Compared to [`offset`], this method basically delays the requirement of staying within the
495    /// same allocated object: [`offset`] is immediate Undefined Behavior when crossing object
496    /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
497    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
498    /// can be optimized better and is thus preferable in performance-sensitive code.
499    ///
500    /// The delayed check only considers the value of the pointer that was dereferenced, not the
501    /// intermediate values used during the computation of the final result. For example,
502    /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
503    /// words, leaving the allocated object and then re-entering it later is permitted.
504    ///
505    /// [`offset`]: #method.offset
506    /// [allocated object]: crate::ptr#allocated-object
507    ///
508    /// # Examples
509    ///
510    /// ```
511    /// // Iterate using a raw pointer in increments of two elements
512    /// let mut data = [1u8, 2, 3, 4, 5];
513    /// let mut ptr: *mut u8 = data.as_mut_ptr();
514    /// let step = 2;
515    /// let end_rounded_up = ptr.wrapping_offset(6);
516    ///
517    /// while ptr != end_rounded_up {
518    ///     unsafe {
519    ///         *ptr = 0;
520    ///     }
521    ///     ptr = ptr.wrapping_offset(step);
522    /// }
523    /// assert_eq!(&data, &[0, 2, 0, 4, 0]);
524    /// ```
525    #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
526    #[must_use = "returns a new pointer rather than modifying its argument"]
527    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
528    #[inline(always)]
529    pub const fn wrapping_offset(self, count: isize) -> *mut T
530    where
531        T: Sized,
532    {
533        // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
534        unsafe { intrinsics::arith_offset(self, count) as *mut T }
535    }
536
537    /// Adds a signed offset in bytes to a pointer using wrapping arithmetic.
538    ///
539    /// `count` is in units of **bytes**.
540    ///
541    /// This is purely a convenience for casting to a `u8` pointer and
542    /// using [wrapping_offset][pointer::wrapping_offset] on it. See that method
543    /// for documentation.
544    ///
545    /// For non-`Sized` pointees this operation changes only the data pointer,
546    /// leaving the metadata untouched.
547    #[must_use]
548    #[inline(always)]
549    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
550    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
551    pub const fn wrapping_byte_offset(self, count: isize) -> Self {
552        self.cast::<u8>().wrapping_offset(count).with_metadata_of(self)
553    }
554
555    /// Masks out bits of the pointer according to a mask.
556    ///
557    /// This is convenience for `ptr.map_addr(|a| a & mask)`.
558    ///
559    /// For non-`Sized` pointees this operation changes only the data pointer,
560    /// leaving the metadata untouched.
561    ///
562    /// ## Examples
563    ///
564    /// ```
565    /// #![feature(ptr_mask)]
566    /// let mut v = 17_u32;
567    /// let ptr: *mut u32 = &mut v;
568    ///
569    /// // `u32` is 4 bytes aligned,
570    /// // which means that lower 2 bits are always 0.
571    /// let tag_mask = 0b11;
572    /// let ptr_mask = !tag_mask;
573    ///
574    /// // We can store something in these lower bits
575    /// let tagged_ptr = ptr.map_addr(|a| a | 0b10);
576    ///
577    /// // Get the "tag" back
578    /// let tag = tagged_ptr.addr() & tag_mask;
579    /// assert_eq!(tag, 0b10);
580    ///
581    /// // Note that `tagged_ptr` is unaligned, it's UB to read from/write to it.
582    /// // To get original pointer `mask` can be used:
583    /// let masked_ptr = tagged_ptr.mask(ptr_mask);
584    /// assert_eq!(unsafe { *masked_ptr }, 17);
585    ///
586    /// unsafe { *masked_ptr = 0 };
587    /// assert_eq!(v, 0);
588    /// ```
589    #[unstable(feature = "ptr_mask", issue = "98290")]
590    #[must_use = "returns a new pointer rather than modifying its argument"]
591    #[inline(always)]
592    pub fn mask(self, mask: usize) -> *mut T {
593        intrinsics::ptr_mask(self.cast::<()>(), mask).cast_mut().with_metadata_of(self)
594    }
595
596    /// Returns `None` if the pointer is null, or else returns a unique reference to
597    /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_mut`]
598    /// must be used instead.
599    ///
600    /// For the shared counterpart see [`as_ref`].
601    ///
602    /// [`as_uninit_mut`]: #method.as_uninit_mut
603    /// [`as_ref`]: pointer#method.as_ref-1
604    ///
605    /// # Safety
606    ///
607    /// When calling this method, you have to ensure that *either*
608    /// the pointer is null *or*
609    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
610    ///
611    /// # Panics during const evaluation
612    ///
613    /// This method will panic during const evaluation if the pointer cannot be
614    /// determined to be null or not. See [`is_null`] for more information.
615    ///
616    /// [`is_null`]: #method.is_null-1
617    ///
618    /// # Examples
619    ///
620    /// ```
621    /// let mut s = [1, 2, 3];
622    /// let ptr: *mut u32 = s.as_mut_ptr();
623    /// let first_value = unsafe { ptr.as_mut().unwrap() };
624    /// *first_value = 4;
625    /// # assert_eq!(s, [4, 2, 3]);
626    /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
627    /// ```
628    ///
629    /// # Null-unchecked version
630    ///
631    /// If you are sure the pointer can never be null and are looking for some kind of
632    /// `as_mut_unchecked` that returns the `&mut T` instead of `Option<&mut T>`, know that
633    /// you can dereference the pointer directly.
634    ///
635    /// ```
636    /// let mut s = [1, 2, 3];
637    /// let ptr: *mut u32 = s.as_mut_ptr();
638    /// let first_value = unsafe { &mut *ptr };
639    /// *first_value = 4;
640    /// # assert_eq!(s, [4, 2, 3]);
641    /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
642    /// ```
643    #[stable(feature = "ptr_as_ref", since = "1.9.0")]
644    #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
645    #[inline]
646    pub const unsafe fn as_mut<'a>(self) -> Option<&'a mut T> {
647        // SAFETY: the caller must guarantee that `self` is be valid for
648        // a mutable reference if it isn't null.
649        if self.is_null() { None } else { unsafe { Some(&mut *self) } }
650    }
651
652    /// Returns a unique reference to the value behind the pointer.
653    /// If the pointer may be null or the value may be uninitialized, [`as_uninit_mut`] must be used instead.
654    /// If the pointer may be null, but the value is known to have been initialized, [`as_mut`] must be used instead.
655    ///
656    /// For the shared counterpart see [`as_ref_unchecked`].
657    ///
658    /// [`as_mut`]: #method.as_mut
659    /// [`as_uninit_mut`]: #method.as_uninit_mut
660    /// [`as_ref_unchecked`]: #method.as_mut_unchecked
661    ///
662    /// # Safety
663    ///
664    /// When calling this method, you have to ensure that
665    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
666    ///
667    /// # Examples
668    ///
669    /// ```
670    /// #![feature(ptr_as_ref_unchecked)]
671    /// let mut s = [1, 2, 3];
672    /// let ptr: *mut u32 = s.as_mut_ptr();
673    /// let first_value = unsafe { ptr.as_mut_unchecked() };
674    /// *first_value = 4;
675    /// # assert_eq!(s, [4, 2, 3]);
676    /// println!("{s:?}"); // It'll print: "[4, 2, 3]".
677    /// ```
678    // FIXME: mention it in the docs for `as_mut` and `as_uninit_mut` once stabilized.
679    #[unstable(feature = "ptr_as_ref_unchecked", issue = "122034")]
680    #[inline]
681    #[must_use]
682    pub const unsafe fn as_mut_unchecked<'a>(self) -> &'a mut T {
683        // SAFETY: the caller must guarantee that `self` is valid for a reference
684        unsafe { &mut *self }
685    }
686
687    /// Returns `None` if the pointer is null, or else returns a unique reference to
688    /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
689    /// that the value has to be initialized.
690    ///
691    /// For the shared counterpart see [`as_uninit_ref`].
692    ///
693    /// [`as_mut`]: #method.as_mut
694    /// [`as_uninit_ref`]: pointer#method.as_uninit_ref-1
695    ///
696    /// # Safety
697    ///
698    /// When calling this method, you have to ensure that *either* the pointer is null *or*
699    /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
700    ///
701    /// # Panics during const evaluation
702    ///
703    /// This method will panic during const evaluation if the pointer cannot be
704    /// determined to be null or not. See [`is_null`] for more information.
705    ///
706    /// [`is_null`]: #method.is_null-1
707    #[inline]
708    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
709    pub const unsafe fn as_uninit_mut<'a>(self) -> Option<&'a mut MaybeUninit<T>>
710    where
711        T: Sized,
712    {
713        // SAFETY: the caller must guarantee that `self` meets all the
714        // requirements for a reference.
715        if self.is_null() { None } else { Some(unsafe { &mut *(self as *mut MaybeUninit<T>) }) }
716    }
717
718    /// Returns whether two pointers are guaranteed to be equal.
719    ///
720    /// At runtime this function behaves like `Some(self == other)`.
721    /// However, in some contexts (e.g., compile-time evaluation),
722    /// it is not always possible to determine equality of two pointers, so this function may
723    /// spuriously return `None` for pointers that later actually turn out to have its equality known.
724    /// But when it returns `Some`, the pointers' equality is guaranteed to be known.
725    ///
726    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
727    /// version and unsafe code must not
728    /// rely on the result of this function for soundness. It is suggested to only use this function
729    /// for performance optimizations where spurious `None` return values by this function do not
730    /// affect the outcome, but just the performance.
731    /// The consequences of using this method to make runtime and compile-time code behave
732    /// differently have not been explored. This method should not be used to introduce such
733    /// differences, and it should also not be stabilized before we have a better understanding
734    /// of this issue.
735    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
736    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
737    #[inline]
738    pub const fn guaranteed_eq(self, other: *mut T) -> Option<bool>
739    where
740        T: Sized,
741    {
742        (self as *const T).guaranteed_eq(other as _)
743    }
744
745    /// Returns whether two pointers are guaranteed to be inequal.
746    ///
747    /// At runtime this function behaves like `Some(self != other)`.
748    /// However, in some contexts (e.g., compile-time evaluation),
749    /// it is not always possible to determine inequality of two pointers, so this function may
750    /// spuriously return `None` for pointers that later actually turn out to have its inequality known.
751    /// But when it returns `Some`, the pointers' inequality is guaranteed to be known.
752    ///
753    /// The return value may change from `Some` to `None` and vice versa depending on the compiler
754    /// version and unsafe code must not
755    /// rely on the result of this function for soundness. It is suggested to only use this function
756    /// for performance optimizations where spurious `None` return values by this function do not
757    /// affect the outcome, but just the performance.
758    /// The consequences of using this method to make runtime and compile-time code behave
759    /// differently have not been explored. This method should not be used to introduce such
760    /// differences, and it should also not be stabilized before we have a better understanding
761    /// of this issue.
762    #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
763    #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
764    #[inline]
765    pub const fn guaranteed_ne(self, other: *mut T) -> Option<bool>
766    where
767        T: Sized,
768    {
769        (self as *const T).guaranteed_ne(other as _)
770    }
771
772    /// Calculates the distance between two pointers within the same allocation. The returned value is in
773    /// units of T: the distance in bytes divided by `size_of::<T>()`.
774    ///
775    /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
776    /// except that it has a lot more opportunities for UB, in exchange for the compiler
777    /// better understanding what you are doing.
778    ///
779    /// The primary motivation of this method is for computing the `len` of an array/slice
780    /// of `T` that you are currently representing as a "start" and "end" pointer
781    /// (and "end" is "one past the end" of the array).
782    /// In that case, `end.offset_from(start)` gets you the length of the array.
783    ///
784    /// All of the following safety requirements are trivially satisfied for this usecase.
785    ///
786    /// [`offset`]: pointer#method.offset-1
787    ///
788    /// # Safety
789    ///
790    /// If any of the following conditions are violated, the result is Undefined Behavior:
791    ///
792    /// * `self` and `origin` must either
793    ///
794    ///   * point to the same address, or
795    ///   * both be [derived from][crate::ptr#provenance] a pointer to the same [allocated object], and the memory range between
796    ///     the two pointers must be in bounds of that object. (See below for an example.)
797    ///
798    /// * The distance between the pointers, in bytes, must be an exact multiple
799    ///   of the size of `T`.
800    ///
801    /// As a consequence, the absolute distance between the pointers, in bytes, computed on
802    /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
803    /// implied by the in-bounds requirement, and the fact that no allocated object can be larger
804    /// than `isize::MAX` bytes.
805    ///
806    /// The requirement for pointers to be derived from the same allocated object is primarily
807    /// needed for `const`-compatibility: the distance between pointers into *different* allocated
808    /// objects is not known at compile-time. However, the requirement also exists at
809    /// runtime and may be exploited by optimizations. If you wish to compute the difference between
810    /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
811    /// origin as isize) / size_of::<T>()`.
812    // FIXME: recommend `addr()` instead of `as usize` once that is stable.
813    ///
814    /// [`add`]: #method.add
815    /// [allocated object]: crate::ptr#allocated-object
816    ///
817    /// # Panics
818    ///
819    /// This function panics if `T` is a Zero-Sized Type ("ZST").
820    ///
821    /// # Examples
822    ///
823    /// Basic usage:
824    ///
825    /// ```
826    /// let mut a = [0; 5];
827    /// let ptr1: *mut i32 = &mut a[1];
828    /// let ptr2: *mut i32 = &mut a[3];
829    /// unsafe {
830    ///     assert_eq!(ptr2.offset_from(ptr1), 2);
831    ///     assert_eq!(ptr1.offset_from(ptr2), -2);
832    ///     assert_eq!(ptr1.offset(2), ptr2);
833    ///     assert_eq!(ptr2.offset(-2), ptr1);
834    /// }
835    /// ```
836    ///
837    /// *Incorrect* usage:
838    ///
839    /// ```rust,no_run
840    /// let ptr1 = Box::into_raw(Box::new(0u8));
841    /// let ptr2 = Box::into_raw(Box::new(1u8));
842    /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
843    /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
844    /// let ptr2_other = (ptr1 as *mut u8).wrapping_offset(diff).wrapping_offset(1);
845    /// assert_eq!(ptr2 as usize, ptr2_other as usize);
846    /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
847    /// // computing their offset is undefined behavior, even though
848    /// // they point to addresses that are in-bounds of the same object!
849    /// unsafe {
850    ///     let one = ptr2_other.offset_from(ptr2); // Undefined Behavior! ⚠️
851    /// }
852    /// ```
853    #[stable(feature = "ptr_offset_from", since = "1.47.0")]
854    #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")]
855    #[inline(always)]
856    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
857    pub const unsafe fn offset_from(self, origin: *const T) -> isize
858    where
859        T: Sized,
860    {
861        // SAFETY: the caller must uphold the safety contract for `offset_from`.
862        unsafe { (self as *const T).offset_from(origin) }
863    }
864
865    /// Calculates the distance between two pointers within the same allocation. The returned value is in
866    /// units of **bytes**.
867    ///
868    /// This is purely a convenience for casting to a `u8` pointer and
869    /// using [`offset_from`][pointer::offset_from] on it. See that method for
870    /// documentation and safety requirements.
871    ///
872    /// For non-`Sized` pointees this operation considers only the data pointers,
873    /// ignoring the metadata.
874    #[inline(always)]
875    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
876    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
877    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
878    pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: *const U) -> isize {
879        // SAFETY: the caller must uphold the safety contract for `offset_from`.
880        unsafe { self.cast::<u8>().offset_from(origin.cast::<u8>()) }
881    }
882
883    /// Calculates the distance between two pointers within the same allocation, *where it's known that
884    /// `self` is equal to or greater than `origin`*. The returned value is in
885    /// units of T: the distance in bytes is divided by `size_of::<T>()`.
886    ///
887    /// This computes the same value that [`offset_from`](#method.offset_from)
888    /// would compute, but with the added precondition that the offset is
889    /// guaranteed to be non-negative.  This method is equivalent to
890    /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
891    /// but it provides slightly more information to the optimizer, which can
892    /// sometimes allow it to optimize slightly better with some backends.
893    ///
894    /// This method can be thought of as recovering the `count` that was passed
895    /// to [`add`](#method.add) (or, with the parameters in the other order,
896    /// to [`sub`](#method.sub)).  The following are all equivalent, assuming
897    /// that their safety preconditions are met:
898    /// ```rust
899    /// # unsafe fn blah(ptr: *mut i32, origin: *mut i32, count: usize) -> bool { unsafe {
900    /// ptr.offset_from_unsigned(origin) == count
901    /// # &&
902    /// origin.add(count) == ptr
903    /// # &&
904    /// ptr.sub(count) == origin
905    /// # } }
906    /// ```
907    ///
908    /// # Safety
909    ///
910    /// - The distance between the pointers must be non-negative (`self >= origin`)
911    ///
912    /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
913    ///   apply to this method as well; see it for the full details.
914    ///
915    /// Importantly, despite the return type of this method being able to represent
916    /// a larger offset, it's still *not permitted* to pass pointers which differ
917    /// by more than `isize::MAX` *bytes*.  As such, the result of this method will
918    /// always be less than or equal to `isize::MAX as usize`.
919    ///
920    /// # Panics
921    ///
922    /// This function panics if `T` is a Zero-Sized Type ("ZST").
923    ///
924    /// # Examples
925    ///
926    /// ```
927    /// let mut a = [0; 5];
928    /// let p: *mut i32 = a.as_mut_ptr();
929    /// unsafe {
930    ///     let ptr1: *mut i32 = p.add(1);
931    ///     let ptr2: *mut i32 = p.add(3);
932    ///
933    ///     assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
934    ///     assert_eq!(ptr1.add(2), ptr2);
935    ///     assert_eq!(ptr2.sub(2), ptr1);
936    ///     assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
937    /// }
938    ///
939    /// // This would be incorrect, as the pointers are not correctly ordered:
940    /// // ptr1.offset_from(ptr2)
941    /// ```
942    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
943    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
944    #[inline]
945    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
946    pub const unsafe fn offset_from_unsigned(self, origin: *const T) -> usize
947    where
948        T: Sized,
949    {
950        // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
951        unsafe { (self as *const T).offset_from_unsigned(origin) }
952    }
953
954    /// Calculates the distance between two pointers within the same allocation, *where it's known that
955    /// `self` is equal to or greater than `origin`*. The returned value is in
956    /// units of **bytes**.
957    ///
958    /// This is purely a convenience for casting to a `u8` pointer and
959    /// using [`offset_from_unsigned`][pointer::offset_from_unsigned] on it.
960    /// See that method for documentation and safety requirements.
961    ///
962    /// For non-`Sized` pointees this operation considers only the data pointers,
963    /// ignoring the metadata.
964    #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
965    #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
966    #[inline]
967    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
968    pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: *mut U) -> usize {
969        // SAFETY: the caller must uphold the safety contract for `byte_offset_from_unsigned`.
970        unsafe { (self as *const T).byte_offset_from_unsigned(origin) }
971    }
972
973    /// Adds an unsigned offset to a pointer.
974    ///
975    /// This can only move the pointer forward (or not move it). If you need to move forward or
976    /// backward depending on the value, then you might want [`offset`](#method.offset) instead
977    /// which takes a signed offset.
978    ///
979    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
980    /// offset of `3 * size_of::<T>()` bytes.
981    ///
982    /// # Safety
983    ///
984    /// If any of the following conditions are violated, the result is Undefined Behavior:
985    ///
986    /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
987    ///   "wrapping around"), must fit in an `isize`.
988    ///
989    /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
990    ///   [allocated object], and the entire memory range between `self` and the result must be in
991    ///   bounds of that allocated object. In particular, this range must not "wrap around" the edge
992    ///   of the address space.
993    ///
994    /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
995    /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
996    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
997    /// safe.
998    ///
999    /// Consider using [`wrapping_add`] instead if these constraints are
1000    /// difficult to satisfy. The only advantage of this method is that it
1001    /// enables more aggressive compiler optimizations.
1002    ///
1003    /// [`wrapping_add`]: #method.wrapping_add
1004    /// [allocated object]: crate::ptr#allocated-object
1005    ///
1006    /// # Examples
1007    ///
1008    /// ```
1009    /// let s: &str = "123";
1010    /// let ptr: *const u8 = s.as_ptr();
1011    ///
1012    /// unsafe {
1013    ///     assert_eq!('2', *ptr.add(1) as char);
1014    ///     assert_eq!('3', *ptr.add(2) as char);
1015    /// }
1016    /// ```
1017    #[stable(feature = "pointer_methods", since = "1.26.0")]
1018    #[must_use = "returns a new pointer rather than modifying its argument"]
1019    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1020    #[inline(always)]
1021    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1022    pub const unsafe fn add(self, count: usize) -> Self
1023    where
1024        T: Sized,
1025    {
1026        #[cfg(debug_assertions)]
1027        #[inline]
1028        #[rustc_allow_const_fn_unstable(const_eval_select)]
1029        const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
1030            const_eval_select!(
1031                @capture { this: *const (), count: usize, size: usize } -> bool:
1032                if const {
1033                    true
1034                } else {
1035                    let Some(byte_offset) = count.checked_mul(size) else {
1036                        return false;
1037                    };
1038                    let (_, overflow) = this.addr().overflowing_add(byte_offset);
1039                    byte_offset <= (isize::MAX as usize) && !overflow
1040                }
1041            )
1042        }
1043
1044        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
1045        ub_checks::assert_unsafe_precondition!(
1046            check_language_ub,
1047            "ptr::add requires that the address calculation does not overflow",
1048            (
1049                this: *const () = self as *const (),
1050                count: usize = count,
1051                size: usize = size_of::<T>(),
1052            ) => runtime_add_nowrap(this, count, size)
1053        );
1054
1055        // SAFETY: the caller must uphold the safety contract for `offset`.
1056        unsafe { intrinsics::offset(self, count) }
1057    }
1058
1059    /// Adds an unsigned offset in bytes to a pointer.
1060    ///
1061    /// `count` is in units of bytes.
1062    ///
1063    /// This is purely a convenience for casting to a `u8` pointer and
1064    /// using [add][pointer::add] on it. See that method for documentation
1065    /// and safety requirements.
1066    ///
1067    /// For non-`Sized` pointees this operation changes only the data pointer,
1068    /// leaving the metadata untouched.
1069    #[must_use]
1070    #[inline(always)]
1071    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1072    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1073    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1074    pub const unsafe fn byte_add(self, count: usize) -> Self {
1075        // SAFETY: the caller must uphold the safety contract for `add`.
1076        unsafe { self.cast::<u8>().add(count).with_metadata_of(self) }
1077    }
1078
1079    /// Subtracts an unsigned offset from a pointer.
1080    ///
1081    /// This can only move the pointer backward (or not move it). If you need to move forward or
1082    /// backward depending on the value, then you might want [`offset`](#method.offset) instead
1083    /// which takes a signed offset.
1084    ///
1085    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1086    /// offset of `3 * size_of::<T>()` bytes.
1087    ///
1088    /// # Safety
1089    ///
1090    /// If any of the following conditions are violated, the result is Undefined Behavior:
1091    ///
1092    /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
1093    ///   "wrapping around"), must fit in an `isize`.
1094    ///
1095    /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
1096    ///   [allocated object], and the entire memory range between `self` and the result must be in
1097    ///   bounds of that allocated object. In particular, this range must not "wrap around" the edge
1098    ///   of the address space.
1099    ///
1100    /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
1101    /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
1102    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
1103    /// safe.
1104    ///
1105    /// Consider using [`wrapping_sub`] instead if these constraints are
1106    /// difficult to satisfy. The only advantage of this method is that it
1107    /// enables more aggressive compiler optimizations.
1108    ///
1109    /// [`wrapping_sub`]: #method.wrapping_sub
1110    /// [allocated object]: crate::ptr#allocated-object
1111    ///
1112    /// # Examples
1113    ///
1114    /// ```
1115    /// let s: &str = "123";
1116    ///
1117    /// unsafe {
1118    ///     let end: *const u8 = s.as_ptr().add(3);
1119    ///     assert_eq!('3', *end.sub(1) as char);
1120    ///     assert_eq!('2', *end.sub(2) as char);
1121    /// }
1122    /// ```
1123    #[stable(feature = "pointer_methods", since = "1.26.0")]
1124    #[must_use = "returns a new pointer rather than modifying its argument"]
1125    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1126    #[inline(always)]
1127    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1128    pub const unsafe fn sub(self, count: usize) -> Self
1129    where
1130        T: Sized,
1131    {
1132        #[cfg(debug_assertions)]
1133        #[inline]
1134        #[rustc_allow_const_fn_unstable(const_eval_select)]
1135        const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
1136            const_eval_select!(
1137                @capture { this: *const (), count: usize, size: usize } -> bool:
1138                if const {
1139                    true
1140                } else {
1141                    let Some(byte_offset) = count.checked_mul(size) else {
1142                        return false;
1143                    };
1144                    byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
1145                }
1146            )
1147        }
1148
1149        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
1150        ub_checks::assert_unsafe_precondition!(
1151            check_language_ub,
1152            "ptr::sub requires that the address calculation does not overflow",
1153            (
1154                this: *const () = self as *const (),
1155                count: usize = count,
1156                size: usize = size_of::<T>(),
1157            ) => runtime_sub_nowrap(this, count, size)
1158        );
1159
1160        if T::IS_ZST {
1161            // Pointer arithmetic does nothing when the pointee is a ZST.
1162            self
1163        } else {
1164            // SAFETY: the caller must uphold the safety contract for `offset`.
1165            // Because the pointee is *not* a ZST, that means that `count` is
1166            // at most `isize::MAX`, and thus the negation cannot overflow.
1167            unsafe { intrinsics::offset(self, intrinsics::unchecked_sub(0, count as isize)) }
1168        }
1169    }
1170
1171    /// Subtracts an unsigned offset in bytes from a pointer.
1172    ///
1173    /// `count` is in units of bytes.
1174    ///
1175    /// This is purely a convenience for casting to a `u8` pointer and
1176    /// using [sub][pointer::sub] on it. See that method for documentation
1177    /// and safety requirements.
1178    ///
1179    /// For non-`Sized` pointees this operation changes only the data pointer,
1180    /// leaving the metadata untouched.
1181    #[must_use]
1182    #[inline(always)]
1183    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1184    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1185    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1186    pub const unsafe fn byte_sub(self, count: usize) -> Self {
1187        // SAFETY: the caller must uphold the safety contract for `sub`.
1188        unsafe { self.cast::<u8>().sub(count).with_metadata_of(self) }
1189    }
1190
1191    /// Adds an unsigned offset to a pointer using wrapping arithmetic.
1192    ///
1193    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1194    /// offset of `3 * size_of::<T>()` bytes.
1195    ///
1196    /// # Safety
1197    ///
1198    /// This operation itself is always safe, but using the resulting pointer is not.
1199    ///
1200    /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
1201    /// be used to read or write other allocated objects.
1202    ///
1203    /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
1204    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1205    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1206    /// `x` and `y` point into the same allocated object.
1207    ///
1208    /// Compared to [`add`], this method basically delays the requirement of staying within the
1209    /// same allocated object: [`add`] is immediate Undefined Behavior when crossing object
1210    /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
1211    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
1212    /// can be optimized better and is thus preferable in performance-sensitive code.
1213    ///
1214    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1215    /// intermediate values used during the computation of the final result. For example,
1216    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1217    /// allocated object and then re-entering it later is permitted.
1218    ///
1219    /// [`add`]: #method.add
1220    /// [allocated object]: crate::ptr#allocated-object
1221    ///
1222    /// # Examples
1223    ///
1224    /// ```
1225    /// // Iterate using a raw pointer in increments of two elements
1226    /// let data = [1u8, 2, 3, 4, 5];
1227    /// let mut ptr: *const u8 = data.as_ptr();
1228    /// let step = 2;
1229    /// let end_rounded_up = ptr.wrapping_add(6);
1230    ///
1231    /// // This loop prints "1, 3, 5, "
1232    /// while ptr != end_rounded_up {
1233    ///     unsafe {
1234    ///         print!("{}, ", *ptr);
1235    ///     }
1236    ///     ptr = ptr.wrapping_add(step);
1237    /// }
1238    /// ```
1239    #[stable(feature = "pointer_methods", since = "1.26.0")]
1240    #[must_use = "returns a new pointer rather than modifying its argument"]
1241    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1242    #[inline(always)]
1243    pub const fn wrapping_add(self, count: usize) -> Self
1244    where
1245        T: Sized,
1246    {
1247        self.wrapping_offset(count as isize)
1248    }
1249
1250    /// Adds an unsigned offset in bytes to a pointer using wrapping arithmetic.
1251    ///
1252    /// `count` is in units of bytes.
1253    ///
1254    /// This is purely a convenience for casting to a `u8` pointer and
1255    /// using [wrapping_add][pointer::wrapping_add] on it. See that method for documentation.
1256    ///
1257    /// For non-`Sized` pointees this operation changes only the data pointer,
1258    /// leaving the metadata untouched.
1259    #[must_use]
1260    #[inline(always)]
1261    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1262    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1263    pub const fn wrapping_byte_add(self, count: usize) -> Self {
1264        self.cast::<u8>().wrapping_add(count).with_metadata_of(self)
1265    }
1266
1267    /// Subtracts an unsigned offset from a pointer using wrapping arithmetic.
1268    ///
1269    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1270    /// offset of `3 * size_of::<T>()` bytes.
1271    ///
1272    /// # Safety
1273    ///
1274    /// This operation itself is always safe, but using the resulting pointer is not.
1275    ///
1276    /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
1277    /// be used to read or write other allocated objects.
1278    ///
1279    /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
1280    /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1281    /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1282    /// `x` and `y` point into the same allocated object.
1283    ///
1284    /// Compared to [`sub`], this method basically delays the requirement of staying within the
1285    /// same allocated object: [`sub`] is immediate Undefined Behavior when crossing object
1286    /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
1287    /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
1288    /// can be optimized better and is thus preferable in performance-sensitive code.
1289    ///
1290    /// The delayed check only considers the value of the pointer that was dereferenced, not the
1291    /// intermediate values used during the computation of the final result. For example,
1292    /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1293    /// allocated object and then re-entering it later is permitted.
1294    ///
1295    /// [`sub`]: #method.sub
1296    /// [allocated object]: crate::ptr#allocated-object
1297    ///
1298    /// # Examples
1299    ///
1300    /// ```
1301    /// // Iterate using a raw pointer in increments of two elements (backwards)
1302    /// let data = [1u8, 2, 3, 4, 5];
1303    /// let mut ptr: *const u8 = data.as_ptr();
1304    /// let start_rounded_down = ptr.wrapping_sub(2);
1305    /// ptr = ptr.wrapping_add(4);
1306    /// let step = 2;
1307    /// // This loop prints "5, 3, 1, "
1308    /// while ptr != start_rounded_down {
1309    ///     unsafe {
1310    ///         print!("{}, ", *ptr);
1311    ///     }
1312    ///     ptr = ptr.wrapping_sub(step);
1313    /// }
1314    /// ```
1315    #[stable(feature = "pointer_methods", since = "1.26.0")]
1316    #[must_use = "returns a new pointer rather than modifying its argument"]
1317    #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1318    #[inline(always)]
1319    pub const fn wrapping_sub(self, count: usize) -> Self
1320    where
1321        T: Sized,
1322    {
1323        self.wrapping_offset((count as isize).wrapping_neg())
1324    }
1325
1326    /// Subtracts an unsigned offset in bytes from a pointer using wrapping arithmetic.
1327    ///
1328    /// `count` is in units of bytes.
1329    ///
1330    /// This is purely a convenience for casting to a `u8` pointer and
1331    /// using [wrapping_sub][pointer::wrapping_sub] on it. See that method for documentation.
1332    ///
1333    /// For non-`Sized` pointees this operation changes only the data pointer,
1334    /// leaving the metadata untouched.
1335    #[must_use]
1336    #[inline(always)]
1337    #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1338    #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1339    pub const fn wrapping_byte_sub(self, count: usize) -> Self {
1340        self.cast::<u8>().wrapping_sub(count).with_metadata_of(self)
1341    }
1342
1343    /// Reads the value from `self` without moving it. This leaves the
1344    /// memory in `self` unchanged.
1345    ///
1346    /// See [`ptr::read`] for safety concerns and examples.
1347    ///
1348    /// [`ptr::read`]: crate::ptr::read()
1349    #[stable(feature = "pointer_methods", since = "1.26.0")]
1350    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1351    #[inline(always)]
1352    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1353    pub const unsafe fn read(self) -> T
1354    where
1355        T: Sized,
1356    {
1357        // SAFETY: the caller must uphold the safety contract for ``.
1358        unsafe { read(self) }
1359    }
1360
1361    /// Performs a volatile read of the value from `self` without moving it. This
1362    /// leaves the memory in `self` unchanged.
1363    ///
1364    /// Volatile operations are intended to act on I/O memory, and are guaranteed
1365    /// to not be elided or reordered by the compiler across other volatile
1366    /// operations.
1367    ///
1368    /// See [`ptr::read_volatile`] for safety concerns and examples.
1369    ///
1370    /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1371    #[stable(feature = "pointer_methods", since = "1.26.0")]
1372    #[inline(always)]
1373    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1374    pub unsafe fn read_volatile(self) -> T
1375    where
1376        T: Sized,
1377    {
1378        // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1379        unsafe { read_volatile(self) }
1380    }
1381
1382    /// Reads the value from `self` without moving it. This leaves the
1383    /// memory in `self` unchanged.
1384    ///
1385    /// Unlike `read`, the pointer may be unaligned.
1386    ///
1387    /// See [`ptr::read_unaligned`] for safety concerns and examples.
1388    ///
1389    /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1390    #[stable(feature = "pointer_methods", since = "1.26.0")]
1391    #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1392    #[inline(always)]
1393    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1394    pub const unsafe fn read_unaligned(self) -> T
1395    where
1396        T: Sized,
1397    {
1398        // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1399        unsafe { read_unaligned(self) }
1400    }
1401
1402    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1403    /// and destination may overlap.
1404    ///
1405    /// NOTE: this has the *same* argument order as [`ptr::copy`].
1406    ///
1407    /// See [`ptr::copy`] for safety concerns and examples.
1408    ///
1409    /// [`ptr::copy`]: crate::ptr::copy()
1410    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1411    #[stable(feature = "pointer_methods", since = "1.26.0")]
1412    #[inline(always)]
1413    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1414    pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
1415    where
1416        T: Sized,
1417    {
1418        // SAFETY: the caller must uphold the safety contract for `copy`.
1419        unsafe { copy(self, dest, count) }
1420    }
1421
1422    /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1423    /// and destination may *not* overlap.
1424    ///
1425    /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1426    ///
1427    /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1428    ///
1429    /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1430    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1431    #[stable(feature = "pointer_methods", since = "1.26.0")]
1432    #[inline(always)]
1433    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1434    pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
1435    where
1436        T: Sized,
1437    {
1438        // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1439        unsafe { copy_nonoverlapping(self, dest, count) }
1440    }
1441
1442    /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1443    /// and destination may overlap.
1444    ///
1445    /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
1446    ///
1447    /// See [`ptr::copy`] for safety concerns and examples.
1448    ///
1449    /// [`ptr::copy`]: crate::ptr::copy()
1450    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1451    #[stable(feature = "pointer_methods", since = "1.26.0")]
1452    #[inline(always)]
1453    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1454    pub const unsafe fn copy_from(self, src: *const T, count: usize)
1455    where
1456        T: Sized,
1457    {
1458        // SAFETY: the caller must uphold the safety contract for `copy`.
1459        unsafe { copy(src, self, count) }
1460    }
1461
1462    /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1463    /// and destination may *not* overlap.
1464    ///
1465    /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
1466    ///
1467    /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1468    ///
1469    /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1470    #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1471    #[stable(feature = "pointer_methods", since = "1.26.0")]
1472    #[inline(always)]
1473    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1474    pub const unsafe fn copy_from_nonoverlapping(self, src: *const T, count: usize)
1475    where
1476        T: Sized,
1477    {
1478        // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1479        unsafe { copy_nonoverlapping(src, self, count) }
1480    }
1481
1482    /// Executes the destructor (if any) of the pointed-to value.
1483    ///
1484    /// See [`ptr::drop_in_place`] for safety concerns and examples.
1485    ///
1486    /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
1487    #[stable(feature = "pointer_methods", since = "1.26.0")]
1488    #[inline(always)]
1489    pub unsafe fn drop_in_place(self) {
1490        // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1491        unsafe { drop_in_place(self) }
1492    }
1493
1494    /// Overwrites a memory location with the given value without reading or
1495    /// dropping the old value.
1496    ///
1497    /// See [`ptr::write`] for safety concerns and examples.
1498    ///
1499    /// [`ptr::write`]: crate::ptr::write()
1500    #[stable(feature = "pointer_methods", since = "1.26.0")]
1501    #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1502    #[inline(always)]
1503    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1504    pub const unsafe fn write(self, val: T)
1505    where
1506        T: Sized,
1507    {
1508        // SAFETY: the caller must uphold the safety contract for `write`.
1509        unsafe { write(self, val) }
1510    }
1511
1512    /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1513    /// bytes of memory starting at `self` to `val`.
1514    ///
1515    /// See [`ptr::write_bytes`] for safety concerns and examples.
1516    ///
1517    /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1518    #[doc(alias = "memset")]
1519    #[stable(feature = "pointer_methods", since = "1.26.0")]
1520    #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1521    #[inline(always)]
1522    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1523    pub const unsafe fn write_bytes(self, val: u8, count: usize)
1524    where
1525        T: Sized,
1526    {
1527        // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1528        unsafe { write_bytes(self, val, count) }
1529    }
1530
1531    /// Performs a volatile write of a memory location with the given value without
1532    /// reading or dropping the old value.
1533    ///
1534    /// Volatile operations are intended to act on I/O memory, and are guaranteed
1535    /// to not be elided or reordered by the compiler across other volatile
1536    /// operations.
1537    ///
1538    /// See [`ptr::write_volatile`] for safety concerns and examples.
1539    ///
1540    /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1541    #[stable(feature = "pointer_methods", since = "1.26.0")]
1542    #[inline(always)]
1543    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1544    pub unsafe fn write_volatile(self, val: T)
1545    where
1546        T: Sized,
1547    {
1548        // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1549        unsafe { write_volatile(self, val) }
1550    }
1551
1552    /// Overwrites a memory location with the given value without reading or
1553    /// dropping the old value.
1554    ///
1555    /// Unlike `write`, the pointer may be unaligned.
1556    ///
1557    /// See [`ptr::write_unaligned`] for safety concerns and examples.
1558    ///
1559    /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1560    #[stable(feature = "pointer_methods", since = "1.26.0")]
1561    #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1562    #[inline(always)]
1563    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1564    pub const unsafe fn write_unaligned(self, val: T)
1565    where
1566        T: Sized,
1567    {
1568        // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1569        unsafe { write_unaligned(self, val) }
1570    }
1571
1572    /// Replaces the value at `self` with `src`, returning the old
1573    /// value, without dropping either.
1574    ///
1575    /// See [`ptr::replace`] for safety concerns and examples.
1576    ///
1577    /// [`ptr::replace`]: crate::ptr::replace()
1578    #[stable(feature = "pointer_methods", since = "1.26.0")]
1579    #[rustc_const_stable(feature = "const_inherent_ptr_replace", since = "1.88.0")]
1580    #[inline(always)]
1581    pub const unsafe fn replace(self, src: T) -> T
1582    where
1583        T: Sized,
1584    {
1585        // SAFETY: the caller must uphold the safety contract for `replace`.
1586        unsafe { replace(self, src) }
1587    }
1588
1589    /// Swaps the values at two mutable locations of the same type, without
1590    /// deinitializing either. They may overlap, unlike `mem::swap` which is
1591    /// otherwise equivalent.
1592    ///
1593    /// See [`ptr::swap`] for safety concerns and examples.
1594    ///
1595    /// [`ptr::swap`]: crate::ptr::swap()
1596    #[stable(feature = "pointer_methods", since = "1.26.0")]
1597    #[rustc_const_stable(feature = "const_swap", since = "1.85.0")]
1598    #[inline(always)]
1599    pub const unsafe fn swap(self, with: *mut T)
1600    where
1601        T: Sized,
1602    {
1603        // SAFETY: the caller must uphold the safety contract for `swap`.
1604        unsafe { swap(self, with) }
1605    }
1606
1607    /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1608    /// `align`.
1609    ///
1610    /// If it is not possible to align the pointer, the implementation returns
1611    /// `usize::MAX`.
1612    ///
1613    /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1614    /// used with the `wrapping_add` method.
1615    ///
1616    /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1617    /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1618    /// the returned offset is correct in all terms other than alignment.
1619    ///
1620    /// # Panics
1621    ///
1622    /// The function panics if `align` is not a power-of-two.
1623    ///
1624    /// # Examples
1625    ///
1626    /// Accessing adjacent `u8` as `u16`
1627    ///
1628    /// ```
1629    /// # unsafe {
1630    /// let mut x = [5_u8, 6, 7, 8, 9];
1631    /// let ptr = x.as_mut_ptr();
1632    /// let offset = ptr.align_offset(align_of::<u16>());
1633    ///
1634    /// if offset < x.len() - 1 {
1635    ///     let u16_ptr = ptr.add(offset).cast::<u16>();
1636    ///     *u16_ptr = 0;
1637    ///
1638    ///     assert!(x == [0, 0, 7, 8, 9] || x == [5, 0, 0, 8, 9]);
1639    /// } else {
1640    ///     // while the pointer can be aligned via `offset`, it would point
1641    ///     // outside the allocation
1642    /// }
1643    /// # }
1644    /// ```
1645    #[must_use]
1646    #[inline]
1647    #[stable(feature = "align_offset", since = "1.36.0")]
1648    pub fn align_offset(self, align: usize) -> usize
1649    where
1650        T: Sized,
1651    {
1652        if !align.is_power_of_two() {
1653            panic!("align_offset: align is not a power-of-two");
1654        }
1655
1656        // SAFETY: `align` has been checked to be a power of 2 above
1657        let ret = unsafe { align_offset(self, align) };
1658
1659        // Inform Miri that we want to consider the resulting pointer to be suitably aligned.
1660        #[cfg(miri)]
1661        if ret != usize::MAX {
1662            intrinsics::miri_promise_symbolic_alignment(
1663                self.wrapping_add(ret).cast_const().cast(),
1664                align,
1665            );
1666        }
1667
1668        ret
1669    }
1670
1671    /// Returns whether the pointer is properly aligned for `T`.
1672    ///
1673    /// # Examples
1674    ///
1675    /// ```
1676    /// // On some platforms, the alignment of i32 is less than 4.
1677    /// #[repr(align(4))]
1678    /// struct AlignedI32(i32);
1679    ///
1680    /// let mut data = AlignedI32(42);
1681    /// let ptr = &mut data as *mut AlignedI32;
1682    ///
1683    /// assert!(ptr.is_aligned());
1684    /// assert!(!ptr.wrapping_byte_add(1).is_aligned());
1685    /// ```
1686    #[must_use]
1687    #[inline]
1688    #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1689    pub fn is_aligned(self) -> bool
1690    where
1691        T: Sized,
1692    {
1693        self.is_aligned_to(align_of::<T>())
1694    }
1695
1696    /// Returns whether the pointer is aligned to `align`.
1697    ///
1698    /// For non-`Sized` pointees this operation considers only the data pointer,
1699    /// ignoring the metadata.
1700    ///
1701    /// # Panics
1702    ///
1703    /// The function panics if `align` is not a power-of-two (this includes 0).
1704    ///
1705    /// # Examples
1706    ///
1707    /// ```
1708    /// #![feature(pointer_is_aligned_to)]
1709    ///
1710    /// // On some platforms, the alignment of i32 is less than 4.
1711    /// #[repr(align(4))]
1712    /// struct AlignedI32(i32);
1713    ///
1714    /// let mut data = AlignedI32(42);
1715    /// let ptr = &mut data as *mut AlignedI32;
1716    ///
1717    /// assert!(ptr.is_aligned_to(1));
1718    /// assert!(ptr.is_aligned_to(2));
1719    /// assert!(ptr.is_aligned_to(4));
1720    ///
1721    /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1722    /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1723    ///
1724    /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1725    /// ```
1726    #[must_use]
1727    #[inline]
1728    #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1729    pub fn is_aligned_to(self, align: usize) -> bool {
1730        if !align.is_power_of_two() {
1731            panic!("is_aligned_to: align is not a power-of-two");
1732        }
1733
1734        self.addr() & (align - 1) == 0
1735    }
1736}
1737
1738impl<T> *mut [T] {
1739    /// Returns the length of a raw slice.
1740    ///
1741    /// The returned value is the number of **elements**, not the number of bytes.
1742    ///
1743    /// This function is safe, even when the raw slice cannot be cast to a slice
1744    /// reference because the pointer is null or unaligned.
1745    ///
1746    /// # Examples
1747    ///
1748    /// ```rust
1749    /// use std::ptr;
1750    ///
1751    /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1752    /// assert_eq!(slice.len(), 3);
1753    /// ```
1754    #[inline(always)]
1755    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1756    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1757    pub const fn len(self) -> usize {
1758        metadata(self)
1759    }
1760
1761    /// Returns `true` if the raw slice has a length of 0.
1762    ///
1763    /// # Examples
1764    ///
1765    /// ```
1766    /// use std::ptr;
1767    ///
1768    /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1769    /// assert!(!slice.is_empty());
1770    /// ```
1771    #[inline(always)]
1772    #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1773    #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1774    pub const fn is_empty(self) -> bool {
1775        self.len() == 0
1776    }
1777
1778    /// Gets a raw, mutable pointer to the underlying array.
1779    ///
1780    /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1781    #[unstable(feature = "slice_as_array", issue = "133508")]
1782    #[inline]
1783    #[must_use]
1784    pub const fn as_mut_array<const N: usize>(self) -> Option<*mut [T; N]> {
1785        if self.len() == N {
1786            let me = self.as_mut_ptr() as *mut [T; N];
1787            Some(me)
1788        } else {
1789            None
1790        }
1791    }
1792
1793    /// Divides one mutable raw slice into two at an index.
1794    ///
1795    /// The first will contain all indices from `[0, mid)` (excluding
1796    /// the index `mid` itself) and the second will contain all
1797    /// indices from `[mid, len)` (excluding the index `len` itself).
1798    ///
1799    /// # Panics
1800    ///
1801    /// Panics if `mid > len`.
1802    ///
1803    /// # Safety
1804    ///
1805    /// `mid` must be [in-bounds] of the underlying [allocated object].
1806    /// Which means `self` must be dereferenceable and span a single allocation
1807    /// that is at least `mid * size_of::<T>()` bytes long. Not upholding these
1808    /// requirements is *[undefined behavior]* even if the resulting pointers are not used.
1809    ///
1810    /// Since `len` being in-bounds it is not a safety invariant of `*mut [T]` the
1811    /// safety requirements of this method are the same as for [`split_at_mut_unchecked`].
1812    /// The explicit bounds check is only as useful as `len` is correct.
1813    ///
1814    /// [`split_at_mut_unchecked`]: #method.split_at_mut_unchecked
1815    /// [in-bounds]: #method.add
1816    /// [allocated object]: crate::ptr#allocated-object
1817    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1818    ///
1819    /// # Examples
1820    ///
1821    /// ```
1822    /// #![feature(raw_slice_split)]
1823    /// #![feature(slice_ptr_get)]
1824    ///
1825    /// let mut v = [1, 0, 3, 0, 5, 6];
1826    /// let ptr = &mut v as *mut [_];
1827    /// unsafe {
1828    ///     let (left, right) = ptr.split_at_mut(2);
1829    ///     assert_eq!(&*left, [1, 0]);
1830    ///     assert_eq!(&*right, [3, 0, 5, 6]);
1831    /// }
1832    /// ```
1833    #[inline(always)]
1834    #[track_caller]
1835    #[unstable(feature = "raw_slice_split", issue = "95595")]
1836    pub unsafe fn split_at_mut(self, mid: usize) -> (*mut [T], *mut [T]) {
1837        assert!(mid <= self.len());
1838        // SAFETY: The assert above is only a safety-net as long as `self.len()` is correct
1839        // The actual safety requirements of this function are the same as for `split_at_mut_unchecked`
1840        unsafe { self.split_at_mut_unchecked(mid) }
1841    }
1842
1843    /// Divides one mutable raw slice into two at an index, without doing bounds checking.
1844    ///
1845    /// The first will contain all indices from `[0, mid)` (excluding
1846    /// the index `mid` itself) and the second will contain all
1847    /// indices from `[mid, len)` (excluding the index `len` itself).
1848    ///
1849    /// # Safety
1850    ///
1851    /// `mid` must be [in-bounds] of the underlying [allocated object].
1852    /// Which means `self` must be dereferenceable and span a single allocation
1853    /// that is at least `mid * size_of::<T>()` bytes long. Not upholding these
1854    /// requirements is *[undefined behavior]* even if the resulting pointers are not used.
1855    ///
1856    /// [in-bounds]: #method.add
1857    /// [out-of-bounds index]: #method.add
1858    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1859    ///
1860    /// # Examples
1861    ///
1862    /// ```
1863    /// #![feature(raw_slice_split)]
1864    ///
1865    /// let mut v = [1, 0, 3, 0, 5, 6];
1866    /// // scoped to restrict the lifetime of the borrows
1867    /// unsafe {
1868    ///     let ptr = &mut v as *mut [_];
1869    ///     let (left, right) = ptr.split_at_mut_unchecked(2);
1870    ///     assert_eq!(&*left, [1, 0]);
1871    ///     assert_eq!(&*right, [3, 0, 5, 6]);
1872    ///     (&mut *left)[1] = 2;
1873    ///     (&mut *right)[1] = 4;
1874    /// }
1875    /// assert_eq!(v, [1, 2, 3, 4, 5, 6]);
1876    /// ```
1877    #[inline(always)]
1878    #[unstable(feature = "raw_slice_split", issue = "95595")]
1879    pub unsafe fn split_at_mut_unchecked(self, mid: usize) -> (*mut [T], *mut [T]) {
1880        let len = self.len();
1881        let ptr = self.as_mut_ptr();
1882
1883        // SAFETY: Caller must pass a valid pointer and an index that is in-bounds.
1884        let tail = unsafe { ptr.add(mid) };
1885        (
1886            crate::ptr::slice_from_raw_parts_mut(ptr, mid),
1887            crate::ptr::slice_from_raw_parts_mut(tail, len - mid),
1888        )
1889    }
1890
1891    /// Returns a raw pointer to the slice's buffer.
1892    ///
1893    /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
1894    ///
1895    /// # Examples
1896    ///
1897    /// ```rust
1898    /// #![feature(slice_ptr_get)]
1899    /// use std::ptr;
1900    ///
1901    /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1902    /// assert_eq!(slice.as_mut_ptr(), ptr::null_mut());
1903    /// ```
1904    #[inline(always)]
1905    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1906    pub const fn as_mut_ptr(self) -> *mut T {
1907        self as *mut T
1908    }
1909
1910    /// Returns a raw pointer to an element or subslice, without doing bounds
1911    /// checking.
1912    ///
1913    /// Calling this method with an [out-of-bounds index] or when `self` is not dereferenceable
1914    /// is *[undefined behavior]* even if the resulting pointer is not used.
1915    ///
1916    /// [out-of-bounds index]: #method.add
1917    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1918    ///
1919    /// # Examples
1920    ///
1921    /// ```
1922    /// #![feature(slice_ptr_get)]
1923    ///
1924    /// let x = &mut [1, 2, 4] as *mut [i32];
1925    ///
1926    /// unsafe {
1927    ///     assert_eq!(x.get_unchecked_mut(1), x.as_mut_ptr().add(1));
1928    /// }
1929    /// ```
1930    #[unstable(feature = "slice_ptr_get", issue = "74265")]
1931    #[inline(always)]
1932    pub unsafe fn get_unchecked_mut<I>(self, index: I) -> *mut I::Output
1933    where
1934        I: SliceIndex<[T]>,
1935    {
1936        // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1937        unsafe { index.get_unchecked_mut(self) }
1938    }
1939
1940    /// Returns `None` if the pointer is null, or else returns a shared slice to
1941    /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
1942    /// that the value has to be initialized.
1943    ///
1944    /// For the mutable counterpart see [`as_uninit_slice_mut`].
1945    ///
1946    /// [`as_ref`]: pointer#method.as_ref-1
1947    /// [`as_uninit_slice_mut`]: #method.as_uninit_slice_mut
1948    ///
1949    /// # Safety
1950    ///
1951    /// When calling this method, you have to ensure that *either* the pointer is null *or*
1952    /// all of the following is true:
1953    ///
1954    /// * The pointer must be [valid] for reads for `ptr.len() * size_of::<T>()` many bytes,
1955    ///   and it must be properly aligned. This means in particular:
1956    ///
1957    ///     * The entire memory range of this slice must be contained within a single [allocated object]!
1958    ///       Slices can never span across multiple allocated objects.
1959    ///
1960    ///     * The pointer must be aligned even for zero-length slices. One
1961    ///       reason for this is that enum layout optimizations may rely on references
1962    ///       (including slices of any length) being aligned and non-null to distinguish
1963    ///       them from other data. You can obtain a pointer that is usable as `data`
1964    ///       for zero-length slices using [`NonNull::dangling()`].
1965    ///
1966    /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1967    ///   See the safety documentation of [`pointer::offset`].
1968    ///
1969    /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1970    ///   arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1971    ///   In particular, while this reference exists, the memory the pointer points to must
1972    ///   not get mutated (except inside `UnsafeCell`).
1973    ///
1974    /// This applies even if the result of this method is unused!
1975    ///
1976    /// See also [`slice::from_raw_parts`][].
1977    ///
1978    /// [valid]: crate::ptr#safety
1979    /// [allocated object]: crate::ptr#allocated-object
1980    ///
1981    /// # Panics during const evaluation
1982    ///
1983    /// This method will panic during const evaluation if the pointer cannot be
1984    /// determined to be null or not. See [`is_null`] for more information.
1985    ///
1986    /// [`is_null`]: #method.is_null-1
1987    #[inline]
1988    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1989    pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
1990        if self.is_null() {
1991            None
1992        } else {
1993            // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1994            Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
1995        }
1996    }
1997
1998    /// Returns `None` if the pointer is null, or else returns a unique slice to
1999    /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
2000    /// that the value has to be initialized.
2001    ///
2002    /// For the shared counterpart see [`as_uninit_slice`].
2003    ///
2004    /// [`as_mut`]: #method.as_mut
2005    /// [`as_uninit_slice`]: #method.as_uninit_slice-1
2006    ///
2007    /// # Safety
2008    ///
2009    /// When calling this method, you have to ensure that *either* the pointer is null *or*
2010    /// all of the following is true:
2011    ///
2012    /// * The pointer must be [valid] for reads and writes for `ptr.len() * size_of::<T>()`
2013    ///   many bytes, and it must be properly aligned. This means in particular:
2014    ///
2015    ///     * The entire memory range of this slice must be contained within a single [allocated object]!
2016    ///       Slices can never span across multiple allocated objects.
2017    ///
2018    ///     * The pointer must be aligned even for zero-length slices. One
2019    ///       reason for this is that enum layout optimizations may rely on references
2020    ///       (including slices of any length) being aligned and non-null to distinguish
2021    ///       them from other data. You can obtain a pointer that is usable as `data`
2022    ///       for zero-length slices using [`NonNull::dangling()`].
2023    ///
2024    /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
2025    ///   See the safety documentation of [`pointer::offset`].
2026    ///
2027    /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
2028    ///   arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
2029    ///   In particular, while this reference exists, the memory the pointer points to must
2030    ///   not get accessed (read or written) through any other pointer.
2031    ///
2032    /// This applies even if the result of this method is unused!
2033    ///
2034    /// See also [`slice::from_raw_parts_mut`][].
2035    ///
2036    /// [valid]: crate::ptr#safety
2037    /// [allocated object]: crate::ptr#allocated-object
2038    ///
2039    /// # Panics during const evaluation
2040    ///
2041    /// This method will panic during const evaluation if the pointer cannot be
2042    /// determined to be null or not. See [`is_null`] for more information.
2043    ///
2044    /// [`is_null`]: #method.is_null-1
2045    #[inline]
2046    #[unstable(feature = "ptr_as_uninit", issue = "75402")]
2047    pub const unsafe fn as_uninit_slice_mut<'a>(self) -> Option<&'a mut [MaybeUninit<T>]> {
2048        if self.is_null() {
2049            None
2050        } else {
2051            // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
2052            Some(unsafe { slice::from_raw_parts_mut(self as *mut MaybeUninit<T>, self.len()) })
2053        }
2054    }
2055}
2056
2057impl<T, const N: usize> *mut [T; N] {
2058    /// Returns a raw pointer to the array's buffer.
2059    ///
2060    /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
2061    ///
2062    /// # Examples
2063    ///
2064    /// ```rust
2065    /// #![feature(array_ptr_get)]
2066    /// use std::ptr;
2067    ///
2068    /// let arr: *mut [i8; 3] = ptr::null_mut();
2069    /// assert_eq!(arr.as_mut_ptr(), ptr::null_mut());
2070    /// ```
2071    #[inline]
2072    #[unstable(feature = "array_ptr_get", issue = "119834")]
2073    pub const fn as_mut_ptr(self) -> *mut T {
2074        self as *mut T
2075    }
2076
2077    /// Returns a raw pointer to a mutable slice containing the entire array.
2078    ///
2079    /// # Examples
2080    ///
2081    /// ```
2082    /// #![feature(array_ptr_get)]
2083    ///
2084    /// let mut arr = [1, 2, 5];
2085    /// let ptr: *mut [i32; 3] = &mut arr;
2086    /// unsafe {
2087    ///     (&mut *ptr.as_mut_slice())[..2].copy_from_slice(&[3, 4]);
2088    /// }
2089    /// assert_eq!(arr, [3, 4, 5]);
2090    /// ```
2091    #[inline]
2092    #[unstable(feature = "array_ptr_get", issue = "119834")]
2093    pub const fn as_mut_slice(self) -> *mut [T] {
2094        self
2095    }
2096}
2097
2098/// Pointer equality is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method.
2099#[stable(feature = "rust1", since = "1.0.0")]
2100impl<T: ?Sized> PartialEq for *mut T {
2101    #[inline(always)]
2102    #[allow(ambiguous_wide_pointer_comparisons)]
2103    fn eq(&self, other: &*mut T) -> bool {
2104        *self == *other
2105    }
2106}
2107
2108/// Pointer equality is an equivalence relation.
2109#[stable(feature = "rust1", since = "1.0.0")]
2110impl<T: ?Sized> Eq for *mut T {}
2111
2112/// Pointer comparison is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method.
2113#[stable(feature = "rust1", since = "1.0.0")]
2114impl<T: ?Sized> Ord for *mut T {
2115    #[inline]
2116    #[allow(ambiguous_wide_pointer_comparisons)]
2117    fn cmp(&self, other: &*mut T) -> Ordering {
2118        if self < other {
2119            Less
2120        } else if self == other {
2121            Equal
2122        } else {
2123            Greater
2124        }
2125    }
2126}
2127
2128/// Pointer comparison is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method.
2129#[stable(feature = "rust1", since = "1.0.0")]
2130impl<T: ?Sized> PartialOrd for *mut T {
2131    #[inline(always)]
2132    #[allow(ambiguous_wide_pointer_comparisons)]
2133    fn partial_cmp(&self, other: &*mut T) -> Option<Ordering> {
2134        Some(self.cmp(other))
2135    }
2136
2137    #[inline(always)]
2138    #[allow(ambiguous_wide_pointer_comparisons)]
2139    fn lt(&self, other: &*mut T) -> bool {
2140        *self < *other
2141    }
2142
2143    #[inline(always)]
2144    #[allow(ambiguous_wide_pointer_comparisons)]
2145    fn le(&self, other: &*mut T) -> bool {
2146        *self <= *other
2147    }
2148
2149    #[inline(always)]
2150    #[allow(ambiguous_wide_pointer_comparisons)]
2151    fn gt(&self, other: &*mut T) -> bool {
2152        *self > *other
2153    }
2154
2155    #[inline(always)]
2156    #[allow(ambiguous_wide_pointer_comparisons)]
2157    fn ge(&self, other: &*mut T) -> bool {
2158        *self >= *other
2159    }
2160}
2161
2162#[stable(feature = "raw_ptr_default", since = "1.88.0")]
2163impl<T: ?Sized + Thin> Default for *mut T {
2164    /// Returns the default value of [`null_mut()`][crate::ptr::null_mut].
2165    fn default() -> Self {
2166        crate::ptr::null_mut()
2167    }
2168}
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy