alloc/
alloc.rs

1//! Memory allocation APIs
2
3#![stable(feature = "alloc_module", since = "1.28.0")]
4
5#[stable(feature = "alloc_module", since = "1.28.0")]
6#[doc(inline)]
7pub use core::alloc::*;
8use core::hint;
9use core::ptr::{self, NonNull};
10
11unsafe extern "Rust" {
12    // These are the magic symbols to call the global allocator. rustc generates
13    // them to call the global allocator if there is a `#[global_allocator]` attribute
14    // (the code expanding that attribute macro generates those functions), or to call
15    // the default implementations in std (`__rdl_alloc` etc. in `library/std/src/alloc.rs`)
16    // otherwise.
17    #[rustc_allocator]
18    #[rustc_nounwind]
19    #[rustc_std_internal_symbol]
20    fn __rust_alloc(size: usize, align: usize) -> *mut u8;
21    #[rustc_deallocator]
22    #[rustc_nounwind]
23    #[rustc_std_internal_symbol]
24    fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
25    #[rustc_reallocator]
26    #[rustc_nounwind]
27    #[rustc_std_internal_symbol]
28    fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8;
29    #[rustc_allocator_zeroed]
30    #[rustc_nounwind]
31    #[rustc_std_internal_symbol]
32    fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
33
34    #[rustc_nounwind]
35    #[rustc_std_internal_symbol]
36    fn __rust_no_alloc_shim_is_unstable_v2();
37}
38
39/// The global memory allocator.
40///
41/// This type implements the [`Allocator`] trait by forwarding calls
42/// to the allocator registered with the `#[global_allocator]` attribute
43/// if there is one, or the `std` crate’s default.
44///
45/// Note: while this type is unstable, the functionality it provides can be
46/// accessed through the [free functions in `alloc`](self#functions).
47#[unstable(feature = "allocator_api", issue = "32838")]
48#[derive(Copy, Clone, Default, Debug)]
49// the compiler needs to know when a Box uses the global allocator vs a custom one
50#[lang = "global_alloc_ty"]
51pub struct Global;
52
53/// Allocates memory with the global allocator.
54///
55/// This function forwards calls to the [`GlobalAlloc::alloc`] method
56/// of the allocator registered with the `#[global_allocator]` attribute
57/// if there is one, or the `std` crate’s default.
58///
59/// This function is expected to be deprecated in favor of the `allocate` method
60/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
61///
62/// # Safety
63///
64/// See [`GlobalAlloc::alloc`].
65///
66/// # Examples
67///
68/// ```
69/// use std::alloc::{alloc, dealloc, handle_alloc_error, Layout};
70///
71/// unsafe {
72///     let layout = Layout::new::<u16>();
73///     let ptr = alloc(layout);
74///     if ptr.is_null() {
75///         handle_alloc_error(layout);
76///     }
77///
78///     *(ptr as *mut u16) = 42;
79///     assert_eq!(*(ptr as *mut u16), 42);
80///
81///     dealloc(ptr, layout);
82/// }
83/// ```
84#[stable(feature = "global_alloc", since = "1.28.0")]
85#[must_use = "losing the pointer will leak memory"]
86#[inline]
87#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
88pub unsafe fn alloc(layout: Layout) -> *mut u8 {
89    unsafe {
90        // Make sure we don't accidentally allow omitting the allocator shim in
91        // stable code until it is actually stabilized.
92        __rust_no_alloc_shim_is_unstable_v2();
93
94        __rust_alloc(layout.size(), layout.align())
95    }
96}
97
98/// Deallocates memory with the global allocator.
99///
100/// This function forwards calls to the [`GlobalAlloc::dealloc`] method
101/// of the allocator registered with the `#[global_allocator]` attribute
102/// if there is one, or the `std` crate’s default.
103///
104/// This function is expected to be deprecated in favor of the `deallocate` method
105/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
106///
107/// # Safety
108///
109/// See [`GlobalAlloc::dealloc`].
110#[stable(feature = "global_alloc", since = "1.28.0")]
111#[inline]
112#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
113pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
114    unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) }
115}
116
117/// Reallocates memory with the global allocator.
118///
119/// This function forwards calls to the [`GlobalAlloc::realloc`] method
120/// of the allocator registered with the `#[global_allocator]` attribute
121/// if there is one, or the `std` crate’s default.
122///
123/// This function is expected to be deprecated in favor of the `grow` and `shrink` methods
124/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
125///
126/// # Safety
127///
128/// See [`GlobalAlloc::realloc`].
129#[stable(feature = "global_alloc", since = "1.28.0")]
130#[must_use = "losing the pointer will leak memory"]
131#[inline]
132#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
133pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
134    unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) }
135}
136
137/// Allocates zero-initialized memory with the global allocator.
138///
139/// This function forwards calls to the [`GlobalAlloc::alloc_zeroed`] method
140/// of the allocator registered with the `#[global_allocator]` attribute
141/// if there is one, or the `std` crate’s default.
142///
143/// This function is expected to be deprecated in favor of the `allocate_zeroed` method
144/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
145///
146/// # Safety
147///
148/// See [`GlobalAlloc::alloc_zeroed`].
149///
150/// # Examples
151///
152/// ```
153/// use std::alloc::{alloc_zeroed, dealloc, handle_alloc_error, Layout};
154///
155/// unsafe {
156///     let layout = Layout::new::<u16>();
157///     let ptr = alloc_zeroed(layout);
158///     if ptr.is_null() {
159///         handle_alloc_error(layout);
160///     }
161///
162///     assert_eq!(*(ptr as *mut u16), 0);
163///
164///     dealloc(ptr, layout);
165/// }
166/// ```
167#[stable(feature = "global_alloc", since = "1.28.0")]
168#[must_use = "losing the pointer will leak memory"]
169#[inline]
170#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
171pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
172    unsafe {
173        // Make sure we don't accidentally allow omitting the allocator shim in
174        // stable code until it is actually stabilized.
175        __rust_no_alloc_shim_is_unstable_v2();
176
177        __rust_alloc_zeroed(layout.size(), layout.align())
178    }
179}
180
181impl Global {
182    #[inline]
183    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
184    fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
185        match layout.size() {
186            0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
187            // SAFETY: `layout` is non-zero in size,
188            size => unsafe {
189                let raw_ptr = if zeroed { alloc_zeroed(layout) } else { alloc(layout) };
190                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
191                Ok(NonNull::slice_from_raw_parts(ptr, size))
192            },
193        }
194    }
195
196    // SAFETY: Same as `Allocator::grow`
197    #[inline]
198    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
199    unsafe fn grow_impl(
200        &self,
201        ptr: NonNull<u8>,
202        old_layout: Layout,
203        new_layout: Layout,
204        zeroed: bool,
205    ) -> Result<NonNull<[u8]>, AllocError> {
206        debug_assert!(
207            new_layout.size() >= old_layout.size(),
208            "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
209        );
210
211        match old_layout.size() {
212            0 => self.alloc_impl(new_layout, zeroed),
213
214            // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size`
215            // as required by safety conditions. Other conditions must be upheld by the caller
216            old_size if old_layout.align() == new_layout.align() => unsafe {
217                let new_size = new_layout.size();
218
219                // `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
220                hint::assert_unchecked(new_size >= old_layout.size());
221
222                let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
223                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
224                if zeroed {
225                    raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
226                }
227                Ok(NonNull::slice_from_raw_parts(ptr, new_size))
228            },
229
230            // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
231            // both the old and new memory allocation are valid for reads and writes for `old_size`
232            // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
233            // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
234            // for `dealloc` must be upheld by the caller.
235            old_size => unsafe {
236                let new_ptr = self.alloc_impl(new_layout, zeroed)?;
237                ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
238                self.deallocate(ptr, old_layout);
239                Ok(new_ptr)
240            },
241        }
242    }
243}
244
245#[unstable(feature = "allocator_api", issue = "32838")]
246unsafe impl Allocator for Global {
247    #[inline]
248    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
249    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
250        self.alloc_impl(layout, false)
251    }
252
253    #[inline]
254    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
255    fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
256        self.alloc_impl(layout, true)
257    }
258
259    #[inline]
260    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
261    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
262        if layout.size() != 0 {
263            // SAFETY:
264            // * We have checked that `layout` is non-zero in size.
265            // * The caller is obligated to provide a layout that "fits", and in this case,
266            //   "fit" always means a layout that is equal to the original, because our
267            //   `allocate()`, `grow()`, and `shrink()` implementations never returns a larger
268            //   allocation than requested.
269            // * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s
270            //   safety documentation.
271            unsafe { dealloc(ptr.as_ptr(), layout) }
272        }
273    }
274
275    #[inline]
276    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
277    unsafe fn grow(
278        &self,
279        ptr: NonNull<u8>,
280        old_layout: Layout,
281        new_layout: Layout,
282    ) -> Result<NonNull<[u8]>, AllocError> {
283        // SAFETY: all conditions must be upheld by the caller
284        unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
285    }
286
287    #[inline]
288    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
289    unsafe fn grow_zeroed(
290        &self,
291        ptr: NonNull<u8>,
292        old_layout: Layout,
293        new_layout: Layout,
294    ) -> Result<NonNull<[u8]>, AllocError> {
295        // SAFETY: all conditions must be upheld by the caller
296        unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
297    }
298
299    #[inline]
300    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
301    unsafe fn shrink(
302        &self,
303        ptr: NonNull<u8>,
304        old_layout: Layout,
305        new_layout: Layout,
306    ) -> Result<NonNull<[u8]>, AllocError> {
307        debug_assert!(
308            new_layout.size() <= old_layout.size(),
309            "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
310        );
311
312        match new_layout.size() {
313            // SAFETY: conditions must be upheld by the caller
314            0 => unsafe {
315                self.deallocate(ptr, old_layout);
316                Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
317            },
318
319            // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
320            new_size if old_layout.align() == new_layout.align() => unsafe {
321                // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
322                hint::assert_unchecked(new_size <= old_layout.size());
323
324                let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
325                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
326                Ok(NonNull::slice_from_raw_parts(ptr, new_size))
327            },
328
329            // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
330            // both the old and new memory allocation are valid for reads and writes for `new_size`
331            // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
332            // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
333            // for `dealloc` must be upheld by the caller.
334            new_size => unsafe {
335                let new_ptr = self.allocate(new_layout)?;
336                ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
337                self.deallocate(ptr, old_layout);
338                Ok(new_ptr)
339            },
340        }
341    }
342}
343
344/// The allocator for `Box`.
345#[cfg(not(no_global_oom_handling))]
346#[lang = "exchange_malloc"]
347#[inline]
348#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
349unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
350    let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
351    match Global.allocate(layout) {
352        Ok(ptr) => ptr.as_mut_ptr(),
353        Err(_) => handle_alloc_error(layout),
354    }
355}
356
357// # Allocation error handler
358
359#[cfg(not(no_global_oom_handling))]
360unsafe extern "Rust" {
361    // This is the magic symbol to call the global alloc error handler. rustc generates
362    // it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the
363    // default implementations below (`__rdl_oom`) otherwise.
364    #[rustc_std_internal_symbol]
365    fn __rust_alloc_error_handler(size: usize, align: usize) -> !;
366}
367
368/// Signals a memory allocation error.
369///
370/// Callers of memory allocation APIs wishing to cease execution
371/// in response to an allocation error are encouraged to call this function,
372/// rather than directly invoking [`panic!`] or similar.
373///
374/// This function is guaranteed to diverge (not return normally with a value), but depending on
375/// global configuration, it may either panic (resulting in unwinding or aborting as per
376/// configuration for all panics), or abort the process (with no unwinding).
377///
378/// The default behavior is:
379///
380///  * If the binary links against `std` (typically the case), then
381///   print a message to standard error and abort the process.
382///   This behavior can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`].
383///   Future versions of Rust may panic by default instead.
384///
385/// * If the binary does not link against `std` (all of its crates are marked
386///   [`#![no_std]`][no_std]), then call [`panic!`] with a message.
387///   [The panic handler] applies as to any panic.
388///
389/// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html
390/// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html
391/// [The panic handler]: https://doc.rust-lang.org/reference/runtime.html#the-panic_handler-attribute
392/// [no_std]: https://doc.rust-lang.org/reference/names/preludes.html#the-no_std-attribute
393#[stable(feature = "global_alloc", since = "1.28.0")]
394#[rustc_const_unstable(feature = "const_alloc_error", issue = "92523")]
395#[cfg(not(no_global_oom_handling))]
396#[cold]
397#[optimize(size)]
398pub const fn handle_alloc_error(layout: Layout) -> ! {
399    const fn ct_error(_: Layout) -> ! {
400        panic!("allocation failed");
401    }
402
403    #[inline]
404    fn rt_error(layout: Layout) -> ! {
405        unsafe {
406            __rust_alloc_error_handler(layout.size(), layout.align());
407        }
408    }
409
410    #[cfg(not(feature = "panic_immediate_abort"))]
411    {
412        core::intrinsics::const_eval_select((layout,), ct_error, rt_error)
413    }
414
415    #[cfg(feature = "panic_immediate_abort")]
416    ct_error(layout)
417}
418
419#[cfg(not(no_global_oom_handling))]
420#[doc(hidden)]
421#[allow(unused_attributes)]
422#[unstable(feature = "alloc_internals", issue = "none")]
423pub mod __alloc_error_handler {
424    // called via generated `__rust_alloc_error_handler` if there is no
425    // `#[alloc_error_handler]`.
426    #[rustc_std_internal_symbol]
427    pub unsafe fn __rdl_oom(size: usize, _align: usize) -> ! {
428        unsafe extern "Rust" {
429            // This symbol is emitted by rustc next to __rust_alloc_error_handler.
430            // Its value depends on the -Zoom={panic,abort} compiler option.
431            #[rustc_std_internal_symbol]
432            fn __rust_alloc_error_handler_should_panic_v2() -> u8;
433        }
434
435        if unsafe { __rust_alloc_error_handler_should_panic_v2() != 0 } {
436            panic!("memory allocation of {size} bytes failed")
437        } else {
438            core::panicking::panic_nounwind_fmt(
439                format_args!("memory allocation of {size} bytes failed"),
440                /* force_no_backtrace */ false,
441            )
442        }
443    }
444}
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy