alloc/
alloc.rs

1//! Memory allocation APIs
2
3#![stable(feature = "alloc_module", since = "1.28.0")]
4
5#[stable(feature = "alloc_module", since = "1.28.0")]
6#[doc(inline)]
7pub use core::alloc::*;
8use core::hint;
9use core::ptr::{self, NonNull};
10
11unsafe extern "Rust" {
12    // These are the magic symbols to call the global allocator. rustc generates
13    // them to call `__rg_alloc` etc. if there is a `#[global_allocator]` attribute
14    // (the code expanding that attribute macro generates those functions), or to call
15    // the default implementations in std (`__rdl_alloc` etc. in `library/std/src/alloc.rs`)
16    // otherwise.
17    // The rustc fork of LLVM 14 and earlier also special-cases these function names to be able to optimize them
18    // like `malloc`, `realloc`, and `free`, respectively.
19    #[rustc_allocator]
20    #[rustc_nounwind]
21    fn __rust_alloc(size: usize, align: usize) -> *mut u8;
22    #[rustc_deallocator]
23    #[rustc_nounwind]
24    fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
25    #[rustc_reallocator]
26    #[rustc_nounwind]
27    fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8;
28    #[rustc_allocator_zeroed]
29    #[rustc_nounwind]
30    fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
31
32    static __rust_no_alloc_shim_is_unstable: u8;
33}
34
35/// The global memory allocator.
36///
37/// This type implements the [`Allocator`] trait by forwarding calls
38/// to the allocator registered with the `#[global_allocator]` attribute
39/// if there is one, or the `std` crate’s default.
40///
41/// Note: while this type is unstable, the functionality it provides can be
42/// accessed through the [free functions in `alloc`](self#functions).
43#[unstable(feature = "allocator_api", issue = "32838")]
44#[derive(Copy, Clone, Default, Debug)]
45// the compiler needs to know when a Box uses the global allocator vs a custom one
46#[lang = "global_alloc_ty"]
47pub struct Global;
48
49/// Allocates memory with the global allocator.
50///
51/// This function forwards calls to the [`GlobalAlloc::alloc`] method
52/// of the allocator registered with the `#[global_allocator]` attribute
53/// if there is one, or the `std` crate’s default.
54///
55/// This function is expected to be deprecated in favor of the `allocate` method
56/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
57///
58/// # Safety
59///
60/// See [`GlobalAlloc::alloc`].
61///
62/// # Examples
63///
64/// ```
65/// use std::alloc::{alloc, dealloc, handle_alloc_error, Layout};
66///
67/// unsafe {
68///     let layout = Layout::new::<u16>();
69///     let ptr = alloc(layout);
70///     if ptr.is_null() {
71///         handle_alloc_error(layout);
72///     }
73///
74///     *(ptr as *mut u16) = 42;
75///     assert_eq!(*(ptr as *mut u16), 42);
76///
77///     dealloc(ptr, layout);
78/// }
79/// ```
80#[stable(feature = "global_alloc", since = "1.28.0")]
81#[must_use = "losing the pointer will leak memory"]
82#[inline]
83#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
84pub unsafe fn alloc(layout: Layout) -> *mut u8 {
85    unsafe {
86        // Make sure we don't accidentally allow omitting the allocator shim in
87        // stable code until it is actually stabilized.
88        core::ptr::read_volatile(&__rust_no_alloc_shim_is_unstable);
89
90        __rust_alloc(layout.size(), layout.align())
91    }
92}
93
94/// Deallocates memory with the global allocator.
95///
96/// This function forwards calls to the [`GlobalAlloc::dealloc`] method
97/// of the allocator registered with the `#[global_allocator]` attribute
98/// if there is one, or the `std` crate’s default.
99///
100/// This function is expected to be deprecated in favor of the `deallocate` method
101/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
102///
103/// # Safety
104///
105/// See [`GlobalAlloc::dealloc`].
106#[stable(feature = "global_alloc", since = "1.28.0")]
107#[inline]
108#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
109pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
110    unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) }
111}
112
113/// Reallocates memory with the global allocator.
114///
115/// This function forwards calls to the [`GlobalAlloc::realloc`] method
116/// of the allocator registered with the `#[global_allocator]` attribute
117/// if there is one, or the `std` crate’s default.
118///
119/// This function is expected to be deprecated in favor of the `grow` and `shrink` methods
120/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
121///
122/// # Safety
123///
124/// See [`GlobalAlloc::realloc`].
125#[stable(feature = "global_alloc", since = "1.28.0")]
126#[must_use = "losing the pointer will leak memory"]
127#[inline]
128#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
129pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
130    unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) }
131}
132
133/// Allocates zero-initialized memory with the global allocator.
134///
135/// This function forwards calls to the [`GlobalAlloc::alloc_zeroed`] method
136/// of the allocator registered with the `#[global_allocator]` attribute
137/// if there is one, or the `std` crate’s default.
138///
139/// This function is expected to be deprecated in favor of the `allocate_zeroed` method
140/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
141///
142/// # Safety
143///
144/// See [`GlobalAlloc::alloc_zeroed`].
145///
146/// # Examples
147///
148/// ```
149/// use std::alloc::{alloc_zeroed, dealloc, handle_alloc_error, Layout};
150///
151/// unsafe {
152///     let layout = Layout::new::<u16>();
153///     let ptr = alloc_zeroed(layout);
154///     if ptr.is_null() {
155///         handle_alloc_error(layout);
156///     }
157///
158///     assert_eq!(*(ptr as *mut u16), 0);
159///
160///     dealloc(ptr, layout);
161/// }
162/// ```
163#[stable(feature = "global_alloc", since = "1.28.0")]
164#[must_use = "losing the pointer will leak memory"]
165#[inline]
166#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
167pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
168    unsafe {
169        // Make sure we don't accidentally allow omitting the allocator shim in
170        // stable code until it is actually stabilized.
171        core::ptr::read_volatile(&__rust_no_alloc_shim_is_unstable);
172
173        __rust_alloc_zeroed(layout.size(), layout.align())
174    }
175}
176
177impl Global {
178    #[inline]
179    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
180    fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
181        match layout.size() {
182            0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
183            // SAFETY: `layout` is non-zero in size,
184            size => unsafe {
185                let raw_ptr = if zeroed { alloc_zeroed(layout) } else { alloc(layout) };
186                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
187                Ok(NonNull::slice_from_raw_parts(ptr, size))
188            },
189        }
190    }
191
192    // SAFETY: Same as `Allocator::grow`
193    #[inline]
194    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
195    unsafe fn grow_impl(
196        &self,
197        ptr: NonNull<u8>,
198        old_layout: Layout,
199        new_layout: Layout,
200        zeroed: bool,
201    ) -> Result<NonNull<[u8]>, AllocError> {
202        debug_assert!(
203            new_layout.size() >= old_layout.size(),
204            "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
205        );
206
207        match old_layout.size() {
208            0 => self.alloc_impl(new_layout, zeroed),
209
210            // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size`
211            // as required by safety conditions. Other conditions must be upheld by the caller
212            old_size if old_layout.align() == new_layout.align() => unsafe {
213                let new_size = new_layout.size();
214
215                // `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
216                hint::assert_unchecked(new_size >= old_layout.size());
217
218                let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
219                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
220                if zeroed {
221                    raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
222                }
223                Ok(NonNull::slice_from_raw_parts(ptr, new_size))
224            },
225
226            // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
227            // both the old and new memory allocation are valid for reads and writes for `old_size`
228            // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
229            // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
230            // for `dealloc` must be upheld by the caller.
231            old_size => unsafe {
232                let new_ptr = self.alloc_impl(new_layout, zeroed)?;
233                ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
234                self.deallocate(ptr, old_layout);
235                Ok(new_ptr)
236            },
237        }
238    }
239}
240
241#[unstable(feature = "allocator_api", issue = "32838")]
242unsafe impl Allocator for Global {
243    #[inline]
244    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
245    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
246        self.alloc_impl(layout, false)
247    }
248
249    #[inline]
250    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
251    fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
252        self.alloc_impl(layout, true)
253    }
254
255    #[inline]
256    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
257    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
258        if layout.size() != 0 {
259            // SAFETY:
260            // * We have checked that `layout` is non-zero in size.
261            // * The caller is obligated to provide a layout that "fits", and in this case,
262            //   "fit" always means a layout that is equal to the original, because our
263            //   `allocate()`, `grow()`, and `shrink()` implementations never returns a larger
264            //   allocation than requested.
265            // * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s
266            //   safety documentation.
267            unsafe { dealloc(ptr.as_ptr(), layout) }
268        }
269    }
270
271    #[inline]
272    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
273    unsafe fn grow(
274        &self,
275        ptr: NonNull<u8>,
276        old_layout: Layout,
277        new_layout: Layout,
278    ) -> Result<NonNull<[u8]>, AllocError> {
279        // SAFETY: all conditions must be upheld by the caller
280        unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
281    }
282
283    #[inline]
284    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
285    unsafe fn grow_zeroed(
286        &self,
287        ptr: NonNull<u8>,
288        old_layout: Layout,
289        new_layout: Layout,
290    ) -> Result<NonNull<[u8]>, AllocError> {
291        // SAFETY: all conditions must be upheld by the caller
292        unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
293    }
294
295    #[inline]
296    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
297    unsafe fn shrink(
298        &self,
299        ptr: NonNull<u8>,
300        old_layout: Layout,
301        new_layout: Layout,
302    ) -> Result<NonNull<[u8]>, AllocError> {
303        debug_assert!(
304            new_layout.size() <= old_layout.size(),
305            "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
306        );
307
308        match new_layout.size() {
309            // SAFETY: conditions must be upheld by the caller
310            0 => unsafe {
311                self.deallocate(ptr, old_layout);
312                Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
313            },
314
315            // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
316            new_size if old_layout.align() == new_layout.align() => unsafe {
317                // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
318                hint::assert_unchecked(new_size <= old_layout.size());
319
320                let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
321                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
322                Ok(NonNull::slice_from_raw_parts(ptr, new_size))
323            },
324
325            // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
326            // both the old and new memory allocation are valid for reads and writes for `new_size`
327            // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
328            // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
329            // for `dealloc` must be upheld by the caller.
330            new_size => unsafe {
331                let new_ptr = self.allocate(new_layout)?;
332                ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
333                self.deallocate(ptr, old_layout);
334                Ok(new_ptr)
335            },
336        }
337    }
338}
339
340/// The allocator for `Box`.
341#[cfg(not(no_global_oom_handling))]
342#[lang = "exchange_malloc"]
343#[inline]
344#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
345unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
346    let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
347    match Global.allocate(layout) {
348        Ok(ptr) => ptr.as_mut_ptr(),
349        Err(_) => handle_alloc_error(layout),
350    }
351}
352
353// # Allocation error handler
354
355#[cfg(not(no_global_oom_handling))]
356unsafe extern "Rust" {
357    // This is the magic symbol to call the global alloc error handler. rustc generates
358    // it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the
359    // default implementations below (`__rdl_oom`) otherwise.
360    fn __rust_alloc_error_handler(size: usize, align: usize) -> !;
361}
362
363/// Signals a memory allocation error.
364///
365/// Callers of memory allocation APIs wishing to cease execution
366/// in response to an allocation error are encouraged to call this function,
367/// rather than directly invoking [`panic!`] or similar.
368///
369/// This function is guaranteed to diverge (not return normally with a value), but depending on
370/// global configuration, it may either panic (resulting in unwinding or aborting as per
371/// configuration for all panics), or abort the process (with no unwinding).
372///
373/// The default behavior is:
374///
375///  * If the binary links against `std` (typically the case), then
376///   print a message to standard error and abort the process.
377///   This behavior can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`].
378///   Future versions of Rust may panic by default instead.
379///
380/// * If the binary does not link against `std` (all of its crates are marked
381///   [`#![no_std]`][no_std]), then call [`panic!`] with a message.
382///   [The panic handler] applies as to any panic.
383///
384/// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html
385/// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html
386/// [The panic handler]: https://doc.rust-lang.org/reference/runtime.html#the-panic_handler-attribute
387/// [no_std]: https://doc.rust-lang.org/reference/names/preludes.html#the-no_std-attribute
388#[stable(feature = "global_alloc", since = "1.28.0")]
389#[rustc_const_unstable(feature = "const_alloc_error", issue = "92523")]
390#[cfg(not(no_global_oom_handling))]
391#[cold]
392#[optimize(size)]
393pub const fn handle_alloc_error(layout: Layout) -> ! {
394    const fn ct_error(_: Layout) -> ! {
395        panic!("allocation failed");
396    }
397
398    #[inline]
399    fn rt_error(layout: Layout) -> ! {
400        unsafe {
401            __rust_alloc_error_handler(layout.size(), layout.align());
402        }
403    }
404
405    #[cfg(not(feature = "panic_immediate_abort"))]
406    {
407        core::intrinsics::const_eval_select((layout,), ct_error, rt_error)
408    }
409
410    #[cfg(feature = "panic_immediate_abort")]
411    ct_error(layout)
412}
413
414#[cfg(not(no_global_oom_handling))]
415#[doc(hidden)]
416#[allow(unused_attributes)]
417#[unstable(feature = "alloc_internals", issue = "none")]
418pub mod __alloc_error_handler {
419    // called via generated `__rust_alloc_error_handler` if there is no
420    // `#[alloc_error_handler]`.
421    #[rustc_std_internal_symbol]
422    pub unsafe fn __rdl_oom(size: usize, _align: usize) -> ! {
423        unsafe extern "Rust" {
424            // This symbol is emitted by rustc next to __rust_alloc_error_handler.
425            // Its value depends on the -Zoom={panic,abort} compiler option.
426            static __rust_alloc_error_handler_should_panic: u8;
427        }
428
429        if unsafe { __rust_alloc_error_handler_should_panic != 0 } {
430            panic!("memory allocation of {size} bytes failed")
431        } else {
432            core::panicking::panic_nounwind_fmt(
433                format_args!("memory allocation of {size} bytes failed"),
434                /* force_no_backtrace */ false,
435            )
436        }
437    }
438}