Skip to main content

alloc/
alloc.rs

1//! Memory allocation APIs
2
3#![stable(feature = "alloc_module", since = "1.28.0")]
4
5#[stable(feature = "alloc_module", since = "1.28.0")]
6#[doc(inline)]
7pub use core::alloc::*;
8use core::ptr::{self, Alignment, NonNull};
9use core::{cmp, hint};
10
11unsafe extern "Rust" {
12    // These are the magic symbols to call the global allocator. rustc generates
13    // them to call the global allocator if there is a `#[global_allocator]` attribute
14    // (the code expanding that attribute macro generates those functions), or to call
15    // the default implementations in std (`__rdl_alloc` etc. in `library/std/src/alloc.rs`)
16    // otherwise.
17    #[rustc_allocator]
18    #[rustc_nounwind]
19    #[rustc_std_internal_symbol]
20    #[rustc_allocator_zeroed_variant = "__rust_alloc_zeroed"]
21    fn __rust_alloc(size: usize, align: Alignment) -> *mut u8;
22    #[rustc_deallocator]
23    #[rustc_nounwind]
24    #[rustc_std_internal_symbol]
25    fn __rust_dealloc(ptr: NonNull<u8>, size: usize, align: Alignment);
26    #[rustc_reallocator]
27    #[rustc_nounwind]
28    #[rustc_std_internal_symbol]
29    fn __rust_realloc(
30        ptr: NonNull<u8>,
31        old_size: usize,
32        align: Alignment,
33        new_size: usize,
34    ) -> *mut u8;
35    #[rustc_allocator_zeroed]
36    #[rustc_nounwind]
37    #[rustc_std_internal_symbol]
38    fn __rust_alloc_zeroed(size: usize, align: Alignment) -> *mut u8;
39
40    #[rustc_nounwind]
41    #[rustc_std_internal_symbol]
42    fn __rust_no_alloc_shim_is_unstable_v2();
43}
44
45/// The global memory allocator.
46///
47/// This type implements the [`Allocator`] trait by forwarding calls
48/// to the allocator registered with the `#[global_allocator]` attribute
49/// if there is one, or the `std` crate’s default.
50///
51/// Note: while this type is unstable, the functionality it provides can be
52/// accessed through the [free functions in `alloc`](self#functions).
53#[unstable(feature = "allocator_api", issue = "32838")]
54#[derive(Copy, Clone, Default, Debug)]
55// the compiler needs to know when a Box uses the global allocator vs a custom one
56#[lang = "global_alloc_ty"]
57pub struct Global;
58
59/// Allocates memory with the global allocator.
60///
61/// This function forwards calls to the [`GlobalAlloc::alloc`] method
62/// of the allocator registered with the `#[global_allocator]` attribute
63/// if there is one, or the `std` crate’s default.
64///
65/// This function is expected to be deprecated in favor of the `allocate` method
66/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
67///
68/// # Safety
69///
70/// See [`GlobalAlloc::alloc`].
71///
72/// # Examples
73///
74/// ```
75/// use std::alloc::{alloc, dealloc, handle_alloc_error, Layout};
76///
77/// unsafe {
78///     let layout = Layout::new::<u16>();
79///     let ptr = alloc(layout);
80///     if ptr.is_null() {
81///         handle_alloc_error(layout);
82///     }
83///
84///     *(ptr as *mut u16) = 42;
85///     assert_eq!(*(ptr as *mut u16), 42);
86///
87///     dealloc(ptr, layout);
88/// }
89/// ```
90#[stable(feature = "global_alloc", since = "1.28.0")]
91#[must_use = "losing the pointer will leak memory"]
92#[inline]
93#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
94pub unsafe fn alloc(layout: Layout) -> *mut u8 {
95    unsafe {
96        // Make sure we don't accidentally allow omitting the allocator shim in
97        // stable code until it is actually stabilized.
98        __rust_no_alloc_shim_is_unstable_v2();
99
100        __rust_alloc(layout.size(), layout.alignment())
101    }
102}
103
104/// Deallocates memory with the global allocator.
105///
106/// This function forwards calls to the [`GlobalAlloc::dealloc`] method
107/// of the allocator registered with the `#[global_allocator]` attribute
108/// if there is one, or the `std` crate’s default.
109///
110/// This function is expected to be deprecated in favor of the `deallocate` method
111/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
112///
113/// # Safety
114///
115/// See [`GlobalAlloc::dealloc`].
116#[stable(feature = "global_alloc", since = "1.28.0")]
117#[inline]
118#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
119pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
120    unsafe { dealloc_nonnull(NonNull::new_unchecked(ptr), layout) }
121}
122
123/// Same as [`dealloc`] but when you already have a non-null pointer
124#[inline]
125#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
126unsafe fn dealloc_nonnull(ptr: NonNull<u8>, layout: Layout) {
127    unsafe { __rust_dealloc(ptr, layout.size(), layout.alignment()) }
128}
129
130/// Reallocates memory with the global allocator.
131///
132/// This function forwards calls to the [`GlobalAlloc::realloc`] method
133/// of the allocator registered with the `#[global_allocator]` attribute
134/// if there is one, or the `std` crate’s default.
135///
136/// This function is expected to be deprecated in favor of the `grow` and `shrink` methods
137/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
138///
139/// # Safety
140///
141/// See [`GlobalAlloc::realloc`].
142#[stable(feature = "global_alloc", since = "1.28.0")]
143#[must_use = "losing the pointer will leak memory"]
144#[inline]
145#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
146pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
147    unsafe { realloc_nonnull(NonNull::new_unchecked(ptr), layout, new_size) }
148}
149
150/// Same as [`realloc`] but when you already have a non-null pointer
151#[inline]
152#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
153unsafe fn realloc_nonnull(ptr: NonNull<u8>, layout: Layout, new_size: usize) -> *mut u8 {
154    unsafe { __rust_realloc(ptr, layout.size(), layout.alignment(), new_size) }
155}
156
157/// Allocates zero-initialized memory with the global allocator.
158///
159/// This function forwards calls to the [`GlobalAlloc::alloc_zeroed`] method
160/// of the allocator registered with the `#[global_allocator]` attribute
161/// if there is one, or the `std` crate’s default.
162///
163/// This function is expected to be deprecated in favor of the `allocate_zeroed` method
164/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
165///
166/// # Safety
167///
168/// See [`GlobalAlloc::alloc_zeroed`].
169///
170/// # Examples
171///
172/// ```
173/// use std::alloc::{alloc_zeroed, dealloc, handle_alloc_error, Layout};
174///
175/// unsafe {
176///     let layout = Layout::new::<u16>();
177///     let ptr = alloc_zeroed(layout);
178///     if ptr.is_null() {
179///         handle_alloc_error(layout);
180///     }
181///
182///     assert_eq!(*(ptr as *mut u16), 0);
183///
184///     dealloc(ptr, layout);
185/// }
186/// ```
187#[stable(feature = "global_alloc", since = "1.28.0")]
188#[must_use = "losing the pointer will leak memory"]
189#[inline]
190#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
191pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
192    unsafe {
193        // Make sure we don't accidentally allow omitting the allocator shim in
194        // stable code until it is actually stabilized.
195        __rust_no_alloc_shim_is_unstable_v2();
196
197        __rust_alloc_zeroed(layout.size(), layout.alignment())
198    }
199}
200
201impl Global {
202    #[inline]
203    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
204    fn alloc_impl_runtime(layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
205        match layout.size() {
206            0 => Ok(NonNull::slice_from_raw_parts(layout.dangling_ptr(), 0)),
207            // SAFETY: `layout` is non-zero in size,
208            size => unsafe {
209                let raw_ptr = if zeroed { alloc_zeroed(layout) } else { alloc(layout) };
210                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
211                Ok(NonNull::slice_from_raw_parts(ptr, size))
212            },
213        }
214    }
215
216    #[inline]
217    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
218    fn deallocate_impl_runtime(ptr: NonNull<u8>, layout: Layout) {
219        if layout.size() != 0 {
220            // SAFETY:
221            // * We have checked that `layout` is non-zero in size.
222            // * The caller is obligated to provide a layout that "fits", and in this case,
223            //   "fit" always means a layout that is equal to the original, because our
224            //   `allocate()`, `grow()`, and `shrink()` implementations never returns a larger
225            //   allocation than requested.
226            // * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s
227            //   safety documentation.
228            unsafe { dealloc_nonnull(ptr, layout) }
229        }
230    }
231
232    // SAFETY: Same as `Allocator::grow`
233    #[inline]
234    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
235    fn grow_impl_runtime(
236        &self,
237        ptr: NonNull<u8>,
238        old_layout: Layout,
239        new_layout: Layout,
240        zeroed: bool,
241    ) -> Result<NonNull<[u8]>, AllocError> {
242        debug_assert!(
243            new_layout.size() >= old_layout.size(),
244            "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
245        );
246
247        match old_layout.size() {
248            0 => self.alloc_impl(new_layout, zeroed),
249
250            // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size`
251            // as required by safety conditions. Other conditions must be upheld by the caller
252            old_size if old_layout.align() == new_layout.align() => unsafe {
253                let new_size = new_layout.size();
254
255                // `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
256                hint::assert_unchecked(new_size >= old_layout.size());
257
258                let raw_ptr = realloc_nonnull(ptr, old_layout, new_size);
259                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
260                if zeroed {
261                    raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
262                }
263                Ok(NonNull::slice_from_raw_parts(ptr, new_size))
264            },
265
266            // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
267            // both the old and new memory allocation are valid for reads and writes for `old_size`
268            // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
269            // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
270            // for `dealloc` must be upheld by the caller.
271            old_size => unsafe {
272                let new_ptr = self.alloc_impl(new_layout, zeroed)?;
273                ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
274                self.deallocate(ptr, old_layout);
275                Ok(new_ptr)
276            },
277        }
278    }
279
280    // SAFETY: Same as `Allocator::grow`
281    #[inline]
282    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
283    fn shrink_impl_runtime(
284        &self,
285        ptr: NonNull<u8>,
286        old_layout: Layout,
287        new_layout: Layout,
288        _zeroed: bool,
289    ) -> Result<NonNull<[u8]>, AllocError> {
290        debug_assert!(
291            new_layout.size() <= old_layout.size(),
292            "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
293        );
294
295        match new_layout.size() {
296            // SAFETY: conditions must be upheld by the caller
297            0 => unsafe {
298                self.deallocate(ptr, old_layout);
299                Ok(NonNull::slice_from_raw_parts(new_layout.dangling_ptr(), 0))
300            },
301
302            // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
303            new_size if old_layout.align() == new_layout.align() => unsafe {
304                // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
305                hint::assert_unchecked(new_size <= old_layout.size());
306
307                let raw_ptr = realloc_nonnull(ptr, old_layout, new_size);
308                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
309                Ok(NonNull::slice_from_raw_parts(ptr, new_size))
310            },
311
312            // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
313            // both the old and new memory allocation are valid for reads and writes for `new_size`
314            // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
315            // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
316            // for `dealloc` must be upheld by the caller.
317            new_size => unsafe {
318                let new_ptr = self.allocate(new_layout)?;
319                ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
320                self.deallocate(ptr, old_layout);
321                Ok(new_ptr)
322            },
323        }
324    }
325
326    // SAFETY: Same as `Allocator::allocate`
327    #[inline]
328    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
329    #[rustc_const_unstable(feature = "const_heap", issue = "79597")]
330    const fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
331        core::intrinsics::const_eval_select(
332            (layout, zeroed),
333            Global::alloc_impl_const,
334            Global::alloc_impl_runtime,
335        )
336    }
337
338    // SAFETY: Same as `Allocator::deallocate`
339    #[inline]
340    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
341    #[rustc_const_unstable(feature = "const_heap", issue = "79597")]
342    const unsafe fn deallocate_impl(&self, ptr: NonNull<u8>, layout: Layout) {
343        core::intrinsics::const_eval_select(
344            (ptr, layout),
345            Global::deallocate_impl_const,
346            Global::deallocate_impl_runtime,
347        )
348    }
349
350    // SAFETY: Same as `Allocator::grow`
351    #[inline]
352    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
353    #[rustc_const_unstable(feature = "const_heap", issue = "79597")]
354    const unsafe fn grow_impl(
355        &self,
356        ptr: NonNull<u8>,
357        old_layout: Layout,
358        new_layout: Layout,
359        zeroed: bool,
360    ) -> Result<NonNull<[u8]>, AllocError> {
361        core::intrinsics::const_eval_select(
362            (self, ptr, old_layout, new_layout, zeroed),
363            Global::grow_shrink_impl_const,
364            Global::grow_impl_runtime,
365        )
366    }
367
368    // SAFETY: Same as `Allocator::shrink`
369    #[inline]
370    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
371    #[rustc_const_unstable(feature = "const_heap", issue = "79597")]
372    const unsafe fn shrink_impl(
373        &self,
374        ptr: NonNull<u8>,
375        old_layout: Layout,
376        new_layout: Layout,
377    ) -> Result<NonNull<[u8]>, AllocError> {
378        core::intrinsics::const_eval_select(
379            (self, ptr, old_layout, new_layout, false),
380            Global::grow_shrink_impl_const,
381            Global::shrink_impl_runtime,
382        )
383    }
384
385    #[inline]
386    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
387    #[rustc_const_unstable(feature = "const_heap", issue = "79597")]
388    const fn alloc_impl_const(layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
389        match layout.size() {
390            0 => Ok(NonNull::slice_from_raw_parts(layout.dangling_ptr(), 0)),
391            // SAFETY: `layout` is non-zero in size,
392            size => unsafe {
393                let raw_ptr = core::intrinsics::const_allocate(layout.size(), layout.align());
394                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
395                if zeroed {
396                    // SAFETY: the pointer returned by `const_allocate` is valid to write to.
397                    ptr.write_bytes(0, size);
398                }
399                Ok(NonNull::slice_from_raw_parts(ptr, size))
400            },
401        }
402    }
403
404    #[inline]
405    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
406    #[rustc_const_unstable(feature = "const_heap", issue = "79597")]
407    const fn deallocate_impl_const(ptr: NonNull<u8>, layout: Layout) {
408        if layout.size() != 0 {
409            // SAFETY: We checked for nonzero size; other preconditions must be upheld by caller.
410            unsafe {
411                core::intrinsics::const_deallocate(ptr.as_ptr(), layout.size(), layout.align());
412            }
413        }
414    }
415
416    #[inline]
417    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
418    #[rustc_const_unstable(feature = "const_heap", issue = "79597")]
419    const fn grow_shrink_impl_const(
420        &self,
421        ptr: NonNull<u8>,
422        old_layout: Layout,
423        new_layout: Layout,
424        zeroed: bool,
425    ) -> Result<NonNull<[u8]>, AllocError> {
426        let new_ptr = self.alloc_impl(new_layout, zeroed)?;
427        // SAFETY: both pointers are valid and this operations is in bounds.
428        unsafe {
429            ptr::copy_nonoverlapping(
430                ptr.as_ptr(),
431                new_ptr.as_mut_ptr(),
432                cmp::min(old_layout.size(), new_layout.size()),
433            );
434        }
435        unsafe {
436            self.deallocate_impl(ptr, old_layout);
437        }
438        Ok(new_ptr)
439    }
440}
441
442#[unstable(feature = "allocator_api", issue = "32838")]
443#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
444unsafe impl const Allocator for Global {
445    #[inline]
446    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
447    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
448        self.alloc_impl(layout, false)
449    }
450
451    #[inline]
452    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
453    fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
454        self.alloc_impl(layout, true)
455    }
456
457    #[inline]
458    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
459    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
460        // SAFETY: all conditions must be upheld by the caller
461        unsafe { self.deallocate_impl(ptr, layout) }
462    }
463
464    #[inline]
465    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
466    unsafe fn grow(
467        &self,
468        ptr: NonNull<u8>,
469        old_layout: Layout,
470        new_layout: Layout,
471    ) -> Result<NonNull<[u8]>, AllocError> {
472        // SAFETY: all conditions must be upheld by the caller
473        unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
474    }
475
476    #[inline]
477    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
478    unsafe fn grow_zeroed(
479        &self,
480        ptr: NonNull<u8>,
481        old_layout: Layout,
482        new_layout: Layout,
483    ) -> Result<NonNull<[u8]>, AllocError> {
484        // SAFETY: all conditions must be upheld by the caller
485        unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
486    }
487
488    #[inline]
489    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
490    unsafe fn shrink(
491        &self,
492        ptr: NonNull<u8>,
493        old_layout: Layout,
494        new_layout: Layout,
495    ) -> Result<NonNull<[u8]>, AllocError> {
496        // SAFETY: all conditions must be upheld by the caller
497        unsafe { self.shrink_impl(ptr, old_layout, new_layout) }
498    }
499}
500
501// # Allocation error handler
502
503#[cfg(not(no_global_oom_handling))]
504unsafe extern "Rust" {
505    // This is the magic symbol to call the global alloc error handler. rustc generates
506    // it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the
507    // default implementations below (`__rdl_alloc_error_handler`) otherwise.
508    #[rustc_std_internal_symbol]
509    fn __rust_alloc_error_handler(size: usize, align: usize) -> !;
510}
511
512/// Signals a memory allocation error.
513///
514/// Callers of memory allocation APIs wishing to cease execution
515/// in response to an allocation error are encouraged to call this function,
516/// rather than directly invoking [`panic!`] or similar.
517///
518/// This function is guaranteed to diverge (not return normally with a value), but depending on
519/// global configuration, it may either panic (resulting in unwinding or aborting as per
520/// configuration for all panics), or abort the process (with no unwinding).
521///
522/// The default behavior is:
523///
524///  * If the binary links against `std` (typically the case), then
525///   print a message to standard error and abort the process.
526///   This behavior can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`].
527///   Future versions of Rust may panic by default instead.
528///
529/// * If the binary does not link against `std` (all of its crates are marked
530///   [`#![no_std]`][no_std]), then call [`panic!`] with a message.
531///   [The panic handler] applies as to any panic.
532///
533/// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html
534/// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html
535/// [The panic handler]: https://doc.rust-lang.org/reference/runtime.html#the-panic_handler-attribute
536/// [no_std]: https://doc.rust-lang.org/reference/names/preludes.html#the-no_std-attribute
537#[stable(feature = "global_alloc", since = "1.28.0")]
538#[rustc_const_unstable(feature = "const_alloc_error", issue = "92523")]
539#[cfg(not(no_global_oom_handling))]
540#[cold]
541#[optimize(size)]
542pub const fn handle_alloc_error(layout: Layout) -> ! {
543    const fn ct_error(_: Layout) -> ! {
544        panic!("allocation failed");
545    }
546
547    #[inline]
548    fn rt_error(layout: Layout) -> ! {
549        unsafe {
550            __rust_alloc_error_handler(layout.size(), layout.align());
551        }
552    }
553
554    #[cfg(not(panic = "immediate-abort"))]
555    {
556        core::intrinsics::const_eval_select((layout,), ct_error, rt_error)
557    }
558
559    #[cfg(panic = "immediate-abort")]
560    ct_error(layout)
561}
562
563#[cfg(not(no_global_oom_handling))]
564#[doc(hidden)]
565#[allow(unused_attributes)]
566#[unstable(feature = "alloc_internals", issue = "none")]
567pub mod __alloc_error_handler {
568    // called via generated `__rust_alloc_error_handler` if there is no
569    // `#[alloc_error_handler]`.
570    #[rustc_std_internal_symbol]
571    pub unsafe fn __rdl_alloc_error_handler(size: usize, _align: usize) -> ! {
572        core::panicking::panic_nounwind_fmt(
573            format_args!("memory allocation of {size} bytes failed"),
574            /* force_no_backtrace */ false,
575        )
576    }
577}