std/alloc.rs
1//! Memory allocation APIs.
2//!
3//! In a given program, the standard library has one “global” memory allocator
4//! that is used for example by `Box<T>` and `Vec<T>`.
5//!
6//! Currently the default global allocator is unspecified. Libraries, however,
7//! like `cdylib`s and `staticlib`s are guaranteed to use the [`System`] by
8//! default.
9//!
10//! # The `#[global_allocator]` attribute
11//!
12//! This attribute allows configuring the choice of global allocator.
13//! You can use this to implement a completely custom global allocator
14//! to route all[^system-alloc] default allocation requests to a custom object.
15//!
16//! ```rust
17//! use std::alloc::{GlobalAlloc, System, Layout};
18//!
19//! struct MyAllocator;
20//!
21//! unsafe impl GlobalAlloc for MyAllocator {
22//! unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
23//! unsafe { System.alloc(layout) }
24//! }
25//!
26//! unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
27//! unsafe { System.dealloc(ptr, layout) }
28//! }
29//! }
30//!
31//! #[global_allocator]
32//! static GLOBAL: MyAllocator = MyAllocator;
33//!
34//! fn main() {
35//! // This `Vec` will allocate memory through `GLOBAL` above
36//! let mut v = Vec::new();
37//! v.push(1);
38//! }
39//! ```
40//!
41//! The attribute is used on a `static` item whose type implements the
42//! [`GlobalAlloc`] trait. This type can be provided by an external library:
43//!
44//! ```rust,ignore (demonstrates crates.io usage)
45//! use jemallocator::Jemalloc;
46//!
47//! #[global_allocator]
48//! static GLOBAL: Jemalloc = Jemalloc;
49//!
50//! fn main() {}
51//! ```
52//!
53//! The `#[global_allocator]` can only be used once in a crate
54//! or its recursive dependencies.
55//!
56//! [^system-alloc]: Note that the Rust standard library internals may still
57//! directly call [`System`] when necessary (for example for the runtime
58//! support typically required to implement a global allocator, see [re-entrance] on [`GlobalAlloc`]
59//! for more details).
60//!
61//! [re-entrance]: trait.GlobalAlloc.html#re-entrance
62
63#![deny(unsafe_op_in_unsafe_fn)]
64#![stable(feature = "alloc_module", since = "1.28.0")]
65
66use core::ptr::NonNull;
67use core::sync::atomic::{AtomicBool, AtomicPtr, Ordering};
68use core::{hint, mem, ptr};
69
70#[stable(feature = "alloc_module", since = "1.28.0")]
71#[doc(inline)]
72pub use alloc_crate::alloc::*;
73
74/// The default memory allocator provided by the operating system.
75///
76/// This is based on `malloc` on Unix platforms and `HeapAlloc` on Windows,
77/// plus related functions. However, it is not valid to mix use of the backing
78/// system allocator with `System`, as this implementation may include extra
79/// work, such as to serve alignment requests greater than the alignment
80/// provided directly by the backing system allocator.
81///
82/// This type implements the [`GlobalAlloc`] trait. Currently the default
83/// global allocator is unspecified. Libraries, however, like `cdylib`s and
84/// `staticlib`s are guaranteed to use the [`System`] by default and as such
85/// work as if they had this definition:
86///
87/// ```rust
88/// use std::alloc::System;
89///
90/// #[global_allocator]
91/// static A: System = System;
92///
93/// fn main() {
94/// let a = Box::new(4); // Allocates from the system allocator.
95/// println!("{a}");
96/// }
97/// ```
98///
99/// You can also define your own wrapper around `System` if you'd like, such as
100/// keeping track of the number of all bytes allocated:
101///
102/// ```rust
103/// use std::alloc::{System, GlobalAlloc, Layout};
104/// use std::sync::atomic::{AtomicUsize, Ordering::Relaxed};
105///
106/// struct Counter;
107///
108/// static ALLOCATED: AtomicUsize = AtomicUsize::new(0);
109///
110/// unsafe impl GlobalAlloc for Counter {
111/// unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
112/// let ret = unsafe { System.alloc(layout) };
113/// if !ret.is_null() {
114/// ALLOCATED.fetch_add(layout.size(), Relaxed);
115/// }
116/// ret
117/// }
118///
119/// unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
120/// unsafe { System.dealloc(ptr, layout); }
121/// ALLOCATED.fetch_sub(layout.size(), Relaxed);
122/// }
123/// }
124///
125/// #[global_allocator]
126/// static A: Counter = Counter;
127///
128/// fn main() {
129/// println!("allocated bytes before main: {}", ALLOCATED.load(Relaxed));
130/// }
131/// ```
132///
133/// It can also be used directly to allocate memory independently of whatever
134/// global allocator has been selected for a Rust program. For example if a Rust
135/// program opts in to using jemalloc as the global allocator, `System` will
136/// still allocate memory using `malloc` and `HeapAlloc`.
137#[stable(feature = "alloc_system_type", since = "1.28.0")]
138#[derive(Debug, Default, Copy, Clone)]
139pub struct System;
140
141impl System {
142 #[inline]
143 fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
144 match layout.size() {
145 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
146 // SAFETY: `layout` is non-zero in size,
147 size => unsafe {
148 let raw_ptr = if zeroed {
149 GlobalAlloc::alloc_zeroed(self, layout)
150 } else {
151 GlobalAlloc::alloc(self, layout)
152 };
153 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
154 Ok(NonNull::slice_from_raw_parts(ptr, size))
155 },
156 }
157 }
158
159 // SAFETY: Same as `Allocator::grow`
160 #[inline]
161 unsafe fn grow_impl(
162 &self,
163 ptr: NonNull<u8>,
164 old_layout: Layout,
165 new_layout: Layout,
166 zeroed: bool,
167 ) -> Result<NonNull<[u8]>, AllocError> {
168 debug_assert!(
169 new_layout.size() >= old_layout.size(),
170 "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
171 );
172
173 match old_layout.size() {
174 0 => self.alloc_impl(new_layout, zeroed),
175
176 // SAFETY: `new_size` is non-zero as `new_size` is greater than or equal to `old_size`
177 // as required by safety conditions and the `old_size == 0` case was handled in the
178 // previous match arm. Other conditions must be upheld by the caller
179 old_size if old_layout.align() == new_layout.align() => unsafe {
180 let new_size = new_layout.size();
181
182 // `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
183 hint::assert_unchecked(new_size >= old_layout.size());
184
185 let raw_ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), old_layout, new_size);
186 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
187 if zeroed {
188 raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
189 }
190 Ok(NonNull::slice_from_raw_parts(ptr, new_size))
191 },
192
193 // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
194 // both the old and new memory allocation are valid for reads and writes for `old_size`
195 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
196 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
197 // for `dealloc` must be upheld by the caller.
198 old_size => unsafe {
199 let new_ptr = self.alloc_impl(new_layout, zeroed)?;
200 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
201 Allocator::deallocate(self, ptr, old_layout);
202 Ok(new_ptr)
203 },
204 }
205 }
206}
207
208// The Allocator impl checks the layout size to be non-zero and forwards to the GlobalAlloc impl,
209// which is in `std::sys::*::alloc`.
210#[unstable(feature = "allocator_api", issue = "32838")]
211unsafe impl Allocator for System {
212 #[inline]
213 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
214 self.alloc_impl(layout, false)
215 }
216
217 #[inline]
218 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
219 self.alloc_impl(layout, true)
220 }
221
222 #[inline]
223 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
224 if layout.size() != 0 {
225 // SAFETY: `layout` is non-zero in size,
226 // other conditions must be upheld by the caller
227 unsafe { GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) }
228 }
229 }
230
231 #[inline]
232 unsafe fn grow(
233 &self,
234 ptr: NonNull<u8>,
235 old_layout: Layout,
236 new_layout: Layout,
237 ) -> Result<NonNull<[u8]>, AllocError> {
238 // SAFETY: all conditions must be upheld by the caller
239 unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
240 }
241
242 #[inline]
243 unsafe fn grow_zeroed(
244 &self,
245 ptr: NonNull<u8>,
246 old_layout: Layout,
247 new_layout: Layout,
248 ) -> Result<NonNull<[u8]>, AllocError> {
249 // SAFETY: all conditions must be upheld by the caller
250 unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
251 }
252
253 #[inline]
254 unsafe fn shrink(
255 &self,
256 ptr: NonNull<u8>,
257 old_layout: Layout,
258 new_layout: Layout,
259 ) -> Result<NonNull<[u8]>, AllocError> {
260 debug_assert!(
261 new_layout.size() <= old_layout.size(),
262 "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
263 );
264
265 match new_layout.size() {
266 // SAFETY: conditions must be upheld by the caller
267 0 => unsafe {
268 Allocator::deallocate(self, ptr, old_layout);
269 Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
270 },
271
272 // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
273 new_size if old_layout.align() == new_layout.align() => unsafe {
274 // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
275 hint::assert_unchecked(new_size <= old_layout.size());
276
277 let raw_ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), old_layout, new_size);
278 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
279 Ok(NonNull::slice_from_raw_parts(ptr, new_size))
280 },
281
282 // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
283 // both the old and new memory allocation are valid for reads and writes for `new_size`
284 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
285 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
286 // for `dealloc` must be upheld by the caller.
287 new_size => unsafe {
288 let new_ptr = Allocator::allocate(self, new_layout)?;
289 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
290 Allocator::deallocate(self, ptr, old_layout);
291 Ok(new_ptr)
292 },
293 }
294 }
295}
296
297static HOOK: AtomicPtr<()> = AtomicPtr::new(ptr::null_mut());
298
299/// Registers a custom allocation error hook, replacing any that was previously registered.
300///
301/// The allocation error hook is invoked when an infallible memory allocation fails — that is,
302/// as a consequence of calling [`handle_alloc_error`] — before the runtime aborts.
303///
304/// The allocation error hook is a global resource. [`take_alloc_error_hook`] may be used to
305/// retrieve a previously registered hook and wrap or discard it.
306///
307/// # What the provided `hook` function should expect
308///
309/// The hook function is provided with a [`Layout`] struct which contains information
310/// about the allocation that failed.
311///
312/// The hook function may choose to panic or abort; in the event that it returns normally, this
313/// will cause an immediate abort.
314///
315/// Since [`take_alloc_error_hook`] is a safe function that allows retrieving the hook, the hook
316/// function must be _sound_ to call even if no memory allocations were attempted.
317///
318/// # The default hook
319///
320/// The default hook, used if [`set_alloc_error_hook`] is never called, prints a message to
321/// standard error (and then returns, causing the runtime to abort the process).
322/// Compiler options may cause it to panic instead, and the default behavior may be changed
323/// to panicking in future versions of Rust.
324///
325/// # Examples
326///
327/// ```
328/// #![feature(alloc_error_hook)]
329///
330/// use std::alloc::{Layout, set_alloc_error_hook};
331///
332/// fn custom_alloc_error_hook(layout: Layout) {
333/// panic!("memory allocation of {} bytes failed", layout.size());
334/// }
335///
336/// set_alloc_error_hook(custom_alloc_error_hook);
337/// ```
338#[unstable(feature = "alloc_error_hook", issue = "51245")]
339pub fn set_alloc_error_hook(hook: fn(Layout)) {
340 HOOK.store(hook as *mut (), Ordering::Release);
341}
342
343/// Unregisters the current allocation error hook, returning it.
344///
345/// *See also the function [`set_alloc_error_hook`].*
346///
347/// If no custom hook is registered, the default hook will be returned.
348#[unstable(feature = "alloc_error_hook", issue = "51245")]
349pub fn take_alloc_error_hook() -> fn(Layout) {
350 let hook = HOOK.swap(ptr::null_mut(), Ordering::Acquire);
351 if hook.is_null() { default_alloc_error_hook } else { unsafe { mem::transmute(hook) } }
352}
353
354#[optimize(size)]
355fn default_alloc_error_hook(layout: Layout) {
356 if cfg!(panic = "immediate-abort") {
357 return;
358 }
359
360 unsafe extern "Rust" {
361 // This symbol is emitted by rustc next to __rust_alloc_error_handler.
362 // Its value depends on the -Zoom={panic,abort} compiler option.
363 #[rustc_std_internal_symbol]
364 fn __rust_alloc_error_handler_should_panic_v2() -> u8;
365 }
366
367 if unsafe { __rust_alloc_error_handler_should_panic_v2() != 0 } {
368 panic!("memory allocation of {} bytes failed", layout.size());
369 }
370
371 // This is the default path taken on OOM, and the only path taken on stable with std.
372 // Crucially, it does *not* call any user-defined code, and therefore users do not have to
373 // worry about allocation failure causing reentrancy issues. That makes it different from
374 // the default `__rdl_alloc_error_handler` defined in alloc (i.e., the default alloc error
375 // handler that is called when there is no `#[alloc_error_handler]`), which triggers a
376 // regular panic and thus can invoke a user-defined panic hook, executing arbitrary
377 // user-defined code.
378
379 static PREV_ALLOC_FAILURE: AtomicBool = AtomicBool::new(false);
380 if PREV_ALLOC_FAILURE.swap(true, Ordering::Relaxed) {
381 // Don't try to print a backtrace if a previous alloc error happened. This likely means
382 // there is not enough memory to print a backtrace, although it could also mean that two
383 // threads concurrently run out of memory.
384 rtprintpanic!(
385 "memory allocation of {} bytes failed\nskipping backtrace printing to avoid potential recursion\n",
386 layout.size()
387 );
388 return;
389 } else {
390 rtprintpanic!("memory allocation of {} bytes failed\n", layout.size());
391 }
392
393 let Some(mut out) = crate::sys::stdio::panic_output() else {
394 return;
395 };
396
397 // Use a lock to prevent mixed output in multithreading context.
398 // Some platforms also require it when printing a backtrace, like `SymFromAddr` on Windows.
399 // Make sure to not take this lock until after checking PREV_ALLOC_FAILURE to avoid deadlocks
400 // when there is too little memory to print a backtrace.
401 let mut lock = crate::sys::backtrace::lock();
402
403 match crate::panic::get_backtrace_style() {
404 Some(crate::panic::BacktraceStyle::Short) => {
405 drop(lock.print(&mut out, crate::backtrace_rs::PrintFmt::Short))
406 }
407 Some(crate::panic::BacktraceStyle::Full) => {
408 drop(lock.print(&mut out, crate::backtrace_rs::PrintFmt::Full))
409 }
410 Some(crate::panic::BacktraceStyle::Off) => {
411 use crate::io::Write;
412 let _ = writeln!(
413 out,
414 "note: run with `RUST_BACKTRACE=1` environment variable to display a \
415 backtrace"
416 );
417 if cfg!(miri) {
418 let _ = writeln!(
419 out,
420 "note: in Miri, you may have to set `MIRIFLAGS=-Zmiri-env-forward=RUST_BACKTRACE` \
421 for the environment variable to have an effect"
422 );
423 }
424 }
425 // If backtraces aren't supported or are forced-off, do nothing.
426 None => {}
427 }
428}
429
430#[cfg(not(test))]
431#[doc(hidden)]
432#[alloc_error_handler]
433#[unstable(feature = "alloc_internals", issue = "none")]
434pub fn rust_oom(layout: Layout) -> ! {
435 crate::sys::backtrace::__rust_end_short_backtrace(|| {
436 let hook = HOOK.load(Ordering::Acquire);
437 let hook: fn(Layout) =
438 if hook.is_null() { default_alloc_error_hook } else { unsafe { mem::transmute(hook) } };
439 hook(layout);
440 crate::process::abort()
441 })
442}
443
444#[cfg(not(test))]
445#[doc(hidden)]
446#[allow(unused_attributes)]
447#[unstable(feature = "alloc_internals", issue = "none")]
448pub mod __default_lib_allocator {
449 use super::{GlobalAlloc, Layout, System};
450 // These magic symbol names are used as a fallback for implementing the
451 // `__rust_alloc` etc symbols (see `src/liballoc/alloc.rs`) when there is
452 // no `#[global_allocator]` attribute.
453
454 // for symbol names src/librustc_ast/expand/allocator.rs
455 // for signatures src/librustc_allocator/lib.rs
456
457 // linkage directives are provided as part of the current compiler allocator
458 // ABI
459
460 #[rustc_std_internal_symbol]
461 pub unsafe extern "C" fn __rdl_alloc(size: usize, align: usize) -> *mut u8 {
462 // SAFETY: see the guarantees expected by `Layout::from_size_align` and
463 // `GlobalAlloc::alloc`.
464 unsafe {
465 let layout = Layout::from_size_align_unchecked(size, align);
466 System.alloc(layout)
467 }
468 }
469
470 #[rustc_std_internal_symbol]
471 pub unsafe extern "C" fn __rdl_dealloc(ptr: *mut u8, size: usize, align: usize) {
472 // SAFETY: see the guarantees expected by `Layout::from_size_align` and
473 // `GlobalAlloc::dealloc`.
474 unsafe { System.dealloc(ptr, Layout::from_size_align_unchecked(size, align)) }
475 }
476
477 #[rustc_std_internal_symbol]
478 pub unsafe extern "C" fn __rdl_realloc(
479 ptr: *mut u8,
480 old_size: usize,
481 align: usize,
482 new_size: usize,
483 ) -> *mut u8 {
484 // SAFETY: see the guarantees expected by `Layout::from_size_align` and
485 // `GlobalAlloc::realloc`.
486 unsafe {
487 let old_layout = Layout::from_size_align_unchecked(old_size, align);
488 System.realloc(ptr, old_layout, new_size)
489 }
490 }
491
492 #[rustc_std_internal_symbol]
493 pub unsafe extern "C" fn __rdl_alloc_zeroed(size: usize, align: usize) -> *mut u8 {
494 // SAFETY: see the guarantees expected by `Layout::from_size_align` and
495 // `GlobalAlloc::alloc_zeroed`.
496 unsafe {
497 let layout = Layout::from_size_align_unchecked(size, align);
498 System.alloc_zeroed(layout)
499 }
500 }
501}