std/alloc.rs
1//! Memory allocation APIs.
2//!
3//! In a given program, the standard library has one “global” memory allocator
4//! that is used for example by `Box<T>` and `Vec<T>`.
5//!
6//! Currently the default global allocator is unspecified. Libraries, however,
7//! like `cdylib`s and `staticlib`s are guaranteed to use the [`System`] by
8//! default.
9//!
10//! # The `#[global_allocator]` attribute
11//!
12//! This attribute allows configuring the choice of global allocator.
13//! You can use this to implement a completely custom global allocator
14//! to route all default allocation requests to a custom object.
15//!
16//! ```rust
17//! use std::alloc::{GlobalAlloc, System, Layout};
18//!
19//! struct MyAllocator;
20//!
21//! unsafe impl GlobalAlloc for MyAllocator {
22//! unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
23//! System.alloc(layout)
24//! }
25//!
26//! unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
27//! System.dealloc(ptr, layout)
28//! }
29//! }
30//!
31//! #[global_allocator]
32//! static GLOBAL: MyAllocator = MyAllocator;
33//!
34//! fn main() {
35//! // This `Vec` will allocate memory through `GLOBAL` above
36//! let mut v = Vec::new();
37//! v.push(1);
38//! }
39//! ```
40//!
41//! The attribute is used on a `static` item whose type implements the
42//! [`GlobalAlloc`] trait. This type can be provided by an external library:
43//!
44//! ```rust,ignore (demonstrates crates.io usage)
45//! use jemallocator::Jemalloc;
46//!
47//! #[global_allocator]
48//! static GLOBAL: Jemalloc = Jemalloc;
49//!
50//! fn main() {}
51//! ```
52//!
53//! The `#[global_allocator]` can only be used once in a crate
54//! or its recursive dependencies.
55
56#![deny(unsafe_op_in_unsafe_fn)]
57#![stable(feature = "alloc_module", since = "1.28.0")]
58
59use core::ptr::NonNull;
60use core::sync::atomic::{AtomicPtr, Ordering};
61use core::{hint, mem, ptr};
62
63#[stable(feature = "alloc_module", since = "1.28.0")]
64#[doc(inline)]
65pub use alloc_crate::alloc::*;
66
67/// The default memory allocator provided by the operating system.
68///
69/// This is based on `malloc` on Unix platforms and `HeapAlloc` on Windows,
70/// plus related functions. However, it is not valid to mix use of the backing
71/// system allocator with `System`, as this implementation may include extra
72/// work, such as to serve alignment requests greater than the alignment
73/// provided directly by the backing system allocator.
74///
75/// This type implements the [`GlobalAlloc`] trait. Currently the default
76/// global allocator is unspecified. Libraries, however, like `cdylib`s and
77/// `staticlib`s are guaranteed to use the [`System`] by default and as such
78/// work as if they had this definition:
79///
80/// ```rust
81/// use std::alloc::System;
82///
83/// #[global_allocator]
84/// static A: System = System;
85///
86/// fn main() {
87/// let a = Box::new(4); // Allocates from the system allocator.
88/// println!("{a}");
89/// }
90/// ```
91///
92/// You can also define your own wrapper around `System` if you'd like, such as
93/// keeping track of the number of all bytes allocated:
94///
95/// ```rust
96/// use std::alloc::{System, GlobalAlloc, Layout};
97/// use std::sync::atomic::{AtomicUsize, Ordering::Relaxed};
98///
99/// struct Counter;
100///
101/// static ALLOCATED: AtomicUsize = AtomicUsize::new(0);
102///
103/// unsafe impl GlobalAlloc for Counter {
104/// unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
105/// let ret = System.alloc(layout);
106/// if !ret.is_null() {
107/// ALLOCATED.fetch_add(layout.size(), Relaxed);
108/// }
109/// ret
110/// }
111///
112/// unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
113/// System.dealloc(ptr, layout);
114/// ALLOCATED.fetch_sub(layout.size(), Relaxed);
115/// }
116/// }
117///
118/// #[global_allocator]
119/// static A: Counter = Counter;
120///
121/// fn main() {
122/// println!("allocated bytes before main: {}", ALLOCATED.load(Relaxed));
123/// }
124/// ```
125///
126/// It can also be used directly to allocate memory independently of whatever
127/// global allocator has been selected for a Rust program. For example if a Rust
128/// program opts in to using jemalloc as the global allocator, `System` will
129/// still allocate memory using `malloc` and `HeapAlloc`.
130#[stable(feature = "alloc_system_type", since = "1.28.0")]
131#[derive(Debug, Default, Copy, Clone)]
132pub struct System;
133
134impl System {
135 #[inline]
136 fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
137 match layout.size() {
138 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
139 // SAFETY: `layout` is non-zero in size,
140 size => unsafe {
141 let raw_ptr = if zeroed {
142 GlobalAlloc::alloc_zeroed(self, layout)
143 } else {
144 GlobalAlloc::alloc(self, layout)
145 };
146 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
147 Ok(NonNull::slice_from_raw_parts(ptr, size))
148 },
149 }
150 }
151
152 // SAFETY: Same as `Allocator::grow`
153 #[inline]
154 unsafe fn grow_impl(
155 &self,
156 ptr: NonNull<u8>,
157 old_layout: Layout,
158 new_layout: Layout,
159 zeroed: bool,
160 ) -> Result<NonNull<[u8]>, AllocError> {
161 debug_assert!(
162 new_layout.size() >= old_layout.size(),
163 "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
164 );
165
166 match old_layout.size() {
167 0 => self.alloc_impl(new_layout, zeroed),
168
169 // SAFETY: `new_size` is non-zero as `new_size` is greater than or equal to `old_size`
170 // as required by safety conditions and the `old_size == 0` case was handled in the
171 // previous match arm. Other conditions must be upheld by the caller
172 old_size if old_layout.align() == new_layout.align() => unsafe {
173 let new_size = new_layout.size();
174
175 // `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
176 hint::assert_unchecked(new_size >= old_layout.size());
177
178 let raw_ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), old_layout, new_size);
179 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
180 if zeroed {
181 raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
182 }
183 Ok(NonNull::slice_from_raw_parts(ptr, new_size))
184 },
185
186 // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
187 // both the old and new memory allocation are valid for reads and writes for `old_size`
188 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
189 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
190 // for `dealloc` must be upheld by the caller.
191 old_size => unsafe {
192 let new_ptr = self.alloc_impl(new_layout, zeroed)?;
193 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
194 Allocator::deallocate(self, ptr, old_layout);
195 Ok(new_ptr)
196 },
197 }
198 }
199}
200
201// The Allocator impl checks the layout size to be non-zero and forwards to the GlobalAlloc impl,
202// which is in `std::sys::*::alloc`.
203#[unstable(feature = "allocator_api", issue = "32838")]
204unsafe impl Allocator for System {
205 #[inline]
206 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
207 self.alloc_impl(layout, false)
208 }
209
210 #[inline]
211 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
212 self.alloc_impl(layout, true)
213 }
214
215 #[inline]
216 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
217 if layout.size() != 0 {
218 // SAFETY: `layout` is non-zero in size,
219 // other conditions must be upheld by the caller
220 unsafe { GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) }
221 }
222 }
223
224 #[inline]
225 unsafe fn grow(
226 &self,
227 ptr: NonNull<u8>,
228 old_layout: Layout,
229 new_layout: Layout,
230 ) -> Result<NonNull<[u8]>, AllocError> {
231 // SAFETY: all conditions must be upheld by the caller
232 unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
233 }
234
235 #[inline]
236 unsafe fn grow_zeroed(
237 &self,
238 ptr: NonNull<u8>,
239 old_layout: Layout,
240 new_layout: Layout,
241 ) -> Result<NonNull<[u8]>, AllocError> {
242 // SAFETY: all conditions must be upheld by the caller
243 unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
244 }
245
246 #[inline]
247 unsafe fn shrink(
248 &self,
249 ptr: NonNull<u8>,
250 old_layout: Layout,
251 new_layout: Layout,
252 ) -> Result<NonNull<[u8]>, AllocError> {
253 debug_assert!(
254 new_layout.size() <= old_layout.size(),
255 "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
256 );
257
258 match new_layout.size() {
259 // SAFETY: conditions must be upheld by the caller
260 0 => unsafe {
261 Allocator::deallocate(self, ptr, old_layout);
262 Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
263 },
264
265 // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
266 new_size if old_layout.align() == new_layout.align() => unsafe {
267 // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
268 hint::assert_unchecked(new_size <= old_layout.size());
269
270 let raw_ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), old_layout, new_size);
271 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
272 Ok(NonNull::slice_from_raw_parts(ptr, new_size))
273 },
274
275 // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
276 // both the old and new memory allocation are valid for reads and writes for `new_size`
277 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
278 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
279 // for `dealloc` must be upheld by the caller.
280 new_size => unsafe {
281 let new_ptr = Allocator::allocate(self, new_layout)?;
282 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
283 Allocator::deallocate(self, ptr, old_layout);
284 Ok(new_ptr)
285 },
286 }
287 }
288}
289
290static HOOK: AtomicPtr<()> = AtomicPtr::new(ptr::null_mut());
291
292/// Registers a custom allocation error hook, replacing any that was previously registered.
293///
294/// The allocation error hook is invoked when an infallible memory allocation fails — that is,
295/// as a consequence of calling [`handle_alloc_error`] — before the runtime aborts.
296///
297/// The allocation error hook is a global resource. [`take_alloc_error_hook`] may be used to
298/// retrieve a previously registered hook and wrap or discard it.
299///
300/// # What the provided `hook` function should expect
301///
302/// The hook function is provided with a [`Layout`] struct which contains information
303/// about the allocation that failed.
304///
305/// The hook function may choose to panic or abort; in the event that it returns normally, this
306/// will cause an immediate abort.
307///
308/// Since [`take_alloc_error_hook`] is a safe function that allows retrieving the hook, the hook
309/// function must be _sound_ to call even if no memory allocations were attempted.
310///
311/// # The default hook
312///
313/// The default hook, used if [`set_alloc_error_hook`] is never called, prints a message to
314/// standard error (and then returns, causing the runtime to abort the process).
315/// Compiler options may cause it to panic instead, and the default behavior may be changed
316/// to panicking in future versions of Rust.
317///
318/// # Examples
319///
320/// ```
321/// #![feature(alloc_error_hook)]
322///
323/// use std::alloc::{Layout, set_alloc_error_hook};
324///
325/// fn custom_alloc_error_hook(layout: Layout) {
326/// panic!("memory allocation of {} bytes failed", layout.size());
327/// }
328///
329/// set_alloc_error_hook(custom_alloc_error_hook);
330/// ```
331#[unstable(feature = "alloc_error_hook", issue = "51245")]
332pub fn set_alloc_error_hook(hook: fn(Layout)) {
333 HOOK.store(hook as *mut (), Ordering::Release);
334}
335
336/// Unregisters the current allocation error hook, returning it.
337///
338/// *See also the function [`set_alloc_error_hook`].*
339///
340/// If no custom hook is registered, the default hook will be returned.
341#[unstable(feature = "alloc_error_hook", issue = "51245")]
342pub fn take_alloc_error_hook() -> fn(Layout) {
343 let hook = HOOK.swap(ptr::null_mut(), Ordering::Acquire);
344 if hook.is_null() { default_alloc_error_hook } else { unsafe { mem::transmute(hook) } }
345}
346
347fn default_alloc_error_hook(layout: Layout) {
348 unsafe extern "Rust" {
349 // This symbol is emitted by rustc next to __rust_alloc_error_handler.
350 // Its value depends on the -Zoom={panic,abort} compiler option.
351 static __rust_alloc_error_handler_should_panic: u8;
352 }
353
354 if unsafe { __rust_alloc_error_handler_should_panic != 0 } {
355 panic!("memory allocation of {} bytes failed", layout.size());
356 } else {
357 // This is the default path taken on OOM, and the only path taken on stable with std.
358 // Crucially, it does *not* call any user-defined code, and therefore users do not have to
359 // worry about allocation failure causing reentrancy issues. That makes it different from
360 // the default `__rdl_oom` defined in alloc (i.e., the default alloc error handler that is
361 // called when there is no `#[alloc_error_handler]`), which triggers a regular panic and
362 // thus can invoke a user-defined panic hook, executing arbitrary user-defined code.
363 rtprintpanic!("memory allocation of {} bytes failed\n", layout.size());
364 }
365}
366
367#[cfg(not(test))]
368#[doc(hidden)]
369#[alloc_error_handler]
370#[unstable(feature = "alloc_internals", issue = "none")]
371pub fn rust_oom(layout: Layout) -> ! {
372 let hook = HOOK.load(Ordering::Acquire);
373 let hook: fn(Layout) =
374 if hook.is_null() { default_alloc_error_hook } else { unsafe { mem::transmute(hook) } };
375 hook(layout);
376 crate::process::abort()
377}
378
379#[cfg(not(test))]
380#[doc(hidden)]
381#[allow(unused_attributes)]
382#[unstable(feature = "alloc_internals", issue = "none")]
383pub mod __default_lib_allocator {
384 use super::{GlobalAlloc, Layout, System};
385 // These magic symbol names are used as a fallback for implementing the
386 // `__rust_alloc` etc symbols (see `src/liballoc/alloc.rs`) when there is
387 // no `#[global_allocator]` attribute.
388
389 // for symbol names src/librustc_ast/expand/allocator.rs
390 // for signatures src/librustc_allocator/lib.rs
391
392 // linkage directives are provided as part of the current compiler allocator
393 // ABI
394
395 #[rustc_std_internal_symbol]
396 pub unsafe extern "C" fn __rdl_alloc(size: usize, align: usize) -> *mut u8 {
397 // SAFETY: see the guarantees expected by `Layout::from_size_align` and
398 // `GlobalAlloc::alloc`.
399 unsafe {
400 let layout = Layout::from_size_align_unchecked(size, align);
401 System.alloc(layout)
402 }
403 }
404
405 #[rustc_std_internal_symbol]
406 pub unsafe extern "C" fn __rdl_dealloc(ptr: *mut u8, size: usize, align: usize) {
407 // SAFETY: see the guarantees expected by `Layout::from_size_align` and
408 // `GlobalAlloc::dealloc`.
409 unsafe { System.dealloc(ptr, Layout::from_size_align_unchecked(size, align)) }
410 }
411
412 #[rustc_std_internal_symbol]
413 pub unsafe extern "C" fn __rdl_realloc(
414 ptr: *mut u8,
415 old_size: usize,
416 align: usize,
417 new_size: usize,
418 ) -> *mut u8 {
419 // SAFETY: see the guarantees expected by `Layout::from_size_align` and
420 // `GlobalAlloc::realloc`.
421 unsafe {
422 let old_layout = Layout::from_size_align_unchecked(old_size, align);
423 System.realloc(ptr, old_layout, new_size)
424 }
425 }
426
427 #[rustc_std_internal_symbol]
428 pub unsafe extern "C" fn __rdl_alloc_zeroed(size: usize, align: usize) -> *mut u8 {
429 // SAFETY: see the guarantees expected by `Layout::from_size_align` and
430 // `GlobalAlloc::alloc_zeroed`.
431 unsafe {
432 let layout = Layout::from_size_align_unchecked(size, align);
433 System.alloc_zeroed(layout)
434 }
435 }
436}