alloc/alloc.rs
1//! Memory allocation APIs
2
3#![stable(feature = "alloc_module", since = "1.28.0")]
4
5#[stable(feature = "alloc_module", since = "1.28.0")]
6#[doc(inline)]
7pub use core::alloc::*;
8use core::mem::Alignment;
9use core::ptr::{self, NonNull};
10use core::{cmp, hint};
11
12unsafe extern "Rust" {
13 // These are the magic symbols to call the global allocator. rustc generates
14 // them to call the global allocator if there is a `#[global_allocator]` attribute
15 // (the code expanding that attribute macro generates those functions), or to call
16 // the default implementations in std (`__rdl_alloc` etc. in `library/std/src/alloc.rs`)
17 // otherwise.
18 #[rustc_allocator]
19 #[rustc_nounwind]
20 #[rustc_std_internal_symbol]
21 #[rustc_allocator_zeroed_variant = "__rust_alloc_zeroed"]
22 fn __rust_alloc(size: usize, align: Alignment) -> *mut u8;
23 #[rustc_deallocator]
24 #[rustc_nounwind]
25 #[rustc_std_internal_symbol]
26 fn __rust_dealloc(ptr: NonNull<u8>, size: usize, align: Alignment);
27 #[rustc_reallocator]
28 #[rustc_nounwind]
29 #[rustc_std_internal_symbol]
30 fn __rust_realloc(
31 ptr: NonNull<u8>,
32 old_size: usize,
33 align: Alignment,
34 new_size: usize,
35 ) -> *mut u8;
36 #[rustc_allocator_zeroed]
37 #[rustc_nounwind]
38 #[rustc_std_internal_symbol]
39 fn __rust_alloc_zeroed(size: usize, align: Alignment) -> *mut u8;
40
41 #[rustc_nounwind]
42 #[rustc_std_internal_symbol]
43 fn __rust_no_alloc_shim_is_unstable_v2();
44}
45
46/// The global memory allocator.
47///
48/// This type implements the [`Allocator`] trait by forwarding calls
49/// to the allocator registered with the `#[global_allocator]` attribute
50/// if there is one, or the `std` crate’s default.
51///
52/// Note: while this type is unstable, the functionality it provides can be
53/// accessed through the [free functions in `alloc`](self#functions).
54#[unstable(feature = "allocator_api", issue = "32838")]
55#[derive(Copy, Clone, Default, Debug)]
56// the compiler needs to know when a Box uses the global allocator vs a custom one
57#[lang = "global_alloc_ty"]
58pub struct Global;
59
60/// Allocates memory with the global allocator.
61///
62/// This function forwards calls to the [`GlobalAlloc::alloc`] method
63/// of the allocator registered with the `#[global_allocator]` attribute
64/// if there is one, or the `std` crate’s default.
65///
66/// This function is expected to be deprecated in favor of the `allocate` method
67/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
68///
69/// # Safety
70///
71/// See [`GlobalAlloc::alloc`].
72///
73/// # Examples
74///
75/// ```
76/// use std::alloc::{alloc, dealloc, handle_alloc_error, Layout};
77///
78/// unsafe {
79/// let layout = Layout::new::<u16>();
80/// let ptr = alloc(layout);
81/// if ptr.is_null() {
82/// handle_alloc_error(layout);
83/// }
84///
85/// *(ptr as *mut u16) = 42;
86/// assert_eq!(*(ptr as *mut u16), 42);
87///
88/// dealloc(ptr, layout);
89/// }
90/// ```
91#[stable(feature = "global_alloc", since = "1.28.0")]
92#[must_use = "losing the pointer will leak memory"]
93#[inline]
94#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
95pub unsafe fn alloc(layout: Layout) -> *mut u8 {
96 unsafe {
97 // Make sure we don't accidentally allow omitting the allocator shim in
98 // stable code until it is actually stabilized.
99 __rust_no_alloc_shim_is_unstable_v2();
100
101 __rust_alloc(layout.size(), layout.alignment())
102 }
103}
104
105/// Deallocates memory with the global allocator.
106///
107/// This function forwards calls to the [`GlobalAlloc::dealloc`] method
108/// of the allocator registered with the `#[global_allocator]` attribute
109/// if there is one, or the `std` crate’s default.
110///
111/// This function is expected to be deprecated in favor of the `deallocate` method
112/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
113///
114/// # Safety
115///
116/// See [`GlobalAlloc::dealloc`].
117#[stable(feature = "global_alloc", since = "1.28.0")]
118#[inline]
119#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
120pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
121 unsafe { dealloc_nonnull(NonNull::new_unchecked(ptr), layout) }
122}
123
124/// Same as [`dealloc`] but when you already have a non-null pointer
125#[inline]
126#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
127unsafe fn dealloc_nonnull(ptr: NonNull<u8>, layout: Layout) {
128 unsafe { __rust_dealloc(ptr, layout.size(), layout.alignment()) }
129}
130
131/// Reallocates memory with the global allocator.
132///
133/// This function forwards calls to the [`GlobalAlloc::realloc`] method
134/// of the allocator registered with the `#[global_allocator]` attribute
135/// if there is one, or the `std` crate’s default.
136///
137/// This function is expected to be deprecated in favor of the `grow` and `shrink` methods
138/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
139///
140/// # Safety
141///
142/// See [`GlobalAlloc::realloc`].
143#[stable(feature = "global_alloc", since = "1.28.0")]
144#[must_use = "losing the pointer will leak memory"]
145#[inline]
146#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
147pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
148 unsafe { realloc_nonnull(NonNull::new_unchecked(ptr), layout, new_size) }
149}
150
151/// Same as [`realloc`] but when you already have a non-null pointer
152#[inline]
153#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
154unsafe fn realloc_nonnull(ptr: NonNull<u8>, layout: Layout, new_size: usize) -> *mut u8 {
155 unsafe { __rust_realloc(ptr, layout.size(), layout.alignment(), new_size) }
156}
157
158/// Allocates zero-initialized memory with the global allocator.
159///
160/// This function forwards calls to the [`GlobalAlloc::alloc_zeroed`] method
161/// of the allocator registered with the `#[global_allocator]` attribute
162/// if there is one, or the `std` crate’s default.
163///
164/// This function is expected to be deprecated in favor of the `allocate_zeroed` method
165/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
166///
167/// # Safety
168///
169/// See [`GlobalAlloc::alloc_zeroed`].
170///
171/// # Examples
172///
173/// ```
174/// use std::alloc::{alloc_zeroed, dealloc, handle_alloc_error, Layout};
175///
176/// unsafe {
177/// let layout = Layout::new::<u16>();
178/// let ptr = alloc_zeroed(layout);
179/// if ptr.is_null() {
180/// handle_alloc_error(layout);
181/// }
182///
183/// assert_eq!(*(ptr as *mut u16), 0);
184///
185/// dealloc(ptr, layout);
186/// }
187/// ```
188#[stable(feature = "global_alloc", since = "1.28.0")]
189#[must_use = "losing the pointer will leak memory"]
190#[inline]
191#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
192pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
193 unsafe {
194 // Make sure we don't accidentally allow omitting the allocator shim in
195 // stable code until it is actually stabilized.
196 __rust_no_alloc_shim_is_unstable_v2();
197
198 __rust_alloc_zeroed(layout.size(), layout.alignment())
199 }
200}
201
202impl Global {
203 #[inline]
204 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
205 fn alloc_impl_runtime(layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
206 match layout.size() {
207 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling_ptr(), 0)),
208 // SAFETY: `layout` is non-zero in size,
209 size => unsafe {
210 let raw_ptr = if zeroed { alloc_zeroed(layout) } else { alloc(layout) };
211 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
212 Ok(NonNull::slice_from_raw_parts(ptr, size))
213 },
214 }
215 }
216
217 #[inline]
218 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
219 fn deallocate_impl_runtime(ptr: NonNull<u8>, layout: Layout) {
220 if layout.size() != 0 {
221 // SAFETY:
222 // * We have checked that `layout` is non-zero in size.
223 // * The caller is obligated to provide a layout that "fits", and in this case,
224 // "fit" always means a layout that is equal to the original, because our
225 // `allocate()`, `grow()`, and `shrink()` implementations never returns a larger
226 // allocation than requested.
227 // * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s
228 // safety documentation.
229 unsafe { dealloc_nonnull(ptr, layout) }
230 }
231 }
232
233 // SAFETY: Same as `Allocator::grow`
234 #[inline]
235 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
236 fn grow_impl_runtime(
237 &self,
238 ptr: NonNull<u8>,
239 old_layout: Layout,
240 new_layout: Layout,
241 zeroed: bool,
242 ) -> Result<NonNull<[u8]>, AllocError> {
243 debug_assert!(
244 new_layout.size() >= old_layout.size(),
245 "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
246 );
247
248 match old_layout.size() {
249 0 => self.alloc_impl(new_layout, zeroed),
250
251 // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size`
252 // as required by safety conditions. Other conditions must be upheld by the caller
253 old_size if old_layout.align() == new_layout.align() => unsafe {
254 let new_size = new_layout.size();
255
256 // `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
257 hint::assert_unchecked(new_size >= old_layout.size());
258
259 let raw_ptr = realloc_nonnull(ptr, old_layout, new_size);
260 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
261 if zeroed {
262 raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
263 }
264 Ok(NonNull::slice_from_raw_parts(ptr, new_size))
265 },
266
267 // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
268 // both the old and new memory allocation are valid for reads and writes for `old_size`
269 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
270 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
271 // for `dealloc` must be upheld by the caller.
272 old_size => unsafe {
273 let new_ptr = self.alloc_impl(new_layout, zeroed)?;
274 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
275 self.deallocate(ptr, old_layout);
276 Ok(new_ptr)
277 },
278 }
279 }
280
281 // SAFETY: Same as `Allocator::grow`
282 #[inline]
283 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
284 fn shrink_impl_runtime(
285 &self,
286 ptr: NonNull<u8>,
287 old_layout: Layout,
288 new_layout: Layout,
289 _zeroed: bool,
290 ) -> Result<NonNull<[u8]>, AllocError> {
291 debug_assert!(
292 new_layout.size() <= old_layout.size(),
293 "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
294 );
295
296 match new_layout.size() {
297 // SAFETY: conditions must be upheld by the caller
298 0 => unsafe {
299 self.deallocate(ptr, old_layout);
300 Ok(NonNull::slice_from_raw_parts(new_layout.dangling_ptr(), 0))
301 },
302
303 // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
304 new_size if old_layout.align() == new_layout.align() => unsafe {
305 // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
306 hint::assert_unchecked(new_size <= old_layout.size());
307
308 let raw_ptr = realloc_nonnull(ptr, old_layout, new_size);
309 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
310 Ok(NonNull::slice_from_raw_parts(ptr, new_size))
311 },
312
313 // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
314 // both the old and new memory allocation are valid for reads and writes for `new_size`
315 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
316 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
317 // for `dealloc` must be upheld by the caller.
318 new_size => unsafe {
319 let new_ptr = self.allocate(new_layout)?;
320 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
321 self.deallocate(ptr, old_layout);
322 Ok(new_ptr)
323 },
324 }
325 }
326
327 // SAFETY: Same as `Allocator::allocate`
328 #[inline]
329 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
330 #[rustc_const_unstable(feature = "const_heap", issue = "79597")]
331 const fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
332 core::intrinsics::const_eval_select(
333 (layout, zeroed),
334 Global::alloc_impl_const,
335 Global::alloc_impl_runtime,
336 )
337 }
338
339 // SAFETY: Same as `Allocator::deallocate`
340 #[inline]
341 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
342 #[rustc_const_unstable(feature = "const_heap", issue = "79597")]
343 const unsafe fn deallocate_impl(&self, ptr: NonNull<u8>, layout: Layout) {
344 core::intrinsics::const_eval_select(
345 (ptr, layout),
346 Global::deallocate_impl_const,
347 Global::deallocate_impl_runtime,
348 )
349 }
350
351 // SAFETY: Same as `Allocator::grow`
352 #[inline]
353 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
354 #[rustc_const_unstable(feature = "const_heap", issue = "79597")]
355 const unsafe fn grow_impl(
356 &self,
357 ptr: NonNull<u8>,
358 old_layout: Layout,
359 new_layout: Layout,
360 zeroed: bool,
361 ) -> Result<NonNull<[u8]>, AllocError> {
362 core::intrinsics::const_eval_select(
363 (self, ptr, old_layout, new_layout, zeroed),
364 Global::grow_shrink_impl_const,
365 Global::grow_impl_runtime,
366 )
367 }
368
369 // SAFETY: Same as `Allocator::shrink`
370 #[inline]
371 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
372 #[rustc_const_unstable(feature = "const_heap", issue = "79597")]
373 const unsafe fn shrink_impl(
374 &self,
375 ptr: NonNull<u8>,
376 old_layout: Layout,
377 new_layout: Layout,
378 ) -> Result<NonNull<[u8]>, AllocError> {
379 core::intrinsics::const_eval_select(
380 (self, ptr, old_layout, new_layout, false),
381 Global::grow_shrink_impl_const,
382 Global::shrink_impl_runtime,
383 )
384 }
385
386 #[inline]
387 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
388 #[rustc_const_unstable(feature = "const_heap", issue = "79597")]
389 const fn alloc_impl_const(layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
390 match layout.size() {
391 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling_ptr(), 0)),
392 // SAFETY: `layout` is non-zero in size,
393 size => unsafe {
394 let raw_ptr = core::intrinsics::const_allocate(layout.size(), layout.align());
395 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
396 if zeroed {
397 // SAFETY: the pointer returned by `const_allocate` is valid to write to.
398 ptr.write_bytes(0, size);
399 }
400 Ok(NonNull::slice_from_raw_parts(ptr, size))
401 },
402 }
403 }
404
405 #[inline]
406 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
407 #[rustc_const_unstable(feature = "const_heap", issue = "79597")]
408 const fn deallocate_impl_const(ptr: NonNull<u8>, layout: Layout) {
409 if layout.size() != 0 {
410 // SAFETY: We checked for nonzero size; other preconditions must be upheld by caller.
411 unsafe {
412 core::intrinsics::const_deallocate(ptr.as_ptr(), layout.size(), layout.align());
413 }
414 }
415 }
416
417 #[inline]
418 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
419 #[rustc_const_unstable(feature = "const_heap", issue = "79597")]
420 const fn grow_shrink_impl_const(
421 &self,
422 ptr: NonNull<u8>,
423 old_layout: Layout,
424 new_layout: Layout,
425 zeroed: bool,
426 ) -> Result<NonNull<[u8]>, AllocError> {
427 let new_ptr = self.alloc_impl(new_layout, zeroed)?;
428 // SAFETY: both pointers are valid and this operations is in bounds.
429 unsafe {
430 ptr::copy_nonoverlapping(
431 ptr.as_ptr(),
432 new_ptr.as_mut_ptr(),
433 cmp::min(old_layout.size(), new_layout.size()),
434 );
435 }
436 unsafe {
437 self.deallocate_impl(ptr, old_layout);
438 }
439 Ok(new_ptr)
440 }
441}
442
443#[unstable(feature = "allocator_api", issue = "32838")]
444#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
445unsafe impl const Allocator for Global {
446 #[inline]
447 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
448 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
449 self.alloc_impl(layout, false)
450 }
451
452 #[inline]
453 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
454 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
455 self.alloc_impl(layout, true)
456 }
457
458 #[inline]
459 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
460 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
461 // SAFETY: all conditions must be upheld by the caller
462 unsafe { self.deallocate_impl(ptr, layout) }
463 }
464
465 #[inline]
466 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
467 unsafe fn grow(
468 &self,
469 ptr: NonNull<u8>,
470 old_layout: Layout,
471 new_layout: Layout,
472 ) -> Result<NonNull<[u8]>, AllocError> {
473 // SAFETY: all conditions must be upheld by the caller
474 unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
475 }
476
477 #[inline]
478 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
479 unsafe fn grow_zeroed(
480 &self,
481 ptr: NonNull<u8>,
482 old_layout: Layout,
483 new_layout: Layout,
484 ) -> Result<NonNull<[u8]>, AllocError> {
485 // SAFETY: all conditions must be upheld by the caller
486 unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
487 }
488
489 #[inline]
490 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
491 unsafe fn shrink(
492 &self,
493 ptr: NonNull<u8>,
494 old_layout: Layout,
495 new_layout: Layout,
496 ) -> Result<NonNull<[u8]>, AllocError> {
497 // SAFETY: all conditions must be upheld by the caller
498 unsafe { self.shrink_impl(ptr, old_layout, new_layout) }
499 }
500}
501
502// # Allocation error handler
503
504#[cfg(not(no_global_oom_handling))]
505unsafe extern "Rust" {
506 // This is the magic symbol to call the global alloc error handler. rustc generates
507 // it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the
508 // default implementations below (`__rdl_alloc_error_handler`) otherwise.
509 #[rustc_std_internal_symbol]
510 fn __rust_alloc_error_handler(size: usize, align: usize) -> !;
511}
512
513/// Signals a memory allocation error.
514///
515/// Callers of memory allocation APIs wishing to cease execution
516/// in response to an allocation error are encouraged to call this function,
517/// rather than directly invoking [`panic!`] or similar.
518///
519/// This function is guaranteed to diverge (not return normally with a value), but depending on
520/// global configuration, it may either panic (resulting in unwinding or aborting as per
521/// configuration for all panics), or abort the process (with no unwinding).
522///
523/// The default behavior is:
524///
525/// * If the binary links against `std` (typically the case), then
526/// print a message to standard error and abort the process.
527/// This behavior can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`].
528/// Future versions of Rust may panic by default instead.
529///
530/// * If the binary does not link against `std` (all of its crates are marked
531/// [`#![no_std]`][no_std]), then call [`panic!`] with a message.
532/// [The panic handler] applies as to any panic.
533///
534/// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html
535/// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html
536/// [The panic handler]: https://doc.rust-lang.org/reference/runtime.html#the-panic_handler-attribute
537/// [no_std]: https://doc.rust-lang.org/reference/names/preludes.html#the-no_std-attribute
538#[stable(feature = "global_alloc", since = "1.28.0")]
539#[rustc_const_unstable(feature = "const_alloc_error", issue = "92523")]
540#[cfg(not(no_global_oom_handling))]
541#[cold]
542#[optimize(size)]
543pub const fn handle_alloc_error(layout: Layout) -> ! {
544 const fn ct_error(_: Layout) -> ! {
545 panic!("allocation failed");
546 }
547
548 #[inline]
549 fn rt_error(layout: Layout) -> ! {
550 unsafe {
551 __rust_alloc_error_handler(layout.size(), layout.align());
552 }
553 }
554
555 #[cfg(not(panic = "immediate-abort"))]
556 {
557 core::intrinsics::const_eval_select((layout,), ct_error, rt_error)
558 }
559
560 #[cfg(panic = "immediate-abort")]
561 ct_error(layout)
562}
563
564#[cfg(not(no_global_oom_handling))]
565#[doc(hidden)]
566#[allow(unused_attributes)]
567#[unstable(feature = "alloc_internals", issue = "none")]
568pub mod __alloc_error_handler {
569 // called via generated `__rust_alloc_error_handler` if there is no
570 // `#[alloc_error_handler]`.
571 #[rustc_std_internal_symbol]
572 pub unsafe fn __rdl_alloc_error_handler(size: usize, _align: usize) -> ! {
573 core::panicking::panic_nounwind_fmt(
574 format_args!("memory allocation of {size} bytes failed"),
575 /* force_no_backtrace */ false,
576 )
577 }
578}