core/ptr/non_null.rs
1use crate::cmp::Ordering;
2use crate::marker::{Destruct, PointeeSized, Unsize};
3use crate::mem::{MaybeUninit, SizedTypeProperties};
4use crate::num::NonZero;
5use crate::ops::{CoerceUnsized, DispatchFromDyn};
6use crate::pin::PinCoerceUnsized;
7use crate::ptr::Unique;
8use crate::slice::{self, SliceIndex};
9use crate::ub_checks::assert_unsafe_precondition;
10use crate::{fmt, hash, intrinsics, mem, ptr};
11
12/// `*mut T` but non-zero and [covariant].
13///
14/// This is often the correct thing to use when building data structures using
15/// raw pointers, but is ultimately more dangerous to use because of its additional
16/// properties. If you're not sure if you should use `NonNull<T>`, just use `*mut T`!
17///
18/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
19/// is never dereferenced. This is so that enums may use this forbidden value
20/// as a discriminant -- `Option<NonNull<T>>` has the same size as `*mut T`.
21/// However the pointer may still dangle if it isn't dereferenced.
22///
23/// Unlike `*mut T`, `NonNull<T>` is covariant over `T`. This is usually the correct
24/// choice for most data structures and safe abstractions, such as `Box`, `Rc`, `Arc`, `Vec`,
25/// and `LinkedList`.
26///
27/// In rare cases, if your type exposes a way to mutate the value of `T` through a `NonNull<T>`,
28/// and you need to prevent unsoundness from variance (for example, if `T` could be a reference
29/// with a shorter lifetime), you should add a field to make your type invariant, such as
30/// `PhantomData<Cell<T>>` or `PhantomData<&'a mut T>`.
31///
32/// Example of a type that must be invariant:
33/// ```rust
34/// use std::cell::Cell;
35/// use std::marker::PhantomData;
36/// struct Invariant<T> {
37/// ptr: std::ptr::NonNull<T>,
38/// _invariant: PhantomData<Cell<T>>,
39/// }
40/// ```
41///
42/// Notice that `NonNull<T>` has a `From` instance for `&T`. However, this does
43/// not change the fact that mutating through a (pointer derived from a) shared
44/// reference is undefined behavior unless the mutation happens inside an
45/// [`UnsafeCell<T>`]. The same goes for creating a mutable reference from a shared
46/// reference. When using this `From` instance without an `UnsafeCell<T>`,
47/// it is your responsibility to ensure that `as_mut` is never called, and `as_ptr`
48/// is never used for mutation.
49///
50/// # Representation
51///
52/// Thanks to the [null pointer optimization],
53/// `NonNull<T>` and `Option<NonNull<T>>`
54/// are guaranteed to have the same size and alignment:
55///
56/// ```
57/// use std::ptr::NonNull;
58///
59/// assert_eq!(size_of::<NonNull<i16>>(), size_of::<Option<NonNull<i16>>>());
60/// assert_eq!(align_of::<NonNull<i16>>(), align_of::<Option<NonNull<i16>>>());
61///
62/// assert_eq!(size_of::<NonNull<str>>(), size_of::<Option<NonNull<str>>>());
63/// assert_eq!(align_of::<NonNull<str>>(), align_of::<Option<NonNull<str>>>());
64/// ```
65///
66/// [covariant]: https://doc.rust-lang.org/reference/subtyping.html
67/// [`PhantomData`]: crate::marker::PhantomData
68/// [`UnsafeCell<T>`]: crate::cell::UnsafeCell
69/// [null pointer optimization]: crate::option#representation
70#[stable(feature = "nonnull", since = "1.25.0")]
71#[repr(transparent)]
72#[rustc_layout_scalar_valid_range_start(1)]
73#[rustc_nonnull_optimization_guaranteed]
74#[rustc_diagnostic_item = "NonNull"]
75pub struct NonNull<T: PointeeSized> {
76 // Remember to use `.as_ptr()` instead of `.pointer`, as field projecting to
77 // this is banned by <https://github.com/rust-lang/compiler-team/issues/807>.
78 pointer: *const T,
79}
80
81/// `NonNull` pointers are not `Send` because the data they reference may be aliased.
82// N.B., this impl is unnecessary, but should provide better error messages.
83#[stable(feature = "nonnull", since = "1.25.0")]
84impl<T: PointeeSized> !Send for NonNull<T> {}
85
86/// `NonNull` pointers are not `Sync` because the data they reference may be aliased.
87// N.B., this impl is unnecessary, but should provide better error messages.
88#[stable(feature = "nonnull", since = "1.25.0")]
89impl<T: PointeeSized> !Sync for NonNull<T> {}
90
91impl<T: Sized> NonNull<T> {
92 /// Creates a pointer with the given address and no [provenance][crate::ptr#provenance].
93 ///
94 /// For more details, see the equivalent method on a raw pointer, [`ptr::without_provenance_mut`].
95 ///
96 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
97 #[stable(feature = "nonnull_provenance", since = "1.89.0")]
98 #[rustc_const_stable(feature = "nonnull_provenance", since = "1.89.0")]
99 #[must_use]
100 #[inline]
101 pub const fn without_provenance(addr: NonZero<usize>) -> Self {
102 let pointer = crate::ptr::without_provenance(addr.get());
103 // SAFETY: we know `addr` is non-zero.
104 unsafe { NonNull { pointer } }
105 }
106
107 /// Creates a new `NonNull` that is dangling, but well-aligned.
108 ///
109 /// This is useful for initializing types which lazily allocate, like
110 /// `Vec::new` does.
111 ///
112 /// Note that the address of the returned pointer may potentially
113 /// be that of a valid pointer, which means this must not be used
114 /// as a "not yet initialized" sentinel value.
115 /// Types that lazily allocate must track initialization by some other means.
116 ///
117 /// # Examples
118 ///
119 /// ```
120 /// use std::ptr::NonNull;
121 ///
122 /// let ptr = NonNull::<u32>::dangling();
123 /// // Important: don't try to access the value of `ptr` without
124 /// // initializing it first! The pointer is not null but isn't valid either!
125 /// ```
126 #[stable(feature = "nonnull", since = "1.25.0")]
127 #[rustc_const_stable(feature = "const_nonnull_dangling", since = "1.36.0")]
128 #[must_use]
129 #[inline]
130 pub const fn dangling() -> Self {
131 let align = crate::ptr::Alignment::of::<T>();
132 NonNull::without_provenance(align.as_nonzero())
133 }
134
135 /// Converts an address back to a mutable pointer, picking up some previously 'exposed'
136 /// [provenance][crate::ptr#provenance].
137 ///
138 /// For more details, see the equivalent method on a raw pointer, [`ptr::with_exposed_provenance_mut`].
139 ///
140 /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
141 #[stable(feature = "nonnull_provenance", since = "1.89.0")]
142 #[inline]
143 pub fn with_exposed_provenance(addr: NonZero<usize>) -> Self {
144 // SAFETY: we know `addr` is non-zero.
145 unsafe {
146 let ptr = crate::ptr::with_exposed_provenance_mut(addr.get());
147 NonNull::new_unchecked(ptr)
148 }
149 }
150
151 /// Returns a shared references to the value. In contrast to [`as_ref`], this does not require
152 /// that the value has to be initialized.
153 ///
154 /// For the mutable counterpart see [`as_uninit_mut`].
155 ///
156 /// [`as_ref`]: NonNull::as_ref
157 /// [`as_uninit_mut`]: NonNull::as_uninit_mut
158 ///
159 /// # Safety
160 ///
161 /// When calling this method, you have to ensure that
162 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
163 /// Note that because the created reference is to `MaybeUninit<T>`, the
164 /// source pointer can point to uninitialized memory.
165 #[inline]
166 #[must_use]
167 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
168 pub const unsafe fn as_uninit_ref<'a>(self) -> &'a MaybeUninit<T> {
169 // SAFETY: the caller must guarantee that `self` meets all the
170 // requirements for a reference.
171 unsafe { &*self.cast().as_ptr() }
172 }
173
174 /// Returns a unique references to the value. In contrast to [`as_mut`], this does not require
175 /// that the value has to be initialized.
176 ///
177 /// For the shared counterpart see [`as_uninit_ref`].
178 ///
179 /// [`as_mut`]: NonNull::as_mut
180 /// [`as_uninit_ref`]: NonNull::as_uninit_ref
181 ///
182 /// # Safety
183 ///
184 /// When calling this method, you have to ensure that
185 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
186 /// Note that because the created reference is to `MaybeUninit<T>`, the
187 /// source pointer can point to uninitialized memory.
188 #[inline]
189 #[must_use]
190 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
191 pub const unsafe fn as_uninit_mut<'a>(self) -> &'a mut MaybeUninit<T> {
192 // SAFETY: the caller must guarantee that `self` meets all the
193 // requirements for a reference.
194 unsafe { &mut *self.cast().as_ptr() }
195 }
196
197 /// Casts from a pointer-to-`T` to a pointer-to-`[T; N]`.
198 #[inline]
199 #[unstable(feature = "ptr_cast_array", issue = "144514")]
200 pub const fn cast_array<const N: usize>(self) -> NonNull<[T; N]> {
201 self.cast()
202 }
203}
204
205impl<T: PointeeSized> NonNull<T> {
206 /// Creates a new `NonNull`.
207 ///
208 /// # Safety
209 ///
210 /// `ptr` must be non-null.
211 ///
212 /// # Examples
213 ///
214 /// ```
215 /// use std::ptr::NonNull;
216 ///
217 /// let mut x = 0u32;
218 /// let ptr = unsafe { NonNull::new_unchecked(&mut x as *mut _) };
219 /// ```
220 ///
221 /// *Incorrect* usage of this function:
222 ///
223 /// ```rust,no_run
224 /// use std::ptr::NonNull;
225 ///
226 /// // NEVER DO THAT!!! This is undefined behavior. ⚠️
227 /// let ptr = unsafe { NonNull::<u32>::new_unchecked(std::ptr::null_mut()) };
228 /// ```
229 #[stable(feature = "nonnull", since = "1.25.0")]
230 #[rustc_const_stable(feature = "const_nonnull_new_unchecked", since = "1.25.0")]
231 #[inline]
232 #[track_caller]
233 pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
234 // SAFETY: the caller must guarantee that `ptr` is non-null.
235 unsafe {
236 assert_unsafe_precondition!(
237 check_language_ub,
238 "NonNull::new_unchecked requires that the pointer is non-null",
239 (ptr: *mut () = ptr as *mut ()) => !ptr.is_null()
240 );
241 NonNull { pointer: ptr as _ }
242 }
243 }
244
245 /// Creates a new `NonNull` if `ptr` is non-null.
246 ///
247 /// # Panics during const evaluation
248 ///
249 /// This method will panic during const evaluation if the pointer cannot be
250 /// determined to be null or not. See [`is_null`] for more information.
251 ///
252 /// [`is_null`]: ../primitive.pointer.html#method.is_null-1
253 ///
254 /// # Examples
255 ///
256 /// ```
257 /// use std::ptr::NonNull;
258 ///
259 /// let mut x = 0u32;
260 /// let ptr = NonNull::<u32>::new(&mut x as *mut _).expect("ptr is null!");
261 ///
262 /// if let Some(ptr) = NonNull::<u32>::new(std::ptr::null_mut()) {
263 /// unreachable!();
264 /// }
265 /// ```
266 #[stable(feature = "nonnull", since = "1.25.0")]
267 #[rustc_const_stable(feature = "const_nonnull_new", since = "1.85.0")]
268 #[inline]
269 pub const fn new(ptr: *mut T) -> Option<Self> {
270 if !ptr.is_null() {
271 // SAFETY: The pointer is already checked and is not null
272 Some(unsafe { Self::new_unchecked(ptr) })
273 } else {
274 None
275 }
276 }
277
278 /// Converts a reference to a `NonNull` pointer.
279 #[stable(feature = "non_null_from_ref", since = "1.89.0")]
280 #[rustc_const_stable(feature = "non_null_from_ref", since = "1.89.0")]
281 #[inline]
282 pub const fn from_ref(r: &T) -> Self {
283 // SAFETY: A reference cannot be null.
284 unsafe { NonNull { pointer: r as *const T } }
285 }
286
287 /// Converts a mutable reference to a `NonNull` pointer.
288 #[stable(feature = "non_null_from_ref", since = "1.89.0")]
289 #[rustc_const_stable(feature = "non_null_from_ref", since = "1.89.0")]
290 #[inline]
291 pub const fn from_mut(r: &mut T) -> Self {
292 // SAFETY: A mutable reference cannot be null.
293 unsafe { NonNull { pointer: r as *mut T } }
294 }
295
296 /// Performs the same functionality as [`std::ptr::from_raw_parts`], except that a
297 /// `NonNull` pointer is returned, as opposed to a raw `*const` pointer.
298 ///
299 /// See the documentation of [`std::ptr::from_raw_parts`] for more details.
300 ///
301 /// [`std::ptr::from_raw_parts`]: crate::ptr::from_raw_parts
302 #[unstable(feature = "ptr_metadata", issue = "81513")]
303 #[inline]
304 pub const fn from_raw_parts(
305 data_pointer: NonNull<impl super::Thin>,
306 metadata: <T as super::Pointee>::Metadata,
307 ) -> NonNull<T> {
308 // SAFETY: The result of `ptr::from::raw_parts_mut` is non-null because `data_pointer` is.
309 unsafe {
310 NonNull::new_unchecked(super::from_raw_parts_mut(data_pointer.as_ptr(), metadata))
311 }
312 }
313
314 /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
315 ///
316 /// The pointer can be later reconstructed with [`NonNull::from_raw_parts`].
317 #[unstable(feature = "ptr_metadata", issue = "81513")]
318 #[must_use = "this returns the result of the operation, \
319 without modifying the original"]
320 #[inline]
321 pub const fn to_raw_parts(self) -> (NonNull<()>, <T as super::Pointee>::Metadata) {
322 (self.cast(), super::metadata(self.as_ptr()))
323 }
324
325 /// Gets the "address" portion of the pointer.
326 ///
327 /// For more details, see the equivalent method on a raw pointer, [`pointer::addr`].
328 ///
329 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
330 #[must_use]
331 #[inline]
332 #[stable(feature = "strict_provenance", since = "1.84.0")]
333 pub fn addr(self) -> NonZero<usize> {
334 // SAFETY: The pointer is guaranteed by the type to be non-null,
335 // meaning that the address will be non-zero.
336 unsafe { NonZero::new_unchecked(self.as_ptr().addr()) }
337 }
338
339 /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
340 /// [`with_exposed_provenance`][NonNull::with_exposed_provenance] and returns the "address" portion.
341 ///
342 /// For more details, see the equivalent method on a raw pointer, [`pointer::expose_provenance`].
343 ///
344 /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
345 #[stable(feature = "nonnull_provenance", since = "1.89.0")]
346 pub fn expose_provenance(self) -> NonZero<usize> {
347 // SAFETY: The pointer is guaranteed by the type to be non-null,
348 // meaning that the address will be non-zero.
349 unsafe { NonZero::new_unchecked(self.as_ptr().expose_provenance()) }
350 }
351
352 /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
353 /// `self`.
354 ///
355 /// For more details, see the equivalent method on a raw pointer, [`pointer::with_addr`].
356 ///
357 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
358 #[must_use]
359 #[inline]
360 #[stable(feature = "strict_provenance", since = "1.84.0")]
361 pub fn with_addr(self, addr: NonZero<usize>) -> Self {
362 // SAFETY: The result of `ptr::from::with_addr` is non-null because `addr` is guaranteed to be non-zero.
363 unsafe { NonNull::new_unchecked(self.as_ptr().with_addr(addr.get()) as *mut _) }
364 }
365
366 /// Creates a new pointer by mapping `self`'s address to a new one, preserving the
367 /// [provenance][crate::ptr#provenance] of `self`.
368 ///
369 /// For more details, see the equivalent method on a raw pointer, [`pointer::map_addr`].
370 ///
371 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
372 #[must_use]
373 #[inline]
374 #[stable(feature = "strict_provenance", since = "1.84.0")]
375 pub fn map_addr(self, f: impl FnOnce(NonZero<usize>) -> NonZero<usize>) -> Self {
376 self.with_addr(f(self.addr()))
377 }
378
379 /// Acquires the underlying `*mut` pointer.
380 ///
381 /// # Examples
382 ///
383 /// ```
384 /// use std::ptr::NonNull;
385 ///
386 /// let mut x = 0u32;
387 /// let ptr = NonNull::new(&mut x).expect("ptr is null!");
388 ///
389 /// let x_value = unsafe { *ptr.as_ptr() };
390 /// assert_eq!(x_value, 0);
391 ///
392 /// unsafe { *ptr.as_ptr() += 2; }
393 /// let x_value = unsafe { *ptr.as_ptr() };
394 /// assert_eq!(x_value, 2);
395 /// ```
396 #[stable(feature = "nonnull", since = "1.25.0")]
397 #[rustc_const_stable(feature = "const_nonnull_as_ptr", since = "1.32.0")]
398 #[rustc_never_returns_null_ptr]
399 #[must_use]
400 #[inline(always)]
401 pub const fn as_ptr(self) -> *mut T {
402 // This is a transmute for the same reasons as `NonZero::get`.
403
404 // SAFETY: `NonNull` is `transparent` over a `*const T`, and `*const T`
405 // and `*mut T` have the same layout, so transitively we can transmute
406 // our `NonNull` to a `*mut T` directly.
407 unsafe { mem::transmute::<Self, *mut T>(self) }
408 }
409
410 /// Returns a shared reference to the value. If the value may be uninitialized, [`as_uninit_ref`]
411 /// must be used instead.
412 ///
413 /// For the mutable counterpart see [`as_mut`].
414 ///
415 /// [`as_uninit_ref`]: NonNull::as_uninit_ref
416 /// [`as_mut`]: NonNull::as_mut
417 ///
418 /// # Safety
419 ///
420 /// When calling this method, you have to ensure that
421 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
422 ///
423 /// # Examples
424 ///
425 /// ```
426 /// use std::ptr::NonNull;
427 ///
428 /// let mut x = 0u32;
429 /// let ptr = NonNull::new(&mut x as *mut _).expect("ptr is null!");
430 ///
431 /// let ref_x = unsafe { ptr.as_ref() };
432 /// println!("{ref_x}");
433 /// ```
434 ///
435 /// [the module documentation]: crate::ptr#safety
436 #[stable(feature = "nonnull", since = "1.25.0")]
437 #[rustc_const_stable(feature = "const_nonnull_as_ref", since = "1.73.0")]
438 #[must_use]
439 #[inline(always)]
440 pub const unsafe fn as_ref<'a>(&self) -> &'a T {
441 // SAFETY: the caller must guarantee that `self` meets all the
442 // requirements for a reference.
443 // `cast_const` avoids a mutable raw pointer deref.
444 unsafe { &*self.as_ptr().cast_const() }
445 }
446
447 /// Returns a unique reference to the value. If the value may be uninitialized, [`as_uninit_mut`]
448 /// must be used instead.
449 ///
450 /// For the shared counterpart see [`as_ref`].
451 ///
452 /// [`as_uninit_mut`]: NonNull::as_uninit_mut
453 /// [`as_ref`]: NonNull::as_ref
454 ///
455 /// # Safety
456 ///
457 /// When calling this method, you have to ensure that
458 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
459 /// # Examples
460 ///
461 /// ```
462 /// use std::ptr::NonNull;
463 ///
464 /// let mut x = 0u32;
465 /// let mut ptr = NonNull::new(&mut x).expect("null pointer");
466 ///
467 /// let x_ref = unsafe { ptr.as_mut() };
468 /// assert_eq!(*x_ref, 0);
469 /// *x_ref += 2;
470 /// assert_eq!(*x_ref, 2);
471 /// ```
472 ///
473 /// [the module documentation]: crate::ptr#safety
474 #[stable(feature = "nonnull", since = "1.25.0")]
475 #[rustc_const_stable(feature = "const_ptr_as_ref", since = "1.83.0")]
476 #[must_use]
477 #[inline(always)]
478 pub const unsafe fn as_mut<'a>(&mut self) -> &'a mut T {
479 // SAFETY: the caller must guarantee that `self` meets all the
480 // requirements for a mutable reference.
481 unsafe { &mut *self.as_ptr() }
482 }
483
484 /// Casts to a pointer of another type.
485 ///
486 /// # Examples
487 ///
488 /// ```
489 /// use std::ptr::NonNull;
490 ///
491 /// let mut x = 0u32;
492 /// let ptr = NonNull::new(&mut x as *mut _).expect("null pointer");
493 ///
494 /// let casted_ptr = ptr.cast::<i8>();
495 /// let raw_ptr: *mut i8 = casted_ptr.as_ptr();
496 /// ```
497 #[stable(feature = "nonnull_cast", since = "1.27.0")]
498 #[rustc_const_stable(feature = "const_nonnull_cast", since = "1.36.0")]
499 #[must_use = "this returns the result of the operation, \
500 without modifying the original"]
501 #[inline]
502 pub const fn cast<U>(self) -> NonNull<U> {
503 // SAFETY: `self` is a `NonNull` pointer which is necessarily non-null
504 unsafe { NonNull { pointer: self.as_ptr() as *mut U } }
505 }
506
507 /// Try to cast to a pointer of another type by checking alignment.
508 ///
509 /// If the pointer is properly aligned to the target type, it will be
510 /// cast to the target type. Otherwise, `None` is returned.
511 ///
512 /// # Examples
513 ///
514 /// ```rust
515 /// #![feature(pointer_try_cast_aligned)]
516 /// use std::ptr::NonNull;
517 ///
518 /// let mut x = 0u64;
519 ///
520 /// let aligned = NonNull::from_mut(&mut x);
521 /// let unaligned = unsafe { aligned.byte_add(1) };
522 ///
523 /// assert!(aligned.try_cast_aligned::<u32>().is_some());
524 /// assert!(unaligned.try_cast_aligned::<u32>().is_none());
525 /// ```
526 #[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
527 #[must_use = "this returns the result of the operation, \
528 without modifying the original"]
529 #[inline]
530 pub fn try_cast_aligned<U>(self) -> Option<NonNull<U>> {
531 if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
532 }
533
534 /// Adds an offset to a pointer.
535 ///
536 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
537 /// offset of `3 * size_of::<T>()` bytes.
538 ///
539 /// # Safety
540 ///
541 /// If any of the following conditions are violated, the result is Undefined Behavior:
542 ///
543 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
544 ///
545 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
546 /// [allocation], and the entire memory range between `self` and the result must be in
547 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
548 /// of the address space.
549 ///
550 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
551 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
552 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
553 /// safe.
554 ///
555 /// [allocation]: crate::ptr#allocation
556 ///
557 /// # Examples
558 ///
559 /// ```
560 /// use std::ptr::NonNull;
561 ///
562 /// let mut s = [1, 2, 3];
563 /// let ptr: NonNull<u32> = NonNull::new(s.as_mut_ptr()).unwrap();
564 ///
565 /// unsafe {
566 /// println!("{}", ptr.offset(1).read());
567 /// println!("{}", ptr.offset(2).read());
568 /// }
569 /// ```
570 #[inline(always)]
571 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
572 #[must_use = "returns a new pointer rather than modifying its argument"]
573 #[stable(feature = "non_null_convenience", since = "1.80.0")]
574 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
575 pub const unsafe fn offset(self, count: isize) -> Self
576 where
577 T: Sized,
578 {
579 // SAFETY: the caller must uphold the safety contract for `offset`.
580 // Additionally safety contract of `offset` guarantees that the resulting pointer is
581 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
582 // construct `NonNull`.
583 unsafe { NonNull { pointer: intrinsics::offset(self.as_ptr(), count) } }
584 }
585
586 /// Calculates the offset from a pointer in bytes.
587 ///
588 /// `count` is in units of **bytes**.
589 ///
590 /// This is purely a convenience for casting to a `u8` pointer and
591 /// using [offset][pointer::offset] on it. See that method for documentation
592 /// and safety requirements.
593 ///
594 /// For non-`Sized` pointees this operation changes only the data pointer,
595 /// leaving the metadata untouched.
596 #[must_use]
597 #[inline(always)]
598 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
599 #[stable(feature = "non_null_convenience", since = "1.80.0")]
600 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
601 pub const unsafe fn byte_offset(self, count: isize) -> Self {
602 // SAFETY: the caller must uphold the safety contract for `offset` and `byte_offset` has
603 // the same safety contract.
604 // Additionally safety contract of `offset` guarantees that the resulting pointer is
605 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
606 // construct `NonNull`.
607 unsafe { NonNull { pointer: self.as_ptr().byte_offset(count) } }
608 }
609
610 /// Adds an offset to a pointer (convenience for `.offset(count as isize)`).
611 ///
612 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
613 /// offset of `3 * size_of::<T>()` bytes.
614 ///
615 /// # Safety
616 ///
617 /// If any of the following conditions are violated, the result is Undefined Behavior:
618 ///
619 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
620 ///
621 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
622 /// [allocation], and the entire memory range between `self` and the result must be in
623 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
624 /// of the address space.
625 ///
626 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
627 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
628 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
629 /// safe.
630 ///
631 /// [allocation]: crate::ptr#allocation
632 ///
633 /// # Examples
634 ///
635 /// ```
636 /// use std::ptr::NonNull;
637 ///
638 /// let s: &str = "123";
639 /// let ptr: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap();
640 ///
641 /// unsafe {
642 /// println!("{}", ptr.add(1).read() as char);
643 /// println!("{}", ptr.add(2).read() as char);
644 /// }
645 /// ```
646 #[inline(always)]
647 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
648 #[must_use = "returns a new pointer rather than modifying its argument"]
649 #[stable(feature = "non_null_convenience", since = "1.80.0")]
650 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
651 pub const unsafe fn add(self, count: usize) -> Self
652 where
653 T: Sized,
654 {
655 // SAFETY: the caller must uphold the safety contract for `offset`.
656 // Additionally safety contract of `offset` guarantees that the resulting pointer is
657 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
658 // construct `NonNull`.
659 unsafe { NonNull { pointer: intrinsics::offset(self.as_ptr(), count) } }
660 }
661
662 /// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`).
663 ///
664 /// `count` is in units of bytes.
665 ///
666 /// This is purely a convenience for casting to a `u8` pointer and
667 /// using [`add`][NonNull::add] on it. See that method for documentation
668 /// and safety requirements.
669 ///
670 /// For non-`Sized` pointees this operation changes only the data pointer,
671 /// leaving the metadata untouched.
672 #[must_use]
673 #[inline(always)]
674 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
675 #[stable(feature = "non_null_convenience", since = "1.80.0")]
676 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
677 pub const unsafe fn byte_add(self, count: usize) -> Self {
678 // SAFETY: the caller must uphold the safety contract for `add` and `byte_add` has the same
679 // safety contract.
680 // Additionally safety contract of `add` guarantees that the resulting pointer is pointing
681 // to an allocation, there can't be an allocation at null, thus it's safe to construct
682 // `NonNull`.
683 unsafe { NonNull { pointer: self.as_ptr().byte_add(count) } }
684 }
685
686 /// Subtracts an offset from a pointer (convenience for
687 /// `.offset((count as isize).wrapping_neg())`).
688 ///
689 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
690 /// offset of `3 * size_of::<T>()` bytes.
691 ///
692 /// # Safety
693 ///
694 /// If any of the following conditions are violated, the result is Undefined Behavior:
695 ///
696 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
697 ///
698 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
699 /// [allocation], and the entire memory range between `self` and the result must be in
700 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
701 /// of the address space.
702 ///
703 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
704 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
705 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
706 /// safe.
707 ///
708 /// [allocation]: crate::ptr#allocation
709 ///
710 /// # Examples
711 ///
712 /// ```
713 /// use std::ptr::NonNull;
714 ///
715 /// let s: &str = "123";
716 ///
717 /// unsafe {
718 /// let end: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap().add(3);
719 /// println!("{}", end.sub(1).read() as char);
720 /// println!("{}", end.sub(2).read() as char);
721 /// }
722 /// ```
723 #[inline(always)]
724 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
725 #[must_use = "returns a new pointer rather than modifying its argument"]
726 #[stable(feature = "non_null_convenience", since = "1.80.0")]
727 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
728 pub const unsafe fn sub(self, count: usize) -> Self
729 where
730 T: Sized,
731 {
732 if T::IS_ZST {
733 // Pointer arithmetic does nothing when the pointee is a ZST.
734 self
735 } else {
736 // SAFETY: the caller must uphold the safety contract for `offset`.
737 // Because the pointee is *not* a ZST, that means that `count` is
738 // at most `isize::MAX`, and thus the negation cannot overflow.
739 unsafe { self.offset((count as isize).unchecked_neg()) }
740 }
741 }
742
743 /// Calculates the offset from a pointer in bytes (convenience for
744 /// `.byte_offset((count as isize).wrapping_neg())`).
745 ///
746 /// `count` is in units of bytes.
747 ///
748 /// This is purely a convenience for casting to a `u8` pointer and
749 /// using [`sub`][NonNull::sub] on it. See that method for documentation
750 /// and safety requirements.
751 ///
752 /// For non-`Sized` pointees this operation changes only the data pointer,
753 /// leaving the metadata untouched.
754 #[must_use]
755 #[inline(always)]
756 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
757 #[stable(feature = "non_null_convenience", since = "1.80.0")]
758 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
759 pub const unsafe fn byte_sub(self, count: usize) -> Self {
760 // SAFETY: the caller must uphold the safety contract for `sub` and `byte_sub` has the same
761 // safety contract.
762 // Additionally safety contract of `sub` guarantees that the resulting pointer is pointing
763 // to an allocation, there can't be an allocation at null, thus it's safe to construct
764 // `NonNull`.
765 unsafe { NonNull { pointer: self.as_ptr().byte_sub(count) } }
766 }
767
768 /// Calculates the distance between two pointers within the same allocation. The returned value is in
769 /// units of T: the distance in bytes divided by `size_of::<T>()`.
770 ///
771 /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
772 /// except that it has a lot more opportunities for UB, in exchange for the compiler
773 /// better understanding what you are doing.
774 ///
775 /// The primary motivation of this method is for computing the `len` of an array/slice
776 /// of `T` that you are currently representing as a "start" and "end" pointer
777 /// (and "end" is "one past the end" of the array).
778 /// In that case, `end.offset_from(start)` gets you the length of the array.
779 ///
780 /// All of the following safety requirements are trivially satisfied for this usecase.
781 ///
782 /// [`offset`]: #method.offset
783 ///
784 /// # Safety
785 ///
786 /// If any of the following conditions are violated, the result is Undefined Behavior:
787 ///
788 /// * `self` and `origin` must either
789 ///
790 /// * point to the same address, or
791 /// * both be *derived from* a pointer to the same [allocation], and the memory range between
792 /// the two pointers must be in bounds of that object. (See below for an example.)
793 ///
794 /// * The distance between the pointers, in bytes, must be an exact multiple
795 /// of the size of `T`.
796 ///
797 /// As a consequence, the absolute distance between the pointers, in bytes, computed on
798 /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
799 /// implied by the in-bounds requirement, and the fact that no allocation can be larger
800 /// than `isize::MAX` bytes.
801 ///
802 /// The requirement for pointers to be derived from the same allocation is primarily
803 /// needed for `const`-compatibility: the distance between pointers into *different* allocated
804 /// objects is not known at compile-time. However, the requirement also exists at
805 /// runtime and may be exploited by optimizations. If you wish to compute the difference between
806 /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
807 /// origin as isize) / size_of::<T>()`.
808 // FIXME: recommend `addr()` instead of `as usize` once that is stable.
809 ///
810 /// [`add`]: #method.add
811 /// [allocation]: crate::ptr#allocation
812 ///
813 /// # Panics
814 ///
815 /// This function panics if `T` is a Zero-Sized Type ("ZST").
816 ///
817 /// # Examples
818 ///
819 /// Basic usage:
820 ///
821 /// ```
822 /// use std::ptr::NonNull;
823 ///
824 /// let a = [0; 5];
825 /// let ptr1: NonNull<u32> = NonNull::from(&a[1]);
826 /// let ptr2: NonNull<u32> = NonNull::from(&a[3]);
827 /// unsafe {
828 /// assert_eq!(ptr2.offset_from(ptr1), 2);
829 /// assert_eq!(ptr1.offset_from(ptr2), -2);
830 /// assert_eq!(ptr1.offset(2), ptr2);
831 /// assert_eq!(ptr2.offset(-2), ptr1);
832 /// }
833 /// ```
834 ///
835 /// *Incorrect* usage:
836 ///
837 /// ```rust,no_run
838 /// use std::ptr::NonNull;
839 ///
840 /// let ptr1 = NonNull::new(Box::into_raw(Box::new(0u8))).unwrap();
841 /// let ptr2 = NonNull::new(Box::into_raw(Box::new(1u8))).unwrap();
842 /// let diff = (ptr2.addr().get() as isize).wrapping_sub(ptr1.addr().get() as isize);
843 /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
844 /// let diff_plus_1 = diff.wrapping_add(1);
845 /// let ptr2_other = NonNull::new(ptr1.as_ptr().wrapping_byte_offset(diff_plus_1)).unwrap();
846 /// assert_eq!(ptr2.addr(), ptr2_other.addr());
847 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
848 /// // computing their offset is undefined behavior, even though
849 /// // they point to addresses that are in-bounds of the same object!
850 ///
851 /// let one = unsafe { ptr2_other.offset_from(ptr2) }; // Undefined Behavior! ⚠️
852 /// ```
853 #[inline]
854 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
855 #[stable(feature = "non_null_convenience", since = "1.80.0")]
856 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
857 pub const unsafe fn offset_from(self, origin: NonNull<T>) -> isize
858 where
859 T: Sized,
860 {
861 // SAFETY: the caller must uphold the safety contract for `offset_from`.
862 unsafe { self.as_ptr().offset_from(origin.as_ptr()) }
863 }
864
865 /// Calculates the distance between two pointers within the same allocation. The returned value is in
866 /// units of **bytes**.
867 ///
868 /// This is purely a convenience for casting to a `u8` pointer and
869 /// using [`offset_from`][NonNull::offset_from] on it. See that method for
870 /// documentation and safety requirements.
871 ///
872 /// For non-`Sized` pointees this operation considers only the data pointers,
873 /// ignoring the metadata.
874 #[inline(always)]
875 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
876 #[stable(feature = "non_null_convenience", since = "1.80.0")]
877 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
878 pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: NonNull<U>) -> isize {
879 // SAFETY: the caller must uphold the safety contract for `byte_offset_from`.
880 unsafe { self.as_ptr().byte_offset_from(origin.as_ptr()) }
881 }
882
883 // N.B. `wrapping_offset``, `wrapping_add`, etc are not implemented because they can wrap to null
884
885 /// Calculates the distance between two pointers within the same allocation, *where it's known that
886 /// `self` is equal to or greater than `origin`*. The returned value is in
887 /// units of T: the distance in bytes is divided by `size_of::<T>()`.
888 ///
889 /// This computes the same value that [`offset_from`](#method.offset_from)
890 /// would compute, but with the added precondition that the offset is
891 /// guaranteed to be non-negative. This method is equivalent to
892 /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
893 /// but it provides slightly more information to the optimizer, which can
894 /// sometimes allow it to optimize slightly better with some backends.
895 ///
896 /// This method can be though of as recovering the `count` that was passed
897 /// to [`add`](#method.add) (or, with the parameters in the other order,
898 /// to [`sub`](#method.sub)). The following are all equivalent, assuming
899 /// that their safety preconditions are met:
900 /// ```rust
901 /// # unsafe fn blah(ptr: std::ptr::NonNull<u32>, origin: std::ptr::NonNull<u32>, count: usize) -> bool { unsafe {
902 /// ptr.offset_from_unsigned(origin) == count
903 /// # &&
904 /// origin.add(count) == ptr
905 /// # &&
906 /// ptr.sub(count) == origin
907 /// # } }
908 /// ```
909 ///
910 /// # Safety
911 ///
912 /// - The distance between the pointers must be non-negative (`self >= origin`)
913 ///
914 /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
915 /// apply to this method as well; see it for the full details.
916 ///
917 /// Importantly, despite the return type of this method being able to represent
918 /// a larger offset, it's still *not permitted* to pass pointers which differ
919 /// by more than `isize::MAX` *bytes*. As such, the result of this method will
920 /// always be less than or equal to `isize::MAX as usize`.
921 ///
922 /// # Panics
923 ///
924 /// This function panics if `T` is a Zero-Sized Type ("ZST").
925 ///
926 /// # Examples
927 ///
928 /// ```
929 /// use std::ptr::NonNull;
930 ///
931 /// let a = [0; 5];
932 /// let ptr1: NonNull<u32> = NonNull::from(&a[1]);
933 /// let ptr2: NonNull<u32> = NonNull::from(&a[3]);
934 /// unsafe {
935 /// assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
936 /// assert_eq!(ptr1.add(2), ptr2);
937 /// assert_eq!(ptr2.sub(2), ptr1);
938 /// assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
939 /// }
940 ///
941 /// // This would be incorrect, as the pointers are not correctly ordered:
942 /// // ptr1.offset_from_unsigned(ptr2)
943 /// ```
944 #[inline]
945 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
946 #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
947 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
948 pub const unsafe fn offset_from_unsigned(self, subtracted: NonNull<T>) -> usize
949 where
950 T: Sized,
951 {
952 // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
953 unsafe { self.as_ptr().offset_from_unsigned(subtracted.as_ptr()) }
954 }
955
956 /// Calculates the distance between two pointers within the same allocation, *where it's known that
957 /// `self` is equal to or greater than `origin`*. The returned value is in
958 /// units of **bytes**.
959 ///
960 /// This is purely a convenience for casting to a `u8` pointer and
961 /// using [`offset_from_unsigned`][NonNull::offset_from_unsigned] on it.
962 /// See that method for documentation and safety requirements.
963 ///
964 /// For non-`Sized` pointees this operation considers only the data pointers,
965 /// ignoring the metadata.
966 #[inline(always)]
967 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
968 #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
969 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
970 pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: NonNull<U>) -> usize {
971 // SAFETY: the caller must uphold the safety contract for `byte_offset_from_unsigned`.
972 unsafe { self.as_ptr().byte_offset_from_unsigned(origin.as_ptr()) }
973 }
974
975 /// Reads the value from `self` without moving it. This leaves the
976 /// memory in `self` unchanged.
977 ///
978 /// See [`ptr::read`] for safety concerns and examples.
979 ///
980 /// [`ptr::read`]: crate::ptr::read()
981 #[inline]
982 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
983 #[stable(feature = "non_null_convenience", since = "1.80.0")]
984 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
985 pub const unsafe fn read(self) -> T
986 where
987 T: Sized,
988 {
989 // SAFETY: the caller must uphold the safety contract for `read`.
990 unsafe { ptr::read(self.as_ptr()) }
991 }
992
993 /// Performs a volatile read of the value from `self` without moving it. This
994 /// leaves the memory in `self` unchanged.
995 ///
996 /// Volatile operations are intended to act on I/O memory, and are guaranteed
997 /// to not be elided or reordered by the compiler across other volatile
998 /// operations.
999 ///
1000 /// See [`ptr::read_volatile`] for safety concerns and examples.
1001 ///
1002 /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1003 #[inline]
1004 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1005 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1006 pub unsafe fn read_volatile(self) -> T
1007 where
1008 T: Sized,
1009 {
1010 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1011 unsafe { ptr::read_volatile(self.as_ptr()) }
1012 }
1013
1014 /// Reads the value from `self` without moving it. This leaves the
1015 /// memory in `self` unchanged.
1016 ///
1017 /// Unlike `read`, the pointer may be unaligned.
1018 ///
1019 /// See [`ptr::read_unaligned`] for safety concerns and examples.
1020 ///
1021 /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1022 #[inline]
1023 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1024 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1025 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
1026 pub const unsafe fn read_unaligned(self) -> T
1027 where
1028 T: Sized,
1029 {
1030 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1031 unsafe { ptr::read_unaligned(self.as_ptr()) }
1032 }
1033
1034 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1035 /// and destination may overlap.
1036 ///
1037 /// NOTE: this has the *same* argument order as [`ptr::copy`].
1038 ///
1039 /// See [`ptr::copy`] for safety concerns and examples.
1040 ///
1041 /// [`ptr::copy`]: crate::ptr::copy()
1042 #[inline(always)]
1043 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1044 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1045 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1046 pub const unsafe fn copy_to(self, dest: NonNull<T>, count: usize)
1047 where
1048 T: Sized,
1049 {
1050 // SAFETY: the caller must uphold the safety contract for `copy`.
1051 unsafe { ptr::copy(self.as_ptr(), dest.as_ptr(), count) }
1052 }
1053
1054 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1055 /// and destination may *not* overlap.
1056 ///
1057 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1058 ///
1059 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1060 ///
1061 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1062 #[inline(always)]
1063 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1064 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1065 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1066 pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNull<T>, count: usize)
1067 where
1068 T: Sized,
1069 {
1070 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1071 unsafe { ptr::copy_nonoverlapping(self.as_ptr(), dest.as_ptr(), count) }
1072 }
1073
1074 /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1075 /// and destination may overlap.
1076 ///
1077 /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
1078 ///
1079 /// See [`ptr::copy`] for safety concerns and examples.
1080 ///
1081 /// [`ptr::copy`]: crate::ptr::copy()
1082 #[inline(always)]
1083 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1084 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1085 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1086 pub const unsafe fn copy_from(self, src: NonNull<T>, count: usize)
1087 where
1088 T: Sized,
1089 {
1090 // SAFETY: the caller must uphold the safety contract for `copy`.
1091 unsafe { ptr::copy(src.as_ptr(), self.as_ptr(), count) }
1092 }
1093
1094 /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1095 /// and destination may *not* overlap.
1096 ///
1097 /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
1098 ///
1099 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1100 ///
1101 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1102 #[inline(always)]
1103 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1104 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1105 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1106 pub const unsafe fn copy_from_nonoverlapping(self, src: NonNull<T>, count: usize)
1107 where
1108 T: Sized,
1109 {
1110 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1111 unsafe { ptr::copy_nonoverlapping(src.as_ptr(), self.as_ptr(), count) }
1112 }
1113
1114 /// Executes the destructor (if any) of the pointed-to value.
1115 ///
1116 /// See [`ptr::drop_in_place`] for safety concerns and examples.
1117 ///
1118 /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
1119 #[inline(always)]
1120 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1121 #[rustc_const_unstable(feature = "const_drop_in_place", issue = "109342")]
1122 pub const unsafe fn drop_in_place(self)
1123 where
1124 T: [const] Destruct,
1125 {
1126 // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1127 unsafe { ptr::drop_in_place(self.as_ptr()) }
1128 }
1129
1130 /// Overwrites a memory location with the given value without reading or
1131 /// dropping the old value.
1132 ///
1133 /// See [`ptr::write`] for safety concerns and examples.
1134 ///
1135 /// [`ptr::write`]: crate::ptr::write()
1136 #[inline(always)]
1137 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1138 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1139 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1140 pub const unsafe fn write(self, val: T)
1141 where
1142 T: Sized,
1143 {
1144 // SAFETY: the caller must uphold the safety contract for `write`.
1145 unsafe { ptr::write(self.as_ptr(), val) }
1146 }
1147
1148 /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1149 /// bytes of memory starting at `self` to `val`.
1150 ///
1151 /// See [`ptr::write_bytes`] for safety concerns and examples.
1152 ///
1153 /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1154 #[inline(always)]
1155 #[doc(alias = "memset")]
1156 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1157 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1158 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1159 pub const unsafe fn write_bytes(self, val: u8, count: usize)
1160 where
1161 T: Sized,
1162 {
1163 // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1164 unsafe { ptr::write_bytes(self.as_ptr(), val, count) }
1165 }
1166
1167 /// Performs a volatile write of a memory location with the given value without
1168 /// reading or dropping the old value.
1169 ///
1170 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1171 /// to not be elided or reordered by the compiler across other volatile
1172 /// operations.
1173 ///
1174 /// See [`ptr::write_volatile`] for safety concerns and examples.
1175 ///
1176 /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1177 #[inline(always)]
1178 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1179 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1180 pub unsafe fn write_volatile(self, val: T)
1181 where
1182 T: Sized,
1183 {
1184 // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1185 unsafe { ptr::write_volatile(self.as_ptr(), val) }
1186 }
1187
1188 /// Overwrites a memory location with the given value without reading or
1189 /// dropping the old value.
1190 ///
1191 /// Unlike `write`, the pointer may be unaligned.
1192 ///
1193 /// See [`ptr::write_unaligned`] for safety concerns and examples.
1194 ///
1195 /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1196 #[inline(always)]
1197 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1198 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1199 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1200 pub const unsafe fn write_unaligned(self, val: T)
1201 where
1202 T: Sized,
1203 {
1204 // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1205 unsafe { ptr::write_unaligned(self.as_ptr(), val) }
1206 }
1207
1208 /// Replaces the value at `self` with `src`, returning the old
1209 /// value, without dropping either.
1210 ///
1211 /// See [`ptr::replace`] for safety concerns and examples.
1212 ///
1213 /// [`ptr::replace`]: crate::ptr::replace()
1214 #[inline(always)]
1215 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1216 #[rustc_const_stable(feature = "const_inherent_ptr_replace", since = "1.88.0")]
1217 pub const unsafe fn replace(self, src: T) -> T
1218 where
1219 T: Sized,
1220 {
1221 // SAFETY: the caller must uphold the safety contract for `replace`.
1222 unsafe { ptr::replace(self.as_ptr(), src) }
1223 }
1224
1225 /// Swaps the values at two mutable locations of the same type, without
1226 /// deinitializing either. They may overlap, unlike `mem::swap` which is
1227 /// otherwise equivalent.
1228 ///
1229 /// See [`ptr::swap`] for safety concerns and examples.
1230 ///
1231 /// [`ptr::swap`]: crate::ptr::swap()
1232 #[inline(always)]
1233 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1234 #[rustc_const_stable(feature = "const_swap", since = "1.85.0")]
1235 pub const unsafe fn swap(self, with: NonNull<T>)
1236 where
1237 T: Sized,
1238 {
1239 // SAFETY: the caller must uphold the safety contract for `swap`.
1240 unsafe { ptr::swap(self.as_ptr(), with.as_ptr()) }
1241 }
1242
1243 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1244 /// `align`.
1245 ///
1246 /// If it is not possible to align the pointer, the implementation returns
1247 /// `usize::MAX`.
1248 ///
1249 /// The offset is expressed in number of `T` elements, and not bytes.
1250 ///
1251 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1252 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1253 /// the returned offset is correct in all terms other than alignment.
1254 ///
1255 /// When this is called during compile-time evaluation (which is unstable), the implementation
1256 /// may return `usize::MAX` in cases where that can never happen at runtime. This is because the
1257 /// actual alignment of pointers is not known yet during compile-time, so an offset with
1258 /// guaranteed alignment can sometimes not be computed. For example, a buffer declared as `[u8;
1259 /// N]` might be allocated at an odd or an even address, but at compile-time this is not yet
1260 /// known, so the execution has to be correct for either choice. It is therefore impossible to
1261 /// find an offset that is guaranteed to be 2-aligned. (This behavior is subject to change, as usual
1262 /// for unstable APIs.)
1263 ///
1264 /// # Panics
1265 ///
1266 /// The function panics if `align` is not a power-of-two.
1267 ///
1268 /// # Examples
1269 ///
1270 /// Accessing adjacent `u8` as `u16`
1271 ///
1272 /// ```
1273 /// use std::ptr::NonNull;
1274 ///
1275 /// # unsafe {
1276 /// let x = [5_u8, 6, 7, 8, 9];
1277 /// let ptr = NonNull::new(x.as_ptr() as *mut u8).unwrap();
1278 /// let offset = ptr.align_offset(align_of::<u16>());
1279 ///
1280 /// if offset < x.len() - 1 {
1281 /// let u16_ptr = ptr.add(offset).cast::<u16>();
1282 /// assert!(u16_ptr.read() == u16::from_ne_bytes([5, 6]) || u16_ptr.read() == u16::from_ne_bytes([6, 7]));
1283 /// } else {
1284 /// // while the pointer can be aligned via `offset`, it would point
1285 /// // outside the allocation
1286 /// }
1287 /// # }
1288 /// ```
1289 #[inline]
1290 #[must_use]
1291 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1292 pub fn align_offset(self, align: usize) -> usize
1293 where
1294 T: Sized,
1295 {
1296 if !align.is_power_of_two() {
1297 panic!("align_offset: align is not a power-of-two");
1298 }
1299
1300 {
1301 // SAFETY: `align` has been checked to be a power of 2 above.
1302 unsafe { ptr::align_offset(self.as_ptr(), align) }
1303 }
1304 }
1305
1306 /// Returns whether the pointer is properly aligned for `T`.
1307 ///
1308 /// # Examples
1309 ///
1310 /// ```
1311 /// use std::ptr::NonNull;
1312 ///
1313 /// // On some platforms, the alignment of i32 is less than 4.
1314 /// #[repr(align(4))]
1315 /// struct AlignedI32(i32);
1316 ///
1317 /// let data = AlignedI32(42);
1318 /// let ptr = NonNull::<AlignedI32>::from(&data);
1319 ///
1320 /// assert!(ptr.is_aligned());
1321 /// assert!(!NonNull::new(ptr.as_ptr().wrapping_byte_add(1)).unwrap().is_aligned());
1322 /// ```
1323 #[inline]
1324 #[must_use]
1325 #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1326 pub fn is_aligned(self) -> bool
1327 where
1328 T: Sized,
1329 {
1330 self.as_ptr().is_aligned()
1331 }
1332
1333 /// Returns whether the pointer is aligned to `align`.
1334 ///
1335 /// For non-`Sized` pointees this operation considers only the data pointer,
1336 /// ignoring the metadata.
1337 ///
1338 /// # Panics
1339 ///
1340 /// The function panics if `align` is not a power-of-two (this includes 0).
1341 ///
1342 /// # Examples
1343 ///
1344 /// ```
1345 /// #![feature(pointer_is_aligned_to)]
1346 ///
1347 /// // On some platforms, the alignment of i32 is less than 4.
1348 /// #[repr(align(4))]
1349 /// struct AlignedI32(i32);
1350 ///
1351 /// let data = AlignedI32(42);
1352 /// let ptr = &data as *const AlignedI32;
1353 ///
1354 /// assert!(ptr.is_aligned_to(1));
1355 /// assert!(ptr.is_aligned_to(2));
1356 /// assert!(ptr.is_aligned_to(4));
1357 ///
1358 /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1359 /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1360 ///
1361 /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1362 /// ```
1363 #[inline]
1364 #[must_use]
1365 #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1366 pub fn is_aligned_to(self, align: usize) -> bool {
1367 self.as_ptr().is_aligned_to(align)
1368 }
1369}
1370
1371impl<T> NonNull<T> {
1372 /// Casts from a type to its maybe-uninitialized version.
1373 #[must_use]
1374 #[inline(always)]
1375 #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
1376 pub const fn cast_uninit(self) -> NonNull<MaybeUninit<T>> {
1377 self.cast()
1378 }
1379}
1380impl<T> NonNull<MaybeUninit<T>> {
1381 /// Casts from a maybe-uninitialized type to its initialized version.
1382 ///
1383 /// This is always safe, since UB can only occur if the pointer is read
1384 /// before being initialized.
1385 #[must_use]
1386 #[inline(always)]
1387 #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
1388 pub const fn cast_init(self) -> NonNull<T> {
1389 self.cast()
1390 }
1391}
1392
1393impl<T> NonNull<[T]> {
1394 /// Creates a non-null raw slice from a thin pointer and a length.
1395 ///
1396 /// The `len` argument is the number of **elements**, not the number of bytes.
1397 ///
1398 /// This function is safe, but dereferencing the return value is unsafe.
1399 /// See the documentation of [`slice::from_raw_parts`] for slice safety requirements.
1400 ///
1401 /// # Examples
1402 ///
1403 /// ```rust
1404 /// use std::ptr::NonNull;
1405 ///
1406 /// // create a slice pointer when starting out with a pointer to the first element
1407 /// let mut x = [5, 6, 7];
1408 /// let nonnull_pointer = NonNull::new(x.as_mut_ptr()).unwrap();
1409 /// let slice = NonNull::slice_from_raw_parts(nonnull_pointer, 3);
1410 /// assert_eq!(unsafe { slice.as_ref()[2] }, 7);
1411 /// ```
1412 ///
1413 /// (Note that this example artificially demonstrates a use of this method,
1414 /// but `let slice = NonNull::from(&x[..]);` would be a better way to write code like this.)
1415 #[stable(feature = "nonnull_slice_from_raw_parts", since = "1.70.0")]
1416 #[rustc_const_stable(feature = "const_slice_from_raw_parts_mut", since = "1.83.0")]
1417 #[must_use]
1418 #[inline]
1419 pub const fn slice_from_raw_parts(data: NonNull<T>, len: usize) -> Self {
1420 // SAFETY: `data` is a `NonNull` pointer which is necessarily non-null
1421 unsafe { Self::new_unchecked(super::slice_from_raw_parts_mut(data.as_ptr(), len)) }
1422 }
1423
1424 /// Returns the length of a non-null raw slice.
1425 ///
1426 /// The returned value is the number of **elements**, not the number of bytes.
1427 ///
1428 /// This function is safe, even when the non-null raw slice cannot be dereferenced to a slice
1429 /// because the pointer does not have a valid address.
1430 ///
1431 /// # Examples
1432 ///
1433 /// ```rust
1434 /// use std::ptr::NonNull;
1435 ///
1436 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1437 /// assert_eq!(slice.len(), 3);
1438 /// ```
1439 #[stable(feature = "slice_ptr_len_nonnull", since = "1.63.0")]
1440 #[rustc_const_stable(feature = "const_slice_ptr_len_nonnull", since = "1.63.0")]
1441 #[must_use]
1442 #[inline]
1443 pub const fn len(self) -> usize {
1444 self.as_ptr().len()
1445 }
1446
1447 /// Returns `true` if the non-null raw slice has a length of 0.
1448 ///
1449 /// # Examples
1450 ///
1451 /// ```rust
1452 /// use std::ptr::NonNull;
1453 ///
1454 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1455 /// assert!(!slice.is_empty());
1456 /// ```
1457 #[stable(feature = "slice_ptr_is_empty_nonnull", since = "1.79.0")]
1458 #[rustc_const_stable(feature = "const_slice_ptr_is_empty_nonnull", since = "1.79.0")]
1459 #[must_use]
1460 #[inline]
1461 pub const fn is_empty(self) -> bool {
1462 self.len() == 0
1463 }
1464
1465 /// Returns a non-null pointer to the slice's buffer.
1466 ///
1467 /// # Examples
1468 ///
1469 /// ```rust
1470 /// #![feature(slice_ptr_get)]
1471 /// use std::ptr::NonNull;
1472 ///
1473 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1474 /// assert_eq!(slice.as_non_null_ptr(), NonNull::<i8>::dangling());
1475 /// ```
1476 #[inline]
1477 #[must_use]
1478 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1479 pub const fn as_non_null_ptr(self) -> NonNull<T> {
1480 self.cast()
1481 }
1482
1483 /// Returns a raw pointer to the slice's buffer.
1484 ///
1485 /// # Examples
1486 ///
1487 /// ```rust
1488 /// #![feature(slice_ptr_get)]
1489 /// use std::ptr::NonNull;
1490 ///
1491 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1492 /// assert_eq!(slice.as_mut_ptr(), NonNull::<i8>::dangling().as_ptr());
1493 /// ```
1494 #[inline]
1495 #[must_use]
1496 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1497 #[rustc_never_returns_null_ptr]
1498 pub const fn as_mut_ptr(self) -> *mut T {
1499 self.as_non_null_ptr().as_ptr()
1500 }
1501
1502 /// Returns a shared reference to a slice of possibly uninitialized values. In contrast to
1503 /// [`as_ref`], this does not require that the value has to be initialized.
1504 ///
1505 /// For the mutable counterpart see [`as_uninit_slice_mut`].
1506 ///
1507 /// [`as_ref`]: NonNull::as_ref
1508 /// [`as_uninit_slice_mut`]: NonNull::as_uninit_slice_mut
1509 ///
1510 /// # Safety
1511 ///
1512 /// When calling this method, you have to ensure that all of the following is true:
1513 ///
1514 /// * The pointer must be [valid] for reads for `ptr.len() * size_of::<T>()` many bytes,
1515 /// and it must be properly aligned. This means in particular:
1516 ///
1517 /// * The entire memory range of this slice must be contained within a single allocation!
1518 /// Slices can never span across multiple allocations.
1519 ///
1520 /// * The pointer must be aligned even for zero-length slices. One
1521 /// reason for this is that enum layout optimizations may rely on references
1522 /// (including slices of any length) being aligned and non-null to distinguish
1523 /// them from other data. You can obtain a pointer that is usable as `data`
1524 /// for zero-length slices using [`NonNull::dangling()`].
1525 ///
1526 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1527 /// See the safety documentation of [`pointer::offset`].
1528 ///
1529 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1530 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1531 /// In particular, while this reference exists, the memory the pointer points to must
1532 /// not get mutated (except inside `UnsafeCell`).
1533 ///
1534 /// This applies even if the result of this method is unused!
1535 ///
1536 /// See also [`slice::from_raw_parts`].
1537 ///
1538 /// [valid]: crate::ptr#safety
1539 #[inline]
1540 #[must_use]
1541 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1542 pub const unsafe fn as_uninit_slice<'a>(self) -> &'a [MaybeUninit<T>] {
1543 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1544 unsafe { slice::from_raw_parts(self.cast().as_ptr(), self.len()) }
1545 }
1546
1547 /// Returns a unique reference to a slice of possibly uninitialized values. In contrast to
1548 /// [`as_mut`], this does not require that the value has to be initialized.
1549 ///
1550 /// For the shared counterpart see [`as_uninit_slice`].
1551 ///
1552 /// [`as_mut`]: NonNull::as_mut
1553 /// [`as_uninit_slice`]: NonNull::as_uninit_slice
1554 ///
1555 /// # Safety
1556 ///
1557 /// When calling this method, you have to ensure that all of the following is true:
1558 ///
1559 /// * The pointer must be [valid] for reads and writes for `ptr.len() * size_of::<T>()`
1560 /// many bytes, and it must be properly aligned. This means in particular:
1561 ///
1562 /// * The entire memory range of this slice must be contained within a single allocation!
1563 /// Slices can never span across multiple allocations.
1564 ///
1565 /// * The pointer must be aligned even for zero-length slices. One
1566 /// reason for this is that enum layout optimizations may rely on references
1567 /// (including slices of any length) being aligned and non-null to distinguish
1568 /// them from other data. You can obtain a pointer that is usable as `data`
1569 /// for zero-length slices using [`NonNull::dangling()`].
1570 ///
1571 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1572 /// See the safety documentation of [`pointer::offset`].
1573 ///
1574 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1575 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1576 /// In particular, while this reference exists, the memory the pointer points to must
1577 /// not get accessed (read or written) through any other pointer.
1578 ///
1579 /// This applies even if the result of this method is unused!
1580 ///
1581 /// See also [`slice::from_raw_parts_mut`].
1582 ///
1583 /// [valid]: crate::ptr#safety
1584 ///
1585 /// # Examples
1586 ///
1587 /// ```rust
1588 /// #![feature(allocator_api, ptr_as_uninit)]
1589 ///
1590 /// use std::alloc::{Allocator, Layout, Global};
1591 /// use std::mem::MaybeUninit;
1592 /// use std::ptr::NonNull;
1593 ///
1594 /// let memory: NonNull<[u8]> = Global.allocate(Layout::new::<[u8; 32]>())?;
1595 /// // This is safe as `memory` is valid for reads and writes for `memory.len()` many bytes.
1596 /// // Note that calling `memory.as_mut()` is not allowed here as the content may be uninitialized.
1597 /// # #[allow(unused_variables)]
1598 /// let slice: &mut [MaybeUninit<u8>] = unsafe { memory.as_uninit_slice_mut() };
1599 /// # // Prevent leaks for Miri.
1600 /// # unsafe { Global.deallocate(memory.cast(), Layout::new::<[u8; 32]>()); }
1601 /// # Ok::<_, std::alloc::AllocError>(())
1602 /// ```
1603 #[inline]
1604 #[must_use]
1605 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1606 pub const unsafe fn as_uninit_slice_mut<'a>(self) -> &'a mut [MaybeUninit<T>] {
1607 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
1608 unsafe { slice::from_raw_parts_mut(self.cast().as_ptr(), self.len()) }
1609 }
1610
1611 /// Returns a raw pointer to an element or subslice, without doing bounds
1612 /// checking.
1613 ///
1614 /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable
1615 /// is *[undefined behavior]* even if the resulting pointer is not used.
1616 ///
1617 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1618 ///
1619 /// # Examples
1620 ///
1621 /// ```
1622 /// #![feature(slice_ptr_get)]
1623 /// use std::ptr::NonNull;
1624 ///
1625 /// let x = &mut [1, 2, 4];
1626 /// let x = NonNull::slice_from_raw_parts(NonNull::new(x.as_mut_ptr()).unwrap(), x.len());
1627 ///
1628 /// unsafe {
1629 /// assert_eq!(x.get_unchecked_mut(1).as_ptr(), x.as_non_null_ptr().as_ptr().add(1));
1630 /// }
1631 /// ```
1632 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1633 #[rustc_const_unstable(feature = "const_index", issue = "143775")]
1634 #[inline]
1635 pub const unsafe fn get_unchecked_mut<I>(self, index: I) -> NonNull<I::Output>
1636 where
1637 I: [const] SliceIndex<[T]>,
1638 {
1639 // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1640 // As a consequence, the resulting pointer cannot be null.
1641 unsafe { NonNull::new_unchecked(self.as_ptr().get_unchecked_mut(index)) }
1642 }
1643}
1644
1645#[stable(feature = "nonnull", since = "1.25.0")]
1646impl<T: PointeeSized> Clone for NonNull<T> {
1647 #[inline(always)]
1648 fn clone(&self) -> Self {
1649 *self
1650 }
1651}
1652
1653#[stable(feature = "nonnull", since = "1.25.0")]
1654impl<T: PointeeSized> Copy for NonNull<T> {}
1655
1656#[unstable(feature = "coerce_unsized", issue = "18598")]
1657impl<T: PointeeSized, U: PointeeSized> CoerceUnsized<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
1658
1659#[unstable(feature = "dispatch_from_dyn", issue = "none")]
1660impl<T: PointeeSized, U: PointeeSized> DispatchFromDyn<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
1661
1662#[stable(feature = "pin", since = "1.33.0")]
1663unsafe impl<T: PointeeSized> PinCoerceUnsized for NonNull<T> {}
1664
1665#[stable(feature = "nonnull", since = "1.25.0")]
1666impl<T: PointeeSized> fmt::Debug for NonNull<T> {
1667 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1668 fmt::Pointer::fmt(&self.as_ptr(), f)
1669 }
1670}
1671
1672#[stable(feature = "nonnull", since = "1.25.0")]
1673impl<T: PointeeSized> fmt::Pointer for NonNull<T> {
1674 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1675 fmt::Pointer::fmt(&self.as_ptr(), f)
1676 }
1677}
1678
1679#[stable(feature = "nonnull", since = "1.25.0")]
1680impl<T: PointeeSized> Eq for NonNull<T> {}
1681
1682#[stable(feature = "nonnull", since = "1.25.0")]
1683impl<T: PointeeSized> PartialEq for NonNull<T> {
1684 #[inline]
1685 #[allow(ambiguous_wide_pointer_comparisons)]
1686 fn eq(&self, other: &Self) -> bool {
1687 self.as_ptr() == other.as_ptr()
1688 }
1689}
1690
1691#[stable(feature = "nonnull", since = "1.25.0")]
1692impl<T: PointeeSized> Ord for NonNull<T> {
1693 #[inline]
1694 #[allow(ambiguous_wide_pointer_comparisons)]
1695 fn cmp(&self, other: &Self) -> Ordering {
1696 self.as_ptr().cmp(&other.as_ptr())
1697 }
1698}
1699
1700#[stable(feature = "nonnull", since = "1.25.0")]
1701impl<T: PointeeSized> PartialOrd for NonNull<T> {
1702 #[inline]
1703 #[allow(ambiguous_wide_pointer_comparisons)]
1704 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
1705 self.as_ptr().partial_cmp(&other.as_ptr())
1706 }
1707}
1708
1709#[stable(feature = "nonnull", since = "1.25.0")]
1710impl<T: PointeeSized> hash::Hash for NonNull<T> {
1711 #[inline]
1712 fn hash<H: hash::Hasher>(&self, state: &mut H) {
1713 self.as_ptr().hash(state)
1714 }
1715}
1716
1717#[unstable(feature = "ptr_internals", issue = "none")]
1718#[rustc_const_unstable(feature = "const_convert", issue = "143773")]
1719impl<T: PointeeSized> const From<Unique<T>> for NonNull<T> {
1720 #[inline]
1721 fn from(unique: Unique<T>) -> Self {
1722 unique.as_non_null_ptr()
1723 }
1724}
1725
1726#[stable(feature = "nonnull", since = "1.25.0")]
1727#[rustc_const_unstable(feature = "const_convert", issue = "143773")]
1728impl<T: PointeeSized> const From<&mut T> for NonNull<T> {
1729 /// Converts a `&mut T` to a `NonNull<T>`.
1730 ///
1731 /// This conversion is safe and infallible since references cannot be null.
1732 #[inline]
1733 fn from(r: &mut T) -> Self {
1734 NonNull::from_mut(r)
1735 }
1736}
1737
1738#[stable(feature = "nonnull", since = "1.25.0")]
1739#[rustc_const_unstable(feature = "const_convert", issue = "143773")]
1740impl<T: PointeeSized> const From<&T> for NonNull<T> {
1741 /// Converts a `&T` to a `NonNull<T>`.
1742 ///
1743 /// This conversion is safe and infallible since references cannot be null.
1744 #[inline]
1745 fn from(r: &T) -> Self {
1746 NonNull::from_ref(r)
1747 }
1748}