core/ptr/const_ptr.rs
1use super::*;
2use crate::cmp::Ordering::{Equal, Greater, Less};
3use crate::intrinsics::const_eval_select;
4use crate::mem::{self, SizedTypeProperties};
5use crate::slice::{self, SliceIndex};
6
7impl<T: ?Sized> *const T {
8 /// Returns `true` if the pointer is null.
9 ///
10 /// Note that unsized types have many possible null pointers, as only the
11 /// raw data pointer is considered, not their length, vtable, etc.
12 /// Therefore, two pointers that are null may still not compare equal to
13 /// each other.
14 ///
15 /// # Panics during const evaluation
16 ///
17 /// If this method is used during const evaluation, and `self` is a pointer
18 /// that is offset beyond the bounds of the memory it initially pointed to,
19 /// then there might not be enough information to determine whether the
20 /// pointer is null. This is because the absolute address in memory is not
21 /// known at compile time. If the nullness of the pointer cannot be
22 /// determined, this method will panic.
23 ///
24 /// In-bounds pointers are never null, so the method will never panic for
25 /// such pointers.
26 ///
27 /// # Examples
28 ///
29 /// ```
30 /// let s: &str = "Follow the rabbit";
31 /// let ptr: *const u8 = s.as_ptr();
32 /// assert!(!ptr.is_null());
33 /// ```
34 #[stable(feature = "rust1", since = "1.0.0")]
35 #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
36 #[rustc_diagnostic_item = "ptr_const_is_null"]
37 #[inline]
38 #[rustc_allow_const_fn_unstable(const_eval_select)]
39 pub const fn is_null(self) -> bool {
40 // Compare via a cast to a thin pointer, so fat pointers are only
41 // considering their "data" part for null-ness.
42 let ptr = self as *const u8;
43 const_eval_select!(
44 @capture { ptr: *const u8 } -> bool:
45 // This use of `const_raw_ptr_comparison` has been explicitly blessed by t-lang.
46 if const #[rustc_allow_const_fn_unstable(const_raw_ptr_comparison)] {
47 match (ptr).guaranteed_eq(null_mut()) {
48 Some(res) => res,
49 // To remain maximally convervative, we stop execution when we don't
50 // know whether the pointer is null or not.
51 // We can *not* return `false` here, that would be unsound in `NonNull::new`!
52 None => panic!("null-ness of this pointer cannot be determined in const context"),
53 }
54 } else {
55 ptr.addr() == 0
56 }
57 )
58 }
59
60 /// Casts to a pointer of another type.
61 #[stable(feature = "ptr_cast", since = "1.38.0")]
62 #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
63 #[rustc_diagnostic_item = "const_ptr_cast"]
64 #[inline(always)]
65 pub const fn cast<U>(self) -> *const U {
66 self as _
67 }
68
69 /// Uses the address value in a new pointer of another type.
70 ///
71 /// This operation will ignore the address part of its `meta` operand and discard existing
72 /// metadata of `self`. For pointers to a sized types (thin pointers), this has the same effect
73 /// as a simple cast. For pointers to an unsized type (fat pointers) this recombines the address
74 /// with new metadata such as slice lengths or `dyn`-vtable.
75 ///
76 /// The resulting pointer will have provenance of `self`. This operation is semantically the
77 /// same as creating a new pointer with the data pointer value of `self` but the metadata of
78 /// `meta`, being fat or thin depending on the `meta` operand.
79 ///
80 /// # Examples
81 ///
82 /// This function is primarily useful for enabling pointer arithmetic on potentially fat
83 /// pointers. The pointer is cast to a sized pointee to utilize offset operations and then
84 /// recombined with its own original metadata.
85 ///
86 /// ```
87 /// #![feature(set_ptr_value)]
88 /// # use core::fmt::Debug;
89 /// let arr: [i32; 3] = [1, 2, 3];
90 /// let mut ptr = arr.as_ptr() as *const dyn Debug;
91 /// let thin = ptr as *const u8;
92 /// unsafe {
93 /// ptr = thin.add(8).with_metadata_of(ptr);
94 /// # assert_eq!(*(ptr as *const i32), 3);
95 /// println!("{:?}", &*ptr); // will print "3"
96 /// }
97 /// ```
98 ///
99 /// # *Incorrect* usage
100 ///
101 /// The provenance from pointers is *not* combined. The result must only be used to refer to the
102 /// address allowed by `self`.
103 ///
104 /// ```rust,no_run
105 /// #![feature(set_ptr_value)]
106 /// let x = 0u32;
107 /// let y = 1u32;
108 ///
109 /// let x = (&x) as *const u32;
110 /// let y = (&y) as *const u32;
111 ///
112 /// let offset = (x as usize - y as usize) / 4;
113 /// let bad = x.wrapping_add(offset).with_metadata_of(y);
114 ///
115 /// // This dereference is UB. The pointer only has provenance for `x` but points to `y`.
116 /// println!("{:?}", unsafe { &*bad });
117 /// ```
118 #[unstable(feature = "set_ptr_value", issue = "75091")]
119 #[must_use = "returns a new pointer rather than modifying its argument"]
120 #[inline]
121 pub const fn with_metadata_of<U>(self, meta: *const U) -> *const U
122 where
123 U: ?Sized,
124 {
125 from_raw_parts::<U>(self as *const (), metadata(meta))
126 }
127
128 /// Changes constness without changing the type.
129 ///
130 /// This is a bit safer than `as` because it wouldn't silently change the type if the code is
131 /// refactored.
132 #[stable(feature = "ptr_const_cast", since = "1.65.0")]
133 #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")]
134 #[rustc_diagnostic_item = "ptr_cast_mut"]
135 #[inline(always)]
136 pub const fn cast_mut(self) -> *mut T {
137 self as _
138 }
139
140 /// Gets the "address" portion of the pointer.
141 ///
142 /// This is similar to `self as usize`, except that the [provenance][crate::ptr#provenance] of
143 /// the pointer is discarded and not [exposed][crate::ptr#exposed-provenance]. This means that
144 /// casting the returned address back to a pointer yields a [pointer without
145 /// provenance][without_provenance], which is undefined behavior to dereference. To properly
146 /// restore the lost information and obtain a dereferenceable pointer, use
147 /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr].
148 ///
149 /// If using those APIs is not possible because there is no way to preserve a pointer with the
150 /// required provenance, then Strict Provenance might not be for you. Use pointer-integer casts
151 /// or [`expose_provenance`][pointer::expose_provenance] and [`with_exposed_provenance`][with_exposed_provenance]
152 /// instead. However, note that this makes your code less portable and less amenable to tools
153 /// that check for compliance with the Rust memory model.
154 ///
155 /// On most platforms this will produce a value with the same bytes as the original
156 /// pointer, because all the bytes are dedicated to describing the address.
157 /// Platforms which need to store additional information in the pointer may
158 /// perform a change of representation to produce a value containing only the address
159 /// portion of the pointer. What that means is up to the platform to define.
160 ///
161 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
162 #[must_use]
163 #[inline(always)]
164 #[stable(feature = "strict_provenance", since = "1.84.0")]
165 pub fn addr(self) -> usize {
166 // A pointer-to-integer transmute currently has exactly the right semantics: it returns the
167 // address without exposing the provenance. Note that this is *not* a stable guarantee about
168 // transmute semantics, it relies on sysroot crates having special status.
169 // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
170 // provenance).
171 unsafe { mem::transmute(self.cast::<()>()) }
172 }
173
174 /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
175 /// [`with_exposed_provenance`] and returns the "address" portion.
176 ///
177 /// This is equivalent to `self as usize`, which semantically discards provenance information.
178 /// Furthermore, this (like the `as` cast) has the implicit side-effect of marking the
179 /// provenance as 'exposed', so on platforms that support it you can later call
180 /// [`with_exposed_provenance`] to reconstitute the original pointer including its provenance.
181 ///
182 /// Due to its inherent ambiguity, [`with_exposed_provenance`] may not be supported by tools
183 /// that help you to stay conformant with the Rust memory model. It is recommended to use
184 /// [Strict Provenance][crate::ptr#strict-provenance] APIs such as [`with_addr`][pointer::with_addr]
185 /// wherever possible, in which case [`addr`][pointer::addr] should be used instead of `expose_provenance`.
186 ///
187 /// On most platforms this will produce a value with the same bytes as the original pointer,
188 /// because all the bytes are dedicated to describing the address. Platforms which need to store
189 /// additional information in the pointer may not support this operation, since the 'expose'
190 /// side-effect which is required for [`with_exposed_provenance`] to work is typically not
191 /// available.
192 ///
193 /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
194 ///
195 /// [`with_exposed_provenance`]: with_exposed_provenance
196 #[inline(always)]
197 #[stable(feature = "exposed_provenance", since = "1.84.0")]
198 pub fn expose_provenance(self) -> usize {
199 self.cast::<()>() as usize
200 }
201
202 /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
203 /// `self`.
204 ///
205 /// This is similar to a `addr as *const T` cast, but copies
206 /// the *provenance* of `self` to the new pointer.
207 /// This avoids the inherent ambiguity of the unary cast.
208 ///
209 /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset
210 /// `self` to the given address, and therefore has all the same capabilities and restrictions.
211 ///
212 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
213 #[must_use]
214 #[inline]
215 #[stable(feature = "strict_provenance", since = "1.84.0")]
216 pub fn with_addr(self, addr: usize) -> Self {
217 // This should probably be an intrinsic to avoid doing any sort of arithmetic, but
218 // meanwhile, we can implement it with `wrapping_offset`, which preserves the pointer's
219 // provenance.
220 let self_addr = self.addr() as isize;
221 let dest_addr = addr as isize;
222 let offset = dest_addr.wrapping_sub(self_addr);
223 self.wrapping_byte_offset(offset)
224 }
225
226 /// Creates a new pointer by mapping `self`'s address to a new one, preserving the
227 /// [provenance][crate::ptr#provenance] of `self`.
228 ///
229 /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details.
230 ///
231 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
232 #[must_use]
233 #[inline]
234 #[stable(feature = "strict_provenance", since = "1.84.0")]
235 pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
236 self.with_addr(f(self.addr()))
237 }
238
239 /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
240 ///
241 /// The pointer can be later reconstructed with [`from_raw_parts`].
242 #[unstable(feature = "ptr_metadata", issue = "81513")]
243 #[inline]
244 pub const fn to_raw_parts(self) -> (*const (), <T as super::Pointee>::Metadata) {
245 (self.cast(), metadata(self))
246 }
247
248 /// Returns `None` if the pointer is null, or else returns a shared reference to
249 /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`]
250 /// must be used instead.
251 ///
252 /// [`as_uninit_ref`]: #method.as_uninit_ref
253 ///
254 /// # Safety
255 ///
256 /// When calling this method, you have to ensure that *either* the pointer is null *or*
257 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
258 ///
259 /// # Panics during const evaluation
260 ///
261 /// This method will panic during const evaluation if the pointer cannot be
262 /// determined to be null or not. See [`is_null`] for more information.
263 ///
264 /// [`is_null`]: #method.is_null
265 ///
266 /// # Examples
267 ///
268 /// ```
269 /// let ptr: *const u8 = &10u8 as *const u8;
270 ///
271 /// unsafe {
272 /// if let Some(val_back) = ptr.as_ref() {
273 /// assert_eq!(val_back, &10);
274 /// }
275 /// }
276 /// ```
277 ///
278 /// # Null-unchecked version
279 ///
280 /// If you are sure the pointer can never be null and are looking for some kind of
281 /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can
282 /// dereference the pointer directly.
283 ///
284 /// ```
285 /// let ptr: *const u8 = &10u8 as *const u8;
286 ///
287 /// unsafe {
288 /// let val_back = &*ptr;
289 /// assert_eq!(val_back, &10);
290 /// }
291 /// ```
292 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
293 #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
294 #[inline]
295 pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> {
296 // SAFETY: the caller must guarantee that `self` is valid
297 // for a reference if it isn't null.
298 if self.is_null() { None } else { unsafe { Some(&*self) } }
299 }
300
301 /// Returns a shared reference to the value behind the pointer.
302 /// If the pointer may be null or the value may be uninitialized, [`as_uninit_ref`] must be used instead.
303 /// If the pointer may be null, but the value is known to have been initialized, [`as_ref`] must be used instead.
304 ///
305 /// [`as_ref`]: #method.as_ref
306 /// [`as_uninit_ref`]: #method.as_uninit_ref
307 ///
308 /// # Safety
309 ///
310 /// When calling this method, you have to ensure that
311 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
312 ///
313 /// # Examples
314 ///
315 /// ```
316 /// #![feature(ptr_as_ref_unchecked)]
317 /// let ptr: *const u8 = &10u8 as *const u8;
318 ///
319 /// unsafe {
320 /// assert_eq!(ptr.as_ref_unchecked(), &10);
321 /// }
322 /// ```
323 // FIXME: mention it in the docs for `as_ref` and `as_uninit_ref` once stabilized.
324 #[unstable(feature = "ptr_as_ref_unchecked", issue = "122034")]
325 #[inline]
326 #[must_use]
327 pub const unsafe fn as_ref_unchecked<'a>(self) -> &'a T {
328 // SAFETY: the caller must guarantee that `self` is valid for a reference
329 unsafe { &*self }
330 }
331
332 /// Returns `None` if the pointer is null, or else returns a shared reference to
333 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
334 /// that the value has to be initialized.
335 ///
336 /// [`as_ref`]: #method.as_ref
337 ///
338 /// # Safety
339 ///
340 /// When calling this method, you have to ensure that *either* the pointer is null *or*
341 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
342 ///
343 /// # Panics during const evaluation
344 ///
345 /// This method will panic during const evaluation if the pointer cannot be
346 /// determined to be null or not. See [`is_null`] for more information.
347 ///
348 /// [`is_null`]: #method.is_null
349 ///
350 /// # Examples
351 ///
352 /// ```
353 /// #![feature(ptr_as_uninit)]
354 ///
355 /// let ptr: *const u8 = &10u8 as *const u8;
356 ///
357 /// unsafe {
358 /// if let Some(val_back) = ptr.as_uninit_ref() {
359 /// assert_eq!(val_back.assume_init(), 10);
360 /// }
361 /// }
362 /// ```
363 #[inline]
364 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
365 pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
366 where
367 T: Sized,
368 {
369 // SAFETY: the caller must guarantee that `self` meets all the
370 // requirements for a reference.
371 if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
372 }
373
374 /// Adds a signed offset to a pointer.
375 ///
376 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
377 /// offset of `3 * size_of::<T>()` bytes.
378 ///
379 /// # Safety
380 ///
381 /// If any of the following conditions are violated, the result is Undefined Behavior:
382 ///
383 /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
384 /// "wrapping around"), must fit in an `isize`.
385 ///
386 /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
387 /// [allocated object], and the entire memory range between `self` and the result must be in
388 /// bounds of that allocated object. In particular, this range must not "wrap around" the edge
389 /// of the address space.
390 ///
391 /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
392 /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
393 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
394 /// safe.
395 ///
396 /// Consider using [`wrapping_offset`] instead if these constraints are
397 /// difficult to satisfy. The only advantage of this method is that it
398 /// enables more aggressive compiler optimizations.
399 ///
400 /// [`wrapping_offset`]: #method.wrapping_offset
401 /// [allocated object]: crate::ptr#allocated-object
402 ///
403 /// # Examples
404 ///
405 /// ```
406 /// let s: &str = "123";
407 /// let ptr: *const u8 = s.as_ptr();
408 ///
409 /// unsafe {
410 /// assert_eq!(*ptr.offset(1) as char, '2');
411 /// assert_eq!(*ptr.offset(2) as char, '3');
412 /// }
413 /// ```
414 #[stable(feature = "rust1", since = "1.0.0")]
415 #[must_use = "returns a new pointer rather than modifying its argument"]
416 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
417 #[inline(always)]
418 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
419 pub const unsafe fn offset(self, count: isize) -> *const T
420 where
421 T: Sized,
422 {
423 #[inline]
424 #[rustc_allow_const_fn_unstable(const_eval_select)]
425 const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
426 // We can use const_eval_select here because this is only for UB checks.
427 const_eval_select!(
428 @capture { this: *const (), count: isize, size: usize } -> bool:
429 if const {
430 true
431 } else {
432 // `size` is the size of a Rust type, so we know that
433 // `size <= isize::MAX` and thus `as` cast here is not lossy.
434 let Some(byte_offset) = count.checked_mul(size as isize) else {
435 return false;
436 };
437 let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
438 !overflow
439 }
440 )
441 }
442
443 ub_checks::assert_unsafe_precondition!(
444 check_language_ub,
445 "ptr::offset requires the address calculation to not overflow",
446 (
447 this: *const () = self as *const (),
448 count: isize = count,
449 size: usize = size_of::<T>(),
450 ) => runtime_offset_nowrap(this, count, size)
451 );
452
453 // SAFETY: the caller must uphold the safety contract for `offset`.
454 unsafe { intrinsics::offset(self, count) }
455 }
456
457 /// Adds a signed offset in bytes to a pointer.
458 ///
459 /// `count` is in units of **bytes**.
460 ///
461 /// This is purely a convenience for casting to a `u8` pointer and
462 /// using [offset][pointer::offset] on it. See that method for documentation
463 /// and safety requirements.
464 ///
465 /// For non-`Sized` pointees this operation changes only the data pointer,
466 /// leaving the metadata untouched.
467 #[must_use]
468 #[inline(always)]
469 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
470 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
471 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
472 pub const unsafe fn byte_offset(self, count: isize) -> Self {
473 // SAFETY: the caller must uphold the safety contract for `offset`.
474 unsafe { self.cast::<u8>().offset(count).with_metadata_of(self) }
475 }
476
477 /// Adds a signed offset to a pointer using wrapping arithmetic.
478 ///
479 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
480 /// offset of `3 * size_of::<T>()` bytes.
481 ///
482 /// # Safety
483 ///
484 /// This operation itself is always safe, but using the resulting pointer is not.
485 ///
486 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
487 /// be used to read or write other allocated objects.
488 ///
489 /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
490 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
491 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
492 /// `x` and `y` point into the same allocated object.
493 ///
494 /// Compared to [`offset`], this method basically delays the requirement of staying within the
495 /// same allocated object: [`offset`] is immediate Undefined Behavior when crossing object
496 /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
497 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
498 /// can be optimized better and is thus preferable in performance-sensitive code.
499 ///
500 /// The delayed check only considers the value of the pointer that was dereferenced, not the
501 /// intermediate values used during the computation of the final result. For example,
502 /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
503 /// words, leaving the allocated object and then re-entering it later is permitted.
504 ///
505 /// [`offset`]: #method.offset
506 /// [allocated object]: crate::ptr#allocated-object
507 ///
508 /// # Examples
509 ///
510 /// ```
511 /// # use std::fmt::Write;
512 /// // Iterate using a raw pointer in increments of two elements
513 /// let data = [1u8, 2, 3, 4, 5];
514 /// let mut ptr: *const u8 = data.as_ptr();
515 /// let step = 2;
516 /// let end_rounded_up = ptr.wrapping_offset(6);
517 ///
518 /// let mut out = String::new();
519 /// while ptr != end_rounded_up {
520 /// unsafe {
521 /// write!(&mut out, "{}, ", *ptr)?;
522 /// }
523 /// ptr = ptr.wrapping_offset(step);
524 /// }
525 /// assert_eq!(out.as_str(), "1, 3, 5, ");
526 /// # std::fmt::Result::Ok(())
527 /// ```
528 #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
529 #[must_use = "returns a new pointer rather than modifying its argument"]
530 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
531 #[inline(always)]
532 pub const fn wrapping_offset(self, count: isize) -> *const T
533 where
534 T: Sized,
535 {
536 // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
537 unsafe { intrinsics::arith_offset(self, count) }
538 }
539
540 /// Adds a signed offset in bytes to a pointer using wrapping arithmetic.
541 ///
542 /// `count` is in units of **bytes**.
543 ///
544 /// This is purely a convenience for casting to a `u8` pointer and
545 /// using [wrapping_offset][pointer::wrapping_offset] on it. See that method
546 /// for documentation.
547 ///
548 /// For non-`Sized` pointees this operation changes only the data pointer,
549 /// leaving the metadata untouched.
550 #[must_use]
551 #[inline(always)]
552 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
553 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
554 pub const fn wrapping_byte_offset(self, count: isize) -> Self {
555 self.cast::<u8>().wrapping_offset(count).with_metadata_of(self)
556 }
557
558 /// Masks out bits of the pointer according to a mask.
559 ///
560 /// This is convenience for `ptr.map_addr(|a| a & mask)`.
561 ///
562 /// For non-`Sized` pointees this operation changes only the data pointer,
563 /// leaving the metadata untouched.
564 ///
565 /// ## Examples
566 ///
567 /// ```
568 /// #![feature(ptr_mask)]
569 /// let v = 17_u32;
570 /// let ptr: *const u32 = &v;
571 ///
572 /// // `u32` is 4 bytes aligned,
573 /// // which means that lower 2 bits are always 0.
574 /// let tag_mask = 0b11;
575 /// let ptr_mask = !tag_mask;
576 ///
577 /// // We can store something in these lower bits
578 /// let tagged_ptr = ptr.map_addr(|a| a | 0b10);
579 ///
580 /// // Get the "tag" back
581 /// let tag = tagged_ptr.addr() & tag_mask;
582 /// assert_eq!(tag, 0b10);
583 ///
584 /// // Note that `tagged_ptr` is unaligned, it's UB to read from it.
585 /// // To get original pointer `mask` can be used:
586 /// let masked_ptr = tagged_ptr.mask(ptr_mask);
587 /// assert_eq!(unsafe { *masked_ptr }, 17);
588 /// ```
589 #[unstable(feature = "ptr_mask", issue = "98290")]
590 #[must_use = "returns a new pointer rather than modifying its argument"]
591 #[inline(always)]
592 pub fn mask(self, mask: usize) -> *const T {
593 intrinsics::ptr_mask(self.cast::<()>(), mask).with_metadata_of(self)
594 }
595
596 /// Calculates the distance between two pointers within the same allocation. The returned value is in
597 /// units of T: the distance in bytes divided by `size_of::<T>()`.
598 ///
599 /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
600 /// except that it has a lot more opportunities for UB, in exchange for the compiler
601 /// better understanding what you are doing.
602 ///
603 /// The primary motivation of this method is for computing the `len` of an array/slice
604 /// of `T` that you are currently representing as a "start" and "end" pointer
605 /// (and "end" is "one past the end" of the array).
606 /// In that case, `end.offset_from(start)` gets you the length of the array.
607 ///
608 /// All of the following safety requirements are trivially satisfied for this usecase.
609 ///
610 /// [`offset`]: #method.offset
611 ///
612 /// # Safety
613 ///
614 /// If any of the following conditions are violated, the result is Undefined Behavior:
615 ///
616 /// * `self` and `origin` must either
617 ///
618 /// * point to the same address, or
619 /// * both be [derived from][crate::ptr#provenance] a pointer to the same [allocated object], and the memory range between
620 /// the two pointers must be in bounds of that object. (See below for an example.)
621 ///
622 /// * The distance between the pointers, in bytes, must be an exact multiple
623 /// of the size of `T`.
624 ///
625 /// As a consequence, the absolute distance between the pointers, in bytes, computed on
626 /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
627 /// implied by the in-bounds requirement, and the fact that no allocated object can be larger
628 /// than `isize::MAX` bytes.
629 ///
630 /// The requirement for pointers to be derived from the same allocated object is primarily
631 /// needed for `const`-compatibility: the distance between pointers into *different* allocated
632 /// objects is not known at compile-time. However, the requirement also exists at
633 /// runtime and may be exploited by optimizations. If you wish to compute the difference between
634 /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
635 /// origin as isize) / size_of::<T>()`.
636 // FIXME: recommend `addr()` instead of `as usize` once that is stable.
637 ///
638 /// [`add`]: #method.add
639 /// [allocated object]: crate::ptr#allocated-object
640 ///
641 /// # Panics
642 ///
643 /// This function panics if `T` is a Zero-Sized Type ("ZST").
644 ///
645 /// # Examples
646 ///
647 /// Basic usage:
648 ///
649 /// ```
650 /// let a = [0; 5];
651 /// let ptr1: *const i32 = &a[1];
652 /// let ptr2: *const i32 = &a[3];
653 /// unsafe {
654 /// assert_eq!(ptr2.offset_from(ptr1), 2);
655 /// assert_eq!(ptr1.offset_from(ptr2), -2);
656 /// assert_eq!(ptr1.offset(2), ptr2);
657 /// assert_eq!(ptr2.offset(-2), ptr1);
658 /// }
659 /// ```
660 ///
661 /// *Incorrect* usage:
662 ///
663 /// ```rust,no_run
664 /// let ptr1 = Box::into_raw(Box::new(0u8)) as *const u8;
665 /// let ptr2 = Box::into_raw(Box::new(1u8)) as *const u8;
666 /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
667 /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
668 /// let ptr2_other = (ptr1 as *const u8).wrapping_offset(diff).wrapping_offset(1);
669 /// assert_eq!(ptr2 as usize, ptr2_other as usize);
670 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
671 /// // computing their offset is undefined behavior, even though
672 /// // they point to addresses that are in-bounds of the same object!
673 /// unsafe {
674 /// let one = ptr2_other.offset_from(ptr2); // Undefined Behavior! ⚠️
675 /// }
676 /// ```
677 #[stable(feature = "ptr_offset_from", since = "1.47.0")]
678 #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")]
679 #[inline]
680 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
681 pub const unsafe fn offset_from(self, origin: *const T) -> isize
682 where
683 T: Sized,
684 {
685 let pointee_size = size_of::<T>();
686 assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
687 // SAFETY: the caller must uphold the safety contract for `ptr_offset_from`.
688 unsafe { intrinsics::ptr_offset_from(self, origin) }
689 }
690
691 /// Calculates the distance between two pointers within the same allocation. The returned value is in
692 /// units of **bytes**.
693 ///
694 /// This is purely a convenience for casting to a `u8` pointer and
695 /// using [`offset_from`][pointer::offset_from] on it. See that method for
696 /// documentation and safety requirements.
697 ///
698 /// For non-`Sized` pointees this operation considers only the data pointers,
699 /// ignoring the metadata.
700 #[inline(always)]
701 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
702 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
703 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
704 pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: *const U) -> isize {
705 // SAFETY: the caller must uphold the safety contract for `offset_from`.
706 unsafe { self.cast::<u8>().offset_from(origin.cast::<u8>()) }
707 }
708
709 /// Calculates the distance between two pointers within the same allocation, *where it's known that
710 /// `self` is equal to or greater than `origin`*. The returned value is in
711 /// units of T: the distance in bytes is divided by `size_of::<T>()`.
712 ///
713 /// This computes the same value that [`offset_from`](#method.offset_from)
714 /// would compute, but with the added precondition that the offset is
715 /// guaranteed to be non-negative. This method is equivalent to
716 /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
717 /// but it provides slightly more information to the optimizer, which can
718 /// sometimes allow it to optimize slightly better with some backends.
719 ///
720 /// This method can be thought of as recovering the `count` that was passed
721 /// to [`add`](#method.add) (or, with the parameters in the other order,
722 /// to [`sub`](#method.sub)). The following are all equivalent, assuming
723 /// that their safety preconditions are met:
724 /// ```rust
725 /// # unsafe fn blah(ptr: *const i32, origin: *const i32, count: usize) -> bool { unsafe {
726 /// ptr.offset_from_unsigned(origin) == count
727 /// # &&
728 /// origin.add(count) == ptr
729 /// # &&
730 /// ptr.sub(count) == origin
731 /// # } }
732 /// ```
733 ///
734 /// # Safety
735 ///
736 /// - The distance between the pointers must be non-negative (`self >= origin`)
737 ///
738 /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
739 /// apply to this method as well; see it for the full details.
740 ///
741 /// Importantly, despite the return type of this method being able to represent
742 /// a larger offset, it's still *not permitted* to pass pointers which differ
743 /// by more than `isize::MAX` *bytes*. As such, the result of this method will
744 /// always be less than or equal to `isize::MAX as usize`.
745 ///
746 /// # Panics
747 ///
748 /// This function panics if `T` is a Zero-Sized Type ("ZST").
749 ///
750 /// # Examples
751 ///
752 /// ```
753 /// let a = [0; 5];
754 /// let ptr1: *const i32 = &a[1];
755 /// let ptr2: *const i32 = &a[3];
756 /// unsafe {
757 /// assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
758 /// assert_eq!(ptr1.add(2), ptr2);
759 /// assert_eq!(ptr2.sub(2), ptr1);
760 /// assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
761 /// }
762 ///
763 /// // This would be incorrect, as the pointers are not correctly ordered:
764 /// // ptr1.offset_from_unsigned(ptr2)
765 /// ```
766 #[stable(feature = "ptr_sub_ptr", since = "CURRENT_RUSTC_VERSION")]
767 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "CURRENT_RUSTC_VERSION")]
768 #[inline]
769 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
770 pub const unsafe fn offset_from_unsigned(self, origin: *const T) -> usize
771 where
772 T: Sized,
773 {
774 #[rustc_allow_const_fn_unstable(const_eval_select)]
775 const fn runtime_ptr_ge(this: *const (), origin: *const ()) -> bool {
776 const_eval_select!(
777 @capture { this: *const (), origin: *const () } -> bool:
778 if const {
779 true
780 } else {
781 this >= origin
782 }
783 )
784 }
785
786 ub_checks::assert_unsafe_precondition!(
787 check_language_ub,
788 "ptr::offset_from_unsigned requires `self >= origin`",
789 (
790 this: *const () = self as *const (),
791 origin: *const () = origin as *const (),
792 ) => runtime_ptr_ge(this, origin)
793 );
794
795 let pointee_size = size_of::<T>();
796 assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
797 // SAFETY: the caller must uphold the safety contract for `ptr_offset_from_unsigned`.
798 unsafe { intrinsics::ptr_offset_from_unsigned(self, origin) }
799 }
800
801 /// Calculates the distance between two pointers within the same allocation, *where it's known that
802 /// `self` is equal to or greater than `origin`*. The returned value is in
803 /// units of **bytes**.
804 ///
805 /// This is purely a convenience for casting to a `u8` pointer and
806 /// using [`sub_ptr`][pointer::offset_from_unsigned] on it. See that method for
807 /// documentation and safety requirements.
808 ///
809 /// For non-`Sized` pointees this operation considers only the data pointers,
810 /// ignoring the metadata.
811 #[stable(feature = "ptr_sub_ptr", since = "CURRENT_RUSTC_VERSION")]
812 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "CURRENT_RUSTC_VERSION")]
813 #[inline]
814 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
815 pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: *const U) -> usize {
816 // SAFETY: the caller must uphold the safety contract for `sub_ptr`.
817 unsafe { self.cast::<u8>().offset_from_unsigned(origin.cast::<u8>()) }
818 }
819
820 /// Returns whether two pointers are guaranteed to be equal.
821 ///
822 /// At runtime this function behaves like `Some(self == other)`.
823 /// However, in some contexts (e.g., compile-time evaluation),
824 /// it is not always possible to determine equality of two pointers, so this function may
825 /// spuriously return `None` for pointers that later actually turn out to have its equality known.
826 /// But when it returns `Some`, the pointers' equality is guaranteed to be known.
827 ///
828 /// The return value may change from `Some` to `None` and vice versa depending on the compiler
829 /// version and unsafe code must not
830 /// rely on the result of this function for soundness. It is suggested to only use this function
831 /// for performance optimizations where spurious `None` return values by this function do not
832 /// affect the outcome, but just the performance.
833 /// The consequences of using this method to make runtime and compile-time code behave
834 /// differently have not been explored. This method should not be used to introduce such
835 /// differences, and it should also not be stabilized before we have a better understanding
836 /// of this issue.
837 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
838 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
839 #[inline]
840 pub const fn guaranteed_eq(self, other: *const T) -> Option<bool>
841 where
842 T: Sized,
843 {
844 match intrinsics::ptr_guaranteed_cmp(self, other) {
845 2 => None,
846 other => Some(other == 1),
847 }
848 }
849
850 /// Returns whether two pointers are guaranteed to be inequal.
851 ///
852 /// At runtime this function behaves like `Some(self != other)`.
853 /// However, in some contexts (e.g., compile-time evaluation),
854 /// it is not always possible to determine inequality of two pointers, so this function may
855 /// spuriously return `None` for pointers that later actually turn out to have its inequality known.
856 /// But when it returns `Some`, the pointers' inequality is guaranteed to be known.
857 ///
858 /// The return value may change from `Some` to `None` and vice versa depending on the compiler
859 /// version and unsafe code must not
860 /// rely on the result of this function for soundness. It is suggested to only use this function
861 /// for performance optimizations where spurious `None` return values by this function do not
862 /// affect the outcome, but just the performance.
863 /// The consequences of using this method to make runtime and compile-time code behave
864 /// differently have not been explored. This method should not be used to introduce such
865 /// differences, and it should also not be stabilized before we have a better understanding
866 /// of this issue.
867 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
868 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
869 #[inline]
870 pub const fn guaranteed_ne(self, other: *const T) -> Option<bool>
871 where
872 T: Sized,
873 {
874 match self.guaranteed_eq(other) {
875 None => None,
876 Some(eq) => Some(!eq),
877 }
878 }
879
880 /// Adds an unsigned offset to a pointer.
881 ///
882 /// This can only move the pointer forward (or not move it). If you need to move forward or
883 /// backward depending on the value, then you might want [`offset`](#method.offset) instead
884 /// which takes a signed offset.
885 ///
886 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
887 /// offset of `3 * size_of::<T>()` bytes.
888 ///
889 /// # Safety
890 ///
891 /// If any of the following conditions are violated, the result is Undefined Behavior:
892 ///
893 /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
894 /// "wrapping around"), must fit in an `isize`.
895 ///
896 /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
897 /// [allocated object], and the entire memory range between `self` and the result must be in
898 /// bounds of that allocated object. In particular, this range must not "wrap around" the edge
899 /// of the address space.
900 ///
901 /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
902 /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
903 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
904 /// safe.
905 ///
906 /// Consider using [`wrapping_add`] instead if these constraints are
907 /// difficult to satisfy. The only advantage of this method is that it
908 /// enables more aggressive compiler optimizations.
909 ///
910 /// [`wrapping_add`]: #method.wrapping_add
911 /// [allocated object]: crate::ptr#allocated-object
912 ///
913 /// # Examples
914 ///
915 /// ```
916 /// let s: &str = "123";
917 /// let ptr: *const u8 = s.as_ptr();
918 ///
919 /// unsafe {
920 /// assert_eq!(*ptr.add(1), b'2');
921 /// assert_eq!(*ptr.add(2), b'3');
922 /// }
923 /// ```
924 #[stable(feature = "pointer_methods", since = "1.26.0")]
925 #[must_use = "returns a new pointer rather than modifying its argument"]
926 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
927 #[inline(always)]
928 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
929 pub const unsafe fn add(self, count: usize) -> Self
930 where
931 T: Sized,
932 {
933 #[cfg(debug_assertions)]
934 #[inline]
935 #[rustc_allow_const_fn_unstable(const_eval_select)]
936 const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
937 const_eval_select!(
938 @capture { this: *const (), count: usize, size: usize } -> bool:
939 if const {
940 true
941 } else {
942 let Some(byte_offset) = count.checked_mul(size) else {
943 return false;
944 };
945 let (_, overflow) = this.addr().overflowing_add(byte_offset);
946 byte_offset <= (isize::MAX as usize) && !overflow
947 }
948 )
949 }
950
951 #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
952 ub_checks::assert_unsafe_precondition!(
953 check_language_ub,
954 "ptr::add requires that the address calculation does not overflow",
955 (
956 this: *const () = self as *const (),
957 count: usize = count,
958 size: usize = size_of::<T>(),
959 ) => runtime_add_nowrap(this, count, size)
960 );
961
962 // SAFETY: the caller must uphold the safety contract for `offset`.
963 unsafe { intrinsics::offset(self, count) }
964 }
965
966 /// Adds an unsigned offset in bytes to a pointer.
967 ///
968 /// `count` is in units of bytes.
969 ///
970 /// This is purely a convenience for casting to a `u8` pointer and
971 /// using [add][pointer::add] on it. See that method for documentation
972 /// and safety requirements.
973 ///
974 /// For non-`Sized` pointees this operation changes only the data pointer,
975 /// leaving the metadata untouched.
976 #[must_use]
977 #[inline(always)]
978 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
979 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
980 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
981 pub const unsafe fn byte_add(self, count: usize) -> Self {
982 // SAFETY: the caller must uphold the safety contract for `add`.
983 unsafe { self.cast::<u8>().add(count).with_metadata_of(self) }
984 }
985
986 /// Subtracts an unsigned offset from a pointer.
987 ///
988 /// This can only move the pointer backward (or not move it). If you need to move forward or
989 /// backward depending on the value, then you might want [`offset`](#method.offset) instead
990 /// which takes a signed offset.
991 ///
992 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
993 /// offset of `3 * size_of::<T>()` bytes.
994 ///
995 /// # Safety
996 ///
997 /// If any of the following conditions are violated, the result is Undefined Behavior:
998 ///
999 /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
1000 /// "wrapping around"), must fit in an `isize`.
1001 ///
1002 /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
1003 /// [allocated object], and the entire memory range between `self` and the result must be in
1004 /// bounds of that allocated object. In particular, this range must not "wrap around" the edge
1005 /// of the address space.
1006 ///
1007 /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
1008 /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
1009 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
1010 /// safe.
1011 ///
1012 /// Consider using [`wrapping_sub`] instead if these constraints are
1013 /// difficult to satisfy. The only advantage of this method is that it
1014 /// enables more aggressive compiler optimizations.
1015 ///
1016 /// [`wrapping_sub`]: #method.wrapping_sub
1017 /// [allocated object]: crate::ptr#allocated-object
1018 ///
1019 /// # Examples
1020 ///
1021 /// ```
1022 /// let s: &str = "123";
1023 ///
1024 /// unsafe {
1025 /// let end: *const u8 = s.as_ptr().add(3);
1026 /// assert_eq!(*end.sub(1), b'3');
1027 /// assert_eq!(*end.sub(2), b'2');
1028 /// }
1029 /// ```
1030 #[stable(feature = "pointer_methods", since = "1.26.0")]
1031 #[must_use = "returns a new pointer rather than modifying its argument"]
1032 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1033 #[inline(always)]
1034 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1035 pub const unsafe fn sub(self, count: usize) -> Self
1036 where
1037 T: Sized,
1038 {
1039 #[cfg(debug_assertions)]
1040 #[inline]
1041 #[rustc_allow_const_fn_unstable(const_eval_select)]
1042 const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
1043 const_eval_select!(
1044 @capture { this: *const (), count: usize, size: usize } -> bool:
1045 if const {
1046 true
1047 } else {
1048 let Some(byte_offset) = count.checked_mul(size) else {
1049 return false;
1050 };
1051 byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
1052 }
1053 )
1054 }
1055
1056 #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
1057 ub_checks::assert_unsafe_precondition!(
1058 check_language_ub,
1059 "ptr::sub requires that the address calculation does not overflow",
1060 (
1061 this: *const () = self as *const (),
1062 count: usize = count,
1063 size: usize = size_of::<T>(),
1064 ) => runtime_sub_nowrap(this, count, size)
1065 );
1066
1067 if T::IS_ZST {
1068 // Pointer arithmetic does nothing when the pointee is a ZST.
1069 self
1070 } else {
1071 // SAFETY: the caller must uphold the safety contract for `offset`.
1072 // Because the pointee is *not* a ZST, that means that `count` is
1073 // at most `isize::MAX`, and thus the negation cannot overflow.
1074 unsafe { intrinsics::offset(self, intrinsics::unchecked_sub(0, count as isize)) }
1075 }
1076 }
1077
1078 /// Subtracts an unsigned offset in bytes from a pointer.
1079 ///
1080 /// `count` is in units of bytes.
1081 ///
1082 /// This is purely a convenience for casting to a `u8` pointer and
1083 /// using [sub][pointer::sub] on it. See that method for documentation
1084 /// and safety requirements.
1085 ///
1086 /// For non-`Sized` pointees this operation changes only the data pointer,
1087 /// leaving the metadata untouched.
1088 #[must_use]
1089 #[inline(always)]
1090 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1091 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1092 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1093 pub const unsafe fn byte_sub(self, count: usize) -> Self {
1094 // SAFETY: the caller must uphold the safety contract for `sub`.
1095 unsafe { self.cast::<u8>().sub(count).with_metadata_of(self) }
1096 }
1097
1098 /// Adds an unsigned offset to a pointer using wrapping arithmetic.
1099 ///
1100 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1101 /// offset of `3 * size_of::<T>()` bytes.
1102 ///
1103 /// # Safety
1104 ///
1105 /// This operation itself is always safe, but using the resulting pointer is not.
1106 ///
1107 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
1108 /// be used to read or write other allocated objects.
1109 ///
1110 /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
1111 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1112 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1113 /// `x` and `y` point into the same allocated object.
1114 ///
1115 /// Compared to [`add`], this method basically delays the requirement of staying within the
1116 /// same allocated object: [`add`] is immediate Undefined Behavior when crossing object
1117 /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
1118 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
1119 /// can be optimized better and is thus preferable in performance-sensitive code.
1120 ///
1121 /// The delayed check only considers the value of the pointer that was dereferenced, not the
1122 /// intermediate values used during the computation of the final result. For example,
1123 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1124 /// allocated object and then re-entering it later is permitted.
1125 ///
1126 /// [`add`]: #method.add
1127 /// [allocated object]: crate::ptr#allocated-object
1128 ///
1129 /// # Examples
1130 ///
1131 /// ```
1132 /// # use std::fmt::Write;
1133 /// // Iterate using a raw pointer in increments of two elements
1134 /// let data = [1u8, 2, 3, 4, 5];
1135 /// let mut ptr: *const u8 = data.as_ptr();
1136 /// let step = 2;
1137 /// let end_rounded_up = ptr.wrapping_add(6);
1138 ///
1139 /// let mut out = String::new();
1140 /// while ptr != end_rounded_up {
1141 /// unsafe {
1142 /// write!(&mut out, "{}, ", *ptr)?;
1143 /// }
1144 /// ptr = ptr.wrapping_add(step);
1145 /// }
1146 /// assert_eq!(out, "1, 3, 5, ");
1147 /// # std::fmt::Result::Ok(())
1148 /// ```
1149 #[stable(feature = "pointer_methods", since = "1.26.0")]
1150 #[must_use = "returns a new pointer rather than modifying its argument"]
1151 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1152 #[inline(always)]
1153 pub const fn wrapping_add(self, count: usize) -> Self
1154 where
1155 T: Sized,
1156 {
1157 self.wrapping_offset(count as isize)
1158 }
1159
1160 /// Adds an unsigned offset in bytes to a pointer using wrapping arithmetic.
1161 ///
1162 /// `count` is in units of bytes.
1163 ///
1164 /// This is purely a convenience for casting to a `u8` pointer and
1165 /// using [wrapping_add][pointer::wrapping_add] on it. See that method for documentation.
1166 ///
1167 /// For non-`Sized` pointees this operation changes only the data pointer,
1168 /// leaving the metadata untouched.
1169 #[must_use]
1170 #[inline(always)]
1171 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1172 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1173 pub const fn wrapping_byte_add(self, count: usize) -> Self {
1174 self.cast::<u8>().wrapping_add(count).with_metadata_of(self)
1175 }
1176
1177 /// Subtracts an unsigned offset from a pointer using wrapping arithmetic.
1178 ///
1179 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1180 /// offset of `3 * size_of::<T>()` bytes.
1181 ///
1182 /// # Safety
1183 ///
1184 /// This operation itself is always safe, but using the resulting pointer is not.
1185 ///
1186 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
1187 /// be used to read or write other allocated objects.
1188 ///
1189 /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
1190 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1191 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1192 /// `x` and `y` point into the same allocated object.
1193 ///
1194 /// Compared to [`sub`], this method basically delays the requirement of staying within the
1195 /// same allocated object: [`sub`] is immediate Undefined Behavior when crossing object
1196 /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
1197 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
1198 /// can be optimized better and is thus preferable in performance-sensitive code.
1199 ///
1200 /// The delayed check only considers the value of the pointer that was dereferenced, not the
1201 /// intermediate values used during the computation of the final result. For example,
1202 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1203 /// allocated object and then re-entering it later is permitted.
1204 ///
1205 /// [`sub`]: #method.sub
1206 /// [allocated object]: crate::ptr#allocated-object
1207 ///
1208 /// # Examples
1209 ///
1210 /// ```
1211 /// # use std::fmt::Write;
1212 /// // Iterate using a raw pointer in increments of two elements (backwards)
1213 /// let data = [1u8, 2, 3, 4, 5];
1214 /// let mut ptr: *const u8 = data.as_ptr();
1215 /// let start_rounded_down = ptr.wrapping_sub(2);
1216 /// ptr = ptr.wrapping_add(4);
1217 /// let step = 2;
1218 /// let mut out = String::new();
1219 /// while ptr != start_rounded_down {
1220 /// unsafe {
1221 /// write!(&mut out, "{}, ", *ptr)?;
1222 /// }
1223 /// ptr = ptr.wrapping_sub(step);
1224 /// }
1225 /// assert_eq!(out, "5, 3, 1, ");
1226 /// # std::fmt::Result::Ok(())
1227 /// ```
1228 #[stable(feature = "pointer_methods", since = "1.26.0")]
1229 #[must_use = "returns a new pointer rather than modifying its argument"]
1230 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1231 #[inline(always)]
1232 pub const fn wrapping_sub(self, count: usize) -> Self
1233 where
1234 T: Sized,
1235 {
1236 self.wrapping_offset((count as isize).wrapping_neg())
1237 }
1238
1239 /// Subtracts an unsigned offset in bytes from a pointer using wrapping arithmetic.
1240 ///
1241 /// `count` is in units of bytes.
1242 ///
1243 /// This is purely a convenience for casting to a `u8` pointer and
1244 /// using [wrapping_sub][pointer::wrapping_sub] on it. See that method for documentation.
1245 ///
1246 /// For non-`Sized` pointees this operation changes only the data pointer,
1247 /// leaving the metadata untouched.
1248 #[must_use]
1249 #[inline(always)]
1250 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1251 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1252 pub const fn wrapping_byte_sub(self, count: usize) -> Self {
1253 self.cast::<u8>().wrapping_sub(count).with_metadata_of(self)
1254 }
1255
1256 /// Reads the value from `self` without moving it. This leaves the
1257 /// memory in `self` unchanged.
1258 ///
1259 /// See [`ptr::read`] for safety concerns and examples.
1260 ///
1261 /// [`ptr::read`]: crate::ptr::read()
1262 #[stable(feature = "pointer_methods", since = "1.26.0")]
1263 #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1264 #[inline]
1265 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1266 pub const unsafe fn read(self) -> T
1267 where
1268 T: Sized,
1269 {
1270 // SAFETY: the caller must uphold the safety contract for `read`.
1271 unsafe { read(self) }
1272 }
1273
1274 /// Performs a volatile read of the value from `self` without moving it. This
1275 /// leaves the memory in `self` unchanged.
1276 ///
1277 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1278 /// to not be elided or reordered by the compiler across other volatile
1279 /// operations.
1280 ///
1281 /// See [`ptr::read_volatile`] for safety concerns and examples.
1282 ///
1283 /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1284 #[stable(feature = "pointer_methods", since = "1.26.0")]
1285 #[inline]
1286 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1287 pub unsafe fn read_volatile(self) -> T
1288 where
1289 T: Sized,
1290 {
1291 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1292 unsafe { read_volatile(self) }
1293 }
1294
1295 /// Reads the value from `self` without moving it. This leaves the
1296 /// memory in `self` unchanged.
1297 ///
1298 /// Unlike `read`, the pointer may be unaligned.
1299 ///
1300 /// See [`ptr::read_unaligned`] for safety concerns and examples.
1301 ///
1302 /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1303 #[stable(feature = "pointer_methods", since = "1.26.0")]
1304 #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1305 #[inline]
1306 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1307 pub const unsafe fn read_unaligned(self) -> T
1308 where
1309 T: Sized,
1310 {
1311 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1312 unsafe { read_unaligned(self) }
1313 }
1314
1315 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1316 /// and destination may overlap.
1317 ///
1318 /// NOTE: this has the *same* argument order as [`ptr::copy`].
1319 ///
1320 /// See [`ptr::copy`] for safety concerns and examples.
1321 ///
1322 /// [`ptr::copy`]: crate::ptr::copy()
1323 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1324 #[stable(feature = "pointer_methods", since = "1.26.0")]
1325 #[inline]
1326 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1327 pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
1328 where
1329 T: Sized,
1330 {
1331 // SAFETY: the caller must uphold the safety contract for `copy`.
1332 unsafe { copy(self, dest, count) }
1333 }
1334
1335 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1336 /// and destination may *not* overlap.
1337 ///
1338 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1339 ///
1340 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1341 ///
1342 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1343 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1344 #[stable(feature = "pointer_methods", since = "1.26.0")]
1345 #[inline]
1346 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1347 pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
1348 where
1349 T: Sized,
1350 {
1351 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1352 unsafe { copy_nonoverlapping(self, dest, count) }
1353 }
1354
1355 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1356 /// `align`.
1357 ///
1358 /// If it is not possible to align the pointer, the implementation returns
1359 /// `usize::MAX`.
1360 ///
1361 /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1362 /// used with the `wrapping_add` method.
1363 ///
1364 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1365 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1366 /// the returned offset is correct in all terms other than alignment.
1367 ///
1368 /// # Panics
1369 ///
1370 /// The function panics if `align` is not a power-of-two.
1371 ///
1372 /// # Examples
1373 ///
1374 /// Accessing adjacent `u8` as `u16`
1375 ///
1376 /// ```
1377 /// # unsafe {
1378 /// let x = [5_u8, 6, 7, 8, 9];
1379 /// let ptr = x.as_ptr();
1380 /// let offset = ptr.align_offset(align_of::<u16>());
1381 ///
1382 /// if offset < x.len() - 1 {
1383 /// let u16_ptr = ptr.add(offset).cast::<u16>();
1384 /// assert!(*u16_ptr == u16::from_ne_bytes([5, 6]) || *u16_ptr == u16::from_ne_bytes([6, 7]));
1385 /// } else {
1386 /// // while the pointer can be aligned via `offset`, it would point
1387 /// // outside the allocation
1388 /// }
1389 /// # }
1390 /// ```
1391 #[must_use]
1392 #[inline]
1393 #[stable(feature = "align_offset", since = "1.36.0")]
1394 pub fn align_offset(self, align: usize) -> usize
1395 where
1396 T: Sized,
1397 {
1398 if !align.is_power_of_two() {
1399 panic!("align_offset: align is not a power-of-two");
1400 }
1401
1402 // SAFETY: `align` has been checked to be a power of 2 above
1403 let ret = unsafe { align_offset(self, align) };
1404
1405 // Inform Miri that we want to consider the resulting pointer to be suitably aligned.
1406 #[cfg(miri)]
1407 if ret != usize::MAX {
1408 intrinsics::miri_promise_symbolic_alignment(self.wrapping_add(ret).cast(), align);
1409 }
1410
1411 ret
1412 }
1413
1414 /// Returns whether the pointer is properly aligned for `T`.
1415 ///
1416 /// # Examples
1417 ///
1418 /// ```
1419 /// // On some platforms, the alignment of i32 is less than 4.
1420 /// #[repr(align(4))]
1421 /// struct AlignedI32(i32);
1422 ///
1423 /// let data = AlignedI32(42);
1424 /// let ptr = &data as *const AlignedI32;
1425 ///
1426 /// assert!(ptr.is_aligned());
1427 /// assert!(!ptr.wrapping_byte_add(1).is_aligned());
1428 /// ```
1429 #[must_use]
1430 #[inline]
1431 #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1432 pub fn is_aligned(self) -> bool
1433 where
1434 T: Sized,
1435 {
1436 self.is_aligned_to(align_of::<T>())
1437 }
1438
1439 /// Returns whether the pointer is aligned to `align`.
1440 ///
1441 /// For non-`Sized` pointees this operation considers only the data pointer,
1442 /// ignoring the metadata.
1443 ///
1444 /// # Panics
1445 ///
1446 /// The function panics if `align` is not a power-of-two (this includes 0).
1447 ///
1448 /// # Examples
1449 ///
1450 /// ```
1451 /// #![feature(pointer_is_aligned_to)]
1452 ///
1453 /// // On some platforms, the alignment of i32 is less than 4.
1454 /// #[repr(align(4))]
1455 /// struct AlignedI32(i32);
1456 ///
1457 /// let data = AlignedI32(42);
1458 /// let ptr = &data as *const AlignedI32;
1459 ///
1460 /// assert!(ptr.is_aligned_to(1));
1461 /// assert!(ptr.is_aligned_to(2));
1462 /// assert!(ptr.is_aligned_to(4));
1463 ///
1464 /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1465 /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1466 ///
1467 /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1468 /// ```
1469 #[must_use]
1470 #[inline]
1471 #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1472 pub fn is_aligned_to(self, align: usize) -> bool {
1473 if !align.is_power_of_two() {
1474 panic!("is_aligned_to: align is not a power-of-two");
1475 }
1476
1477 self.addr() & (align - 1) == 0
1478 }
1479}
1480
1481impl<T> *const [T] {
1482 /// Returns the length of a raw slice.
1483 ///
1484 /// The returned value is the number of **elements**, not the number of bytes.
1485 ///
1486 /// This function is safe, even when the raw slice cannot be cast to a slice
1487 /// reference because the pointer is null or unaligned.
1488 ///
1489 /// # Examples
1490 ///
1491 /// ```rust
1492 /// use std::ptr;
1493 ///
1494 /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1495 /// assert_eq!(slice.len(), 3);
1496 /// ```
1497 #[inline]
1498 #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1499 #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1500 pub const fn len(self) -> usize {
1501 metadata(self)
1502 }
1503
1504 /// Returns `true` if the raw slice has a length of 0.
1505 ///
1506 /// # Examples
1507 ///
1508 /// ```
1509 /// use std::ptr;
1510 ///
1511 /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1512 /// assert!(!slice.is_empty());
1513 /// ```
1514 #[inline(always)]
1515 #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1516 #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1517 pub const fn is_empty(self) -> bool {
1518 self.len() == 0
1519 }
1520
1521 /// Returns a raw pointer to the slice's buffer.
1522 ///
1523 /// This is equivalent to casting `self` to `*const T`, but more type-safe.
1524 ///
1525 /// # Examples
1526 ///
1527 /// ```rust
1528 /// #![feature(slice_ptr_get)]
1529 /// use std::ptr;
1530 ///
1531 /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1532 /// assert_eq!(slice.as_ptr(), ptr::null());
1533 /// ```
1534 #[inline]
1535 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1536 pub const fn as_ptr(self) -> *const T {
1537 self as *const T
1538 }
1539
1540 /// Gets a raw pointer to the underlying array.
1541 ///
1542 /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1543 #[unstable(feature = "slice_as_array", issue = "133508")]
1544 #[inline]
1545 #[must_use]
1546 pub const fn as_array<const N: usize>(self) -> Option<*const [T; N]> {
1547 if self.len() == N {
1548 let me = self.as_ptr() as *const [T; N];
1549 Some(me)
1550 } else {
1551 None
1552 }
1553 }
1554
1555 /// Returns a raw pointer to an element or subslice, without doing bounds
1556 /// checking.
1557 ///
1558 /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable
1559 /// is *[undefined behavior]* even if the resulting pointer is not used.
1560 ///
1561 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1562 ///
1563 /// # Examples
1564 ///
1565 /// ```
1566 /// #![feature(slice_ptr_get)]
1567 ///
1568 /// let x = &[1, 2, 4] as *const [i32];
1569 ///
1570 /// unsafe {
1571 /// assert_eq!(x.get_unchecked(1), x.as_ptr().add(1));
1572 /// }
1573 /// ```
1574 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1575 #[inline]
1576 pub unsafe fn get_unchecked<I>(self, index: I) -> *const I::Output
1577 where
1578 I: SliceIndex<[T]>,
1579 {
1580 // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1581 unsafe { index.get_unchecked(self) }
1582 }
1583
1584 /// Returns `None` if the pointer is null, or else returns a shared slice to
1585 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
1586 /// that the value has to be initialized.
1587 ///
1588 /// [`as_ref`]: #method.as_ref
1589 ///
1590 /// # Safety
1591 ///
1592 /// When calling this method, you have to ensure that *either* the pointer is null *or*
1593 /// all of the following is true:
1594 ///
1595 /// * The pointer must be [valid] for reads for `ptr.len() * size_of::<T>()` many bytes,
1596 /// and it must be properly aligned. This means in particular:
1597 ///
1598 /// * The entire memory range of this slice must be contained within a single [allocated object]!
1599 /// Slices can never span across multiple allocated objects.
1600 ///
1601 /// * The pointer must be aligned even for zero-length slices. One
1602 /// reason for this is that enum layout optimizations may rely on references
1603 /// (including slices of any length) being aligned and non-null to distinguish
1604 /// them from other data. You can obtain a pointer that is usable as `data`
1605 /// for zero-length slices using [`NonNull::dangling()`].
1606 ///
1607 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1608 /// See the safety documentation of [`pointer::offset`].
1609 ///
1610 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1611 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1612 /// In particular, while this reference exists, the memory the pointer points to must
1613 /// not get mutated (except inside `UnsafeCell`).
1614 ///
1615 /// This applies even if the result of this method is unused!
1616 ///
1617 /// See also [`slice::from_raw_parts`][].
1618 ///
1619 /// [valid]: crate::ptr#safety
1620 /// [allocated object]: crate::ptr#allocated-object
1621 ///
1622 /// # Panics during const evaluation
1623 ///
1624 /// This method will panic during const evaluation if the pointer cannot be
1625 /// determined to be null or not. See [`is_null`] for more information.
1626 ///
1627 /// [`is_null`]: #method.is_null
1628 #[inline]
1629 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1630 pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
1631 if self.is_null() {
1632 None
1633 } else {
1634 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1635 Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
1636 }
1637 }
1638}
1639
1640impl<T, const N: usize> *const [T; N] {
1641 /// Returns a raw pointer to the array's buffer.
1642 ///
1643 /// This is equivalent to casting `self` to `*const T`, but more type-safe.
1644 ///
1645 /// # Examples
1646 ///
1647 /// ```rust
1648 /// #![feature(array_ptr_get)]
1649 /// use std::ptr;
1650 ///
1651 /// let arr: *const [i8; 3] = ptr::null();
1652 /// assert_eq!(arr.as_ptr(), ptr::null());
1653 /// ```
1654 #[inline]
1655 #[unstable(feature = "array_ptr_get", issue = "119834")]
1656 pub const fn as_ptr(self) -> *const T {
1657 self as *const T
1658 }
1659
1660 /// Returns a raw pointer to a slice containing the entire array.
1661 ///
1662 /// # Examples
1663 ///
1664 /// ```
1665 /// #![feature(array_ptr_get)]
1666 ///
1667 /// let arr: *const [i32; 3] = &[1, 2, 4] as *const [i32; 3];
1668 /// let slice: *const [i32] = arr.as_slice();
1669 /// assert_eq!(slice.len(), 3);
1670 /// ```
1671 #[inline]
1672 #[unstable(feature = "array_ptr_get", issue = "119834")]
1673 pub const fn as_slice(self) -> *const [T] {
1674 self
1675 }
1676}
1677
1678/// Pointer equality is by address, as produced by the [`<*const T>::addr`](pointer::addr) method.
1679#[stable(feature = "rust1", since = "1.0.0")]
1680impl<T: ?Sized> PartialEq for *const T {
1681 #[inline]
1682 #[allow(ambiguous_wide_pointer_comparisons)]
1683 fn eq(&self, other: &*const T) -> bool {
1684 *self == *other
1685 }
1686}
1687
1688/// Pointer equality is an equivalence relation.
1689#[stable(feature = "rust1", since = "1.0.0")]
1690impl<T: ?Sized> Eq for *const T {}
1691
1692/// Pointer comparison is by address, as produced by the `[`<*const T>::addr`](pointer::addr)` method.
1693#[stable(feature = "rust1", since = "1.0.0")]
1694impl<T: ?Sized> Ord for *const T {
1695 #[inline]
1696 #[allow(ambiguous_wide_pointer_comparisons)]
1697 fn cmp(&self, other: &*const T) -> Ordering {
1698 if self < other {
1699 Less
1700 } else if self == other {
1701 Equal
1702 } else {
1703 Greater
1704 }
1705 }
1706}
1707
1708/// Pointer comparison is by address, as produced by the `[`<*const T>::addr`](pointer::addr)` method.
1709#[stable(feature = "rust1", since = "1.0.0")]
1710impl<T: ?Sized> PartialOrd for *const T {
1711 #[inline]
1712 #[allow(ambiguous_wide_pointer_comparisons)]
1713 fn partial_cmp(&self, other: &*const T) -> Option<Ordering> {
1714 Some(self.cmp(other))
1715 }
1716
1717 #[inline]
1718 #[allow(ambiguous_wide_pointer_comparisons)]
1719 fn lt(&self, other: &*const T) -> bool {
1720 *self < *other
1721 }
1722
1723 #[inline]
1724 #[allow(ambiguous_wide_pointer_comparisons)]
1725 fn le(&self, other: &*const T) -> bool {
1726 *self <= *other
1727 }
1728
1729 #[inline]
1730 #[allow(ambiguous_wide_pointer_comparisons)]
1731 fn gt(&self, other: &*const T) -> bool {
1732 *self > *other
1733 }
1734
1735 #[inline]
1736 #[allow(ambiguous_wide_pointer_comparisons)]
1737 fn ge(&self, other: &*const T) -> bool {
1738 *self >= *other
1739 }
1740}