rustc_middle/mir/interpret/
pointer.rs
1use std::fmt;
2use std::num::NonZero;
3
4use rustc_abi::{HasDataLayout, Size};
5use rustc_data_structures::static_assert_size;
6use rustc_macros::{HashStable, TyDecodable, TyEncodable};
7
8use super::AllocId;
9
10pub trait PointerArithmetic: HasDataLayout {
15 #[inline(always)]
18 fn pointer_size(&self) -> Size {
19 self.data_layout().pointer_size
20 }
21
22 #[inline(always)]
23 fn max_size_of_val(&self) -> Size {
24 Size::from_bytes(self.target_isize_max())
25 }
26
27 #[inline]
28 fn target_usize_max(&self) -> u64 {
29 self.pointer_size().unsigned_int_max().try_into().unwrap()
30 }
31
32 #[inline]
33 fn target_isize_min(&self) -> i64 {
34 self.pointer_size().signed_int_min().try_into().unwrap()
35 }
36
37 #[inline]
38 fn target_isize_max(&self) -> i64 {
39 self.pointer_size().signed_int_max().try_into().unwrap()
40 }
41
42 #[inline]
43 fn truncate_to_target_usize(&self, val: u64) -> u64 {
44 self.pointer_size().truncate(val.into()).try_into().unwrap()
45 }
46
47 #[inline]
48 fn sign_extend_to_target_isize(&self, val: u64) -> i64 {
49 self.pointer_size().sign_extend(val.into()).try_into().unwrap()
50 }
51}
52
53impl<T: HasDataLayout> PointerArithmetic for T {}
54
55pub trait Provenance: Copy + fmt::Debug + 'static {
60 const OFFSET_IS_ADDR: bool;
68
69 const WILDCARD: Option<Self>;
71
72 fn fmt(ptr: &Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result;
74
75 fn get_alloc_id(self) -> Option<AllocId>;
80
81 fn join(left: Option<Self>, right: Option<Self>) -> Option<Self>;
83}
84
85#[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]
94pub struct CtfeProvenance(NonZero<u64>);
95
96impl From<AllocId> for CtfeProvenance {
97 fn from(value: AllocId) -> Self {
98 let prov = CtfeProvenance(value.0);
99 assert!(
100 prov.alloc_id() == value,
101 "`AllocId` with the highest bits set cannot be used in CTFE"
102 );
103 prov
104 }
105}
106
107impl fmt::Debug for CtfeProvenance {
108 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
109 fmt::Debug::fmt(&self.alloc_id(), f)?; if self.immutable() {
111 write!(f, "<imm>")?;
112 }
113 Ok(())
114 }
115}
116
117const IMMUTABLE_MASK: u64 = 1 << 63; const SHARED_REF_MASK: u64 = 1 << 62;
119const ALLOC_ID_MASK: u64 = u64::MAX & !IMMUTABLE_MASK & !SHARED_REF_MASK;
120
121impl CtfeProvenance {
122 #[inline(always)]
124 pub fn alloc_id(self) -> AllocId {
125 AllocId(NonZero::new(self.0.get() & ALLOC_ID_MASK).unwrap())
126 }
127
128 #[inline]
130 pub fn immutable(self) -> bool {
131 self.0.get() & IMMUTABLE_MASK != 0
132 }
133
134 #[inline]
136 pub fn shared_ref(self) -> bool {
137 self.0.get() & SHARED_REF_MASK != 0
138 }
139
140 pub fn into_parts(self) -> (AllocId, bool, bool) {
141 (self.alloc_id(), self.immutable(), self.shared_ref())
142 }
143
144 pub fn from_parts((alloc_id, immutable, shared_ref): (AllocId, bool, bool)) -> Self {
145 let prov = CtfeProvenance::from(alloc_id);
146 if immutable {
147 prov.as_immutable()
149 } else if shared_ref {
150 prov.as_shared_ref()
151 } else {
152 prov
153 }
154 }
155
156 #[inline]
158 pub fn as_immutable(self) -> Self {
159 CtfeProvenance(self.0 | IMMUTABLE_MASK | SHARED_REF_MASK)
160 }
161
162 #[inline]
164 pub fn as_shared_ref(self) -> Self {
165 CtfeProvenance(self.0 | SHARED_REF_MASK)
166 }
167}
168
169impl Provenance for CtfeProvenance {
170 const OFFSET_IS_ADDR: bool = false;
173
174 const WILDCARD: Option<Self> = None;
176
177 fn fmt(ptr: &Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
178 fmt::Debug::fmt(&ptr.provenance.alloc_id(), f)?; if ptr.offset.bytes() > 0 {
182 write!(f, "+{:#x}", ptr.offset.bytes())?;
183 }
184 if ptr.provenance.immutable() {
186 write!(f, "<imm>")?;
187 }
188 Ok(())
189 }
190
191 fn get_alloc_id(self) -> Option<AllocId> {
192 Some(self.alloc_id())
193 }
194
195 fn join(_left: Option<Self>, _right: Option<Self>) -> Option<Self> {
196 panic!("merging provenance is not supported when `OFFSET_IS_ADDR` is false")
197 }
198}
199
200impl Provenance for AllocId {
202 const OFFSET_IS_ADDR: bool = false;
205
206 const WILDCARD: Option<Self> = None;
208
209 fn fmt(ptr: &Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
210 if f.alternate() {
212 write!(f, "{:#?}", ptr.provenance)?;
213 } else {
214 write!(f, "{:?}", ptr.provenance)?;
215 }
216 if ptr.offset.bytes() > 0 {
218 write!(f, "+{:#x}", ptr.offset.bytes())?;
219 }
220 Ok(())
221 }
222
223 fn get_alloc_id(self) -> Option<AllocId> {
224 Some(self)
225 }
226
227 fn join(_left: Option<Self>, _right: Option<Self>) -> Option<Self> {
228 panic!("merging provenance is not supported when `OFFSET_IS_ADDR` is false")
229 }
230}
231
232#[derive(Copy, Clone, Eq, PartialEq, TyEncodable, TyDecodable, Hash)]
236#[derive(HashStable)]
237pub struct Pointer<Prov = CtfeProvenance> {
238 pub(super) offset: Size, pub provenance: Prov,
240}
241
242static_assert_size!(Pointer, 16);
243static_assert_size!(Pointer<Option<CtfeProvenance>>, 16);
246
247impl<Prov: Provenance> fmt::Debug for Pointer<Prov> {
250 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
251 Provenance::fmt(self, f)
252 }
253}
254
255impl<Prov: Provenance> fmt::Debug for Pointer<Option<Prov>> {
256 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
257 match self.provenance {
258 Some(prov) => Provenance::fmt(&Pointer::new(prov, self.offset), f),
259 None => write!(f, "{:#x}[noalloc]", self.offset.bytes()),
260 }
261 }
262}
263
264impl<Prov: Provenance> fmt::Display for Pointer<Option<Prov>> {
265 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
266 if self.provenance.is_none() && self.offset.bytes() == 0 {
267 write!(f, "null pointer")
268 } else {
269 fmt::Debug::fmt(self, f)
270 }
271 }
272}
273
274impl From<AllocId> for Pointer {
276 #[inline(always)]
277 fn from(alloc_id: AllocId) -> Self {
278 Pointer::new(alloc_id.into(), Size::ZERO)
279 }
280}
281impl From<CtfeProvenance> for Pointer {
282 #[inline(always)]
283 fn from(prov: CtfeProvenance) -> Self {
284 Pointer::new(prov, Size::ZERO)
285 }
286}
287
288impl<Prov> From<Pointer<Prov>> for Pointer<Option<Prov>> {
289 #[inline(always)]
290 fn from(ptr: Pointer<Prov>) -> Self {
291 let (prov, offset) = ptr.into_parts();
292 Pointer::new(Some(prov), offset)
293 }
294}
295
296impl<Prov> Pointer<Option<Prov>> {
297 pub fn into_pointer_or_addr(self) -> Result<Pointer<Prov>, Size> {
302 match self.provenance {
303 Some(prov) => Ok(Pointer::new(prov, self.offset)),
304 None => Err(self.offset),
305 }
306 }
307
308 pub fn addr(self) -> Size
311 where
312 Prov: Provenance,
313 {
314 assert!(Prov::OFFSET_IS_ADDR);
315 self.offset
316 }
317}
318
319impl<Prov> Pointer<Option<Prov>> {
320 #[inline(always)]
323 pub fn from_addr_invalid(addr: u64) -> Self {
324 Pointer { provenance: None, offset: Size::from_bytes(addr) }
325 }
326
327 #[inline(always)]
328 pub fn null() -> Self {
329 Pointer::from_addr_invalid(0)
330 }
331}
332
333impl<Prov> Pointer<Prov> {
334 #[inline(always)]
335 pub fn new(provenance: Prov, offset: Size) -> Self {
336 Pointer { provenance, offset }
337 }
338
339 #[inline(always)]
343 pub fn into_parts(self) -> (Prov, Size) {
344 (self.provenance, self.offset)
345 }
346
347 pub fn map_provenance(self, f: impl FnOnce(Prov) -> Prov) -> Self {
348 Pointer { provenance: f(self.provenance), ..self }
349 }
350
351 #[inline(always)]
352 pub fn wrapping_offset(self, i: Size, cx: &impl HasDataLayout) -> Self {
353 let res =
354 cx.data_layout().truncate_to_target_usize(self.offset.bytes().wrapping_add(i.bytes()));
355 Pointer { offset: Size::from_bytes(res), ..self }
356 }
357
358 #[inline(always)]
359 pub fn wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
360 self.wrapping_offset(Size::from_bytes(i as u64), cx)
362 }
363}