miri/borrow_tracker/tree_borrows/
mod.rs

1use rustc_abi::{BackendRepr, Size};
2use rustc_middle::mir::{Mutability, RetagKind};
3use rustc_middle::ty::layout::HasTypingEnv;
4use rustc_middle::ty::{self, Ty};
5use rustc_span::def_id::DefId;
6
7use crate::borrow_tracker::{GlobalState, GlobalStateInner, ProtectorKind};
8use crate::concurrency::data_race::NaReadType;
9use crate::*;
10
11pub mod diagnostics;
12mod foreign_access_skipping;
13mod perms;
14mod tree;
15mod unimap;
16
17#[cfg(test)]
18mod exhaustive;
19
20use self::perms::Permission;
21pub use self::tree::Tree;
22
23pub type AllocState = Tree;
24
25impl<'tcx> Tree {
26    /// Create a new allocation, i.e. a new tree
27    pub fn new_allocation(
28        id: AllocId,
29        size: Size,
30        state: &mut GlobalStateInner,
31        _kind: MemoryKind,
32        machine: &MiriMachine<'tcx>,
33    ) -> Self {
34        let tag = state.root_ptr_tag(id, machine); // Fresh tag for the root
35        let span = machine.current_span();
36        Tree::new(tag, size, span)
37    }
38
39    /// Check that an access on the entire range is permitted, and update
40    /// the tree.
41    pub fn before_memory_access(
42        &mut self,
43        access_kind: AccessKind,
44        alloc_id: AllocId,
45        prov: ProvenanceExtra,
46        range: AllocRange,
47        machine: &MiriMachine<'tcx>,
48    ) -> InterpResult<'tcx> {
49        trace!(
50            "{} with tag {:?}: {:?}, size {}",
51            access_kind,
52            prov,
53            interpret::Pointer::new(alloc_id, range.start),
54            range.size.bytes(),
55        );
56        // TODO: for now we bail out on wildcard pointers. Eventually we should
57        // handle them as much as we can.
58        let tag = match prov {
59            ProvenanceExtra::Concrete(tag) => tag,
60            ProvenanceExtra::Wildcard => return interp_ok(()),
61        };
62        let global = machine.borrow_tracker.as_ref().unwrap();
63        let span = machine.current_span();
64        self.perform_access(
65            tag,
66            Some((range, access_kind, diagnostics::AccessCause::Explicit(access_kind))),
67            global,
68            alloc_id,
69            span,
70        )
71    }
72
73    /// Check that this pointer has permission to deallocate this range.
74    pub fn before_memory_deallocation(
75        &mut self,
76        alloc_id: AllocId,
77        prov: ProvenanceExtra,
78        size: Size,
79        machine: &MiriMachine<'tcx>,
80    ) -> InterpResult<'tcx> {
81        // TODO: for now we bail out on wildcard pointers. Eventually we should
82        // handle them as much as we can.
83        let tag = match prov {
84            ProvenanceExtra::Concrete(tag) => tag,
85            ProvenanceExtra::Wildcard => return interp_ok(()),
86        };
87        let global = machine.borrow_tracker.as_ref().unwrap();
88        let span = machine.current_span();
89        self.dealloc(tag, alloc_range(Size::ZERO, size), global, alloc_id, span)
90    }
91
92    pub fn expose_tag(&mut self, _tag: BorTag) {
93        // TODO
94    }
95
96    /// A tag just lost its protector.
97    ///
98    /// This emits a special kind of access that is only applied
99    /// to initialized locations, as a protection against other
100    /// tags not having been made aware of the existence of this
101    /// protector.
102    pub fn release_protector(
103        &mut self,
104        machine: &MiriMachine<'tcx>,
105        global: &GlobalState,
106        tag: BorTag,
107        alloc_id: AllocId, // diagnostics
108    ) -> InterpResult<'tcx> {
109        let span = machine.current_span();
110        // `None` makes it the magic on-protector-end operation
111        self.perform_access(tag, None, global, alloc_id, span)
112    }
113}
114
115/// Policy for a new borrow.
116#[derive(Debug, Clone, Copy)]
117struct NewPermission {
118    /// Optionally ignore the actual size to do a zero-size reborrow.
119    /// If this is set then `dereferenceable` is not enforced.
120    zero_size: bool,
121    /// Which permission should the pointer start with.
122    initial_state: Permission,
123    /// Whether this pointer is part of the arguments of a function call.
124    /// `protector` is `Some(_)` for all pointers marked `noalias`.
125    protector: Option<ProtectorKind>,
126}
127
128impl<'tcx> NewPermission {
129    /// Determine NewPermission of the reference from the type of the pointee.
130    fn from_ref_ty(
131        pointee: Ty<'tcx>,
132        mutability: Mutability,
133        kind: RetagKind,
134        cx: &crate::MiriInterpCx<'tcx>,
135    ) -> Option<Self> {
136        let ty_is_freeze = pointee.is_freeze(*cx.tcx, cx.typing_env());
137        let ty_is_unpin = pointee.is_unpin(*cx.tcx, cx.typing_env());
138        let is_protected = kind == RetagKind::FnEntry;
139        // As demonstrated by `tests/fail/tree_borrows/reservedim_spurious_write.rs`,
140        // interior mutability and protectors interact poorly.
141        // To eliminate the case of Protected Reserved IM we override interior mutability
142        // in the case of a protected reference: protected references are always considered
143        // "freeze" in their reservation phase.
144        let initial_state = match mutability {
145            Mutability::Mut if ty_is_unpin => Permission::new_reserved(ty_is_freeze, is_protected),
146            Mutability::Not if ty_is_freeze => Permission::new_frozen(),
147            // Raw pointers never enter this function so they are not handled.
148            // However raw pointers are not the only pointers that take the parent
149            // tag, this also happens for `!Unpin` `&mut`s and interior mutable
150            // `&`s, which are excluded above.
151            _ => return None,
152        };
153
154        let protector = is_protected.then_some(ProtectorKind::StrongProtector);
155        Some(Self { zero_size: false, initial_state, protector })
156    }
157
158    /// Compute permission for `Box`-like type (`Box` always, and also `Unique` if enabled).
159    /// These pointers allow deallocation so need a different kind of protector not handled
160    /// by `from_ref_ty`.
161    fn from_unique_ty(
162        ty: Ty<'tcx>,
163        kind: RetagKind,
164        cx: &crate::MiriInterpCx<'tcx>,
165        zero_size: bool,
166    ) -> Option<Self> {
167        let pointee = ty.builtin_deref(true).unwrap();
168        pointee.is_unpin(*cx.tcx, cx.typing_env()).then_some(()).map(|()| {
169            // Regular `Unpin` box, give it `noalias` but only a weak protector
170            // because it is valid to deallocate it within the function.
171            let ty_is_freeze = ty.is_freeze(*cx.tcx, cx.typing_env());
172            let protected = kind == RetagKind::FnEntry;
173            let initial_state = Permission::new_reserved(ty_is_freeze, protected);
174            Self {
175                zero_size,
176                initial_state,
177                protector: protected.then_some(ProtectorKind::WeakProtector),
178            }
179        })
180    }
181}
182
183/// Retagging/reborrowing.
184/// Policy on which permission to grant to each pointer should be left to
185/// the implementation of NewPermission.
186impl<'tcx> EvalContextPrivExt<'tcx> for crate::MiriInterpCx<'tcx> {}
187trait EvalContextPrivExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
188    /// Returns the provenance that should be used henceforth.
189    fn tb_reborrow(
190        &mut self,
191        place: &MPlaceTy<'tcx>, // parent tag extracted from here
192        ptr_size: Size,
193        new_perm: NewPermission,
194        new_tag: BorTag,
195    ) -> InterpResult<'tcx, Option<Provenance>> {
196        let this = self.eval_context_mut();
197        // Make sure the new permission makes sense as the initial permission of a fresh tag.
198        assert!(new_perm.initial_state.is_initial());
199        // Ensure we bail out if the pointer goes out-of-bounds (see miri#1050).
200        this.check_ptr_access(place.ptr(), ptr_size, CheckInAllocMsg::InboundsTest)?;
201
202        // It is crucial that this gets called on all code paths, to ensure we track tag creation.
203        let log_creation = |this: &MiriInterpCx<'tcx>,
204                            loc: Option<(AllocId, Size, ProvenanceExtra)>| // alloc_id, base_offset, orig_tag
205         -> InterpResult<'tcx> {
206            let global = this.machine.borrow_tracker.as_ref().unwrap().borrow();
207            let ty = place.layout.ty;
208            if global.tracked_pointer_tags.contains(&new_tag) {
209                let kind_str = format!("initial state {} (pointee type {ty})", new_perm.initial_state);
210                this.emit_diagnostic(NonHaltingDiagnostic::CreatedPointerTag(
211                    new_tag.inner(),
212                    Some(kind_str),
213                    loc.map(|(alloc_id, base_offset, orig_tag)| (alloc_id, alloc_range(base_offset, ptr_size), orig_tag)),
214                ));
215            }
216            drop(global); // don't hold that reference any longer than we have to
217            interp_ok(())
218        };
219
220        trace!("Reborrow of size {:?}", ptr_size);
221        let (alloc_id, base_offset, parent_prov) = match this.ptr_try_get_alloc_id(place.ptr(), 0) {
222            Ok(data) => {
223                // Unlike SB, we *do* a proper retag for size 0 if can identify the allocation.
224                // After all, the pointer may be lazily initialized outside this initial range.
225                data
226            }
227            Err(_) => {
228                assert_eq!(ptr_size, Size::ZERO); // we did the deref check above, size has to be 0 here
229                // This pointer doesn't come with an AllocId, so there's no
230                // memory to do retagging in.
231                trace!(
232                    "reborrow of size 0: reference {:?} derived from {:?} (pointee {})",
233                    new_tag,
234                    place.ptr(),
235                    place.layout.ty,
236                );
237                log_creation(this, None)?;
238                // Keep original provenance.
239                return interp_ok(place.ptr().provenance);
240            }
241        };
242        log_creation(this, Some((alloc_id, base_offset, parent_prov)))?;
243
244        let orig_tag = match parent_prov {
245            ProvenanceExtra::Wildcard => return interp_ok(place.ptr().provenance), // TODO: handle wildcard pointers
246            ProvenanceExtra::Concrete(tag) => tag,
247        };
248
249        trace!(
250            "reborrow: reference {:?} derived from {:?} (pointee {}): {:?}, size {}",
251            new_tag,
252            orig_tag,
253            place.layout.ty,
254            interpret::Pointer::new(alloc_id, base_offset),
255            ptr_size.bytes()
256        );
257
258        if let Some(protect) = new_perm.protector {
259            // We register the protection in two different places.
260            // This makes creating a protector slower, but checking whether a tag
261            // is protected faster.
262            this.frame_mut()
263                .extra
264                .borrow_tracker
265                .as_mut()
266                .unwrap()
267                .protected_tags
268                .push((alloc_id, new_tag));
269            this.machine
270                .borrow_tracker
271                .as_mut()
272                .expect("We should have borrow tracking data")
273                .get_mut()
274                .protected_tags
275                .insert(new_tag, protect);
276        }
277
278        let alloc_kind = this.get_alloc_info(alloc_id).kind;
279        if !matches!(alloc_kind, AllocKind::LiveData) {
280            assert_eq!(ptr_size, Size::ZERO); // we did the deref check above, size has to be 0 here
281            // There's not actually any bytes here where accesses could even be tracked.
282            // Just produce the new provenance, nothing else to do.
283            return interp_ok(Some(Provenance::Concrete { alloc_id, tag: new_tag }));
284        }
285
286        let span = this.machine.current_span();
287        let alloc_extra = this.get_alloc_extra(alloc_id)?;
288        let range = alloc_range(base_offset, ptr_size);
289        let mut tree_borrows = alloc_extra.borrow_tracker_tb().borrow_mut();
290
291        // All reborrows incur a (possibly zero-sized) read access to the parent
292        tree_borrows.perform_access(
293            orig_tag,
294            Some((range, AccessKind::Read, diagnostics::AccessCause::Reborrow)),
295            this.machine.borrow_tracker.as_ref().unwrap(),
296            alloc_id,
297            this.machine.current_span(),
298        )?;
299        // Record the parent-child pair in the tree.
300        tree_borrows.new_child(
301            orig_tag,
302            new_tag,
303            new_perm.initial_state,
304            range,
305            span,
306            new_perm.protector.is_some(),
307        )?;
308        drop(tree_borrows);
309
310        // Also inform the data race model (but only if any bytes are actually affected).
311        if range.size.bytes() > 0 {
312            if let Some(data_race) = alloc_extra.data_race.as_ref() {
313                data_race.read(
314                    alloc_id,
315                    range,
316                    NaReadType::Retag,
317                    Some(place.layout.ty),
318                    &this.machine,
319                )?;
320            }
321        }
322
323        interp_ok(Some(Provenance::Concrete { alloc_id, tag: new_tag }))
324    }
325
326    fn tb_retag_place(
327        &mut self,
328        place: &MPlaceTy<'tcx>,
329        new_perm: NewPermission,
330    ) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
331        let this = self.eval_context_mut();
332
333        // Determine the size of the reborrow.
334        // For most types this is the entire size of the place, however
335        // - when `extern type` is involved we use the size of the known prefix,
336        // - if the pointer is not reborrowed (raw pointer) or if `zero_size` is set
337        // then we override the size to do a zero-length reborrow.
338        let reborrow_size = match new_perm {
339            NewPermission { zero_size: false, .. } =>
340                this.size_and_align_of_mplace(place)?
341                    .map(|(size, _)| size)
342                    .unwrap_or(place.layout.size),
343            _ => Size::from_bytes(0),
344        };
345        trace!("Creating new permission: {:?} with size {:?}", new_perm, reborrow_size);
346
347        // This new tag is not guaranteed to actually be used.
348        //
349        // If you run out of tags, consider the following optimization: adjust `tb_reborrow`
350        // so that rather than taking as input a fresh tag and deciding whether it uses this
351        // one or the parent it instead just returns whether a new tag should be created.
352        // This will avoid creating tags than end up never being used.
353        let new_tag = this.machine.borrow_tracker.as_mut().unwrap().get_mut().new_ptr();
354
355        // Compute the actual reborrow.
356        let new_prov = this.tb_reborrow(place, reborrow_size, new_perm, new_tag)?;
357
358        // Adjust place.
359        // (If the closure gets called, that means the old provenance was `Some`, and hence the new
360        // one must also be `Some`.)
361        interp_ok(place.clone().map_provenance(|_| new_prov.unwrap()))
362    }
363
364    /// Retags an individual pointer, returning the retagged version.
365    fn tb_retag_reference(
366        &mut self,
367        val: &ImmTy<'tcx>,
368        new_perm: NewPermission,
369    ) -> InterpResult<'tcx, ImmTy<'tcx>> {
370        let this = self.eval_context_mut();
371        let place = this.ref_to_mplace(val)?;
372        let new_place = this.tb_retag_place(&place, new_perm)?;
373        interp_ok(ImmTy::from_immediate(new_place.to_ref(this), val.layout))
374    }
375}
376
377impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {}
378pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
379    /// Retag a pointer. References are passed to `from_ref_ty` and
380    /// raw pointers are never reborrowed.
381    fn tb_retag_ptr_value(
382        &mut self,
383        kind: RetagKind,
384        val: &ImmTy<'tcx>,
385    ) -> InterpResult<'tcx, ImmTy<'tcx>> {
386        let this = self.eval_context_mut();
387        let new_perm = match val.layout.ty.kind() {
388            &ty::Ref(_, pointee, mutability) =>
389                NewPermission::from_ref_ty(pointee, mutability, kind, this),
390            _ => None,
391        };
392        if let Some(new_perm) = new_perm {
393            this.tb_retag_reference(val, new_perm)
394        } else {
395            interp_ok(val.clone())
396        }
397    }
398
399    /// Retag all pointers that are stored in this place.
400    fn tb_retag_place_contents(
401        &mut self,
402        kind: RetagKind,
403        place: &PlaceTy<'tcx>,
404    ) -> InterpResult<'tcx> {
405        let this = self.eval_context_mut();
406        let options = this.machine.borrow_tracker.as_mut().unwrap().get_mut();
407        let retag_fields = options.retag_fields;
408        let unique_did =
409            options.unique_is_unique.then(|| this.tcx.lang_items().ptr_unique()).flatten();
410        let mut visitor = RetagVisitor { ecx: this, kind, retag_fields, unique_did };
411        return visitor.visit_value(place);
412
413        // The actual visitor.
414        struct RetagVisitor<'ecx, 'tcx> {
415            ecx: &'ecx mut MiriInterpCx<'tcx>,
416            kind: RetagKind,
417            retag_fields: RetagFields,
418            unique_did: Option<DefId>,
419        }
420        impl<'ecx, 'tcx> RetagVisitor<'ecx, 'tcx> {
421            #[inline(always)] // yes this helps in our benchmarks
422            fn retag_ptr_inplace(
423                &mut self,
424                place: &PlaceTy<'tcx>,
425                new_perm: Option<NewPermission>,
426            ) -> InterpResult<'tcx> {
427                if let Some(new_perm) = new_perm {
428                    let val = self.ecx.read_immediate(&self.ecx.place_to_op(place)?)?;
429                    let val = self.ecx.tb_retag_reference(&val, new_perm)?;
430                    self.ecx.write_immediate(*val, place)?;
431                }
432                interp_ok(())
433            }
434        }
435        impl<'ecx, 'tcx> ValueVisitor<'tcx, MiriMachine<'tcx>> for RetagVisitor<'ecx, 'tcx> {
436            type V = PlaceTy<'tcx>;
437
438            #[inline(always)]
439            fn ecx(&self) -> &MiriInterpCx<'tcx> {
440                self.ecx
441            }
442
443            /// Regardless of how `Unique` is handled, Boxes are always reborrowed.
444            /// When `Unique` is also reborrowed, then it behaves exactly like `Box`
445            /// except for the fact that `Box` has a non-zero-sized reborrow.
446            fn visit_box(&mut self, box_ty: Ty<'tcx>, place: &PlaceTy<'tcx>) -> InterpResult<'tcx> {
447                // Only boxes for the global allocator get any special treatment.
448                if box_ty.is_box_global(*self.ecx.tcx) {
449                    let new_perm = NewPermission::from_unique_ty(
450                        place.layout.ty,
451                        self.kind,
452                        self.ecx,
453                        /* zero_size */ false,
454                    );
455                    self.retag_ptr_inplace(place, new_perm)?;
456                }
457                interp_ok(())
458            }
459
460            fn visit_value(&mut self, place: &PlaceTy<'tcx>) -> InterpResult<'tcx> {
461                // If this place is smaller than a pointer, we know that it can't contain any
462                // pointers we need to retag, so we can stop recursion early.
463                // This optimization is crucial for ZSTs, because they can contain way more fields
464                // than we can ever visit.
465                if place.layout.is_sized() && place.layout.size < self.ecx.pointer_size() {
466                    return interp_ok(());
467                }
468
469                // Check the type of this value to see what to do with it (retag, or recurse).
470                match place.layout.ty.kind() {
471                    &ty::Ref(_, pointee, mutability) => {
472                        let new_perm =
473                            NewPermission::from_ref_ty(pointee, mutability, self.kind, self.ecx);
474                        self.retag_ptr_inplace(place, new_perm)?;
475                    }
476                    ty::RawPtr(_, _) => {
477                        // We definitely do *not* want to recurse into raw pointers -- wide raw
478                        // pointers have fields, and for dyn Trait pointees those can have reference
479                        // type!
480                        // We also do not want to reborrow them.
481                    }
482                    ty::Adt(adt, _) if adt.is_box() => {
483                        // Recurse for boxes, they require some tricky handling and will end up in `visit_box` above.
484                        // (Yes this means we technically also recursively retag the allocator itself
485                        // even if field retagging is not enabled. *shrug*)
486                        self.walk_value(place)?;
487                    }
488                    ty::Adt(adt, _) if self.unique_did == Some(adt.did()) => {
489                        let place = inner_ptr_of_unique(self.ecx, place)?;
490                        let new_perm = NewPermission::from_unique_ty(
491                            place.layout.ty,
492                            self.kind,
493                            self.ecx,
494                            /* zero_size */ true,
495                        );
496                        self.retag_ptr_inplace(&place, new_perm)?;
497                    }
498                    _ => {
499                        // Not a reference/pointer/box. Only recurse if configured appropriately.
500                        let recurse = match self.retag_fields {
501                            RetagFields::No => false,
502                            RetagFields::Yes => true,
503                            RetagFields::OnlyScalar => {
504                                // Matching `ArgAbi::new` at the time of writing, only fields of
505                                // `Scalar` and `ScalarPair` ABI are considered.
506                                matches!(
507                                    place.layout.backend_repr,
508                                    BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)
509                                )
510                            }
511                        };
512                        if recurse {
513                            self.walk_value(place)?;
514                        }
515                    }
516                }
517                interp_ok(())
518            }
519        }
520    }
521
522    /// Protect a place so that it cannot be used any more for the duration of the current function
523    /// call.
524    ///
525    /// This is used to ensure soundness of in-place function argument/return passing.
526    fn tb_protect_place(&mut self, place: &MPlaceTy<'tcx>) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
527        let this = self.eval_context_mut();
528
529        // Note: if we were to inline `new_reserved` below we would find out that
530        // `ty_is_freeze` is eventually unused because it appears in a `ty_is_freeze || true`.
531        // We are nevertheless including it here for clarity.
532        let ty_is_freeze = place.layout.ty.is_freeze(*this.tcx, this.typing_env());
533        // Retag it. With protection! That is the entire point.
534        let new_perm = NewPermission {
535            initial_state: Permission::new_reserved(ty_is_freeze, /* protected */ true),
536            zero_size: false,
537            protector: Some(ProtectorKind::StrongProtector),
538        };
539        this.tb_retag_place(place, new_perm)
540    }
541
542    /// Mark the given tag as exposed. It was found on a pointer with the given AllocId.
543    fn tb_expose_tag(&self, alloc_id: AllocId, tag: BorTag) -> InterpResult<'tcx> {
544        let this = self.eval_context_ref();
545
546        // Function pointers and dead objects don't have an alloc_extra so we ignore them.
547        // This is okay because accessing them is UB anyway, no need for any Tree Borrows checks.
548        // NOT using `get_alloc_extra_mut` since this might be a read-only allocation!
549        let kind = this.get_alloc_info(alloc_id).kind;
550        match kind {
551            AllocKind::LiveData => {
552                // This should have alloc_extra data, but `get_alloc_extra` can still fail
553                // if converting this alloc_id from a global to a local one
554                // uncovers a non-supported `extern static`.
555                let alloc_extra = this.get_alloc_extra(alloc_id)?;
556                trace!("Tree Borrows tag {tag:?} exposed in {alloc_id:?}");
557                alloc_extra.borrow_tracker_tb().borrow_mut().expose_tag(tag);
558            }
559            AllocKind::Function | AllocKind::VTable | AllocKind::Dead => {
560                // No tree borrows on these allocations.
561            }
562        }
563        interp_ok(())
564    }
565
566    /// Display the tree.
567    fn print_tree(&mut self, alloc_id: AllocId, show_unnamed: bool) -> InterpResult<'tcx> {
568        let this = self.eval_context_mut();
569        let alloc_extra = this.get_alloc_extra(alloc_id)?;
570        let tree_borrows = alloc_extra.borrow_tracker_tb().borrow();
571        let borrow_tracker = &this.machine.borrow_tracker.as_ref().unwrap().borrow();
572        tree_borrows.print_tree(&borrow_tracker.protected_tags, show_unnamed)
573    }
574
575    /// Give a name to the pointer, usually the name it has in the source code (for debugging).
576    /// The name given is `name` and the pointer that receives it is the `nth_parent`
577    /// of `ptr` (with 0 representing `ptr` itself)
578    fn tb_give_pointer_debug_name(
579        &mut self,
580        ptr: Pointer,
581        nth_parent: u8,
582        name: &str,
583    ) -> InterpResult<'tcx> {
584        let this = self.eval_context_mut();
585        let (tag, alloc_id) = match ptr.provenance {
586            Some(Provenance::Concrete { tag, alloc_id }) => (tag, alloc_id),
587            _ => {
588                eprintln!("Can't give the name {name} to Wildcard pointer");
589                return interp_ok(());
590            }
591        };
592        let alloc_extra = this.get_alloc_extra(alloc_id)?;
593        let mut tree_borrows = alloc_extra.borrow_tracker_tb().borrow_mut();
594        tree_borrows.give_pointer_debug_name(tag, nth_parent, name)
595    }
596}
597
598/// Takes a place for a `Unique` and turns it into a place with the inner raw pointer.
599/// I.e. input is what you get from the visitor upon encountering an `adt` that is `Unique`,
600/// and output can be used by `retag_ptr_inplace`.
601fn inner_ptr_of_unique<'tcx>(
602    ecx: &MiriInterpCx<'tcx>,
603    place: &PlaceTy<'tcx>,
604) -> InterpResult<'tcx, PlaceTy<'tcx>> {
605    // Follows the same layout as `interpret/visitor.rs:walk_value` for `Box` in
606    // `rustc_const_eval`, just with one fewer layer.
607    // Here we have a `Unique(NonNull(*mut), PhantomData)`
608    assert_eq!(place.layout.fields.count(), 2, "Unique must have exactly 2 fields");
609    let (nonnull, phantom) = (ecx.project_field(place, 0)?, ecx.project_field(place, 1)?);
610    assert!(
611        phantom.layout.ty.ty_adt_def().is_some_and(|adt| adt.is_phantom_data()),
612        "2nd field of `Unique` should be `PhantomData` but is `{:?}`",
613        phantom.layout.ty,
614    );
615    // Now down to `NonNull(*mut)`
616    assert_eq!(nonnull.layout.fields.count(), 1, "NonNull must have exactly 1 field");
617    let ptr = ecx.project_field(&nonnull, 0)?;
618    // Finally a plain `*mut`
619    interp_ok(ptr)
620}