Skip to main content

rustc_const_eval/interpret/
intern.rs

1//! This module specifies the type based interner for constants.
2//!
3//! After a const evaluation has computed a value, before we destroy the const evaluator's session
4//! memory, we need to extract all memory allocations to the global memory pool so they stay around.
5//!
6//! In principle, this is not very complicated: we recursively walk the final value, follow all the
7//! pointers, and move all reachable allocations to the global `tcx` memory. The only complication
8//! is picking the right mutability: the outermost allocation generally has a clear mutability, but
9//! what about the other allocations it points to that have also been created with this value? We
10//! don't want to do guesswork here. The rules are: `static`, `const`, and promoted can only create
11//! immutable allocations that way. `static mut` can be initialized with expressions like `&mut 42`,
12//! so all inner allocations are marked mutable. Some of them could potentially be made immutable,
13//! but that would require relying on type information, and given how many ways Rust has to lie
14//! about type information, we want to avoid doing that.
15
16use hir::def::DefKind;
17use rustc_ast::Mutability;
18use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
19use rustc_hir as hir;
20use rustc_hir::definitions::{DefPathData, DisambiguatorState};
21use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
22use rustc_middle::mir::interpret::{
23    AllocBytes, ConstAllocation, CtfeProvenance, InterpResult, Provenance,
24};
25use rustc_middle::query::TyCtxtAt;
26use rustc_middle::span_bug;
27use rustc_middle::ty::TyCtxt;
28use rustc_middle::ty::layout::TyAndLayout;
29use rustc_span::def_id::LocalDefId;
30use tracing::{instrument, trace};
31
32use super::{AllocId, Allocation, InterpCx, MPlaceTy, Machine, MemoryKind, PlaceTy, interp_ok};
33use crate::const_eval::DummyMachine;
34use crate::{const_eval, errors};
35
36pub trait CompileTimeMachine<'tcx> = Machine<
37        'tcx,
38        MemoryKind = const_eval::MemoryKind,
39        Provenance = CtfeProvenance,
40        ExtraFnVal = !,
41        FrameExtra = (),
42        AllocExtra = (),
43        MemoryMap = FxIndexMap<AllocId, (MemoryKind<const_eval::MemoryKind>, Allocation)>,
44    > + HasStaticRootDefId;
45
46pub trait HasStaticRootDefId {
47    /// Returns the `DefId` of the static item that is currently being evaluated.
48    /// Used for interning to be able to handle nested allocations.
49    fn static_def_id(&self) -> Option<LocalDefId>;
50}
51
52impl HasStaticRootDefId for const_eval::CompileTimeMachine<'_> {
53    fn static_def_id(&self) -> Option<LocalDefId> {
54        Some(self.static_root_ids?.1)
55    }
56}
57
58fn prepare_alloc<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>(
59    tcx: TyCtxt<'tcx>,
60    kind: MemoryKind<const_eval::MemoryKind>,
61    alloc: &mut Allocation<Prov, Extra, Bytes>,
62    mutability: Mutability,
63) -> Result<(), InternError> {
64    match kind {
65        MemoryKind::Machine(const_eval::MemoryKind::Heap { was_made_global }) => {
66            if !was_made_global {
67                // Attempting to intern a `const_allocate`d pointer that was not made global via
68                // `const_make_global`.
69                tcx.dcx().delayed_bug("non-global heap allocation in const value");
70                return Err(InternError::ConstAllocNotGlobal);
71            }
72        }
73        MemoryKind::Stack | MemoryKind::CallerLocation => {}
74    }
75
76    if !alloc.provenance_merge_bytes(&tcx) {
77        // Per-byte provenance is not supported by backends, so we cannot accept it here.
78        tcx.dcx().delayed_bug("partial pointer in const value");
79        return Err(InternError::PartialPointer);
80    }
81
82    // Set allocation mutability as appropriate. This is used by LLVM to put things into
83    // read-only memory, and also by Miri when evaluating other globals that
84    // access this one.
85    match mutability {
86        Mutability::Not => {
87            alloc.mutability = Mutability::Not;
88        }
89        Mutability::Mut => {
90            // This must be already mutable, we won't "un-freeze" allocations ever.
91            match (&alloc.mutability, &Mutability::Mut) {
    (left_val, right_val) => {
        if !(*left_val == *right_val) {
            let kind = ::core::panicking::AssertKind::Eq;
            ::core::panicking::assert_failed(kind, &*left_val, &*right_val,
                ::core::option::Option::None);
        }
    }
};assert_eq!(alloc.mutability, Mutability::Mut);
92        }
93    }
94    Ok(())
95}
96
97/// Intern an allocation. Returns `Err` if the allocation does not exist in the local memory.
98///
99/// `mutability` can be used to force immutable interning: if it is `Mutability::Not`, the
100/// allocation is interned immutably; if it is `Mutability::Mut`, then the allocation *must be*
101/// already mutable (as a sanity check).
102///
103/// Returns an iterator over all relocations referred to by this allocation.
104fn intern_shallow<'tcx, M: CompileTimeMachine<'tcx>>(
105    ecx: &mut InterpCx<'tcx, M>,
106    alloc_id: AllocId,
107    mutability: Mutability,
108    disambiguator: Option<&mut DisambiguatorState>,
109) -> Result<impl Iterator<Item = CtfeProvenance> + 'tcx, InternError> {
110    {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/intern.rs:110",
                        "rustc_const_eval::interpret::intern",
                        ::tracing::Level::TRACE,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/intern.rs"),
                        ::tracing_core::__macro_support::Option::Some(110u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::intern"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::TRACE <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("intern_shallow {0:?}",
                                                    alloc_id) as &dyn Value))])
            });
    } else { ; }
};trace!("intern_shallow {:?}", alloc_id);
111    // remove allocation
112    // FIXME(#120456) - is `swap_remove` correct?
113    let Some((kind, mut alloc)) = ecx.memory.alloc_map.swap_remove(&alloc_id) else {
114        return Err(InternError::DanglingPointer);
115    };
116
117    if let Err(err) = prepare_alloc(*ecx.tcx, kind, &mut alloc, mutability) {
118        // We want to error here, but we have to first put the
119        // allocation back into the `alloc_map` to keep things in a consistent state.
120        ecx.memory.alloc_map.insert(alloc_id, (kind, alloc));
121        return Err(err);
122    }
123
124    // link the alloc id to the actual allocation
125    let alloc = ecx.tcx.mk_const_alloc(alloc);
126    if let Some(static_id) = ecx.machine.static_def_id() {
127        intern_as_new_static(
128            ecx.tcx,
129            static_id,
130            alloc_id,
131            alloc,
132            disambiguator.expect("disambiguator needed"),
133        );
134    } else {
135        ecx.tcx.set_alloc_id_memory(alloc_id, alloc);
136    }
137    Ok(alloc.inner().provenance().ptrs().iter().map(|&(_, prov)| prov))
138}
139
140/// Creates a new `DefId` and feeds all the right queries to make this `DefId`
141/// appear as if it were a user-written `static` (though it has no HIR).
142fn intern_as_new_static<'tcx>(
143    tcx: TyCtxtAt<'tcx>,
144    static_id: LocalDefId,
145    alloc_id: AllocId,
146    alloc: ConstAllocation<'tcx>,
147    disambiguator: &mut DisambiguatorState,
148) {
149    // `intern_const_alloc_recursive` is called once per static and it contains the `DisambiguatorState`.
150    //  The `<static_id>::{{nested}}` path is thus unique to `intern_const_alloc_recursive` and the
151    // `DisambiguatorState` ensures the generated path is unique for this call as we generate
152    // `<static_id>::{{nested#n}}` where `n` is the `n`th `intern_as_new_static` call.
153    let feed = tcx.create_def(
154        static_id,
155        None,
156        DefKind::Static { safety: hir::Safety::Safe, mutability: alloc.0.mutability, nested: true },
157        Some(DefPathData::NestedStatic),
158        disambiguator,
159    );
160    tcx.set_nested_alloc_id_static(alloc_id, feed.def_id());
161
162    if tcx.is_thread_local_static(static_id.into()) {
163        tcx.dcx().emit_err(errors::NestedStaticInThreadLocal { span: tcx.def_span(static_id) });
164    }
165
166    // These do not inherit the codegen attrs of the parent static allocation, since
167    // it doesn't make sense for them to inherit their `#[no_mangle]` and `#[link_name = ..]`
168    // and the like.
169    feed.codegen_fn_attrs(CodegenFnAttrs::new());
170
171    feed.eval_static_initializer(Ok(alloc));
172    feed.generics_of(tcx.generics_of(static_id).clone());
173    feed.def_ident_span(tcx.def_ident_span(static_id));
174    feed.explicit_predicates_of(tcx.explicit_predicates_of(static_id));
175    feed.feed_hir();
176}
177
178/// How a constant value should be interned.
179#[derive(#[automatically_derived]
impl ::core::marker::Copy for InternKind { }Copy, #[automatically_derived]
impl ::core::clone::Clone for InternKind {
    #[inline]
    fn clone(&self) -> InternKind {
        let _: ::core::clone::AssertParamIsClone<hir::Mutability>;
        *self
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for InternKind {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        match self {
            InternKind::Static(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f, "Static",
                    &__self_0),
            InternKind::Constant =>
                ::core::fmt::Formatter::write_str(f, "Constant"),
            InternKind::Promoted =>
                ::core::fmt::Formatter::write_str(f, "Promoted"),
        }
    }
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for InternKind {
    #[inline]
    fn eq(&self, other: &InternKind) -> bool {
        let __self_discr = ::core::intrinsics::discriminant_value(self);
        let __arg1_discr = ::core::intrinsics::discriminant_value(other);
        __self_discr == __arg1_discr &&
            match (self, other) {
                (InternKind::Static(__self_0), InternKind::Static(__arg1_0))
                    => __self_0 == __arg1_0,
                _ => true,
            }
    }
}PartialEq, #[automatically_derived]
impl ::core::hash::Hash for InternKind {
    #[inline]
    fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
        let __self_discr = ::core::intrinsics::discriminant_value(self);
        ::core::hash::Hash::hash(&__self_discr, state);
        match self {
            InternKind::Static(__self_0) =>
                ::core::hash::Hash::hash(__self_0, state),
            _ => {}
        }
    }
}Hash, #[automatically_derived]
impl ::core::cmp::Eq for InternKind {
    #[inline]
    #[doc(hidden)]
    #[coverage(off)]
    fn assert_receiver_is_total_eq(&self) {
        let _: ::core::cmp::AssertParamIsEq<hir::Mutability>;
    }
}Eq)]
180pub enum InternKind {
181    /// The `mutability` of the static, ignoring the type which may have interior mutability.
182    Static(hir::Mutability),
183    /// A `const` item
184    Constant,
185    Promoted,
186}
187
188#[derive(#[automatically_derived]
impl ::core::fmt::Debug for InternError {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::write_str(f,
            match self {
                InternError::BadMutablePointer => "BadMutablePointer",
                InternError::DanglingPointer => "DanglingPointer",
                InternError::ConstAllocNotGlobal => "ConstAllocNotGlobal",
                InternError::PartialPointer => "PartialPointer",
            })
    }
}Debug)]
189pub enum InternError {
190    BadMutablePointer,
191    DanglingPointer,
192    ConstAllocNotGlobal,
193    PartialPointer,
194}
195
196/// Intern `ret` and everything it references.
197///
198/// This *cannot raise an interpreter error*. Doing so is left to validation, which
199/// tracks where in the value we are and thus can show much better error messages.
200///
201/// For `InternKind::Static` the root allocation will not be interned, but must be handled by the caller.
202#[allow(clippy :: suspicious_else_formatting)]
{
    let __tracing_attr_span;
    let __tracing_attr_guard;
    if ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::DEBUG <=
                    ::tracing::level_filters::LevelFilter::current() ||
            { false } {
        __tracing_attr_span =
            {
                use ::tracing::__macro_support::Callsite as _;
                static __CALLSITE: ::tracing::callsite::DefaultCallsite =
                    {
                        static META: ::tracing::Metadata<'static> =
                            {
                                ::tracing_core::metadata::Metadata::new("intern_const_alloc_recursive",
                                    "rustc_const_eval::interpret::intern",
                                    ::tracing::Level::DEBUG,
                                    ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/intern.rs"),
                                    ::tracing_core::__macro_support::Option::Some(202u32),
                                    ::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::intern"),
                                    ::tracing_core::field::FieldSet::new(&["intern_kind",
                                                    "ret"], ::tracing_core::callsite::Identifier(&__CALLSITE)),
                                    ::tracing::metadata::Kind::SPAN)
                            };
                        ::tracing::callsite::DefaultCallsite::new(&META)
                    };
                let mut interest = ::tracing::subscriber::Interest::never();
                if ::tracing::Level::DEBUG <=
                                    ::tracing::level_filters::STATIC_MAX_LEVEL &&
                                ::tracing::Level::DEBUG <=
                                    ::tracing::level_filters::LevelFilter::current() &&
                            { interest = __CALLSITE.interest(); !interest.is_never() }
                        &&
                        ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                            interest) {
                    let meta = __CALLSITE.metadata();
                    ::tracing::Span::new(meta,
                        &{
                                #[allow(unused_imports)]
                                use ::tracing::field::{debug, display, Value};
                                let mut iter = meta.fields().iter();
                                meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                    ::tracing::__macro_support::Option::Some(&::tracing::field::debug(&intern_kind)
                                                            as &dyn Value)),
                                                (&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                    ::tracing::__macro_support::Option::Some(&::tracing::field::debug(&ret)
                                                            as &dyn Value))])
                            })
                } else {
                    let span =
                        ::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
                    {};
                    span
                }
            };
        __tracing_attr_guard = __tracing_attr_span.enter();
    }

    #[warn(clippy :: suspicious_else_formatting)]
    {

        #[allow(unknown_lints, unreachable_code, clippy ::
        diverging_sub_expression, clippy :: empty_loop, clippy ::
        let_unit_value, clippy :: let_with_type_underscore, clippy ::
        needless_return, clippy :: unreachable)]
        if false {
            let __tracing_attr_fake_return: Result<(), InternError> = loop {};
            return __tracing_attr_fake_return;
        }
        {
            let mut disambiguator = DisambiguatorState::new();
            let (base_mutability, inner_mutability, is_static) =
                match intern_kind {
                    InternKind::Constant | InternKind::Promoted => {
                        (Mutability::Not, Mutability::Not, false)
                    }
                    InternKind::Static(Mutability::Not) => {
                        (if ret.layout.ty.is_freeze(*ecx.tcx, ecx.typing_env) {
                                Mutability::Not
                            } else { Mutability::Mut }, Mutability::Not, true)
                    }
                    InternKind::Static(Mutability::Mut) => {
                        (Mutability::Mut, Mutability::Mut, true)
                    }
                };
            let base_alloc_id = ret.ptr().provenance.unwrap().alloc_id();
            {
                use ::tracing::__macro_support::Callsite as _;
                static __CALLSITE: ::tracing::callsite::DefaultCallsite =
                    {
                        static META: ::tracing::Metadata<'static> =
                            {
                                ::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/intern.rs:242",
                                    "rustc_const_eval::interpret::intern",
                                    ::tracing::Level::TRACE,
                                    ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/intern.rs"),
                                    ::tracing_core::__macro_support::Option::Some(242u32),
                                    ::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::intern"),
                                    ::tracing_core::field::FieldSet::new(&["base_alloc_id",
                                                    "base_mutability"],
                                        ::tracing_core::callsite::Identifier(&__CALLSITE)),
                                    ::tracing::metadata::Kind::EVENT)
                            };
                        ::tracing::callsite::DefaultCallsite::new(&META)
                    };
                let enabled =
                    ::tracing::Level::TRACE <=
                                ::tracing::level_filters::STATIC_MAX_LEVEL &&
                            ::tracing::Level::TRACE <=
                                ::tracing::level_filters::LevelFilter::current() &&
                        {
                            let interest = __CALLSITE.interest();
                            !interest.is_never() &&
                                ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                                    interest)
                        };
                if enabled {
                    (|value_set: ::tracing::field::ValueSet|
                                {
                                    let meta = __CALLSITE.metadata();
                                    ::tracing::Event::dispatch(meta, &value_set);
                                    ;
                                })({
                            #[allow(unused_imports)]
                            use ::tracing::field::{debug, display, Value};
                            let mut iter = __CALLSITE.metadata().fields().iter();
                            __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                ::tracing::__macro_support::Option::Some(&debug(&base_alloc_id)
                                                        as &dyn Value)),
                                            (&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                ::tracing::__macro_support::Option::Some(&debug(&base_mutability)
                                                        as &dyn Value))])
                        });
                } else { ; }
            };
            let mut todo: Vec<_> =
                if is_static {
                    let (kind, alloc) =
                        ecx.memory.alloc_map.get_mut(&base_alloc_id).unwrap();
                    prepare_alloc(*ecx.tcx, *kind, alloc, base_mutability)?;
                    alloc.provenance().ptrs().iter().map(|&(_, prov)|
                                prov).collect()
                } else {
                    intern_shallow(ecx, base_alloc_id, base_mutability,
                                Some(&mut disambiguator))?.collect()
                };
            let mut just_interned: FxHashSet<_> =
                std::iter::once(base_alloc_id).collect();
            let mut found_bad_mutable_ptr = false;
            while let Some(prov) = todo.pop() {
                {
                    use ::tracing::__macro_support::Callsite as _;
                    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
                        {
                            static META: ::tracing::Metadata<'static> =
                                {
                                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/intern.rs:271",
                                        "rustc_const_eval::interpret::intern",
                                        ::tracing::Level::TRACE,
                                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/intern.rs"),
                                        ::tracing_core::__macro_support::Option::Some(271u32),
                                        ::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::intern"),
                                        ::tracing_core::field::FieldSet::new(&["prov"],
                                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                                        ::tracing::metadata::Kind::EVENT)
                                };
                            ::tracing::callsite::DefaultCallsite::new(&META)
                        };
                    let enabled =
                        ::tracing::Level::TRACE <=
                                    ::tracing::level_filters::STATIC_MAX_LEVEL &&
                                ::tracing::Level::TRACE <=
                                    ::tracing::level_filters::LevelFilter::current() &&
                            {
                                let interest = __CALLSITE.interest();
                                !interest.is_never() &&
                                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                                        interest)
                            };
                    if enabled {
                        (|value_set: ::tracing::field::ValueSet|
                                    {
                                        let meta = __CALLSITE.metadata();
                                        ::tracing::Event::dispatch(meta, &value_set);
                                        ;
                                    })({
                                #[allow(unused_imports)]
                                use ::tracing::field::{debug, display, Value};
                                let mut iter = __CALLSITE.metadata().fields().iter();
                                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                    ::tracing::__macro_support::Option::Some(&debug(&prov) as
                                                            &dyn Value))])
                            });
                    } else { ; }
                };
                let alloc_id = prov.alloc_id();
                if base_alloc_id == alloc_id && is_static { continue; }
                if intern_kind != InternKind::Promoted &&
                            inner_mutability == Mutability::Not && !prov.shared_ref() {
                    let is_already_global =
                        ecx.tcx.try_get_global_alloc(alloc_id).is_some();
                    if is_already_global && !just_interned.contains(&alloc_id) {
                        continue;
                    }
                    let dangling =
                        !is_already_global &&
                            !ecx.memory.alloc_map.contains_key(&alloc_id);
                    if !dangling { found_bad_mutable_ptr = true; }
                }
                if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
                    if true {
                        if !!ecx.memory.alloc_map.contains_key(&alloc_id) {
                            ::core::panicking::panic("assertion failed: !ecx.memory.alloc_map.contains_key(&alloc_id)")
                        };
                    };
                    continue;
                }
                just_interned.insert(alloc_id);
                let next =
                    intern_shallow(ecx, alloc_id, inner_mutability,
                            Some(&mut disambiguator))?;
                todo.extend(next);
            }
            if found_bad_mutable_ptr {
                if ecx.tcx.sess.opts.unstable_opts.unleash_the_miri_inside_of_you
                    {
                    return Err(InternError::BadMutablePointer);
                } else {
                    ::rustc_middle::util::bug::span_bug_fmt(ecx.tcx.span,
                        format_args!("the static const safety checks accepted a mutable pointer they should not have accepted"));
                }
            }
            Ok(())
        }
    }
}#[instrument(level = "debug", skip(ecx))]
203pub fn intern_const_alloc_recursive<'tcx, M: CompileTimeMachine<'tcx>>(
204    ecx: &mut InterpCx<'tcx, M>,
205    intern_kind: InternKind,
206    ret: &MPlaceTy<'tcx>,
207) -> Result<(), InternError> {
208    let mut disambiguator = DisambiguatorState::new();
209
210    // We are interning recursively, and for mutability we are distinguishing the "root" allocation
211    // that we are starting in, and all other allocations that we are encountering recursively.
212    let (base_mutability, inner_mutability, is_static) = match intern_kind {
213        InternKind::Constant | InternKind::Promoted => {
214            // Completely immutable. Interning anything mutably here can only lead to unsoundness,
215            // since all consts are conceptually independent values but share the same underlying
216            // memory.
217            (Mutability::Not, Mutability::Not, false)
218        }
219        InternKind::Static(Mutability::Not) => {
220            (
221                // Outermost allocation is mutable if `!Freeze` i.e. contains interior mutable types.
222                if ret.layout.ty.is_freeze(*ecx.tcx, ecx.typing_env) {
223                    Mutability::Not
224                } else {
225                    Mutability::Mut
226                },
227                // Inner allocations are never mutable. They can only arise via the "tail
228                // expression" / "outer scope" rule, and we treat them consistently with `const`.
229                Mutability::Not,
230                true,
231            )
232        }
233        InternKind::Static(Mutability::Mut) => {
234            // Just make everything mutable. We accept code like
235            // `static mut X = &mut [42]`, so even inner allocations need to be mutable.
236            (Mutability::Mut, Mutability::Mut, true)
237        }
238    };
239
240    // Intern the base allocation, and initialize todo list for recursive interning.
241    let base_alloc_id = ret.ptr().provenance.unwrap().alloc_id();
242    trace!(?base_alloc_id, ?base_mutability);
243    // First we intern the base allocation, as it requires a different mutability.
244    // This gives us the initial set of nested allocations, which will then all be processed
245    // recursively in the loop below.
246    let mut todo: Vec<_> = if is_static {
247        // Do not steal the root allocation, we need it later to create the return value of `eval_static_initializer`.
248        // But still change its mutability to match the requested one.
249        let (kind, alloc) = ecx.memory.alloc_map.get_mut(&base_alloc_id).unwrap();
250        prepare_alloc(*ecx.tcx, *kind, alloc, base_mutability)?;
251        alloc.provenance().ptrs().iter().map(|&(_, prov)| prov).collect()
252    } else {
253        intern_shallow(ecx, base_alloc_id, base_mutability, Some(&mut disambiguator))?.collect()
254    };
255    // We need to distinguish "has just been interned" from "was already in `tcx`",
256    // so we track this in a separate set.
257    let mut just_interned: FxHashSet<_> = std::iter::once(base_alloc_id).collect();
258    // Whether we encountered a bad mutable pointer.
259    // We want to first report "dangling" and then "mutable", so we need to delay reporting these
260    // errors.
261    let mut found_bad_mutable_ptr = false;
262
263    // Keep interning as long as there are things to intern.
264    // We show errors if there are dangling pointers, or mutable pointers in immutable contexts
265    // (i.e., everything except for `static mut`). We only return these errors as a `Result`
266    // so that the caller can run validation, and subsequently only report interning errors
267    // if validation fails. Validation has the better error messages so we prefer those, but
268    // interning has better coverage since it "sees" *all* pointers, including raw pointers and
269    // references stored in unions.
270    while let Some(prov) = todo.pop() {
271        trace!(?prov);
272        let alloc_id = prov.alloc_id();
273
274        if base_alloc_id == alloc_id && is_static {
275            // This is a pointer to the static itself. It's ok for a static to refer to itself,
276            // even mutably. Whether that mutable pointer is legal at all is checked in validation.
277            // See tests/ui/statics/recursive_interior_mut.rs for how such a situation can occur.
278            // We also already collected all the nested allocations, so there's no need to do that again.
279            continue;
280        }
281
282        // Ensure that this is derived from a shared reference. Crucially, we check this *before*
283        // checking whether the `alloc_id` has already been interned. The point of this check is to
284        // ensure that when there are multiple pointers to the same allocation, they are *all*
285        // derived from a shared reference. Therefore it would be bad if we only checked the first
286        // pointer to any given allocation.
287        // (It is likely not possible to actually have multiple pointers to the same allocation,
288        // so alternatively we could also check that and ICE if there are multiple such pointers.)
289        // See <https://github.com/rust-lang/rust/pull/128543> for why we are checking for "shared
290        // reference" and not "immutable", i.e., for why we are allowing interior-mutable shared
291        // references: they can actually be created in safe code while pointing to apparently
292        // "immutable" values, via promotion or tail expression lifetime extension of
293        // `&None::<Cell<T>>`.
294        // We also exclude promoteds from this as `&mut []` can be promoted, which is a mutable
295        // reference pointing to an immutable (zero-sized) allocation. We rely on the promotion
296        // analysis not screwing up to ensure that it is sound to intern promoteds as immutable.
297        if intern_kind != InternKind::Promoted
298            && inner_mutability == Mutability::Not
299            && !prov.shared_ref()
300        {
301            let is_already_global = ecx.tcx.try_get_global_alloc(alloc_id).is_some();
302            if is_already_global && !just_interned.contains(&alloc_id) {
303                // This is a pointer to some memory from another constant. We encounter mutable
304                // pointers to such memory since we do not always track immutability through
305                // these "global" pointers. Allowing them is harmless; the point of these checks
306                // during interning is to justify why we intern the *new* allocations immutably,
307                // so we can completely ignore existing allocations.
308                // We can also skip the rest of this loop iteration, since after all it is already
309                // interned.
310                continue;
311            }
312            // If this is a dangling pointer, that's actually fine -- the problematic case is
313            // when there is memory there that someone might expect to be mutable, but we make it immutable.
314            let dangling = !is_already_global && !ecx.memory.alloc_map.contains_key(&alloc_id);
315            if !dangling {
316                found_bad_mutable_ptr = true;
317            }
318        }
319        if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
320            // Already interned.
321            debug_assert!(!ecx.memory.alloc_map.contains_key(&alloc_id));
322            continue;
323        }
324        // We always intern with `inner_mutability`, and furthermore we ensured above that if
325        // that is "immutable", then there are *no* mutable pointers anywhere in the newly
326        // interned memory -- justifying that we can indeed intern immutably. However this also
327        // means we can *not* easily intern immutably here if `prov.immutable()` is true and
328        // `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
329        // we'd have to somehow check that they are *all* immutable before deciding that this
330        // allocation can be made immutable. In the future we could consider analyzing all
331        // pointers before deciding which allocations can be made immutable; but for now we are
332        // okay with losing some potential for immutability here. This can anyway only affect
333        // `static mut`.
334        just_interned.insert(alloc_id);
335        let next = intern_shallow(ecx, alloc_id, inner_mutability, Some(&mut disambiguator))?;
336        todo.extend(next);
337    }
338    if found_bad_mutable_ptr {
339        // We found a mutable pointer inside a const where inner allocations should be immutable,
340        // and there was no other error. This should usually never happen! However, this can happen
341        // in unleash-miri mode, so report it as a normal error then.
342        if ecx.tcx.sess.opts.unstable_opts.unleash_the_miri_inside_of_you {
343            return Err(InternError::BadMutablePointer);
344        } else {
345            span_bug!(
346                ecx.tcx.span,
347                "the static const safety checks accepted a mutable pointer they should not have accepted"
348            );
349        }
350    }
351    Ok(())
352}
353
354/// Intern `ret`. This function assumes that `ret` references no other allocation.
355#[allow(clippy :: suspicious_else_formatting)]
{
    let __tracing_attr_span;
    let __tracing_attr_guard;
    if ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::DEBUG <=
                    ::tracing::level_filters::LevelFilter::current() ||
            { false } {
        __tracing_attr_span =
            {
                use ::tracing::__macro_support::Callsite as _;
                static __CALLSITE: ::tracing::callsite::DefaultCallsite =
                    {
                        static META: ::tracing::Metadata<'static> =
                            {
                                ::tracing_core::metadata::Metadata::new("intern_const_alloc_for_constprop",
                                    "rustc_const_eval::interpret::intern",
                                    ::tracing::Level::DEBUG,
                                    ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/intern.rs"),
                                    ::tracing_core::__macro_support::Option::Some(355u32),
                                    ::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::intern"),
                                    ::tracing_core::field::FieldSet::new(&["alloc_id"],
                                        ::tracing_core::callsite::Identifier(&__CALLSITE)),
                                    ::tracing::metadata::Kind::SPAN)
                            };
                        ::tracing::callsite::DefaultCallsite::new(&META)
                    };
                let mut interest = ::tracing::subscriber::Interest::never();
                if ::tracing::Level::DEBUG <=
                                    ::tracing::level_filters::STATIC_MAX_LEVEL &&
                                ::tracing::Level::DEBUG <=
                                    ::tracing::level_filters::LevelFilter::current() &&
                            { interest = __CALLSITE.interest(); !interest.is_never() }
                        &&
                        ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                            interest) {
                    let meta = __CALLSITE.metadata();
                    ::tracing::Span::new(meta,
                        &{
                                #[allow(unused_imports)]
                                use ::tracing::field::{debug, display, Value};
                                let mut iter = meta.fields().iter();
                                meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                    ::tracing::__macro_support::Option::Some(&::tracing::field::debug(&alloc_id)
                                                            as &dyn Value))])
                            })
                } else {
                    let span =
                        ::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
                    {};
                    span
                }
            };
        __tracing_attr_guard = __tracing_attr_span.enter();
    }

    #[warn(clippy :: suspicious_else_formatting)]
    {

        #[allow(unknown_lints, unreachable_code, clippy ::
        diverging_sub_expression, clippy :: empty_loop, clippy ::
        let_unit_value, clippy :: let_with_type_underscore, clippy ::
        needless_return, clippy :: unreachable)]
        if false {
            let __tracing_attr_fake_return: InterpResult<'tcx, ()> = loop {};
            return __tracing_attr_fake_return;
        }
        {
            if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
                return interp_ok(());
            }
            if let Some(_) =
                    intern_shallow(ecx, alloc_id, Mutability::Not,
                                None).unwrap().next() {
                {
                    ::core::panicking::panic_fmt(format_args!("`intern_const_alloc_for_constprop` called on allocation with nested provenance"));
                }
            }
            interp_ok(())
        }
    }
}#[instrument(level = "debug", skip(ecx))]
356pub fn intern_const_alloc_for_constprop<'tcx, M: CompileTimeMachine<'tcx>>(
357    ecx: &mut InterpCx<'tcx, M>,
358    alloc_id: AllocId,
359) -> InterpResult<'tcx, ()> {
360    if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
361        // The constant is already in global memory. Do nothing.
362        return interp_ok(());
363    }
364    // Move allocation to `tcx`.
365    if let Some(_) = intern_shallow(ecx, alloc_id, Mutability::Not, None).unwrap().next() {
366        // We are not doing recursive interning, so we don't currently support provenance.
367        // (If this assertion ever triggers, we should just implement a
368        // proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
369        panic!("`intern_const_alloc_for_constprop` called on allocation with nested provenance")
370    }
371    interp_ok(())
372}
373
374impl<'tcx> InterpCx<'tcx, DummyMachine> {
375    /// A helper function that allocates memory for the layout given and gives you access to mutate
376    /// it. Once your own mutation code is done, the backing `Allocation` is removed from the
377    /// current `Memory` and interned as read-only into the global memory.
378    pub fn intern_with_temp_alloc(
379        &mut self,
380        layout: TyAndLayout<'tcx>,
381        f: impl FnOnce(
382            &mut InterpCx<'tcx, DummyMachine>,
383            &PlaceTy<'tcx, CtfeProvenance>,
384        ) -> InterpResult<'tcx, ()>,
385    ) -> InterpResult<'tcx, AllocId> {
386        // `allocate` picks a fresh AllocId that we will associate with its data below.
387        let dest = self.allocate(layout, MemoryKind::Stack)?;
388        f(self, &dest.clone().into())?;
389        let alloc_id = dest.ptr().provenance.unwrap().alloc_id(); // this was just allocated, it must have provenance
390        for prov in intern_shallow(self, alloc_id, Mutability::Not, None).unwrap() {
391            // We are not doing recursive interning, so we don't currently support provenance.
392            // (If this assertion ever triggers, we should just implement a
393            // proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
394            if self.tcx.try_get_global_alloc(prov.alloc_id()).is_none() {
395                {
    ::core::panicking::panic_fmt(format_args!("`intern_with_temp_alloc` with nested allocations"));
};panic!("`intern_with_temp_alloc` with nested allocations");
396            }
397        }
398        interp_ok(alloc_id)
399    }
400}