rustc_middle/mir/interpret/
mod.rs

1//! An interpreter for MIR used in CTFE and by miri.
2
3#[macro_use]
4mod error;
5
6mod allocation;
7mod pointer;
8mod queries;
9mod value;
10
11use std::io::{Read, Write};
12use std::num::NonZero;
13use std::{fmt, io};
14
15use rustc_abi::{AddressSpace, Align, Endian, HasDataLayout, Size};
16use rustc_ast::{LitKind, Mutability};
17use rustc_data_structures::fx::FxHashMap;
18use rustc_data_structures::sharded::ShardedHashMap;
19use rustc_data_structures::sync::{AtomicU64, Lock};
20use rustc_hir::def::DefKind;
21use rustc_hir::def_id::{DefId, LocalDefId};
22use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable};
23use rustc_serialize::{Decodable, Encodable};
24use tracing::{debug, trace};
25// Also make the error macros available from this module.
26pub use {
27    err_exhaust, err_inval, err_machine_stop, err_ub, err_ub_custom, err_ub_format, err_unsup,
28    err_unsup_format, throw_exhaust, throw_inval, throw_machine_stop, throw_ub, throw_ub_custom,
29    throw_ub_format, throw_unsup, throw_unsup_format,
30};
31
32pub use self::allocation::{
33    AllocBytes, AllocError, AllocInit, AllocRange, AllocResult, Allocation, ConstAllocation,
34    InitChunk, InitChunkIter, alloc_range,
35};
36pub use self::error::{
37    BadBytesAccess, CheckAlignMsg, CheckInAllocMsg, ErrorHandled, EvalStaticInitializerRawResult,
38    EvalToAllocationRawResult, EvalToConstValueResult, EvalToValTreeResult, ExpectedKind,
39    InterpErrorInfo, InterpErrorKind, InterpResult, InvalidMetaKind, InvalidProgramInfo,
40    MachineStopType, Misalignment, PointerKind, ReportedErrorInfo, ResourceExhaustionInfo,
41    ScalarSizeMismatch, UndefinedBehaviorInfo, UnsupportedOpInfo, ValidationErrorInfo,
42    ValidationErrorKind, interp_ok,
43};
44pub use self::pointer::{CtfeProvenance, Pointer, PointerArithmetic, Provenance};
45pub use self::value::Scalar;
46use crate::mir;
47use crate::ty::codec::{TyDecoder, TyEncoder};
48use crate::ty::print::with_no_trimmed_paths;
49use crate::ty::{self, Instance, Ty, TyCtxt};
50
51/// Uniquely identifies one of the following:
52/// - A constant
53/// - A static
54#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, TyEncodable, TyDecodable)]
55#[derive(HashStable, TypeFoldable, TypeVisitable)]
56pub struct GlobalId<'tcx> {
57    /// For a constant or static, the `Instance` of the item itself.
58    /// For a promoted global, the `Instance` of the function they belong to.
59    pub instance: ty::Instance<'tcx>,
60
61    /// The index for promoted globals within their function's `mir::Body`.
62    pub promoted: Option<mir::Promoted>,
63}
64
65impl<'tcx> GlobalId<'tcx> {
66    pub fn display(self, tcx: TyCtxt<'tcx>) -> String {
67        let instance_name = with_no_trimmed_paths!(tcx.def_path_str(self.instance.def.def_id()));
68        if let Some(promoted) = self.promoted {
69            format!("{instance_name}::{promoted:?}")
70        } else {
71            instance_name
72        }
73    }
74}
75
76/// Input argument for `tcx.lit_to_const`.
77#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, HashStable)]
78pub struct LitToConstInput<'tcx> {
79    /// The absolute value of the resultant constant.
80    pub lit: &'tcx LitKind,
81    /// The type of the constant.
82    pub ty: Ty<'tcx>,
83    /// If the constant is negative.
84    pub neg: bool,
85}
86
87#[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]
88pub struct AllocId(pub NonZero<u64>);
89
90// We want the `Debug` output to be readable as it is used by `derive(Debug)` for
91// all the Miri types.
92impl fmt::Debug for AllocId {
93    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
94        if f.alternate() { write!(f, "a{}", self.0) } else { write!(f, "alloc{}", self.0) }
95    }
96}
97
98// No "Display" since AllocIds are not usually user-visible.
99
100#[derive(TyDecodable, TyEncodable)]
101enum AllocDiscriminant {
102    Alloc,
103    Fn,
104    VTable,
105    Static,
106}
107
108pub fn specialized_encode_alloc_id<'tcx, E: TyEncoder<I = TyCtxt<'tcx>>>(
109    encoder: &mut E,
110    tcx: TyCtxt<'tcx>,
111    alloc_id: AllocId,
112) {
113    match tcx.global_alloc(alloc_id) {
114        GlobalAlloc::Memory(alloc) => {
115            trace!("encoding {:?} with {:#?}", alloc_id, alloc);
116            AllocDiscriminant::Alloc.encode(encoder);
117            alloc.encode(encoder);
118        }
119        GlobalAlloc::Function { instance } => {
120            trace!("encoding {:?} with {:#?}", alloc_id, instance);
121            AllocDiscriminant::Fn.encode(encoder);
122            instance.encode(encoder);
123        }
124        GlobalAlloc::VTable(ty, poly_trait_ref) => {
125            trace!("encoding {:?} with {ty:#?}, {poly_trait_ref:#?}", alloc_id);
126            AllocDiscriminant::VTable.encode(encoder);
127            ty.encode(encoder);
128            poly_trait_ref.encode(encoder);
129        }
130        GlobalAlloc::Static(did) => {
131            assert!(!tcx.is_thread_local_static(did));
132            // References to statics doesn't need to know about their allocations,
133            // just about its `DefId`.
134            AllocDiscriminant::Static.encode(encoder);
135            // Cannot use `did.encode(encoder)` because of a bug around
136            // specializations and method calls.
137            Encodable::<E>::encode(&did, encoder);
138        }
139    }
140}
141
142#[derive(Clone)]
143enum State {
144    Empty,
145    Done(AllocId),
146}
147
148pub struct AllocDecodingState {
149    // For each `AllocId`, we keep track of which decoding state it's currently in.
150    decoding_state: Vec<Lock<State>>,
151    // The offsets of each allocation in the data stream.
152    data_offsets: Vec<u64>,
153}
154
155impl AllocDecodingState {
156    #[inline]
157    pub fn new_decoding_session(&self) -> AllocDecodingSession<'_> {
158        AllocDecodingSession { state: self }
159    }
160
161    pub fn new(data_offsets: Vec<u64>) -> Self {
162        let decoding_state =
163            std::iter::repeat_with(|| Lock::new(State::Empty)).take(data_offsets.len()).collect();
164
165        Self { decoding_state, data_offsets }
166    }
167}
168
169#[derive(Copy, Clone)]
170pub struct AllocDecodingSession<'s> {
171    state: &'s AllocDecodingState,
172}
173
174impl<'s> AllocDecodingSession<'s> {
175    /// Decodes an `AllocId` in a thread-safe way.
176    pub fn decode_alloc_id<'tcx, D>(&self, decoder: &mut D) -> AllocId
177    where
178        D: TyDecoder<I = TyCtxt<'tcx>>,
179    {
180        // Read the index of the allocation.
181        let idx = usize::try_from(decoder.read_u32()).unwrap();
182        let pos = usize::try_from(self.state.data_offsets[idx]).unwrap();
183
184        // Decode the `AllocDiscriminant` now so that we know if we have to reserve an
185        // `AllocId`.
186        let (alloc_kind, pos) = decoder.with_position(pos, |decoder| {
187            let alloc_kind = AllocDiscriminant::decode(decoder);
188            (alloc_kind, decoder.position())
189        });
190
191        // We are going to hold this lock during the entire decoding of this allocation, which may
192        // require that we decode other allocations. This cannot deadlock for two reasons:
193        //
194        // At the time of writing, it is only possible to create an allocation that contains a pointer
195        // to itself using the const_allocate intrinsic (which is for testing only), and even attempting
196        // to evaluate such consts blows the stack. If we ever grow a mechanism for producing
197        // cyclic allocations, we will need a new strategy for decoding that doesn't bring back
198        // https://github.com/rust-lang/rust/issues/126741.
199        //
200        // It is also impossible to create two allocations (call them A and B) where A is a pointer to B, and B
201        // is a pointer to A, because attempting to evaluate either of those consts will produce a
202        // query cycle, failing compilation.
203        let mut entry = self.state.decoding_state[idx].lock();
204        // Check the decoding state to see if it's already decoded or if we should
205        // decode it here.
206        if let State::Done(alloc_id) = *entry {
207            return alloc_id;
208        }
209
210        // Now decode the actual data.
211        let alloc_id = decoder.with_position(pos, |decoder| match alloc_kind {
212            AllocDiscriminant::Alloc => {
213                trace!("creating memory alloc ID");
214                let alloc = <ConstAllocation<'tcx> as Decodable<_>>::decode(decoder);
215                trace!("decoded alloc {:?}", alloc);
216                decoder.interner().reserve_and_set_memory_alloc(alloc)
217            }
218            AllocDiscriminant::Fn => {
219                trace!("creating fn alloc ID");
220                let instance = ty::Instance::decode(decoder);
221                trace!("decoded fn alloc instance: {:?}", instance);
222                decoder.interner().reserve_and_set_fn_alloc(instance, CTFE_ALLOC_SALT)
223            }
224            AllocDiscriminant::VTable => {
225                trace!("creating vtable alloc ID");
226                let ty = Decodable::decode(decoder);
227                let poly_trait_ref = Decodable::decode(decoder);
228                trace!("decoded vtable alloc instance: {ty:?}, {poly_trait_ref:?}");
229                decoder.interner().reserve_and_set_vtable_alloc(ty, poly_trait_ref, CTFE_ALLOC_SALT)
230            }
231            AllocDiscriminant::Static => {
232                trace!("creating extern static alloc ID");
233                let did = <DefId as Decodable<D>>::decode(decoder);
234                trace!("decoded static def-ID: {:?}", did);
235                decoder.interner().reserve_and_set_static_alloc(did)
236            }
237        });
238
239        *entry = State::Done(alloc_id);
240
241        alloc_id
242    }
243}
244
245/// An allocation in the global (tcx-managed) memory can be either a function pointer,
246/// a static, or a "real" allocation with some data in it.
247#[derive(Debug, Clone, Eq, PartialEq, Hash, TyDecodable, TyEncodable, HashStable)]
248pub enum GlobalAlloc<'tcx> {
249    /// The alloc ID is used as a function pointer.
250    Function { instance: Instance<'tcx> },
251    /// This alloc ID points to a symbolic (not-reified) vtable.
252    /// We remember the full dyn type, not just the principal trait, so that
253    /// const-eval and Miri can detect UB due to invalid transmutes of
254    /// `dyn Trait` types.
255    VTable(Ty<'tcx>, &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>),
256    /// The alloc ID points to a "lazy" static variable that did not get computed (yet).
257    /// This is also used to break the cycle in recursive statics.
258    Static(DefId),
259    /// The alloc ID points to memory.
260    Memory(ConstAllocation<'tcx>),
261}
262
263impl<'tcx> GlobalAlloc<'tcx> {
264    /// Panics if the `GlobalAlloc` does not refer to an `GlobalAlloc::Memory`
265    #[track_caller]
266    #[inline]
267    pub fn unwrap_memory(&self) -> ConstAllocation<'tcx> {
268        match *self {
269            GlobalAlloc::Memory(mem) => mem,
270            _ => bug!("expected memory, got {:?}", self),
271        }
272    }
273
274    /// Panics if the `GlobalAlloc` is not `GlobalAlloc::Function`
275    #[track_caller]
276    #[inline]
277    pub fn unwrap_fn(&self) -> Instance<'tcx> {
278        match *self {
279            GlobalAlloc::Function { instance, .. } => instance,
280            _ => bug!("expected function, got {:?}", self),
281        }
282    }
283
284    /// Panics if the `GlobalAlloc` is not `GlobalAlloc::VTable`
285    #[track_caller]
286    #[inline]
287    pub fn unwrap_vtable(&self) -> (Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>) {
288        match *self {
289            GlobalAlloc::VTable(ty, dyn_ty) => (ty, dyn_ty.principal()),
290            _ => bug!("expected vtable, got {:?}", self),
291        }
292    }
293
294    /// The address space that this `GlobalAlloc` should be placed in.
295    #[inline]
296    pub fn address_space(&self, cx: &impl HasDataLayout) -> AddressSpace {
297        match self {
298            GlobalAlloc::Function { .. } => cx.data_layout().instruction_address_space,
299            GlobalAlloc::Static(..) | GlobalAlloc::Memory(..) | GlobalAlloc::VTable(..) => {
300                AddressSpace::DATA
301            }
302        }
303    }
304
305    pub fn mutability(&self, tcx: TyCtxt<'tcx>, typing_env: ty::TypingEnv<'tcx>) -> Mutability {
306        // Let's see what kind of memory we are.
307        match self {
308            GlobalAlloc::Static(did) => {
309                let DefKind::Static { safety: _, mutability, nested } = tcx.def_kind(did) else {
310                    bug!()
311                };
312                if nested {
313                    // Nested statics in a `static` are never interior mutable,
314                    // so just use the declared mutability.
315                    if cfg!(debug_assertions) {
316                        let alloc = tcx.eval_static_initializer(did).unwrap();
317                        assert_eq!(alloc.0.mutability, mutability);
318                    }
319                    mutability
320                } else {
321                    let mutability = match mutability {
322                        Mutability::Not
323                            if !tcx
324                                .type_of(did)
325                                .no_bound_vars()
326                                .expect("statics should not have generic parameters")
327                                .is_freeze(tcx, typing_env) =>
328                        {
329                            Mutability::Mut
330                        }
331                        _ => mutability,
332                    };
333                    mutability
334                }
335            }
336            GlobalAlloc::Memory(alloc) => alloc.inner().mutability,
337            GlobalAlloc::Function { .. } | GlobalAlloc::VTable(..) => {
338                // These are immutable.
339                Mutability::Not
340            }
341        }
342    }
343
344    pub fn size_and_align(
345        &self,
346        tcx: TyCtxt<'tcx>,
347        typing_env: ty::TypingEnv<'tcx>,
348    ) -> (Size, Align) {
349        match self {
350            GlobalAlloc::Static(def_id) => {
351                let DefKind::Static { nested, .. } = tcx.def_kind(def_id) else {
352                    bug!("GlobalAlloc::Static is not a static")
353                };
354
355                if nested {
356                    // Nested anonymous statics are untyped, so let's get their
357                    // size and alignment from the allocation itself. This always
358                    // succeeds, as the query is fed at DefId creation time, so no
359                    // evaluation actually occurs.
360                    let alloc = tcx.eval_static_initializer(def_id).unwrap();
361                    (alloc.0.size(), alloc.0.align)
362                } else {
363                    // Use size and align of the type for everything else. We need
364                    // to do that to
365                    // * avoid cycle errors in case of self-referential statics,
366                    // * be able to get information on extern statics.
367                    let ty = tcx
368                        .type_of(def_id)
369                        .no_bound_vars()
370                        .expect("statics should not have generic parameters");
371                    let layout = tcx.layout_of(typing_env.as_query_input(ty)).unwrap();
372                    assert!(layout.is_sized());
373                    (layout.size, layout.align.abi)
374                }
375            }
376            GlobalAlloc::Memory(alloc) => {
377                let alloc = alloc.inner();
378                (alloc.size(), alloc.align)
379            }
380            GlobalAlloc::Function { .. } => (Size::ZERO, Align::ONE),
381            GlobalAlloc::VTable(..) => {
382                // No data to be accessed here. But vtables are pointer-aligned.
383                return (Size::ZERO, tcx.data_layout.pointer_align.abi);
384            }
385        }
386    }
387}
388
389pub const CTFE_ALLOC_SALT: usize = 0;
390
391pub(crate) struct AllocMap<'tcx> {
392    /// Maps `AllocId`s to their corresponding allocations.
393    // Note that this map on rustc workloads seems to be rather dense, but in miri workloads should
394    // be pretty sparse. In #136105 we considered replacing it with a (dense) Vec-based map, but
395    // since there are workloads where it can be sparse we decided to go with sharding for now. At
396    // least up to 32 cores the one workload tested didn't exhibit much difference between the two.
397    //
398    // Should be locked *after* locking dedup if locking both to avoid deadlocks.
399    to_alloc: ShardedHashMap<AllocId, GlobalAlloc<'tcx>>,
400
401    /// Used to deduplicate global allocations: functions, vtables, string literals, ...
402    ///
403    /// The `usize` is a "salt" used by Miri to make deduplication imperfect, thus better emulating
404    /// the actual guarantees.
405    dedup: Lock<FxHashMap<(GlobalAlloc<'tcx>, usize), AllocId>>,
406
407    /// The `AllocId` to assign to the next requested ID.
408    /// Always incremented; never gets smaller.
409    next_id: AtomicU64,
410}
411
412impl<'tcx> AllocMap<'tcx> {
413    pub(crate) fn new() -> Self {
414        AllocMap {
415            to_alloc: Default::default(),
416            dedup: Default::default(),
417            next_id: AtomicU64::new(1),
418        }
419    }
420    fn reserve(&self) -> AllocId {
421        // Technically there is a window here where we overflow and then another thread
422        // increments `next_id` *again* and uses it before we panic and tear down the entire session.
423        // We consider this fine since such overflows cannot realistically occur.
424        let next_id = self.next_id.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
425        AllocId(NonZero::new(next_id).unwrap())
426    }
427}
428
429impl<'tcx> TyCtxt<'tcx> {
430    /// Obtains a new allocation ID that can be referenced but does not
431    /// yet have an allocation backing it.
432    ///
433    /// Make sure to call `set_alloc_id_memory` or `set_alloc_id_same_memory` before returning such
434    /// an `AllocId` from a query.
435    pub fn reserve_alloc_id(self) -> AllocId {
436        self.alloc_map.reserve()
437    }
438
439    /// Reserves a new ID *if* this allocation has not been dedup-reserved before.
440    /// Should not be used for mutable memory.
441    fn reserve_and_set_dedup(self, alloc: GlobalAlloc<'tcx>, salt: usize) -> AllocId {
442        if let GlobalAlloc::Memory(mem) = alloc {
443            if mem.inner().mutability.is_mut() {
444                bug!("trying to dedup-reserve mutable memory");
445            }
446        }
447        let alloc_salt = (alloc, salt);
448        // Locking this *before* `to_alloc` also to ensure correct lock order.
449        let mut dedup = self.alloc_map.dedup.lock();
450        if let Some(&alloc_id) = dedup.get(&alloc_salt) {
451            return alloc_id;
452        }
453        let id = self.alloc_map.reserve();
454        debug!("creating alloc {:?} with id {id:?}", alloc_salt.0);
455        let had_previous = self
456            .alloc_map
457            .to_alloc
458            .lock_shard_by_value(&id)
459            .insert(id, alloc_salt.0.clone())
460            .is_some();
461        // We just reserved, so should always be unique.
462        assert!(!had_previous);
463        dedup.insert(alloc_salt, id);
464        id
465    }
466
467    /// Generates an `AllocId` for a memory allocation. If the exact same memory has been
468    /// allocated before, this will return the same `AllocId`.
469    pub fn reserve_and_set_memory_dedup(self, mem: ConstAllocation<'tcx>, salt: usize) -> AllocId {
470        self.reserve_and_set_dedup(GlobalAlloc::Memory(mem), salt)
471    }
472
473    /// Generates an `AllocId` for a static or return a cached one in case this function has been
474    /// called on the same static before.
475    pub fn reserve_and_set_static_alloc(self, static_id: DefId) -> AllocId {
476        let salt = 0; // Statics have a guaranteed unique address, no salt added.
477        self.reserve_and_set_dedup(GlobalAlloc::Static(static_id), salt)
478    }
479
480    /// Generates an `AllocId` for a function. Will get deduplicated.
481    pub fn reserve_and_set_fn_alloc(self, instance: Instance<'tcx>, salt: usize) -> AllocId {
482        self.reserve_and_set_dedup(GlobalAlloc::Function { instance }, salt)
483    }
484
485    /// Generates an `AllocId` for a (symbolic, not-reified) vtable. Will get deduplicated.
486    pub fn reserve_and_set_vtable_alloc(
487        self,
488        ty: Ty<'tcx>,
489        dyn_ty: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
490        salt: usize,
491    ) -> AllocId {
492        self.reserve_and_set_dedup(GlobalAlloc::VTable(ty, dyn_ty), salt)
493    }
494
495    /// Interns the `Allocation` and return a new `AllocId`, even if there's already an identical
496    /// `Allocation` with a different `AllocId`.
497    /// Statics with identical content will still point to the same `Allocation`, i.e.,
498    /// their data will be deduplicated through `Allocation` interning -- but they
499    /// are different places in memory and as such need different IDs.
500    pub fn reserve_and_set_memory_alloc(self, mem: ConstAllocation<'tcx>) -> AllocId {
501        let id = self.reserve_alloc_id();
502        self.set_alloc_id_memory(id, mem);
503        id
504    }
505
506    /// Returns `None` in case the `AllocId` is dangling. An `InterpretCx` can still have a
507    /// local `Allocation` for that `AllocId`, but having such an `AllocId` in a constant is
508    /// illegal and will likely ICE.
509    /// This function exists to allow const eval to detect the difference between evaluation-
510    /// local dangling pointers and allocations in constants/statics.
511    #[inline]
512    pub fn try_get_global_alloc(self, id: AllocId) -> Option<GlobalAlloc<'tcx>> {
513        self.alloc_map.to_alloc.lock_shard_by_value(&id).get(&id).cloned()
514    }
515
516    #[inline]
517    #[track_caller]
518    /// Panics in case the `AllocId` is dangling. Since that is impossible for `AllocId`s in
519    /// constants (as all constants must pass interning and validation that check for dangling
520    /// ids), this function is frequently used throughout rustc, but should not be used within
521    /// the interpreter.
522    pub fn global_alloc(self, id: AllocId) -> GlobalAlloc<'tcx> {
523        match self.try_get_global_alloc(id) {
524            Some(alloc) => alloc,
525            None => bug!("could not find allocation for {id:?}"),
526        }
527    }
528
529    /// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. Trying to
530    /// call this function twice, even with the same `Allocation` will ICE the compiler.
531    pub fn set_alloc_id_memory(self, id: AllocId, mem: ConstAllocation<'tcx>) {
532        if let Some(old) =
533            self.alloc_map.to_alloc.lock_shard_by_value(&id).insert(id, GlobalAlloc::Memory(mem))
534        {
535            bug!("tried to set allocation ID {id:?}, but it was already existing as {old:#?}");
536        }
537    }
538
539    /// Freezes an `AllocId` created with `reserve` by pointing it at a static item. Trying to
540    /// call this function twice, even with the same `DefId` will ICE the compiler.
541    pub fn set_nested_alloc_id_static(self, id: AllocId, def_id: LocalDefId) {
542        if let Some(old) = self
543            .alloc_map
544            .to_alloc
545            .lock_shard_by_value(&id)
546            .insert(id, GlobalAlloc::Static(def_id.to_def_id()))
547        {
548            bug!("tried to set allocation ID {id:?}, but it was already existing as {old:#?}");
549        }
550    }
551}
552
553////////////////////////////////////////////////////////////////////////////////
554// Methods to access integers in the target endianness
555////////////////////////////////////////////////////////////////////////////////
556
557#[inline]
558pub fn write_target_uint(
559    endianness: Endian,
560    mut target: &mut [u8],
561    data: u128,
562) -> Result<(), io::Error> {
563    // This u128 holds an "any-size uint" (since smaller uints can fits in it)
564    // So we do not write all bytes of the u128, just the "payload".
565    match endianness {
566        Endian::Little => target.write(&data.to_le_bytes())?,
567        Endian::Big => target.write(&data.to_be_bytes()[16 - target.len()..])?,
568    };
569    debug_assert!(target.len() == 0); // We should have filled the target buffer.
570    Ok(())
571}
572
573#[inline]
574pub fn read_target_uint(endianness: Endian, mut source: &[u8]) -> Result<u128, io::Error> {
575    // This u128 holds an "any-size uint" (since smaller uints can fits in it)
576    let mut buf = [0u8; std::mem::size_of::<u128>()];
577    // So we do not read exactly 16 bytes into the u128, just the "payload".
578    let uint = match endianness {
579        Endian::Little => {
580            source.read_exact(&mut buf[..source.len()])?;
581            Ok(u128::from_le_bytes(buf))
582        }
583        Endian::Big => {
584            source.read_exact(&mut buf[16 - source.len()..])?;
585            Ok(u128::from_be_bytes(buf))
586        }
587    };
588    debug_assert!(source.len() == 0); // We should have consumed the source buffer.
589    uint
590}