rustc_middle/mir/
syntax.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
//! This defines the syntax of MIR, i.e., the set of available MIR operations, and other definitions
//! closely related to MIR semantics.
//! This is in a dedicated file so that changes to this file can be reviewed more carefully.
//! The intention is that this file only contains datatype declarations, no code.

use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece, Mutability};
use rustc_data_structures::packed::Pu128;
use rustc_hir::CoroutineKind;
use rustc_hir::def_id::DefId;
use rustc_index::IndexVec;
use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable};
use rustc_span::Span;
use rustc_span::def_id::LocalDefId;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::Symbol;
use rustc_target::abi::{FieldIdx, VariantIdx};
use rustc_target::asm::InlineAsmRegOrRegClass;
use smallvec::SmallVec;

use super::{BasicBlock, Const, Local, UserTypeProjection};
use crate::mir::coverage::CoverageKind;
use crate::traits::Reveal;
use crate::ty::adjustment::PointerCoercion;
use crate::ty::{self, GenericArgsRef, List, Region, Ty, UserTypeAnnotationIndex};

/// Represents the "flavors" of MIR.
///
/// All flavors of MIR use the same data structure, but there are some important differences. These
/// differences come in two forms: Dialects and phases.
///
/// Dialects represent a stronger distinction than phases. This is because the transitions between
/// dialects are semantic changes, and therefore technically *lowerings* between distinct IRs. In
/// other words, the same [`Body`](crate::mir::Body) might be well-formed for multiple dialects, but
/// have different semantic meaning and different behavior at runtime.
///
/// Each dialect additionally has a number of phases. However, phase changes never involve semantic
/// changes. If some MIR is well-formed both before and after a phase change, it is also guaranteed
/// that it has the same semantic meaning. In this sense, phase changes can only add additional
/// restrictions on what MIR is well-formed.
///
/// When adding phases, remember to update [`MirPhase::phase_index`].
#[derive(Copy, Clone, TyEncodable, TyDecodable, Debug, PartialEq, Eq, PartialOrd, Ord)]
#[derive(HashStable)]
pub enum MirPhase {
    /// The MIR that is generated by MIR building.
    ///
    /// The only things that operate on this dialect are unsafeck, the various MIR lints, and const
    /// qualifs.
    ///
    /// This has no distinct phases.
    Built,
    /// The MIR used for most analysis.
    ///
    /// The only semantic change between analysis and built MIR is constant promotion. In built MIR,
    /// sequences of statements that would generally be subject to constant promotion are
    /// semantically constants, while in analysis MIR all constants are explicit.
    ///
    /// The result of const promotion is available from the `mir_promoted` and `promoted_mir` queries.
    ///
    /// This is the version of MIR used by borrowck and friends.
    Analysis(AnalysisPhase),
    /// The MIR used for CTFE, optimizations, and codegen.
    ///
    /// The semantic changes that occur in the lowering from analysis to runtime MIR are as follows:
    ///
    ///  - Drops: In analysis MIR, `Drop` terminators represent *conditional* drops; roughly speaking,
    ///    if dataflow analysis determines that the place being dropped is uninitialized, the drop will
    ///    not be executed. The exact semantics of this aren't written down anywhere, which means they
    ///    are essentially "what drop elaboration does." In runtime MIR, the drops are unconditional;
    ///    when a `Drop` terminator is reached, if the type has drop glue that drop glue is always
    ///    executed. This may be UB if the underlying place is not initialized.
    ///  - Packed drops: Places might in general be misaligned - in most cases this is UB, the exception
    ///    is fields of packed structs. In analysis MIR, `Drop(P)` for a `P` that might be misaligned
    ///    for this reason implicitly moves `P` to a temporary before dropping. Runtime MIR has no such
    ///    rules, and dropping a misaligned place is simply UB.
    ///  - Unwinding: in analysis MIR, unwinding from a function which may not unwind aborts. In runtime
    ///    MIR, this is UB.
    ///  - Retags: If `-Zmir-emit-retag` is enabled, analysis MIR has "implicit" retags in the same way
    ///    that Rust itself has them. Where exactly these are is generally subject to change, and so we
    ///    don't document this here. Runtime MIR has most retags explicit (though implicit retags
    ///    can still occur at `Rvalue::{Ref,AddrOf}`).
    ///  - Coroutine bodies: In analysis MIR, locals may actually be behind a pointer that user code has
    ///    access to. This occurs in coroutine bodies. Such locals do not behave like other locals,
    ///    because they eg may be aliased in surprising ways. Runtime MIR has no such special locals -
    ///    all coroutine bodies are lowered and so all places that look like locals really are locals.
    ///
    /// Also note that the lint pass which reports eg `200_u8 + 200_u8` as an error is run as a part
    /// of analysis to runtime MIR lowering. To ensure lints are reported reliably, this means that
    /// transformations which may suppress such errors should not run on analysis MIR.
    Runtime(RuntimePhase),
}

impl MirPhase {
    pub fn name(&self) -> &'static str {
        match *self {
            MirPhase::Built => "built",
            MirPhase::Analysis(AnalysisPhase::Initial) => "analysis",
            MirPhase::Analysis(AnalysisPhase::PostCleanup) => "analysis-post-cleanup",
            MirPhase::Runtime(RuntimePhase::Initial) => "runtime",
            MirPhase::Runtime(RuntimePhase::PostCleanup) => "runtime-post-cleanup",
            MirPhase::Runtime(RuntimePhase::Optimized) => "runtime-optimized",
        }
    }

    pub fn reveal(&self) -> Reveal {
        match *self {
            MirPhase::Built | MirPhase::Analysis(_) => Reveal::UserFacing,
            MirPhase::Runtime(_) => Reveal::All,
        }
    }
}

/// See [`MirPhase::Analysis`].
#[derive(Copy, Clone, TyEncodable, TyDecodable, Debug, PartialEq, Eq, PartialOrd, Ord)]
#[derive(HashStable)]
pub enum AnalysisPhase {
    Initial = 0,
    /// Beginning in this phase, the following variants are disallowed:
    /// * [`TerminatorKind::FalseUnwind`]
    /// * [`TerminatorKind::FalseEdge`]
    /// * [`StatementKind::FakeRead`]
    /// * [`StatementKind::AscribeUserType`]
    /// * [`StatementKind::Coverage`] with [`CoverageKind::BlockMarker`] or [`CoverageKind::SpanMarker`]
    /// * [`Rvalue::Ref`] with `BorrowKind::Fake`
    /// * [`CastKind::PointerCoercion`] with any of the following:
    ///   * [`PointerCoercion::ArrayToPointer`]
    ///   * [`PointerCoercion::MutToConstPointer`]
    ///
    /// Furthermore, `Deref` projections must be the first projection within any place (if they
    /// appear at all)
    PostCleanup = 1,
}

/// See [`MirPhase::Runtime`].
#[derive(Copy, Clone, TyEncodable, TyDecodable, Debug, PartialEq, Eq, PartialOrd, Ord)]
#[derive(HashStable)]
pub enum RuntimePhase {
    /// In addition to the semantic changes, beginning with this phase, the following variants are
    /// disallowed:
    /// * [`TerminatorKind::Yield`]
    /// * [`TerminatorKind::CoroutineDrop`]
    /// * [`Rvalue::Aggregate`] for any `AggregateKind` except `Array`
    /// * [`PlaceElem::OpaqueCast`]
    ///
    /// And the following variants are allowed:
    /// * [`StatementKind::Retag`]
    /// * [`StatementKind::SetDiscriminant`]
    /// * [`StatementKind::Deinit`]
    ///
    /// Furthermore, `Copy` operands are allowed for non-`Copy` types.
    Initial = 0,
    /// Beginning with this phase, the following variant is disallowed:
    /// * [`ProjectionElem::Deref`] of `Box`
    PostCleanup = 1,
    Optimized = 2,
}

///////////////////////////////////////////////////////////////////////////
// Borrow kinds

#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, TyEncodable, TyDecodable)]
#[derive(Hash, HashStable)]
pub enum BorrowKind {
    /// Data must be immutable and is aliasable.
    Shared,

    /// An immutable, aliasable borrow that is discarded after borrow-checking. Can behave either
    /// like a normal shared borrow or like a special shallow borrow (see [`FakeBorrowKind`]).
    ///
    /// This is used when lowering index expressions and matches. This is used to prevent code like
    /// the following from compiling:
    /// ```compile_fail,E0510
    /// let mut x: &[_] = &[[0, 1]];
    /// let y: &[_] = &[];
    /// let _ = x[0][{x = y; 1}];
    /// ```
    /// ```compile_fail,E0510
    /// let mut x = &Some(0);
    /// match *x {
    ///     None => (),
    ///     Some(_) if { x = &None; false } => (),
    ///     Some(_) => (),
    /// }
    /// ```
    /// We can also report errors with this kind of borrow differently.
    Fake(FakeBorrowKind),

    /// Data is mutable and not aliasable.
    Mut { kind: MutBorrowKind },
}

#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, TyEncodable, TyDecodable)]
#[derive(Hash, HashStable)]
pub enum MutBorrowKind {
    Default,
    /// This borrow arose from method-call auto-ref. (i.e., `adjustment::Adjust::Borrow`)
    TwoPhaseBorrow,
    /// Data must be immutable but not aliasable. This kind of borrow
    /// cannot currently be expressed by the user and is used only in
    /// implicit closure bindings. It is needed when the closure is
    /// borrowing or mutating a mutable referent, e.g.:
    /// ```
    /// let mut z = 3;
    /// let x: &mut isize = &mut z;
    /// let y = || *x += 5;
    /// ```
    /// If we were to try to translate this closure into a more explicit
    /// form, we'd encounter an error with the code as written:
    /// ```compile_fail,E0594
    /// struct Env<'a> { x: &'a &'a mut isize }
    /// let mut z = 3;
    /// let x: &mut isize = &mut z;
    /// let y = (&mut Env { x: &x }, fn_ptr);  // Closure is pair of env and fn
    /// fn fn_ptr(env: &mut Env) { **env.x += 5; }
    /// ```
    /// This is then illegal because you cannot mutate an `&mut` found
    /// in an aliasable location. To solve, you'd have to translate with
    /// an `&mut` borrow:
    /// ```compile_fail,E0596
    /// struct Env<'a> { x: &'a mut &'a mut isize }
    /// let mut z = 3;
    /// let x: &mut isize = &mut z;
    /// let y = (&mut Env { x: &mut x }, fn_ptr); // changed from &x to &mut x
    /// fn fn_ptr(env: &mut Env) { **env.x += 5; }
    /// ```
    /// Now the assignment to `**env.x` is legal, but creating a
    /// mutable pointer to `x` is not because `x` is not mutable. We
    /// could fix this by declaring `x` as `let mut x`. This is ok in
    /// user code, if awkward, but extra weird for closures, since the
    /// borrow is hidden.
    ///
    /// So we introduce a `ClosureCapture` borrow -- user will not have to mark the variable
    /// containing the mutable reference as `mut`, as they didn't ever
    /// intend to mutate the mutable reference itself. We still mutable capture it in order to
    /// mutate the pointed value through it (but not mutating the reference itself).
    ///
    /// This solves the problem. For simplicity, we don't give users the way to express this
    /// borrow, it's just used when translating closures.
    ClosureCapture,
}

#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, TyEncodable, TyDecodable)]
#[derive(Hash, HashStable)]
pub enum FakeBorrowKind {
    /// A shared shallow borrow. The immediately borrowed place must be immutable, but projections
    /// from it don't need to be. For example, a shallow borrow of `a.b` doesn't conflict with a
    /// mutable borrow of `a.b.c`.
    ///
    /// This is used when lowering matches: when matching on a place we want to ensure that place
    /// have the same value from the start of the match until an arm is selected. This prevents this
    /// code from compiling:
    /// ```compile_fail,E0510
    /// let mut x = &Some(0);
    /// match *x {
    ///     None => (),
    ///     Some(_) if { x = &None; false } => (),
    ///     Some(_) => (),
    /// }
    /// ```
    /// This can't be a shared borrow because mutably borrowing `(*x as Some).0` should not checking
    /// the discriminant or accessing other variants, because the mutating `(*x as Some).0` can't
    /// affect the discriminant of `x`. E.g. the following is allowed:
    /// ```rust
    /// let mut x = Some(0);
    /// match x {
    ///     Some(_)
    ///         if {
    ///             if let Some(ref mut y) = x {
    ///                 *y += 1;
    ///             };
    ///             true
    ///         } => {}
    ///     _ => {}
    /// }
    /// ```
    Shallow,
    /// A shared (deep) borrow. Data must be immutable and is aliasable.
    ///
    /// This is used when lowering deref patterns, where shallow borrows wouldn't prevent something
    /// like:
    // ```compile_fail
    // let mut b = Box::new(false);
    // match b {
    //     deref!(true) => {} // not reached because `*b == false`
    //     _ if { *b = true; false } => {} // not reached because the guard is `false`
    //     deref!(false) => {} // not reached because the guard changed it
    //     // UB because we reached the unreachable.
    // }
    // ```
    Deep,
}

///////////////////////////////////////////////////////////////////////////
// Statements

/// The various kinds of statements that can appear in MIR.
///
/// Not all of these are allowed at every [`MirPhase`]. Check the documentation there to see which
/// ones you do not have to worry about. The MIR validator will generally enforce such restrictions,
/// causing an ICE if they are violated.
#[derive(Clone, Debug, PartialEq, TyEncodable, TyDecodable, Hash, HashStable)]
#[derive(TypeFoldable, TypeVisitable)]
pub enum StatementKind<'tcx> {
    /// Assign statements roughly correspond to an assignment in Rust proper (`x = ...`) except
    /// without the possibility of dropping the previous value (that must be done separately, if at
    /// all). The *exact* way this works is undecided. It probably does something like evaluating
    /// the LHS to a place and the RHS to a value, and then storing the value to the place. Various
    /// parts of this may do type specific things that are more complicated than simply copying
    /// bytes.
    ///
    /// **Needs clarification**: The implication of the above idea would be that assignment implies
    /// that the resulting value is initialized. I believe we could commit to this separately from
    /// committing to whatever part of the memory model we would need to decide on to make the above
    /// paragraph precise. Do we want to?
    ///
    /// Assignments in which the types of the place and rvalue differ are not well-formed.
    ///
    /// **Needs clarification**: Do we ever want to worry about non-free (in the body) lifetimes for
    /// the typing requirement in post drop-elaboration MIR? I think probably not - I'm not sure we
    /// could meaningfully require this anyway. How about free lifetimes? Is ignoring this
    /// interesting for optimizations? Do we want to allow such optimizations?
    ///
    /// **Needs clarification**: We currently require that the LHS place not overlap with any place
    /// read as part of computation of the RHS for some rvalues (generally those not producing
    /// primitives). This requirement is under discussion in [#68364]. As a part of this discussion,
    /// it is also unclear in what order the components are evaluated.
    ///
    /// [#68364]: https://github.com/rust-lang/rust/issues/68364
    ///
    /// See [`Rvalue`] documentation for details on each of those.
    Assign(Box<(Place<'tcx>, Rvalue<'tcx>)>),

    /// This represents all the reading that a pattern match may do (e.g., inspecting constants and
    /// discriminant values), and the kind of pattern it comes from. This is in order to adapt
    /// potential error messages to these specific patterns.
    ///
    /// Note that this also is emitted for regular `let` bindings to ensure that locals that are
    /// never accessed still get some sanity checks for, e.g., `let x: ! = ..;`
    ///
    /// When executed at runtime this is a nop.
    ///
    /// Disallowed after drop elaboration.
    FakeRead(Box<(FakeReadCause, Place<'tcx>)>),

    /// Write the discriminant for a variant to the enum Place.
    ///
    /// This is permitted for both coroutines and ADTs. This does not necessarily write to the
    /// entire place; instead, it writes to the minimum set of bytes as required by the layout for
    /// the type.
    SetDiscriminant { place: Box<Place<'tcx>>, variant_index: VariantIdx },

    /// Deinitializes the place.
    ///
    /// This writes `uninit` bytes to the entire place.
    Deinit(Box<Place<'tcx>>),

    /// `StorageLive` and `StorageDead` statements mark the live range of a local.
    ///
    /// At any point during the execution of a function, each local is either allocated or
    /// unallocated. Except as noted below, all locals except function parameters are initially
    /// unallocated. `StorageLive` statements cause memory to be allocated for the local while
    /// `StorageDead` statements cause the memory to be freed. In other words,
    /// `StorageLive`/`StorageDead` act like the heap operations `allocate`/`deallocate`, but for
    /// stack-allocated local variables. Using a local in any way (not only reading/writing from it)
    /// while it is unallocated is UB.
    ///
    /// Some locals have no `StorageLive` or `StorageDead` statements within the entire MIR body.
    /// These locals are implicitly allocated for the full duration of the function. There is a
    /// convenience method at `rustc_mir_dataflow::storage::always_storage_live_locals` for
    /// computing these locals.
    ///
    /// If the local is already allocated, calling `StorageLive` again will implicitly free the
    /// local and then allocate fresh uninitilized memory. If a local is already deallocated,
    /// calling `StorageDead` again is a NOP.
    StorageLive(Local),

    /// See `StorageLive` above.
    StorageDead(Local),

    /// Retag references in the given place, ensuring they got fresh tags.
    ///
    /// This is part of the Stacked Borrows model. These statements are currently only interpreted
    /// by miri and only generated when `-Z mir-emit-retag` is passed. See
    /// <https://internals.rust-lang.org/t/stacked-borrows-an-aliasing-model-for-rust/8153/> for
    /// more details.
    ///
    /// For code that is not specific to stacked borrows, you should consider retags to read and
    /// modify the place in an opaque way.
    ///
    /// Only `RetagKind::Default` and `RetagKind::FnEntry` are permitted.
    Retag(RetagKind, Box<Place<'tcx>>),

    /// This statement exists to preserve a trace of a scrutinee matched against a wildcard binding.
    /// This is especially useful for `let _ = PLACE;` bindings that desugar to a single
    /// `PlaceMention(PLACE)`.
    ///
    /// When executed at runtime, this computes the given place, but then discards
    /// it without doing a load. `let _ = *ptr;` is fine even if the pointer is dangling.
    PlaceMention(Box<Place<'tcx>>),

    /// Encodes a user's type ascription. These need to be preserved
    /// intact so that NLL can respect them. For example:
    /// ```ignore (illustrative)
    /// let a: T = y;
    /// ```
    /// The effect of this annotation is to relate the type `T_y` of the place `y`
    /// to the user-given type `T`. The effect depends on the specified variance:
    ///
    /// - `Covariant` -- requires that `T_y <: T`
    /// - `Contravariant` -- requires that `T_y :> T`
    /// - `Invariant` -- requires that `T_y == T`
    /// - `Bivariant` -- no effect
    ///
    /// When executed at runtime this is a nop.
    ///
    /// Disallowed after drop elaboration.
    AscribeUserType(Box<(Place<'tcx>, UserTypeProjection)>, ty::Variance),

    /// Carries control-flow-sensitive information injected by `-Cinstrument-coverage`,
    /// such as where to generate physical coverage-counter-increments during codegen.
    ///
    /// Coverage statements are used in conjunction with the coverage mappings and other
    /// information stored in the function's
    /// [`mir::Body::function_coverage_info`](crate::mir::Body::function_coverage_info).
    /// (For inlined MIR, take care to look up the *original function's* coverage info.)
    ///
    /// Interpreters and codegen backends that don't support coverage instrumentation
    /// can usually treat this as a no-op.
    Coverage(CoverageKind),

    /// Denotes a call to an intrinsic that does not require an unwind path and always returns.
    /// This avoids adding a new block and a terminator for simple intrinsics.
    Intrinsic(Box<NonDivergingIntrinsic<'tcx>>),

    /// Instructs the const eval interpreter to increment a counter; this counter is used to track
    /// how many steps the interpreter has taken. It is used to prevent the user from writing const
    /// code that runs for too long or infinitely. Other than in the const eval interpreter, this
    /// is a no-op.
    ConstEvalCounter,

    /// No-op. Useful for deleting instructions without affecting statement indices.
    Nop,
}

impl StatementKind<'_> {
    /// Returns a simple string representation of a `StatementKind` variant, independent of any
    /// values it might hold (e.g. `StatementKind::Assign` always returns `"Assign"`).
    pub const fn name(&self) -> &'static str {
        match self {
            StatementKind::Assign(..) => "Assign",
            StatementKind::FakeRead(..) => "FakeRead",
            StatementKind::SetDiscriminant { .. } => "SetDiscriminant",
            StatementKind::Deinit(..) => "Deinit",
            StatementKind::StorageLive(..) => "StorageLive",
            StatementKind::StorageDead(..) => "StorageDead",
            StatementKind::Retag(..) => "Retag",
            StatementKind::PlaceMention(..) => "PlaceMention",
            StatementKind::AscribeUserType(..) => "AscribeUserType",
            StatementKind::Coverage(..) => "Coverage",
            StatementKind::Intrinsic(..) => "Intrinsic",
            StatementKind::ConstEvalCounter => "ConstEvalCounter",
            StatementKind::Nop => "Nop",
        }
    }
}

#[derive(
    Clone,
    TyEncodable,
    TyDecodable,
    Debug,
    PartialEq,
    Hash,
    HashStable,
    TypeFoldable,
    TypeVisitable
)]
pub enum NonDivergingIntrinsic<'tcx> {
    /// Denotes a call to the intrinsic function `assume`.
    ///
    /// The operand must be a boolean. Optimizers may use the value of the boolean to backtrack its
    /// computation to infer information about other variables. So if the boolean came from a
    /// `x < y` operation, subsequent operations on `x` and `y` could elide various bound checks.
    /// If the argument is `false`, this operation is equivalent to `TerminatorKind::Unreachable`.
    Assume(Operand<'tcx>),

    /// Denotes a call to the intrinsic function `copy_nonoverlapping`.
    ///
    /// First, all three operands are evaluated. `src` and `dest` must each be a reference, pointer,
    /// or `Box` pointing to the same type `T`. `count` must evaluate to a `usize`. Then, `src` and
    /// `dest` are dereferenced, and `count * size_of::<T>()` bytes beginning with the first byte of
    /// the `src` place are copied to the contiguous range of bytes beginning with the first byte
    /// of `dest`.
    ///
    /// **Needs clarification**: In what order are operands computed and dereferenced? It should
    /// probably match the order for assignment, but that is also undecided.
    ///
    /// **Needs clarification**: Is this typed or not, ie is there a typed load and store involved?
    /// I vaguely remember Ralf saying somewhere that he thought it should not be.
    CopyNonOverlapping(CopyNonOverlapping<'tcx>),
}

/// Describes what kind of retag is to be performed.
#[derive(Copy, Clone, TyEncodable, TyDecodable, Debug, PartialEq, Eq, Hash, HashStable)]
#[rustc_pass_by_value]
pub enum RetagKind {
    /// The initial retag of arguments when entering a function.
    FnEntry,
    /// Retag preparing for a two-phase borrow.
    TwoPhase,
    /// Retagging raw pointers.
    Raw,
    /// A "normal" retag.
    Default,
}

/// The `FakeReadCause` describes the type of pattern why a FakeRead statement exists.
#[derive(Copy, Clone, TyEncodable, TyDecodable, Debug, Hash, HashStable, PartialEq)]
pub enum FakeReadCause {
    /// Inject a fake read of the borrowed input at the end of each guards
    /// code.
    ///
    /// This should ensure that you cannot change the variant for an enum while
    /// you are in the midst of matching on it.
    ForMatchGuard,

    /// `let x: !; match x {}` doesn't generate any read of x so we need to
    /// generate a read of x to check that it is initialized and safe.
    ///
    /// If a closure pattern matches a Place starting with an Upvar, then we introduce a
    /// FakeRead for that Place outside the closure, in such a case this option would be
    /// Some(closure_def_id).
    /// Otherwise, the value of the optional LocalDefId will be None.
    //
    // We can use LocalDefId here since fake read statements are removed
    // before codegen in the `CleanupNonCodegenStatements` pass.
    ForMatchedPlace(Option<LocalDefId>),

    /// A fake read of the RefWithinGuard version of a bind-by-value variable
    /// in a match guard to ensure that its value hasn't change by the time
    /// we create the OutsideGuard version.
    ForGuardBinding,

    /// Officially, the semantics of
    ///
    /// `let pattern = <expr>;`
    ///
    /// is that `<expr>` is evaluated into a temporary and then this temporary is
    /// into the pattern.
    ///
    /// However, if we see the simple pattern `let var = <expr>`, we optimize this to
    /// evaluate `<expr>` directly into the variable `var`. This is mostly unobservable,
    /// but in some cases it can affect the borrow checker, as in #53695.
    /// Therefore, we insert a "fake read" here to ensure that we get
    /// appropriate errors.
    ///
    /// If a closure pattern matches a Place starting with an Upvar, then we introduce a
    /// FakeRead for that Place outside the closure, in such a case this option would be
    /// Some(closure_def_id).
    /// Otherwise, the value of the optional DefId will be None.
    ForLet(Option<LocalDefId>),

    /// If we have an index expression like
    ///
    /// (*x)[1][{ x = y; 4}]
    ///
    /// then the first bounds check is invalidated when we evaluate the second
    /// index expression. Thus we create a fake borrow of `x` across the second
    /// indexer, which will cause a borrow check error.
    ForIndex,
}

#[derive(Clone, Debug, PartialEq, TyEncodable, TyDecodable, Hash, HashStable)]
#[derive(TypeFoldable, TypeVisitable)]
pub struct CopyNonOverlapping<'tcx> {
    pub src: Operand<'tcx>,
    pub dst: Operand<'tcx>,
    /// Number of elements to copy from src to dest, not bytes.
    pub count: Operand<'tcx>,
}

/// Represents how a [`TerminatorKind::Call`] was constructed.
/// Used only for diagnostics.
#[derive(Clone, Copy, TyEncodable, TyDecodable, Debug, PartialEq, Hash, HashStable)]
#[derive(TypeFoldable, TypeVisitable)]
pub enum CallSource {
    /// This came from something such as `a > b` or `a + b`. In THIR, if `from_hir_call`
    /// is false then this is the desugaring.
    OverloadedOperator,
    /// This was from comparison generated by a match, used by const-eval for better errors
    /// when the comparison cannot be done in compile time.
    ///
    /// (see <https://github.com/rust-lang/rust/issues/90237>)
    MatchCmp,
    /// Other types of desugaring that did not come from the HIR, but we don't care about
    /// for diagnostics (yet).
    Misc,
    /// Normal function call, no special source
    Normal,
}

impl CallSource {
    pub fn from_hir_call(self) -> bool {
        matches!(self, CallSource::Normal)
    }
}

#[derive(Clone, Copy, Debug, TyEncodable, TyDecodable, Hash, HashStable, PartialEq)]
#[derive(TypeFoldable, TypeVisitable)]
/// The macro that an inline assembly block was created by
pub enum InlineAsmMacro {
    /// The `asm!` macro
    Asm,
    /// The `naked_asm!` macro
    NakedAsm,
}

impl InlineAsmMacro {
    pub const fn diverges(self, options: InlineAsmOptions) -> bool {
        match self {
            InlineAsmMacro::Asm => options.contains(InlineAsmOptions::NORETURN),
            InlineAsmMacro::NakedAsm => true,
        }
    }
}

///////////////////////////////////////////////////////////////////////////
// Terminators

/// The various kinds of terminators, representing ways of exiting from a basic block.
///
/// A note on unwinding: Panics may occur during the execution of some terminators. Depending on the
/// `-C panic` flag, this may either cause the program to abort or the call stack to unwind. Such
/// terminators have a `unwind: UnwindAction` field on them. If stack unwinding occurs, then
/// once the current function is reached, an action will be taken based on the `unwind` field.
/// If the action is `Cleanup`, then the execution continues at the given basic block. If the
/// action is `Continue` then no cleanup is performed, and the stack continues unwinding.
///
/// The basic block pointed to by a `Cleanup` unwind action must have its `cleanup` flag set.
/// `cleanup` basic blocks have a couple restrictions:
///  1. All `unwind` fields in them must be `UnwindAction::Terminate` or `UnwindAction::Unreachable`.
///  2. `Return` terminators are not allowed in them. `Terminate` and `Resume` terminators are.
///  3. All other basic blocks (in the current body) that are reachable from `cleanup` basic blocks
///     must also be `cleanup`. This is a part of the type system and checked statically, so it is
///     still an error to have such an edge in the CFG even if it's known that it won't be taken at
///     runtime.
///  4. The control flow between cleanup blocks must look like an upside down tree. Roughly
///     speaking, this means that control flow that looks like a V is allowed, while control flow
///     that looks like a W is not. This is necessary to ensure that landing pad information can be
///     correctly codegened on MSVC. More precisely:
///
///     Begin with the standard control flow graph `G`. Modify `G` as follows: for any two cleanup
///     vertices `u` and `v` such that `u` dominates `v`, contract `u` and `v` into a single vertex,
///     deleting self edges and duplicate edges in the process. Now remove all vertices from `G`
///     that are not cleanup vertices or are not reachable. The resulting graph must be an inverted
///     tree, that is each vertex may have at most one successor and there may be no cycles.
#[derive(Clone, TyEncodable, TyDecodable, Hash, HashStable, PartialEq, TypeFoldable, TypeVisitable)]
pub enum TerminatorKind<'tcx> {
    /// Block has one successor; we continue execution there.
    Goto { target: BasicBlock },

    /// Switches based on the computed value.
    ///
    /// First, evaluates the `discr` operand. The type of the operand must be a signed or unsigned
    /// integer, char, or bool, and must match the given type. Then, if the list of switch targets
    /// contains the computed value, continues execution at the associated basic block. Otherwise,
    /// continues execution at the "otherwise" basic block.
    ///
    /// Target values may not appear more than once.
    SwitchInt {
        /// The discriminant value being tested.
        discr: Operand<'tcx>,
        targets: SwitchTargets,
    },

    /// Indicates that the landing pad is finished and that the process should continue unwinding.
    ///
    /// Like a return, this marks the end of this invocation of the function.
    ///
    /// Only permitted in cleanup blocks. `Resume` is not permitted with `-C unwind=abort` after
    /// deaggregation runs.
    UnwindResume,

    /// Indicates that the landing pad is finished and that the process should terminate.
    ///
    /// Used to prevent unwinding for foreign items or with `-C unwind=abort`. Only permitted in
    /// cleanup blocks.
    UnwindTerminate(UnwindTerminateReason),

    /// Returns from the function.
    ///
    /// Like function calls, the exact semantics of returns in Rust are unclear. Returning very
    /// likely at least assigns the value currently in the return place (`_0`) to the place
    /// specified in the associated `Call` terminator in the calling function, as if assigned via
    /// `dest = move _0`. It might additionally do other things, like have side-effects in the
    /// aliasing model.
    ///
    /// If the body is a coroutine body, this has slightly different semantics; it instead causes a
    /// `CoroutineState::Returned(_0)` to be created (as if by an `Aggregate` rvalue) and assigned
    /// to the return place.
    Return,

    /// Indicates a terminator that can never be reached.
    ///
    /// Executing this terminator is UB.
    Unreachable,

    /// The behavior of this statement differs significantly before and after drop elaboration.
    ///
    /// After drop elaboration: `Drop` terminators are a complete nop for types that have no drop
    /// glue. For other types, `Drop` terminators behave exactly like a call to
    /// `core::mem::drop_in_place` with a pointer to the given place.
    ///
    /// `Drop` before drop elaboration is a *conditional* execution of the drop glue. Specifically,
    /// the `Drop` will be executed if...
    ///
    /// **Needs clarification**: End of that sentence. This in effect should document the exact
    /// behavior of drop elaboration. The following sounds vaguely right, but I'm not quite sure:
    ///
    /// > The drop glue is executed if, among all statements executed within this `Body`, an assignment to
    /// > the place or one of its "parents" occurred more recently than a move out of it. This does not
    /// > consider indirect assignments.
    ///
    /// The `replace` flag indicates whether this terminator was created as part of an assignment.
    /// This should only be used for diagnostic purposes, and does not have any operational
    /// meaning.
    Drop { place: Place<'tcx>, target: BasicBlock, unwind: UnwindAction, replace: bool },

    /// Roughly speaking, evaluates the `func` operand and the arguments, and starts execution of
    /// the referred to function. The operand types must match the argument types of the function.
    /// The return place type must match the return type. The type of the `func` operand must be
    /// callable, meaning either a function pointer, a function type, or a closure type.
    ///
    /// **Needs clarification**: The exact semantics of this. Current backends rely on `move`
    /// operands not aliasing the return place. It is unclear how this is justified in MIR, see
    /// [#71117].
    ///
    /// [#71117]: https://github.com/rust-lang/rust/issues/71117
    Call {
        /// The function that’s being called.
        func: Operand<'tcx>,
        /// Arguments the function is called with.
        /// These are owned by the callee, which is free to modify them.
        /// This allows the memory occupied by "by-value" arguments to be
        /// reused across function calls without duplicating the contents.
        /// The span for each arg is also included
        /// (e.g. `a` and `b` in `x.foo(a, b)`).
        args: Box<[Spanned<Operand<'tcx>>]>,
        /// Where the returned value will be written
        destination: Place<'tcx>,
        /// Where to go after this call returns. If none, the call necessarily diverges.
        target: Option<BasicBlock>,
        /// Action to be taken if the call unwinds.
        unwind: UnwindAction,
        /// Where this call came from in HIR/THIR.
        call_source: CallSource,
        /// This `Span` is the span of the function, without the dot and receiver
        /// e.g. `foo(a, b)` in `x.foo(a, b)`
        fn_span: Span,
    },

    /// Tail call.
    ///
    /// Roughly speaking this is a chimera of [`Call`] and [`Return`], with some caveats.
    /// Semantically tail calls consists of two actions:
    /// - pop of the current stack frame
    /// - a call to the `func`, with the return address of the **current** caller
    ///   - so that a `return` inside `func` returns to the caller of the caller
    ///     of the function that is currently being executed
    ///
    /// Note that in difference with [`Call`] this is missing
    /// - `destination` (because it's always the return place)
    /// - `target` (because it's always taken from the current stack frame)
    /// - `unwind` (because it's always taken from the current stack frame)
    ///
    /// [`Call`]: TerminatorKind::Call
    /// [`Return`]: TerminatorKind::Return
    TailCall {
        /// The function that’s being called.
        func: Operand<'tcx>,
        /// Arguments the function is called with.
        /// These are owned by the callee, which is free to modify them.
        /// This allows the memory occupied by "by-value" arguments to be
        /// reused across function calls without duplicating the contents.
        args: Box<[Spanned<Operand<'tcx>>]>,
        // FIXME(explicit_tail_calls): should we have the span for `become`? is this span accurate? do we need it?
        /// This `Span` is the span of the function, without the dot and receiver
        /// (e.g. `foo(a, b)` in `x.foo(a, b)`
        fn_span: Span,
    },

    /// Evaluates the operand, which must have type `bool`. If it is not equal to `expected`,
    /// initiates a panic. Initiating a panic corresponds to a `Call` terminator with some
    /// unspecified constant as the function to call, all the operands stored in the `AssertMessage`
    /// as parameters, and `None` for the destination. Keep in mind that the `cleanup` path is not
    /// necessarily executed even in the case of a panic, for example in `-C panic=abort`. If the
    /// assertion does not fail, execution continues at the specified basic block.
    ///
    /// When overflow checking is disabled and this is run-time MIR (as opposed to compile-time MIR
    /// that is used for CTFE), the following variants of this terminator behave as `goto target`:
    /// - `OverflowNeg(..)`,
    /// - `Overflow(op, ..)` if op is add, sub, mul, shl, shr, but NOT div or rem.
    Assert {
        cond: Operand<'tcx>,
        expected: bool,
        msg: Box<AssertMessage<'tcx>>,
        target: BasicBlock,
        unwind: UnwindAction,
    },

    /// Marks a suspend point.
    ///
    /// Like `Return` terminators in coroutine bodies, this computes `value` and then a
    /// `CoroutineState::Yielded(value)` as if by `Aggregate` rvalue. That value is then assigned to
    /// the return place of the function calling this one, and execution continues in the calling
    /// function. When next invoked with the same first argument, execution of this function
    /// continues at the `resume` basic block, with the second argument written to the `resume_arg`
    /// place. If the coroutine is dropped before then, the `drop` basic block is invoked.
    ///
    /// Not permitted in bodies that are not coroutine bodies, or after coroutine lowering.
    ///
    /// **Needs clarification**: What about the evaluation order of the `resume_arg` and `value`?
    Yield {
        /// The value to return.
        value: Operand<'tcx>,
        /// Where to resume to.
        resume: BasicBlock,
        /// The place to store the resume argument in.
        resume_arg: Place<'tcx>,
        /// Cleanup to be done if the coroutine is dropped at this suspend point.
        drop: Option<BasicBlock>,
    },

    /// Indicates the end of dropping a coroutine.
    ///
    /// Semantically just a `return` (from the coroutines drop glue). Only permitted in the same situations
    /// as `yield`.
    ///
    /// **Needs clarification**: Is that even correct? The coroutine drop code is always confusing
    /// to me, because it's not even really in the current body.
    ///
    /// **Needs clarification**: Are there type system constraints on these terminators? Should
    /// there be a "block type" like `cleanup` blocks for them?
    CoroutineDrop,

    /// A block where control flow only ever takes one real path, but borrowck needs to be more
    /// conservative.
    ///
    /// At runtime this is semantically just a goto.
    ///
    /// Disallowed after drop elaboration.
    FalseEdge {
        /// The target normal control flow will take.
        real_target: BasicBlock,
        /// A block control flow could conceptually jump to, but won't in
        /// practice.
        imaginary_target: BasicBlock,
    },

    /// A terminator for blocks that only take one path in reality, but where we reserve the right
    /// to unwind in borrowck, even if it won't happen in practice. This can arise in infinite loops
    /// with no function calls for example.
    ///
    /// At runtime this is semantically just a goto.
    ///
    /// Disallowed after drop elaboration.
    FalseUnwind {
        /// The target normal control flow will take.
        real_target: BasicBlock,
        /// The imaginary cleanup block link. This particular path will never be taken
        /// in practice, but in order to avoid fragility we want to always
        /// consider it in borrowck. We don't want to accept programs which
        /// pass borrowck only when `panic=abort` or some assertions are disabled
        /// due to release vs. debug mode builds.
        unwind: UnwindAction,
    },

    /// Block ends with an inline assembly block. This is a terminator since
    /// inline assembly is allowed to diverge.
    InlineAsm {
        /// Macro used to create this inline asm: one of `asm!` or `naked_asm!`
        asm_macro: InlineAsmMacro,

        /// The template for the inline assembly, with placeholders.
        template: &'tcx [InlineAsmTemplatePiece],

        /// The operands for the inline assembly, as `Operand`s or `Place`s.
        operands: Box<[InlineAsmOperand<'tcx>]>,

        /// Miscellaneous options for the inline assembly.
        options: InlineAsmOptions,

        /// Source spans for each line of the inline assembly code. These are
        /// used to map assembler errors back to the line in the source code.
        line_spans: &'tcx [Span],

        /// Valid targets for the inline assembly.
        /// The first element is the fallthrough destination, unless
        /// asm_macro == InlineAsmMacro::NakedAsm or InlineAsmOptions::NORETURN is set.
        targets: Box<[BasicBlock]>,

        /// Action to be taken if the inline assembly unwinds. This is present
        /// if and only if InlineAsmOptions::MAY_UNWIND is set.
        unwind: UnwindAction,
    },
}

impl TerminatorKind<'_> {
    /// Returns a simple string representation of a `TerminatorKind` variant, independent of any
    /// values it might hold (e.g. `TerminatorKind::Call` always returns `"Call"`).
    pub const fn name(&self) -> &'static str {
        match self {
            TerminatorKind::Goto { .. } => "Goto",
            TerminatorKind::SwitchInt { .. } => "SwitchInt",
            TerminatorKind::UnwindResume => "UnwindResume",
            TerminatorKind::UnwindTerminate(_) => "UnwindTerminate",
            TerminatorKind::Return => "Return",
            TerminatorKind::Unreachable => "Unreachable",
            TerminatorKind::Drop { .. } => "Drop",
            TerminatorKind::Call { .. } => "Call",
            TerminatorKind::TailCall { .. } => "TailCall",
            TerminatorKind::Assert { .. } => "Assert",
            TerminatorKind::Yield { .. } => "Yield",
            TerminatorKind::CoroutineDrop => "CoroutineDrop",
            TerminatorKind::FalseEdge { .. } => "FalseEdge",
            TerminatorKind::FalseUnwind { .. } => "FalseUnwind",
            TerminatorKind::InlineAsm { .. } => "InlineAsm",
        }
    }
}

#[derive(Debug, Clone, TyEncodable, TyDecodable, Hash, HashStable, PartialEq)]
pub struct SwitchTargets {
    /// Possible values. The locations to branch to in each case
    /// are found in the corresponding indices from the `targets` vector.
    pub(super) values: SmallVec<[Pu128; 1]>,

    /// Possible branch sites. The last element of this vector is used
    /// for the otherwise branch, so targets.len() == values.len() + 1
    /// should hold.
    //
    // This invariant is quite non-obvious and also could be improved.
    // One way to make this invariant is to have something like this instead:
    //
    // branches: Vec<(ConstInt, BasicBlock)>,
    // otherwise: Option<BasicBlock> // exhaustive if None
    //
    // However we’ve decided to keep this as-is until we figure a case
    // where some other approach seems to be strictly better than other.
    pub(super) targets: SmallVec<[BasicBlock; 2]>,
}

/// Action to be taken when a stack unwind happens.
#[derive(Copy, Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)]
#[derive(TypeFoldable, TypeVisitable)]
pub enum UnwindAction {
    /// No action is to be taken. Continue unwinding.
    ///
    /// This is similar to `Cleanup(bb)` where `bb` does nothing but `Resume`, but they are not
    /// equivalent, as presence of `Cleanup(_)` will make a frame non-POF.
    Continue,
    /// Triggers undefined behavior if unwind happens.
    Unreachable,
    /// Terminates the execution if unwind happens.
    ///
    /// Depending on the platform and situation this may cause a non-unwindable panic or abort.
    Terminate(UnwindTerminateReason),
    /// Cleanups to be done.
    Cleanup(BasicBlock),
}

/// The reason we are terminating the process during unwinding.
#[derive(Copy, Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)]
#[derive(TypeFoldable, TypeVisitable)]
pub enum UnwindTerminateReason {
    /// Unwinding is just not possible given the ABI of this function.
    Abi,
    /// We were already cleaning up for an ongoing unwind, and a *second*, *nested* unwind was
    /// triggered by the drop glue.
    InCleanup,
}

/// Information about an assertion failure.
#[derive(Clone, Hash, HashStable, PartialEq, Debug)]
#[derive(TyEncodable, TyDecodable, TypeFoldable, TypeVisitable)]
pub enum AssertKind<O> {
    BoundsCheck { len: O, index: O },
    Overflow(BinOp, O, O),
    OverflowNeg(O),
    DivisionByZero(O),
    RemainderByZero(O),
    ResumedAfterReturn(CoroutineKind),
    ResumedAfterPanic(CoroutineKind),
    MisalignedPointerDereference { required: O, found: O },
}

#[derive(Clone, Debug, PartialEq, TyEncodable, TyDecodable, Hash, HashStable)]
#[derive(TypeFoldable, TypeVisitable)]
pub enum InlineAsmOperand<'tcx> {
    In {
        reg: InlineAsmRegOrRegClass,
        value: Operand<'tcx>,
    },
    Out {
        reg: InlineAsmRegOrRegClass,
        late: bool,
        place: Option<Place<'tcx>>,
    },
    InOut {
        reg: InlineAsmRegOrRegClass,
        late: bool,
        in_value: Operand<'tcx>,
        out_place: Option<Place<'tcx>>,
    },
    Const {
        value: Box<ConstOperand<'tcx>>,
    },
    SymFn {
        value: Box<ConstOperand<'tcx>>,
    },
    SymStatic {
        def_id: DefId,
    },
    Label {
        /// This represents the index into the `targets` array in `TerminatorKind::InlineAsm`.
        target_index: usize,
    },
}

/// Type for MIR `Assert` terminator error messages.
pub type AssertMessage<'tcx> = AssertKind<Operand<'tcx>>;

///////////////////////////////////////////////////////////////////////////
// Places

/// Places roughly correspond to a "location in memory." Places in MIR are the same mathematical
/// object as places in Rust. This of course means that what exactly they are is undecided and part
/// of the Rust memory model. However, they will likely contain at least the following pieces of
/// information in some form:
///
///  1. The address in memory that the place refers to.
///  2. The provenance with which the place is being accessed.
///  3. The type of the place and an optional variant index. See [`PlaceTy`][super::tcx::PlaceTy].
///  4. Optionally, some metadata. This exists if and only if the type of the place is not `Sized`.
///
/// We'll give a description below of how all pieces of the place except for the provenance are
/// calculated. We cannot give a description of the provenance, because that is part of the
/// undecided aliasing model - we only include it here at all to acknowledge its existence.
///
/// Each local naturally corresponds to the place `Place { local, projection: [] }`. This place has
/// the address of the local's allocation and the type of the local.
///
/// **Needs clarification:** Unsized locals seem to present a bit of an issue. Their allocation
/// can't actually be created on `StorageLive`, because it's unclear how big to make the allocation.
/// Furthermore, MIR produces assignments to unsized locals, although that is not permitted under
/// `#![feature(unsized_locals)]` in Rust. Besides just putting "unsized locals are special and
/// different" in a bunch of places, I (JakobDegen) don't know how to incorporate this behavior into
/// the current MIR semantics in a clean way - possibly this needs some design work first.
///
/// For places that are not locals, ie they have a non-empty list of projections, we define the
/// values as a function of the parent place, that is the place with its last [`ProjectionElem`]
/// stripped. The way this is computed of course depends on the kind of that last projection
/// element:
///
///  - [`Downcast`](ProjectionElem::Downcast): This projection sets the place's variant index to the
///    given one, and makes no other changes. A `Downcast` projection must always be followed
///    immediately by a `Field` projection.
///  - [`Field`](ProjectionElem::Field): `Field` projections take their parent place and create a
///    place referring to one of the fields of the type. The resulting address is the parent
///    address, plus the offset of the field. The type becomes the type of the field. If the parent
///    was unsized and so had metadata associated with it, then the metadata is retained if the
///    field is unsized and thrown out if it is sized.
///
///    These projections are only legal for tuples, ADTs, closures, and coroutines. If the ADT or
///    coroutine has more than one variant, the parent place's variant index must be set, indicating
///    which variant is being used. If it has just one variant, the variant index may or may not be
///    included - the single possible variant is inferred if it is not included.
///  - [`OpaqueCast`](ProjectionElem::OpaqueCast): This projection changes the place's type to the
///    given one, and makes no other changes. A `OpaqueCast` projection on any type other than an
///    opaque type from the current crate is not well-formed.
///  - [`ConstantIndex`](ProjectionElem::ConstantIndex): Computes an offset in units of `T` into the
///    place as described in the documentation for the `ProjectionElem`. The resulting address is
///    the parent's address plus that offset, and the type is `T`. This is only legal if the parent
///    place has type `[T;  N]` or `[T]` (*not* `&[T]`). Since such a `T` is always sized, any
///    resulting metadata is thrown out.
///  - [`Subslice`](ProjectionElem::Subslice): This projection calculates an offset and a new
///    address in a similar manner as `ConstantIndex`. It is also only legal on `[T; N]` and `[T]`.
///    However, this yields a `Place` of type `[T]`, and additionally sets the metadata to be the
///    length of the subslice.
///  - [`Index`](ProjectionElem::Index): Like `ConstantIndex`, only legal on `[T; N]` or `[T]`.
///    However, `Index` additionally takes a local from which the value of the index is computed at
///    runtime. Computing the value of the index involves interpreting the `Local` as a
///    `Place { local, projection: [] }`, and then computing its value as if done via
///    [`Operand::Copy`]. The array/slice is then indexed with the resulting value. The local must
///    have type `usize`.
///  - [`Deref`](ProjectionElem::Deref): Derefs are the last type of projection, and the most
///    complicated. They are only legal on parent places that are references, pointers, or `Box`. A
///    `Deref` projection begins by loading a value from the parent place, as if by
///    [`Operand::Copy`]. It then dereferences the resulting pointer, creating a place of the
///    pointee's type. The resulting address is the address that was stored in the pointer. If the
///    pointee type is unsized, the pointer additionally stored the value of the metadata.
///
/// The "validity invariant" of places is the same as that of raw pointers, meaning that e.g.
/// `*ptr` on a dangling or unaligned pointer is never UB. (Later doing a load/store on that place
/// or turning it into a reference can be UB though!) The only ways for a place computation can
/// cause UB are:
/// - On a `Deref` projection, we do an actual load of the inner place, with all the usual
///   consequences (the inner place must be based on an aligned pointer, it must point to allocated
///   memory, the aliasig model must allow reads, this must not be a data race).
/// - For the projections that perform pointer arithmetic, the offset must in-bounds of an
///   allocation (i.e., the preconditions of `ptr::offset` must be met).
#[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, HashStable, TypeFoldable, TypeVisitable)]
pub struct Place<'tcx> {
    pub local: Local,

    /// projection out of a place (access a field, deref a pointer, etc)
    pub projection: &'tcx List<PlaceElem<'tcx>>,
}

#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[derive(TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)]
pub enum ProjectionElem<V, T> {
    Deref,

    /// A field (e.g., `f` in `_1.f`) is one variant of [`ProjectionElem`]. Conceptually,
    /// rustc can identify that a field projection refers to either two different regions of memory
    /// or the same one between the base and the 'projection element'.
    /// Read more about projections in the [rustc-dev-guide][mir-datatypes]
    ///
    /// [mir-datatypes]: https://rustc-dev-guide.rust-lang.org/mir/index.html#mir-data-types
    Field(FieldIdx, T),

    /// Index into a slice/array.
    ///
    /// Note that this does not also dereference, and so it does not exactly correspond to slice
    /// indexing in Rust. In other words, in the below Rust code:
    ///
    /// ```rust
    /// let x = &[1, 2, 3, 4];
    /// let i = 2;
    /// x[i];
    /// ```
    ///
    /// The `x[i]` is turned into a `Deref` followed by an `Index`, not just an `Index`. The same
    /// thing is true of the `ConstantIndex` and `Subslice` projections below.
    Index(V),

    /// These indices are generated by slice patterns. Easiest to explain
    /// by example:
    ///
    /// ```ignore (illustrative)
    /// [X, _, .._, _, _] => { offset: 0, min_length: 4, from_end: false },
    /// [_, X, .._, _, _] => { offset: 1, min_length: 4, from_end: false },
    /// [_, _, .._, X, _] => { offset: 2, min_length: 4, from_end: true },
    /// [_, _, .._, _, X] => { offset: 1, min_length: 4, from_end: true },
    /// ```
    ConstantIndex {
        /// index or -index (in Python terms), depending on from_end
        offset: u64,
        /// The thing being indexed must be at least this long -- otherwise, the
        /// projection is UB.
        ///
        /// For arrays this is always the exact length.
        min_length: u64,
        /// Counting backwards from end? This is always false when indexing an
        /// array.
        from_end: bool,
    },

    /// These indices are generated by slice patterns.
    ///
    /// If `from_end` is true `slice[from..slice.len() - to]`.
    /// Otherwise `array[from..to]`.
    Subslice {
        from: u64,
        to: u64,
        /// Whether `to` counts from the start or end of the array/slice.
        /// For `PlaceElem`s this is `true` if and only if the base is a slice.
        /// For `ProjectionKind`, this can also be `true` for arrays.
        from_end: bool,
    },

    /// "Downcast" to a variant of an enum or a coroutine.
    ///
    /// The included Symbol is the name of the variant, used for printing MIR.
    ///
    /// This operation itself is never UB, all it does is change the type of the place.
    Downcast(Option<Symbol>, VariantIdx),

    /// Like an explicit cast from an opaque type to a concrete type, but without
    /// requiring an intermediate variable.
    OpaqueCast(T),

    /// A `Subtype(T)` projection is applied to any `StatementKind::Assign` where
    /// type of lvalue doesn't match the type of rvalue, the primary goal is making subtyping
    /// explicit during optimizations and codegen.
    ///
    /// This projection doesn't impact the runtime behavior of the program except for potentially changing
    /// some type metadata of the interpreter or codegen backend.
    ///
    /// This goal is achieved with mir_transform pass `Subtyper`, which runs right after
    /// borrowchecker, as we only care about subtyping that can affect trait selection and
    /// `TypeId`.
    Subtype(T),
}

/// Alias for projections as they appear in places, where the base is a place
/// and the index is a local.
pub type PlaceElem<'tcx> = ProjectionElem<Local, Ty<'tcx>>;

///////////////////////////////////////////////////////////////////////////
// Operands

/// An operand in MIR represents a "value" in Rust, the definition of which is undecided and part of
/// the memory model. One proposal for a definition of values can be found [on UCG][value-def].
///
/// [value-def]: https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/value-domain.md
///
/// The most common way to create values is via loading a place. Loading a place is an operation
/// which reads the memory of the place and converts it to a value. This is a fundamentally *typed*
/// operation. The nature of the value produced depends on the type of the conversion. Furthermore,
/// there may be other effects: if the type has a validity constraint loading the place might be UB
/// if the validity constraint is not met.
///
/// **Needs clarification:** Is loading a place that has its variant index set well-formed? Miri
/// currently implements it, but it seems like this may be something to check against in the
/// validator.
#[derive(Clone, PartialEq, TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
pub enum Operand<'tcx> {
    /// Creates a value by loading the given place.
    ///
    /// Before drop elaboration, the type of the place must be `Copy`. After drop elaboration there
    /// is no such requirement.
    Copy(Place<'tcx>),

    /// Creates a value by performing loading the place, just like the `Copy` operand.
    ///
    /// This *may* additionally overwrite the place with `uninit` bytes, depending on how we decide
    /// in [UCG#188]. You should not emit MIR that may attempt a subsequent second load of this
    /// place without first re-initializing it.
    ///
    /// **Needs clarification:** The operational impact of `Move` is unclear. Currently (both in
    /// Miri and codegen) it has no effect at all unless it appears in an argument to `Call`; for
    /// `Call` it allows the argument to be passed to the callee "in-place", i.e. the callee might
    /// just get a reference to this place instead of a full copy. Miri implements this with a
    /// combination of aliasing model "protectors" and putting `uninit` into the place. Ralf
    /// proposes that we don't want these semantics for `Move` in regular assignments, because
    /// loading a place should not have side-effects, and the aliasing model "protectors" are
    /// inherently tied to a function call. Are these the semantics we want for MIR? Is this
    /// something we can even decide without knowing more about Rust's memory model?
    ///
    /// [UCG#188]: https://github.com/rust-lang/unsafe-code-guidelines/issues/188
    Move(Place<'tcx>),

    /// Constants are already semantically values, and remain unchanged.
    Constant(Box<ConstOperand<'tcx>>),
}

#[derive(Clone, Copy, PartialEq, TyEncodable, TyDecodable, Hash, HashStable)]
#[derive(TypeFoldable, TypeVisitable)]
pub struct ConstOperand<'tcx> {
    pub span: Span,

    /// Optional user-given type: for something like
    /// `collect::<Vec<_>>`, this would be present and would
    /// indicate that `Vec<_>` was explicitly specified.
    ///
    /// Needed for NLL to impose user-given type constraints.
    pub user_ty: Option<UserTypeAnnotationIndex>,

    pub const_: Const<'tcx>,
}

///////////////////////////////////////////////////////////////////////////
// Rvalues

/// The various kinds of rvalues that can appear in MIR.
///
/// Not all of these are allowed at every [`MirPhase`] - when this is the case, it's stated below.
///
/// Computing any rvalue begins by evaluating the places and operands in some order (**Needs
/// clarification**: Which order?). These are then used to produce a "value" - the same kind of
/// value that an [`Operand`] produces.
#[derive(Clone, TyEncodable, TyDecodable, Hash, HashStable, PartialEq, TypeFoldable, TypeVisitable)]
pub enum Rvalue<'tcx> {
    /// Yields the operand unchanged
    Use(Operand<'tcx>),

    /// Creates an array where each element is the value of the operand.
    ///
    /// This is the cause of a bug in the case where the repetition count is zero because the value
    /// is not dropped, see [#74836].
    ///
    /// Corresponds to source code like `[x; 32]`.
    ///
    /// [#74836]: https://github.com/rust-lang/rust/issues/74836
    Repeat(Operand<'tcx>, ty::Const<'tcx>),

    /// Creates a reference of the indicated kind to the place.
    ///
    /// There is not much to document here, because besides the obvious parts the semantics of this
    /// are essentially entirely a part of the aliasing model. There are many UCG issues discussing
    /// exactly what the behavior of this operation should be.
    ///
    /// `Shallow` borrows are disallowed after drop lowering.
    Ref(Region<'tcx>, BorrowKind, Place<'tcx>),

    /// Creates a pointer/reference to the given thread local.
    ///
    /// The yielded type is a `*mut T` if the static is mutable, otherwise if the static is extern a
    /// `*const T`, and if neither of those apply a `&T`.
    ///
    /// **Note:** This is a runtime operation that actually executes code and is in this sense more
    /// like a function call. Also, eliminating dead stores of this rvalue causes `fn main() {}` to
    /// SIGILL for some reason that I (JakobDegen) never got a chance to look into.
    ///
    /// **Needs clarification**: Are there weird additional semantics here related to the runtime
    /// nature of this operation?
    ThreadLocalRef(DefId),

    /// Creates a raw pointer with the indicated mutability to the place.
    ///
    /// This is generated by pointer casts like `&v as *const _` or raw borrow expressions like
    /// `&raw const v`.
    ///
    /// Like with references, the semantics of this operation are heavily dependent on the aliasing
    /// model.
    RawPtr(Mutability, Place<'tcx>),

    /// Yields the length of the place, as a `usize`.
    ///
    /// If the type of the place is an array, this is the array length. For slices (`[T]`, not
    /// `&[T]`) this accesses the place's metadata to determine the length. This rvalue is
    /// ill-formed for places of other types.
    ///
    /// This cannot be a `UnOp(PtrMetadata, _)` because that expects a value, and we only
    /// have a place, and `UnOp(PtrMetadata, RawPtr(place))` is not a thing.
    Len(Place<'tcx>),

    /// Performs essentially all of the casts that can be performed via `as`.
    ///
    /// This allows for casts from/to a variety of types.
    ///
    /// **FIXME**: Document exactly which `CastKind`s allow which types of casts.
    Cast(CastKind, Operand<'tcx>, Ty<'tcx>),

    /// * `Offset` has the same semantics as [`offset`](pointer::offset), except that the second
    ///   parameter may be a `usize` as well.
    /// * The comparison operations accept `bool`s, `char`s, signed or unsigned integers, floats,
    ///   raw pointers, or function pointers and return a `bool`. The types of the operands must be
    ///   matching, up to the usual caveat of the lifetimes in function pointers.
    /// * Left and right shift operations accept signed or unsigned integers not necessarily of the
    ///   same type and return a value of the same type as their LHS. Like in Rust, the RHS is
    ///   truncated as needed.
    /// * The `Bit*` operations accept signed integers, unsigned integers, or bools with matching
    ///   types and return a value of that type.
    /// * The `FooWithOverflow` are like the `Foo`, but returning `(T, bool)` instead of just `T`,
    ///   where the `bool` is true if the result is not equal to the infinite-precision result.
    /// * The remaining operations accept signed integers, unsigned integers, or floats with
    ///   matching types and return a value of that type.
    BinaryOp(BinOp, Box<(Operand<'tcx>, Operand<'tcx>)>),

    /// Computes a value as described by the operation.
    NullaryOp(NullOp<'tcx>, Ty<'tcx>),

    /// Exactly like `BinaryOp`, but less operands.
    ///
    /// Also does two's-complement arithmetic. Negation requires a signed integer or a float;
    /// bitwise not requires a signed integer, unsigned integer, or bool. Both operation kinds
    /// return a value with the same type as their operand.
    UnaryOp(UnOp, Operand<'tcx>),

    /// Computes the discriminant of the place, returning it as an integer of type
    /// [`discriminant_ty`]. Returns zero for types without discriminant.
    ///
    /// The validity requirements for the underlying value are undecided for this rvalue, see
    /// [#91095]. Note too that the value of the discriminant is not the same thing as the
    /// variant index; use [`discriminant_for_variant`] to convert.
    ///
    /// [`discriminant_ty`]: crate::ty::Ty::discriminant_ty
    /// [#91095]: https://github.com/rust-lang/rust/issues/91095
    /// [`discriminant_for_variant`]: crate::ty::Ty::discriminant_for_variant
    Discriminant(Place<'tcx>),

    /// Creates an aggregate value, like a tuple or struct.
    ///
    /// This is needed because dataflow analysis needs to distinguish
    /// `dest = Foo { x: ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case that `Foo`
    /// has a destructor.
    ///
    /// Disallowed after deaggregation for all aggregate kinds except `Array` and `Coroutine`. After
    /// coroutine lowering, `Coroutine` aggregate kinds are disallowed too.
    Aggregate(Box<AggregateKind<'tcx>>, IndexVec<FieldIdx, Operand<'tcx>>),

    /// Transmutes a `*mut u8` into shallow-initialized `Box<T>`.
    ///
    /// This is different from a normal transmute because dataflow analysis will treat the box as
    /// initialized but its content as uninitialized. Like other pointer casts, this in general
    /// affects alias analysis.
    ShallowInitBox(Operand<'tcx>, Ty<'tcx>),

    /// A CopyForDeref is equivalent to a read from a place at the
    /// codegen level, but is treated specially by drop elaboration. When such a read happens, it
    /// is guaranteed (via nature of the mir_opt `Derefer` in rustc_mir_transform/src/deref_separator)
    /// that the only use of the returned value is a deref operation, immediately
    /// followed by one or more projections. Drop elaboration treats this rvalue as if the
    /// read never happened and just projects further. This allows simplifying various MIR
    /// optimizations and codegen backends that previously had to handle deref operations anywhere
    /// in a place.
    CopyForDeref(Place<'tcx>),
}

#[derive(Clone, Copy, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)]
pub enum CastKind {
    /// An exposing pointer to address cast. A cast between a pointer and an integer type, or
    /// between a function pointer and an integer type.
    /// See the docs on `expose_provenance` for more details.
    PointerExposeProvenance,
    /// An address-to-pointer cast that picks up an exposed provenance.
    /// See the docs on `with_exposed_provenance` for more details.
    PointerWithExposedProvenance,
    /// Pointer related casts that are done by coercions. Note that reference-to-raw-ptr casts are
    /// translated into `&raw mut/const *r`, i.e., they are not actually casts.
    ///
    /// The following are allowed in [`AnalysisPhase::Initial`] as they're needed for borrowck,
    /// but after that are forbidden (including in all phases of runtime MIR):
    /// * [`PointerCoercion::ArrayToPointer`]
    /// * [`PointerCoercion::MutToConstPointer`]
    ///
    /// Both are runtime nops, so should be [`CastKind::PtrToPtr`] instead in runtime MIR.
    PointerCoercion(PointerCoercion, CoercionSource),
    IntToInt,
    FloatToInt,
    FloatToFloat,
    IntToFloat,
    PtrToPtr,
    FnPtrToPtr,
    /// Reinterpret the bits of the input as a different type.
    ///
    /// MIR is well-formed if the input and output types have different sizes,
    /// but running a transmute between differently-sized types is UB.
    ///
    /// Allowed only in [`MirPhase::Runtime`]; Earlier it's a [`TerminatorKind::Call`].
    Transmute,
}

/// Represents how a [`CastKind::PointerCoercion`] was constructed.
/// Used only for diagnostics.
#[derive(Clone, Copy, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)]
pub enum CoercionSource {
    /// The coercion was manually written by the user with an `as` cast.
    AsCast,
    /// The coercion was automatically inserted by the compiler.
    Implicit,
}

#[derive(Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)]
#[derive(TypeFoldable, TypeVisitable)]
pub enum AggregateKind<'tcx> {
    /// The type is of the element
    Array(Ty<'tcx>),
    Tuple,

    /// The second field is the variant index. It's equal to 0 for struct
    /// and union expressions. The last field is the
    /// active field number and is present only for union expressions
    /// -- e.g., for a union expression `SomeUnion { c: .. }`, the
    /// active field index would identity the field `c`
    Adt(DefId, VariantIdx, GenericArgsRef<'tcx>, Option<UserTypeAnnotationIndex>, Option<FieldIdx>),

    Closure(DefId, GenericArgsRef<'tcx>),
    Coroutine(DefId, GenericArgsRef<'tcx>),
    CoroutineClosure(DefId, GenericArgsRef<'tcx>),

    /// Construct a raw pointer from the data pointer and metadata.
    ///
    /// The `Ty` here is the type of the *pointee*, not the pointer itself.
    /// The `Mutability` indicates whether this produces a `*const` or `*mut`.
    ///
    /// The [`Rvalue::Aggregate`] operands for thus must be
    ///
    /// 0. A raw pointer of matching mutability with any [`core::ptr::Thin`] pointee
    /// 1. A value of the appropriate [`core::ptr::Pointee::Metadata`] type
    ///
    /// *Both* operands must always be included, even the unit value if this is
    /// creating a thin pointer. If you're just converting between thin pointers,
    /// you may want an [`Rvalue::Cast`] with [`CastKind::PtrToPtr`] instead.
    RawPtr(Ty<'tcx>, Mutability),
}

#[derive(Copy, Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)]
pub enum NullOp<'tcx> {
    /// Returns the size of a value of that type
    SizeOf,
    /// Returns the minimum alignment of a type
    AlignOf,
    /// Returns the offset of a field
    OffsetOf(&'tcx List<(VariantIdx, FieldIdx)>),
    /// Returns whether we should perform some UB-checking at runtime.
    /// See the `ub_checks` intrinsic docs for details.
    UbChecks,
}

#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[derive(HashStable, TyEncodable, TyDecodable, TypeFoldable, TypeVisitable)]
pub enum UnOp {
    /// The `!` operator for logical inversion
    Not,
    /// The `-` operator for negation
    Neg,
    /// Gets the metadata `M` from a `*const`/`*mut`/`&`/`&mut` to
    /// `impl Pointee<Metadata = M>`.
    ///
    /// For example, this will give a `()` from `*const i32`, a `usize` from
    /// `&mut [u8]`, or a `ptr::DynMetadata<dyn Foo>` (internally a pointer)
    /// from a `*mut dyn Foo`.
    ///
    /// Allowed only in [`MirPhase::Runtime`]; earlier it's an intrinsic.
    PtrMetadata,
}

#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[derive(TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)]
pub enum BinOp {
    /// The `+` operator (addition)
    Add,
    /// Like `Add`, but with UB on overflow.  (Integers only.)
    AddUnchecked,
    /// Like `Add`, but returns `(T, bool)` of both the wrapped result
    /// and a bool indicating whether it overflowed.
    AddWithOverflow,
    /// The `-` operator (subtraction)
    Sub,
    /// Like `Sub`, but with UB on overflow.  (Integers only.)
    SubUnchecked,
    /// Like `Sub`, but returns `(T, bool)` of both the wrapped result
    /// and a bool indicating whether it overflowed.
    SubWithOverflow,
    /// The `*` operator (multiplication)
    Mul,
    /// Like `Mul`, but with UB on overflow.  (Integers only.)
    MulUnchecked,
    /// Like `Mul`, but returns `(T, bool)` of both the wrapped result
    /// and a bool indicating whether it overflowed.
    MulWithOverflow,
    /// The `/` operator (division)
    ///
    /// For integer types, division by zero is UB, as is `MIN / -1` for signed.
    /// The compiler should have inserted checks prior to this.
    ///
    /// Floating-point division by zero is safe, and does not need guards.
    Div,
    /// The `%` operator (modulus)
    ///
    /// For integer types, using zero as the modulus (second operand) is UB,
    /// as is `MIN % -1` for signed.
    /// The compiler should have inserted checks prior to this.
    ///
    /// Floating-point remainder by zero is safe, and does not need guards.
    Rem,
    /// The `^` operator (bitwise xor)
    BitXor,
    /// The `&` operator (bitwise and)
    BitAnd,
    /// The `|` operator (bitwise or)
    BitOr,
    /// The `<<` operator (shift left)
    ///
    /// The offset is given by `RHS.rem_euclid(LHS::BITS)`.
    /// In other words, it is (uniquely) determined as follows:
    /// - it is "equal modulo LHS::BITS" to the RHS
    /// - it is in the range `0..LHS::BITS`
    Shl,
    /// Like `Shl`, but is UB if the RHS >= LHS::BITS or RHS < 0
    ShlUnchecked,
    /// The `>>` operator (shift right)
    ///
    /// The offset is given by `RHS.rem_euclid(LHS::BITS)`.
    /// In other words, it is (uniquely) determined as follows:
    /// - it is "equal modulo LHS::BITS" to the RHS
    /// - it is in the range `0..LHS::BITS`
    ///
    /// This is an arithmetic shift if the LHS is signed
    /// and a logical shift if the LHS is unsigned.
    Shr,
    /// Like `Shl`, but is UB if the RHS >= LHS::BITS or RHS < 0
    ShrUnchecked,
    /// The `==` operator (equality)
    Eq,
    /// The `<` operator (less than)
    Lt,
    /// The `<=` operator (less than or equal to)
    Le,
    /// The `!=` operator (not equal to)
    Ne,
    /// The `>=` operator (greater than or equal to)
    Ge,
    /// The `>` operator (greater than)
    Gt,
    /// The `<=>` operator (three-way comparison, like `Ord::cmp`)
    ///
    /// This is supported only on the integer types and `char`, always returning
    /// [`rustc_hir::LangItem::OrderingEnum`] (aka [`std::cmp::Ordering`]).
    ///
    /// [`Rvalue::BinaryOp`]`(BinOp::Cmp, A, B)` returns
    /// - `Ordering::Less` (`-1_i8`, as a Scalar) if `A < B`
    /// - `Ordering::Equal` (`0_i8`, as a Scalar) if `A == B`
    /// - `Ordering::Greater` (`+1_i8`, as a Scalar) if `A > B`
    Cmp,
    /// The `ptr.offset` operator
    Offset,
}

// Some nodes are used a lot. Make sure they don't unintentionally get bigger.
#[cfg(target_pointer_width = "64")]
mod size_asserts {
    use rustc_data_structures::static_assert_size;

    use super::*;
    // tidy-alphabetical-start
    static_assert_size!(AggregateKind<'_>, 32);
    static_assert_size!(Operand<'_>, 24);
    static_assert_size!(Place<'_>, 16);
    static_assert_size!(PlaceElem<'_>, 24);
    static_assert_size!(Rvalue<'_>, 40);
    static_assert_size!(StatementKind<'_>, 16);
    static_assert_size!(TerminatorKind<'_>, 80);
    // tidy-alphabetical-end
}