hir_ty/
mir.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
//! MIR definitions and implementation

use std::{collections::hash_map::Entry, fmt::Display, iter};

use crate::{
    consteval::usize_const,
    db::HirDatabase,
    display::HirDisplay,
    infer::{normalize, PointerCast},
    lang_items::is_box,
    mapping::ToChalk,
    CallableDefId, ClosureId, Const, ConstScalar, InferenceResult, Interner, MemoryMap,
    Substitution, TraitEnvironment, Ty, TyKind,
};
use base_db::CrateId;
use chalk_ir::Mutability;
use either::Either;
use hir_def::{
    body::Body,
    hir::{BindingAnnotation, BindingId, Expr, ExprId, Ordering, PatId},
    DefWithBodyId, FieldId, StaticId, TupleFieldId, UnionId, VariantId,
};
use la_arena::{Arena, ArenaMap, Idx, RawIdx};

mod borrowck;
mod eval;
mod lower;
mod monomorphization;
mod pretty;

pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason};
pub use eval::{
    interpret_mir, pad16, render_const_using_debug_impl, Evaluator, MirEvalError, VTableMap,
};
pub use lower::{
    lower_to_mir, mir_body_for_closure_query, mir_body_query, mir_body_recover, MirLowerError,
};
pub use monomorphization::{
    monomorphize_mir_body_bad, monomorphized_mir_body_for_closure_query,
    monomorphized_mir_body_query, monomorphized_mir_body_recover,
};
use rustc_hash::FxHashMap;
use smallvec::{smallvec, SmallVec};
use stdx::{impl_from, never};

use super::consteval::{intern_const_scalar, try_const_usize};

pub type BasicBlockId = Idx<BasicBlock>;
pub type LocalId = Idx<Local>;

fn return_slot() -> LocalId {
    LocalId::from_raw(RawIdx::from(0))
}

#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Local {
    pub ty: Ty,
}

/// An operand in MIR represents a "value" in Rust, the definition of which is undecided and part of
/// the memory model. One proposal for a definition of values can be found [on UCG][value-def].
///
/// [value-def]: https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/value-domain.md
///
/// The most common way to create values is via loading a place. Loading a place is an operation
/// which reads the memory of the place and converts it to a value. This is a fundamentally *typed*
/// operation. The nature of the value produced depends on the type of the conversion. Furthermore,
/// there may be other effects: if the type has a validity constraint loading the place might be UB
/// if the validity constraint is not met.
///
/// **Needs clarification:** Ralf proposes that loading a place not have side-effects.
/// This is what is implemented in miri today. Are these the semantics we want for MIR? Is this
/// something we can even decide without knowing more about Rust's memory model?
///
/// **Needs clarification:** Is loading a place that has its variant index set well-formed? Miri
/// currently implements it, but it seems like this may be something to check against in the
/// validator.
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum Operand {
    /// Creates a value by loading the given place.
    ///
    /// Before drop elaboration, the type of the place must be `Copy`. After drop elaboration there
    /// is no such requirement.
    Copy(Place),

    /// Creates a value by performing loading the place, just like the `Copy` operand.
    ///
    /// This *may* additionally overwrite the place with `uninit` bytes, depending on how we decide
    /// in [UCG#188]. You should not emit MIR that may attempt a subsequent second load of this
    /// place without first re-initializing it.
    ///
    /// [UCG#188]: https://github.com/rust-lang/unsafe-code-guidelines/issues/188
    Move(Place),
    /// Constants are already semantically values, and remain unchanged.
    Constant(Const),
    /// NON STANDARD: This kind of operand returns an immutable reference to that static memory. Rustc
    /// handles it with the `Constant` variant somehow.
    Static(StaticId),
}

impl Operand {
    fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap, ty: Ty) -> Self {
        Operand::Constant(intern_const_scalar(ConstScalar::Bytes(data, memory_map), ty))
    }

    fn from_bytes(data: Box<[u8]>, ty: Ty) -> Self {
        Operand::from_concrete_const(data, MemoryMap::default(), ty)
    }

    fn const_zst(ty: Ty) -> Operand {
        Self::from_bytes(Box::default(), ty)
    }

    fn from_fn(
        db: &dyn HirDatabase,
        func_id: hir_def::FunctionId,
        generic_args: Substitution,
    ) -> Operand {
        let ty =
            chalk_ir::TyKind::FnDef(CallableDefId::FunctionId(func_id).to_chalk(db), generic_args)
                .intern(Interner);
        Operand::from_bytes(Box::default(), ty)
    }
}

#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ProjectionElem<V, T> {
    Deref,
    Field(Either<FieldId, TupleFieldId>),
    // FIXME: get rid of this, and use FieldId for tuples and closures
    ClosureField(usize),
    Index(V),
    ConstantIndex { offset: u64, from_end: bool },
    Subslice { from: u64, to: u64 },
    //Downcast(Option<Symbol>, VariantIdx),
    OpaqueCast(T),
}

impl<V, T> ProjectionElem<V, T> {
    pub fn projected_ty(
        &self,
        mut base: Ty,
        db: &dyn HirDatabase,
        closure_field: impl FnOnce(ClosureId, &Substitution, usize) -> Ty,
        krate: CrateId,
    ) -> Ty {
        if matches!(base.kind(Interner), TyKind::Alias(_) | TyKind::AssociatedType(..)) {
            base = normalize(
                db,
                // FIXME: we should get this from caller
                TraitEnvironment::empty(krate),
                base,
            );
        }
        match self {
            ProjectionElem::Deref => match &base.kind(Interner) {
                TyKind::Raw(_, inner) | TyKind::Ref(_, _, inner) => inner.clone(),
                TyKind::Adt(adt, subst) if is_box(db, adt.0) => {
                    subst.at(Interner, 0).assert_ty_ref(Interner).clone()
                }
                _ => {
                    never!(
                        "Overloaded deref on type {} is not a projection",
                        base.display(db, db.crate_graph()[krate].edition)
                    );
                    TyKind::Error.intern(Interner)
                }
            },
            ProjectionElem::Field(Either::Left(f)) => match &base.kind(Interner) {
                TyKind::Adt(_, subst) => {
                    db.field_types(f.parent)[f.local_id].clone().substitute(Interner, subst)
                }
                ty => {
                    never!("Only adt has field, found {:?}", ty);
                    TyKind::Error.intern(Interner)
                }
            },
            ProjectionElem::Field(Either::Right(f)) => match &base.kind(Interner) {
                TyKind::Tuple(_, subst) => subst
                    .as_slice(Interner)
                    .get(f.index as usize)
                    .map(|x| x.assert_ty_ref(Interner))
                    .cloned()
                    .unwrap_or_else(|| {
                        never!("Out of bound tuple field");
                        TyKind::Error.intern(Interner)
                    }),
                ty => {
                    never!("Only tuple has tuple field: {:?}", ty);
                    TyKind::Error.intern(Interner)
                }
            },
            ProjectionElem::ClosureField(f) => match &base.kind(Interner) {
                TyKind::Closure(id, subst) => closure_field(*id, subst, *f),
                _ => {
                    never!("Only closure has closure field");
                    TyKind::Error.intern(Interner)
                }
            },
            ProjectionElem::ConstantIndex { .. } | ProjectionElem::Index(_) => {
                match &base.kind(Interner) {
                    TyKind::Array(inner, _) | TyKind::Slice(inner) => inner.clone(),
                    _ => {
                        never!("Overloaded index is not a projection");
                        TyKind::Error.intern(Interner)
                    }
                }
            }
            &ProjectionElem::Subslice { from, to } => match &base.kind(Interner) {
                TyKind::Array(inner, c) => {
                    let next_c = usize_const(
                        db,
                        match try_const_usize(db, c) {
                            None => None,
                            Some(x) => x.checked_sub(u128::from(from + to)),
                        },
                        krate,
                    );
                    TyKind::Array(inner.clone(), next_c).intern(Interner)
                }
                TyKind::Slice(_) => base.clone(),
                _ => {
                    never!("Subslice projection should only happen on slice and array");
                    TyKind::Error.intern(Interner)
                }
            },
            ProjectionElem::OpaqueCast(_) => {
                never!("We don't emit these yet");
                TyKind::Error.intern(Interner)
            }
        }
    }
}

type PlaceElem = ProjectionElem<LocalId, Ty>;

#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ProjectionId(u32);

#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ProjectionStore {
    id_to_proj: FxHashMap<ProjectionId, Box<[PlaceElem]>>,
    proj_to_id: FxHashMap<Box<[PlaceElem]>, ProjectionId>,
}

impl Default for ProjectionStore {
    fn default() -> Self {
        let mut this = Self { id_to_proj: Default::default(), proj_to_id: Default::default() };
        // Ensure that [] will get the id 0 which is used in `ProjectionId::Empty`
        this.intern(Box::new([]));
        this
    }
}

impl ProjectionStore {
    pub fn shrink_to_fit(&mut self) {
        self.id_to_proj.shrink_to_fit();
        self.proj_to_id.shrink_to_fit();
    }

    pub fn intern_if_exist(&self, projection: &[PlaceElem]) -> Option<ProjectionId> {
        self.proj_to_id.get(projection).copied()
    }

    pub fn intern(&mut self, projection: Box<[PlaceElem]>) -> ProjectionId {
        let new_id = ProjectionId(self.proj_to_id.len() as u32);
        match self.proj_to_id.entry(projection) {
            Entry::Occupied(id) => *id.get(),
            Entry::Vacant(e) => {
                let key_clone = e.key().clone();
                e.insert(new_id);
                self.id_to_proj.insert(new_id, key_clone);
                new_id
            }
        }
    }
}

impl ProjectionId {
    pub const EMPTY: ProjectionId = ProjectionId(0);

    pub fn is_empty(self) -> bool {
        self == ProjectionId::EMPTY
    }

    pub fn lookup(self, store: &ProjectionStore) -> &[PlaceElem] {
        store.id_to_proj.get(&self).unwrap()
    }

    pub fn project(self, projection: PlaceElem, store: &mut ProjectionStore) -> ProjectionId {
        let mut current = self.lookup(store).to_vec();
        current.push(projection);
        store.intern(current.into())
    }
}

#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Place {
    pub local: LocalId,
    pub projection: ProjectionId,
}

impl Place {
    fn is_parent(&self, child: &Place, store: &ProjectionStore) -> bool {
        self.local == child.local
            && child.projection.lookup(store).starts_with(self.projection.lookup(store))
    }

    /// The place itself is not included
    fn iterate_over_parents<'a>(
        &'a self,
        store: &'a ProjectionStore,
    ) -> impl Iterator<Item = Place> + 'a {
        let projection = self.projection.lookup(store);
        (0..projection.len()).map(|x| &projection[0..x]).filter_map(move |x| {
            Some(Place { local: self.local, projection: store.intern_if_exist(x)? })
        })
    }

    fn project(&self, projection: PlaceElem, store: &mut ProjectionStore) -> Place {
        Place { local: self.local, projection: self.projection.project(projection, store) }
    }
}

impl From<LocalId> for Place {
    fn from(local: LocalId) -> Self {
        Self { local, projection: ProjectionId::EMPTY }
    }
}

#[derive(Debug, PartialEq, Eq, Clone)]
pub enum AggregateKind {
    /// The type is of the element
    Array(Ty),
    /// The type is of the tuple
    Tuple(Ty),
    Adt(VariantId, Substitution),
    Union(UnionId, FieldId),
    Closure(Ty),
    //Coroutine(LocalDefId, SubstsRef, Movability),
}

#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct SwitchTargets {
    /// Possible values. The locations to branch to in each case
    /// are found in the corresponding indices from the `targets` vector.
    values: SmallVec<[u128; 1]>,

    /// Possible branch sites. The last element of this vector is used
    /// for the otherwise branch, so targets.len() == values.len() + 1
    /// should hold.
    //
    // This invariant is quite non-obvious and also could be improved.
    // One way to make this invariant is to have something like this instead:
    //
    // branches: Vec<(ConstInt, BasicBlock)>,
    // otherwise: Option<BasicBlock> // exhaustive if None
    //
    // However we’ve decided to keep this as-is until we figure a case
    // where some other approach seems to be strictly better than other.
    targets: SmallVec<[BasicBlockId; 2]>,
}

impl SwitchTargets {
    /// Creates switch targets from an iterator of values and target blocks.
    ///
    /// The iterator may be empty, in which case the `SwitchInt` instruction is equivalent to
    /// `goto otherwise;`.
    pub fn new(
        targets: impl Iterator<Item = (u128, BasicBlockId)>,
        otherwise: BasicBlockId,
    ) -> Self {
        let (values, mut targets): (SmallVec<_>, SmallVec<_>) = targets.unzip();
        targets.push(otherwise);
        Self { values, targets }
    }

    /// Builds a switch targets definition that jumps to `then` if the tested value equals `value`,
    /// and to `else_` if not.
    pub fn static_if(value: u128, then: BasicBlockId, else_: BasicBlockId) -> Self {
        Self { values: smallvec![value], targets: smallvec![then, else_] }
    }

    /// Returns the fallback target that is jumped to when none of the values match the operand.
    pub fn otherwise(&self) -> BasicBlockId {
        *self.targets.last().unwrap()
    }

    /// Returns an iterator over the switch targets.
    ///
    /// The iterator will yield tuples containing the value and corresponding target to jump to, not
    /// including the `otherwise` fallback target.
    ///
    /// Note that this may yield 0 elements. Only the `otherwise` branch is mandatory.
    pub fn iter(&self) -> impl Iterator<Item = (u128, BasicBlockId)> + '_ {
        iter::zip(&self.values, &self.targets).map(|(x, y)| (*x, *y))
    }

    /// Returns a slice with all possible jump targets (including the fallback target).
    pub fn all_targets(&self) -> &[BasicBlockId] {
        &self.targets
    }

    /// Finds the `BasicBlock` to which this `SwitchInt` will branch given the
    /// specific value. This cannot fail, as it'll return the `otherwise`
    /// branch if there's not a specific match for the value.
    pub fn target_for_value(&self, value: u128) -> BasicBlockId {
        self.iter().find_map(|(v, t)| (v == value).then_some(t)).unwrap_or_else(|| self.otherwise())
    }
}

#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Terminator {
    pub span: MirSpan,
    pub kind: TerminatorKind,
}

#[derive(Debug, PartialEq, Eq, Clone)]
pub enum TerminatorKind {
    /// Block has one successor; we continue execution there.
    Goto { target: BasicBlockId },

    /// Switches based on the computed value.
    ///
    /// First, evaluates the `discr` operand. The type of the operand must be a signed or unsigned
    /// integer, char, or bool, and must match the given type. Then, if the list of switch targets
    /// contains the computed value, continues execution at the associated basic block. Otherwise,
    /// continues execution at the "otherwise" basic block.
    ///
    /// Target values may not appear more than once.
    SwitchInt {
        /// The discriminant value being tested.
        discr: Operand,

        targets: SwitchTargets,
    },

    /// Indicates that the landing pad is finished and that the process should continue unwinding.
    ///
    /// Like a return, this marks the end of this invocation of the function.
    ///
    /// Only permitted in cleanup blocks. `Resume` is not permitted with `-C unwind=abort` after
    /// deaggregation runs.
    UnwindResume,

    /// Indicates that the landing pad is finished and that the process should abort.
    ///
    /// Used to prevent unwinding for foreign items or with `-C unwind=abort`. Only permitted in
    /// cleanup blocks.
    Abort,

    /// Returns from the function.
    ///
    /// Like function calls, the exact semantics of returns in Rust are unclear. Returning very
    /// likely at least assigns the value currently in the return place (`_0`) to the place
    /// specified in the associated `Call` terminator in the calling function, as if assigned via
    /// `dest = move _0`. It might additionally do other things, like have side-effects in the
    /// aliasing model.
    ///
    /// If the body is a coroutine body, this has slightly different semantics; it instead causes a
    /// `CoroutineState::Returned(_0)` to be created (as if by an `Aggregate` rvalue) and assigned
    /// to the return place.
    Return,

    /// Indicates a terminator that can never be reached.
    ///
    /// Executing this terminator is UB.
    Unreachable,

    /// The behavior of this statement differs significantly before and after drop elaboration.
    /// After drop elaboration, `Drop` executes the drop glue for the specified place, after which
    /// it continues execution/unwinds at the given basic blocks. It is possible that executing drop
    /// glue is special - this would be part of Rust's memory model. (**FIXME**: due we have an
    /// issue tracking if drop glue has any interesting semantics in addition to those of a function
    /// call?)
    ///
    /// `Drop` before drop elaboration is a *conditional* execution of the drop glue. Specifically, the
    /// `Drop` will be executed if...
    ///
    /// **Needs clarification**: End of that sentence. This in effect should document the exact
    /// behavior of drop elaboration. The following sounds vaguely right, but I'm not quite sure:
    ///
    /// > The drop glue is executed if, among all statements executed within this `Body`, an assignment to
    /// > the place or one of its "parents" occurred more recently than a move out of it. This does not
    /// > consider indirect assignments.
    Drop { place: Place, target: BasicBlockId, unwind: Option<BasicBlockId> },

    /// Drops the place and assigns a new value to it.
    ///
    /// This first performs the exact same operation as the pre drop-elaboration `Drop` terminator;
    /// it then additionally assigns the `value` to the `place` as if by an assignment statement.
    /// This assignment occurs both in the unwind and the regular code paths. The semantics are best
    /// explained by the elaboration:
    ///
    /// ```ignore (MIR)
    /// BB0 {
    ///   DropAndReplace(P <- V, goto BB1, unwind BB2)
    /// }
    /// ```
    ///
    /// becomes
    ///
    /// ```ignore (MIR)
    /// BB0 {
    ///   Drop(P, goto BB1, unwind BB2)
    /// }
    /// BB1 {
    ///   // P is now uninitialized
    ///   P <- V
    /// }
    /// BB2 {
    ///   // P is now uninitialized -- its dtor panicked
    ///   P <- V
    /// }
    /// ```
    ///
    /// Disallowed after drop elaboration.
    DropAndReplace {
        place: Place,
        value: Operand,
        target: BasicBlockId,
        unwind: Option<BasicBlockId>,
    },

    /// Roughly speaking, evaluates the `func` operand and the arguments, and starts execution of
    /// the referred to function. The operand types must match the argument types of the function.
    /// The return place type must match the return type. The type of the `func` operand must be
    /// callable, meaning either a function pointer, a function type, or a closure type.
    ///
    /// **Needs clarification**: The exact semantics of this. Current backends rely on `move`
    /// operands not aliasing the return place. It is unclear how this is justified in MIR, see
    /// [#71117].
    ///
    /// [#71117]: https://github.com/rust-lang/rust/issues/71117
    Call {
        /// The function that’s being called.
        func: Operand,
        /// Arguments the function is called with.
        /// These are owned by the callee, which is free to modify them.
        /// This allows the memory occupied by "by-value" arguments to be
        /// reused across function calls without duplicating the contents.
        args: Box<[Operand]>,
        /// Where the returned value will be written
        destination: Place,
        /// Where to go after this call returns. If none, the call necessarily diverges.
        target: Option<BasicBlockId>,
        /// Cleanups to be done if the call unwinds.
        cleanup: Option<BasicBlockId>,
        /// `true` if this is from a call in HIR rather than from an overloaded
        /// operator. True for overloaded function call.
        from_hir_call: bool,
        // This `Span` is the span of the function, without the dot and receiver
        // (e.g. `foo(a, b)` in `x.foo(a, b)`
        //fn_span: Span,
    },

    /// Evaluates the operand, which must have type `bool`. If it is not equal to `expected`,
    /// initiates a panic. Initiating a panic corresponds to a `Call` terminator with some
    /// unspecified constant as the function to call, all the operands stored in the `AssertMessage`
    /// as parameters, and `None` for the destination. Keep in mind that the `cleanup` path is not
    /// necessarily executed even in the case of a panic, for example in `-C panic=abort`. If the
    /// assertion does not fail, execution continues at the specified basic block.
    Assert {
        cond: Operand,
        expected: bool,
        //msg: AssertMessage,
        target: BasicBlockId,
        cleanup: Option<BasicBlockId>,
    },

    /// Marks a suspend point.
    ///
    /// Like `Return` terminators in coroutine bodies, this computes `value` and then a
    /// `CoroutineState::Yielded(value)` as if by `Aggregate` rvalue. That value is then assigned to
    /// the return place of the function calling this one, and execution continues in the calling
    /// function. When next invoked with the same first argument, execution of this function
    /// continues at the `resume` basic block, with the second argument written to the `resume_arg`
    /// place. If the coroutine is dropped before then, the `drop` basic block is invoked.
    ///
    /// Not permitted in bodies that are not coroutine bodies, or after coroutine lowering.
    ///
    /// **Needs clarification**: What about the evaluation order of the `resume_arg` and `value`?
    Yield {
        /// The value to return.
        value: Operand,
        /// Where to resume to.
        resume: BasicBlockId,
        /// The place to store the resume argument in.
        resume_arg: Place,
        /// Cleanup to be done if the coroutine is dropped at this suspend point.
        drop: Option<BasicBlockId>,
    },

    /// Indicates the end of dropping a coroutine.
    ///
    /// Semantically just a `return` (from the coroutines drop glue). Only permitted in the same situations
    /// as `yield`.
    ///
    /// **Needs clarification**: Is that even correct? The coroutine drop code is always confusing
    /// to me, because it's not even really in the current body.
    ///
    /// **Needs clarification**: Are there type system constraints on these terminators? Should
    /// there be a "block type" like `cleanup` blocks for them?
    CoroutineDrop,

    /// A block where control flow only ever takes one real path, but borrowck needs to be more
    /// conservative.
    ///
    /// At runtime this is semantically just a goto.
    ///
    /// Disallowed after drop elaboration.
    FalseEdge {
        /// The target normal control flow will take.
        real_target: BasicBlockId,
        /// A block control flow could conceptually jump to, but won't in
        /// practice.
        imaginary_target: BasicBlockId,
    },

    /// A terminator for blocks that only take one path in reality, but where we reserve the right
    /// to unwind in borrowck, even if it won't happen in practice. This can arise in infinite loops
    /// with no function calls for example.
    ///
    /// At runtime this is semantically just a goto.
    ///
    /// Disallowed after drop elaboration.
    FalseUnwind {
        /// The target normal control flow will take.
        real_target: BasicBlockId,
        /// The imaginary cleanup block link. This particular path will never be taken
        /// in practice, but in order to avoid fragility we want to always
        /// consider it in borrowck. We don't want to accept programs which
        /// pass borrowck only when `panic=abort` or some assertions are disabled
        /// due to release vs. debug mode builds. This needs to be an `Option` because
        /// of the `remove_noop_landing_pads` and `abort_unwinding_calls` passes.
        unwind: Option<BasicBlockId>,
    },
}

// Order of variants in this enum matter: they are used to compare borrow kinds.
#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)]
pub enum BorrowKind {
    /// Data must be immutable and is aliasable.
    Shared,

    /// The immediately borrowed place must be immutable, but projections from
    /// it don't need to be. For example, a shallow borrow of `a.b` doesn't
    /// conflict with a mutable borrow of `a.b.c`.
    ///
    /// This is used when lowering matches: when matching on a place we want to
    /// ensure that place have the same value from the start of the match until
    /// an arm is selected. This prevents this code from compiling:
    /// ```compile_fail,E0510
    /// let mut x = &Some(0);
    /// match *x {
    ///     None => (),
    ///     Some(_) if { x = &None; false } => (),
    ///     Some(_) => (),
    /// }
    /// ```
    /// This can't be a shared borrow because mutably borrowing (*x as Some).0
    /// should not prevent `if let None = x { ... }`, for example, because the
    /// mutating `(*x as Some).0` can't affect the discriminant of `x`.
    /// We can also report errors with this kind of borrow differently.
    Shallow,

    /// Data is mutable and not aliasable.
    Mut { kind: MutBorrowKind },
}

// Order of variants in this enum matter: they are used to compare borrow kinds.
#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)]
pub enum MutBorrowKind {
    /// Data must be immutable but not aliasable. This kind of borrow cannot currently
    /// be expressed by the user and is used only in implicit closure bindings.
    ClosureCapture,
    Default,
    /// This borrow arose from method-call auto-ref
    /// (i.e., adjustment::Adjust::Borrow).
    TwoPhasedBorrow,
}

impl BorrowKind {
    fn from_hir(m: hir_def::type_ref::Mutability) -> Self {
        match m {
            hir_def::type_ref::Mutability::Shared => BorrowKind::Shared,
            hir_def::type_ref::Mutability::Mut => BorrowKind::Mut { kind: MutBorrowKind::Default },
        }
    }

    fn from_chalk(m: Mutability) -> Self {
        match m {
            Mutability::Not => BorrowKind::Shared,
            Mutability::Mut => BorrowKind::Mut { kind: MutBorrowKind::Default },
        }
    }
}

#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum UnOp {
    /// The `!` operator for logical inversion
    Not,
    /// The `-` operator for negation
    Neg,
}

#[derive(Debug, PartialEq, Eq, Clone)]
pub enum BinOp {
    /// The `+` operator (addition)
    Add,
    /// The `-` operator (subtraction)
    Sub,
    /// The `*` operator (multiplication)
    Mul,
    /// The `/` operator (division)
    ///
    /// Division by zero is UB, because the compiler should have inserted checks
    /// prior to this.
    Div,
    /// The `%` operator (modulus)
    ///
    /// Using zero as the modulus (second operand) is UB, because the compiler
    /// should have inserted checks prior to this.
    Rem,
    /// The `^` operator (bitwise xor)
    BitXor,
    /// The `&` operator (bitwise and)
    BitAnd,
    /// The `|` operator (bitwise or)
    BitOr,
    /// The `<<` operator (shift left)
    ///
    /// The offset is truncated to the size of the first operand before shifting.
    Shl,
    /// The `>>` operator (shift right)
    ///
    /// The offset is truncated to the size of the first operand before shifting.
    Shr,
    /// The `==` operator (equality)
    Eq,
    /// The `<` operator (less than)
    Lt,
    /// The `<=` operator (less than or equal to)
    Le,
    /// The `!=` operator (not equal to)
    Ne,
    /// The `>=` operator (greater than or equal to)
    Ge,
    /// The `>` operator (greater than)
    Gt,
    /// The `ptr.offset` operator
    Offset,
}

impl BinOp {
    fn run_compare<T: PartialEq + PartialOrd>(&self, l: T, r: T) -> bool {
        match self {
            BinOp::Ge => l >= r,
            BinOp::Gt => l > r,
            BinOp::Le => l <= r,
            BinOp::Lt => l < r,
            BinOp::Eq => l == r,
            BinOp::Ne => l != r,
            x => panic!("`run_compare` called on operator {x:?}"),
        }
    }
}

impl Display for BinOp {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        f.write_str(match self {
            BinOp::Add => "+",
            BinOp::Sub => "-",
            BinOp::Mul => "*",
            BinOp::Div => "/",
            BinOp::Rem => "%",
            BinOp::BitXor => "^",
            BinOp::BitAnd => "&",
            BinOp::BitOr => "|",
            BinOp::Shl => "<<",
            BinOp::Shr => ">>",
            BinOp::Eq => "==",
            BinOp::Lt => "<",
            BinOp::Le => "<=",
            BinOp::Ne => "!=",
            BinOp::Ge => ">=",
            BinOp::Gt => ">",
            BinOp::Offset => "`offset`",
        })
    }
}

impl From<hir_def::hir::ArithOp> for BinOp {
    fn from(value: hir_def::hir::ArithOp) -> Self {
        match value {
            hir_def::hir::ArithOp::Add => BinOp::Add,
            hir_def::hir::ArithOp::Mul => BinOp::Mul,
            hir_def::hir::ArithOp::Sub => BinOp::Sub,
            hir_def::hir::ArithOp::Div => BinOp::Div,
            hir_def::hir::ArithOp::Rem => BinOp::Rem,
            hir_def::hir::ArithOp::Shl => BinOp::Shl,
            hir_def::hir::ArithOp::Shr => BinOp::Shr,
            hir_def::hir::ArithOp::BitXor => BinOp::BitXor,
            hir_def::hir::ArithOp::BitOr => BinOp::BitOr,
            hir_def::hir::ArithOp::BitAnd => BinOp::BitAnd,
        }
    }
}

impl From<hir_def::hir::CmpOp> for BinOp {
    fn from(value: hir_def::hir::CmpOp) -> Self {
        match value {
            hir_def::hir::CmpOp::Eq { negated: false } => BinOp::Eq,
            hir_def::hir::CmpOp::Eq { negated: true } => BinOp::Ne,
            hir_def::hir::CmpOp::Ord { ordering: Ordering::Greater, strict: false } => BinOp::Ge,
            hir_def::hir::CmpOp::Ord { ordering: Ordering::Greater, strict: true } => BinOp::Gt,
            hir_def::hir::CmpOp::Ord { ordering: Ordering::Less, strict: false } => BinOp::Le,
            hir_def::hir::CmpOp::Ord { ordering: Ordering::Less, strict: true } => BinOp::Lt,
        }
    }
}

impl From<Operand> for Rvalue {
    fn from(x: Operand) -> Self {
        Self::Use(x)
    }
}

#[derive(Debug, PartialEq, Eq, Clone)]
pub enum CastKind {
    /// An exposing pointer to address cast. A cast between a pointer and an integer type, or
    /// between a function pointer and an integer type.
    /// See the docs on `expose_addr` for more details.
    PointerExposeAddress,
    /// An address-to-pointer cast that picks up an exposed provenance.
    /// See the docs on `from_exposed_addr` for more details.
    PointerFromExposedAddress,
    /// All sorts of pointer-to-pointer casts. Note that reference-to-raw-ptr casts are
    /// translated into `&raw mut/const *r`, i.e., they are not actually casts.
    PtrToPtr,
    /// Pointer related casts that are done by coercions.
    PointerCoercion(PointerCast),
    /// Cast into a dyn* object.
    DynStar,
    IntToInt,
    FloatToInt,
    FloatToFloat,
    IntToFloat,
    FnPtrToPtr,
}

#[derive(Debug, PartialEq, Eq, Clone)]
pub enum Rvalue {
    /// Yields the operand unchanged
    Use(Operand),

    /// Creates an array where each element is the value of the operand.
    ///
    /// Corresponds to source code like `[x; 32]`.
    Repeat(Operand, Const),

    /// Creates a reference of the indicated kind to the place.
    ///
    /// There is not much to document here, because besides the obvious parts the semantics of this
    /// are essentially entirely a part of the aliasing model. There are many UCG issues discussing
    /// exactly what the behavior of this operation should be.
    ///
    /// `Shallow` borrows are disallowed after drop lowering.
    Ref(BorrowKind, Place),

    /// Creates a pointer/reference to the given thread local.
    ///
    /// The yielded type is a `*mut T` if the static is mutable, otherwise if the static is extern a
    /// `*const T`, and if neither of those apply a `&T`.
    ///
    /// **Note:** This is a runtime operation that actually executes code and is in this sense more
    /// like a function call. Also, eliminating dead stores of this rvalue causes `fn main() {}` to
    /// SIGILL for some reason that I (JakobDegen) never got a chance to look into.
    ///
    /// **Needs clarification**: Are there weird additional semantics here related to the runtime
    /// nature of this operation?
    // ThreadLocalRef(DefId),
    ThreadLocalRef(std::convert::Infallible),

    /// Creates a pointer with the indicated mutability to the place.
    ///
    /// This is generated by pointer casts like `&v as *const _` or raw address of expressions like
    /// `&raw v` or `addr_of!(v)`.
    ///
    /// Like with references, the semantics of this operation are heavily dependent on the aliasing
    /// model.
    // AddressOf(Mutability, Place),
    AddressOf(std::convert::Infallible),

    /// Yields the length of the place, as a `usize`.
    ///
    /// If the type of the place is an array, this is the array length. For slices (`[T]`, not
    /// `&[T]`) this accesses the place's metadata to determine the length. This rvalue is
    /// ill-formed for places of other types.
    Len(Place),

    /// Performs essentially all of the casts that can be performed via `as`.
    ///
    /// This allows for casts from/to a variety of types.
    ///
    /// **FIXME**: Document exactly which `CastKind`s allow which types of casts. Figure out why
    /// `ArrayToPointer` and `MutToConstPointer` are special.
    Cast(CastKind, Operand, Ty),

    // FIXME link to `pointer::offset` when it hits stable.
    /// * `Offset` has the same semantics as `pointer::offset`, except that the second
    ///   parameter may be a `usize` as well.
    /// * The comparison operations accept `bool`s, `char`s, signed or unsigned integers, floats,
    ///   raw pointers, or function pointers and return a `bool`. The types of the operands must be
    ///   matching, up to the usual caveat of the lifetimes in function pointers.
    /// * Left and right shift operations accept signed or unsigned integers not necessarily of the
    ///   same type and return a value of the same type as their LHS. Like in Rust, the RHS is
    ///   truncated as needed.
    /// * The `Bit*` operations accept signed integers, unsigned integers, or bools with matching
    ///   types and return a value of that type.
    /// * The remaining operations accept signed integers, unsigned integers, or floats with
    ///   matching types and return a value of that type.
    //BinaryOp(BinOp, Box<(Operand, Operand)>),
    BinaryOp(std::convert::Infallible),

    /// Same as `BinaryOp`, but yields `(T, bool)` with a `bool` indicating an error condition.
    ///
    /// When overflow checking is disabled and we are generating run-time code, the error condition
    /// is false. Otherwise, and always during CTFE, the error condition is determined as described
    /// below.
    ///
    /// For addition, subtraction, and multiplication on integers the error condition is set when
    /// the infinite precision result would be unequal to the actual result.
    ///
    /// For shift operations on integers the error condition is set when the value of right-hand
    /// side is greater than or equal to the number of bits in the type of the left-hand side, or
    /// when the value of right-hand side is negative.
    ///
    /// Other combinations of types and operators are unsupported.
    CheckedBinaryOp(BinOp, Operand, Operand),

    /// Computes a value as described by the operation.
    //NullaryOp(NullOp, Ty),
    NullaryOp(std::convert::Infallible),

    /// Exactly like `BinaryOp`, but less operands.
    ///
    /// Also does two's-complement arithmetic. Negation requires a signed integer or a float;
    /// bitwise not requires a signed integer, unsigned integer, or bool. Both operation kinds
    /// return a value with the same type as their operand.
    UnaryOp(UnOp, Operand),

    /// Computes the discriminant of the place, returning it as an integer of type
    /// [`discriminant_ty`]. Returns zero for types without discriminant.
    ///
    /// The validity requirements for the underlying value are undecided for this rvalue, see
    /// [#91095]. Note too that the value of the discriminant is not the same thing as the
    /// variant index; use [`discriminant_for_variant`] to convert.
    ///
    /// [`discriminant_ty`]: crate::ty::Ty::discriminant_ty
    /// [#91095]: https://github.com/rust-lang/rust/issues/91095
    /// [`discriminant_for_variant`]: crate::ty::Ty::discriminant_for_variant
    Discriminant(Place),

    /// Creates an aggregate value, like a tuple or struct.
    ///
    /// This is needed because dataflow analysis needs to distinguish
    /// `dest = Foo { x: ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case that `Foo`
    /// has a destructor.
    ///
    /// Disallowed after deaggregation for all aggregate kinds except `Array` and `Coroutine`. After
    /// coroutine lowering, `Coroutine` aggregate kinds are disallowed too.
    Aggregate(AggregateKind, Box<[Operand]>),

    /// Transmutes a `*mut u8` into shallow-initialized `Box<T>`.
    ///
    /// This is different from a normal transmute because dataflow analysis will treat the box as
    /// initialized but its content as uninitialized. Like other pointer casts, this in general
    /// affects alias analysis.
    ShallowInitBox(Operand, Ty),

    /// NON STANDARD: allocates memory with the type's layout, and shallow init the box with the resulting pointer.
    ShallowInitBoxWithAlloc(Ty),

    /// A CopyForDeref is equivalent to a read from a place at the
    /// codegen level, but is treated specially by drop elaboration. When such a read happens, it
    /// is guaranteed (via nature of the mir_opt `Derefer` in rustc_mir_transform/src/deref_separator)
    /// that the only use of the returned value is a deref operation, immediately
    /// followed by one or more projections. Drop elaboration treats this rvalue as if the
    /// read never happened and just projects further. This allows simplifying various MIR
    /// optimizations and codegen backends that previously had to handle deref operations anywhere
    /// in a place.
    CopyForDeref(Place),
}

#[derive(Debug, PartialEq, Eq, Clone)]
pub enum StatementKind {
    Assign(Place, Rvalue),
    FakeRead(Place),
    //SetDiscriminant {
    //    place: Box<Place>,
    //    variant_index: VariantIdx,
    //},
    Deinit(Place),
    StorageLive(LocalId),
    StorageDead(LocalId),
    //Retag(RetagKind, Box<Place>),
    //AscribeUserType(Place, UserTypeProjection, Variance),
    //Intrinsic(Box<NonDivergingIntrinsic>),
    Nop,
}
impl StatementKind {
    fn with_span(self, span: MirSpan) -> Statement {
        Statement { kind: self, span }
    }
}

#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Statement {
    pub kind: StatementKind,
    pub span: MirSpan,
}

#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct BasicBlock {
    /// List of statements in this block.
    pub statements: Vec<Statement>,

    /// Terminator for this block.
    ///
    /// N.B., this should generally ONLY be `None` during construction.
    /// Therefore, you should generally access it via the
    /// `terminator()` or `terminator_mut()` methods. The only
    /// exception is that certain passes, such as `simplify_cfg`, swap
    /// out the terminator temporarily with `None` while they continue
    /// to recurse over the set of basic blocks.
    pub terminator: Option<Terminator>,

    /// If true, this block lies on an unwind path. This is used
    /// during codegen where distinct kinds of basic blocks may be
    /// generated (particularly for MSVC cleanup). Unwind blocks must
    /// only branch to other unwind blocks.
    pub is_cleanup: bool,
}

#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MirBody {
    pub projection_store: ProjectionStore,
    pub basic_blocks: Arena<BasicBlock>,
    pub locals: Arena<Local>,
    pub start_block: BasicBlockId,
    pub owner: DefWithBodyId,
    pub binding_locals: ArenaMap<BindingId, LocalId>,
    pub param_locals: Vec<LocalId>,
    /// This field stores the closures directly owned by this body. It is used
    /// in traversing every mir body.
    pub closures: Vec<ClosureId>,
}

impl MirBody {
    pub fn local_to_binding_map(&self) -> ArenaMap<LocalId, BindingId> {
        self.binding_locals.iter().map(|(it, y)| (*y, it)).collect()
    }

    fn walk_places(&mut self, mut f: impl FnMut(&mut Place, &mut ProjectionStore)) {
        fn for_operand(
            op: &mut Operand,
            f: &mut impl FnMut(&mut Place, &mut ProjectionStore),
            store: &mut ProjectionStore,
        ) {
            match op {
                Operand::Copy(p) | Operand::Move(p) => {
                    f(p, store);
                }
                Operand::Constant(_) | Operand::Static(_) => (),
            }
        }
        for (_, block) in self.basic_blocks.iter_mut() {
            for statement in &mut block.statements {
                match &mut statement.kind {
                    StatementKind::Assign(p, r) => {
                        f(p, &mut self.projection_store);
                        match r {
                            Rvalue::ShallowInitBoxWithAlloc(_) => (),
                            Rvalue::ShallowInitBox(o, _)
                            | Rvalue::UnaryOp(_, o)
                            | Rvalue::Cast(_, o, _)
                            | Rvalue::Repeat(o, _)
                            | Rvalue::Use(o) => for_operand(o, &mut f, &mut self.projection_store),
                            Rvalue::CopyForDeref(p)
                            | Rvalue::Discriminant(p)
                            | Rvalue::Len(p)
                            | Rvalue::Ref(_, p) => f(p, &mut self.projection_store),
                            Rvalue::CheckedBinaryOp(_, o1, o2) => {
                                for_operand(o1, &mut f, &mut self.projection_store);
                                for_operand(o2, &mut f, &mut self.projection_store);
                            }
                            Rvalue::Aggregate(_, ops) => {
                                for op in ops.iter_mut() {
                                    for_operand(op, &mut f, &mut self.projection_store);
                                }
                            }
                            Rvalue::ThreadLocalRef(n)
                            | Rvalue::AddressOf(n)
                            | Rvalue::BinaryOp(n)
                            | Rvalue::NullaryOp(n) => match *n {},
                        }
                    }
                    StatementKind::FakeRead(p) | StatementKind::Deinit(p) => {
                        f(p, &mut self.projection_store)
                    }
                    StatementKind::StorageLive(_)
                    | StatementKind::StorageDead(_)
                    | StatementKind::Nop => (),
                }
            }
            match &mut block.terminator {
                Some(x) => match &mut x.kind {
                    TerminatorKind::SwitchInt { discr, .. } => {
                        for_operand(discr, &mut f, &mut self.projection_store)
                    }
                    TerminatorKind::FalseEdge { .. }
                    | TerminatorKind::FalseUnwind { .. }
                    | TerminatorKind::Goto { .. }
                    | TerminatorKind::UnwindResume
                    | TerminatorKind::CoroutineDrop
                    | TerminatorKind::Abort
                    | TerminatorKind::Return
                    | TerminatorKind::Unreachable => (),
                    TerminatorKind::Drop { place, .. } => {
                        f(place, &mut self.projection_store);
                    }
                    TerminatorKind::DropAndReplace { place, value, .. } => {
                        f(place, &mut self.projection_store);
                        for_operand(value, &mut f, &mut self.projection_store);
                    }
                    TerminatorKind::Call { func, args, destination, .. } => {
                        for_operand(func, &mut f, &mut self.projection_store);
                        args.iter_mut()
                            .for_each(|x| for_operand(x, &mut f, &mut self.projection_store));
                        f(destination, &mut self.projection_store);
                    }
                    TerminatorKind::Assert { cond, .. } => {
                        for_operand(cond, &mut f, &mut self.projection_store);
                    }
                    TerminatorKind::Yield { value, resume_arg, .. } => {
                        for_operand(value, &mut f, &mut self.projection_store);
                        f(resume_arg, &mut self.projection_store);
                    }
                },
                None => (),
            }
        }
    }

    fn shrink_to_fit(&mut self) {
        let MirBody {
            basic_blocks,
            locals,
            start_block: _,
            owner: _,
            binding_locals,
            param_locals,
            closures,
            projection_store,
        } = self;
        projection_store.shrink_to_fit();
        basic_blocks.shrink_to_fit();
        locals.shrink_to_fit();
        binding_locals.shrink_to_fit();
        param_locals.shrink_to_fit();
        closures.shrink_to_fit();
        for (_, b) in basic_blocks.iter_mut() {
            let BasicBlock { statements, terminator: _, is_cleanup: _ } = b;
            statements.shrink_to_fit();
        }
    }
}

#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum MirSpan {
    ExprId(ExprId),
    PatId(PatId),
    BindingId(BindingId),
    SelfParam,
    Unknown,
}

impl MirSpan {
    pub fn is_ref_span(&self, body: &Body) -> bool {
        match *self {
            MirSpan::ExprId(expr) => matches!(body[expr], Expr::Ref { .. }),
            // FIXME: Figure out if this is correct wrt. match ergonomics.
            MirSpan::BindingId(binding) => matches!(
                body.bindings[binding].mode,
                BindingAnnotation::Ref | BindingAnnotation::RefMut
            ),
            MirSpan::PatId(_) | MirSpan::SelfParam | MirSpan::Unknown => false,
        }
    }
}

impl_from!(ExprId, PatId for MirSpan);

impl From<&ExprId> for MirSpan {
    fn from(value: &ExprId) -> Self {
        (*value).into()
    }
}