hir_ty/mir.rs
1//! MIR definitions and implementation
2
3use std::{collections::hash_map::Entry, fmt::Display, iter};
4
5use base_db::Crate;
6use either::Either;
7use hir_def::{
8 DefWithBodyId, FieldId, StaticId, TupleFieldId, UnionId, VariantId,
9 expr_store::Body,
10 hir::{BindingAnnotation, BindingId, Expr, ExprId, Ordering, PatId},
11};
12use la_arena::{Arena, ArenaMap, Idx, RawIdx};
13use rustc_ast_ir::Mutability;
14use rustc_hash::FxHashMap;
15use rustc_type_ir::inherent::{GenericArgs as _, IntoKind, SliceLike, Ty as _};
16use smallvec::{SmallVec, smallvec};
17use stdx::{impl_from, never};
18
19use crate::{
20 CallableDefId, InferenceResult, MemoryMap,
21 consteval::usize_const,
22 db::{HirDatabase, InternedClosureId},
23 display::{DisplayTarget, HirDisplay},
24 infer::PointerCast,
25 next_solver::{
26 Const, DbInterner, ErrorGuaranteed, GenericArgs, ParamEnv, Ty, TyKind,
27 infer::{InferCtxt, traits::ObligationCause},
28 obligation_ctxt::ObligationCtxt,
29 },
30};
31
32mod borrowck;
33mod eval;
34mod lower;
35mod monomorphization;
36mod pretty;
37
38pub use borrowck::{BorrowckResult, MutabilityReason, borrowck_query};
39pub use eval::{
40 Evaluator, MirEvalError, VTableMap, interpret_mir, pad16, render_const_using_debug_impl,
41};
42pub use lower::{MirLowerError, lower_to_mir, mir_body_for_closure_query, mir_body_query};
43pub use monomorphization::{
44 monomorphized_mir_body_for_closure_query, monomorphized_mir_body_query,
45};
46
47pub(crate) use lower::mir_body_cycle_result;
48pub(crate) use monomorphization::monomorphized_mir_body_cycle_result;
49
50use super::consteval::try_const_usize;
51
52pub type BasicBlockId<'db> = Idx<BasicBlock<'db>>;
53pub type LocalId<'db> = Idx<Local<'db>>;
54
55fn return_slot<'db>() -> LocalId<'db> {
56 LocalId::from_raw(RawIdx::from(0))
57}
58
59#[derive(Debug, Clone, PartialEq, Eq)]
60pub struct Local<'db> {
61 pub ty: Ty<'db>,
62}
63
64/// An operand in MIR represents a "value" in Rust, the definition of which is undecided and part of
65/// the memory model. One proposal for a definition of values can be found [on UCG][value-def].
66///
67/// [value-def]: https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/value-domain.md
68///
69/// The most common way to create values is via loading a place. Loading a place is an operation
70/// which reads the memory of the place and converts it to a value. This is a fundamentally *typed*
71/// operation. The nature of the value produced depends on the type of the conversion. Furthermore,
72/// there may be other effects: if the type has a validity constraint loading the place might be UB
73/// if the validity constraint is not met.
74///
75/// **Needs clarification:** Ralf proposes that loading a place not have side-effects.
76/// This is what is implemented in miri today. Are these the semantics we want for MIR? Is this
77/// something we can even decide without knowing more about Rust's memory model?
78///
79/// **Needs clarification:** Is loading a place that has its variant index set well-formed? Miri
80/// currently implements it, but it seems like this may be something to check against in the
81/// validator.
82#[derive(Debug, PartialEq, Eq, Clone)]
83pub struct Operand<'db> {
84 kind: OperandKind<'db>,
85 // FIXME : This should actually just be of type `MirSpan`.
86 span: Option<MirSpan>,
87}
88
89#[derive(Debug, PartialEq, Eq, Clone)]
90pub enum OperandKind<'db> {
91 /// Creates a value by loading the given place.
92 ///
93 /// Before drop elaboration, the type of the place must be `Copy`. After drop elaboration there
94 /// is no such requirement.
95 Copy(Place<'db>),
96
97 /// Creates a value by performing loading the place, just like the `Copy` operand.
98 ///
99 /// This *may* additionally overwrite the place with `uninit` bytes, depending on how we decide
100 /// in [UCG#188]. You should not emit MIR that may attempt a subsequent second load of this
101 /// place without first re-initializing it.
102 ///
103 /// [UCG#188]: https://github.com/rust-lang/unsafe-code-guidelines/issues/188
104 Move(Place<'db>),
105 /// Constants are already semantically values, and remain unchanged.
106 Constant { konst: Const<'db>, ty: Ty<'db> },
107 /// NON STANDARD: This kind of operand returns an immutable reference to that static memory. Rustc
108 /// handles it with the `Constant` variant somehow.
109 Static(StaticId),
110}
111
112impl<'db> Operand<'db> {
113 fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap<'db>, ty: Ty<'db>) -> Self {
114 let interner = DbInterner::conjure();
115 Operand {
116 kind: OperandKind::Constant {
117 konst: Const::new_valtree(interner, ty, data, memory_map),
118 ty,
119 },
120 span: None,
121 }
122 }
123
124 fn from_bytes(data: Box<[u8]>, ty: Ty<'db>) -> Self {
125 Operand::from_concrete_const(data, MemoryMap::default(), ty)
126 }
127
128 fn const_zst(ty: Ty<'db>) -> Operand<'db> {
129 Self::from_bytes(Box::default(), ty)
130 }
131
132 fn from_fn(
133 db: &'db dyn HirDatabase,
134 func_id: hir_def::FunctionId,
135 generic_args: GenericArgs<'db>,
136 ) -> Operand<'db> {
137 let interner = DbInterner::new_no_crate(db);
138 let ty = Ty::new_fn_def(interner, CallableDefId::FunctionId(func_id).into(), generic_args);
139 Operand::from_bytes(Box::default(), ty)
140 }
141}
142
143#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)]
144pub enum ProjectionElem<'db, V: PartialEq> {
145 Deref,
146 Field(Either<FieldId, TupleFieldId>),
147 // FIXME: get rid of this, and use FieldId for tuples and closures
148 ClosureField(usize),
149 Index(#[update(unsafe(with(crate::utils::unsafe_update_eq)))] V),
150 ConstantIndex { offset: u64, from_end: bool },
151 Subslice { from: u64, to: u64 },
152 //Downcast(Option<Symbol>, VariantIdx),
153 OpaqueCast(Ty<'db>),
154}
155
156impl<'db, V: PartialEq> ProjectionElem<'db, V> {
157 pub fn projected_ty(
158 &self,
159 infcx: &InferCtxt<'db>,
160 env: ParamEnv<'db>,
161 mut base: Ty<'db>,
162 closure_field: impl FnOnce(InternedClosureId, GenericArgs<'db>, usize) -> Ty<'db>,
163 krate: Crate,
164 ) -> Ty<'db> {
165 let interner = infcx.interner;
166 let db = interner.db;
167
168 // we only bail on mir building when there are type mismatches
169 // but error types may pop up resulting in us still attempting to build the mir
170 // so just propagate the error type
171 if base.is_ty_error() {
172 return Ty::new_error(interner, ErrorGuaranteed);
173 }
174
175 if matches!(base.kind(), TyKind::Alias(..)) {
176 let mut ocx = ObligationCtxt::new(infcx);
177 match ocx.structurally_normalize_ty(&ObligationCause::dummy(), env, base) {
178 Ok(it) => base = it,
179 Err(_) => return Ty::new_error(interner, ErrorGuaranteed),
180 }
181 }
182
183 match self {
184 ProjectionElem::Deref => match base.kind() {
185 TyKind::RawPtr(inner, _) | TyKind::Ref(_, inner, _) => inner,
186 TyKind::Adt(adt_def, subst) if adt_def.is_box() => subst.type_at(0),
187 _ => {
188 never!(
189 "Overloaded deref on type {} is not a projection",
190 base.display(db, DisplayTarget::from_crate(db, krate))
191 );
192 Ty::new_error(interner, ErrorGuaranteed)
193 }
194 },
195 ProjectionElem::Field(Either::Left(f)) => match base.kind() {
196 TyKind::Adt(_, subst) => {
197 db.field_types(f.parent)[f.local_id].instantiate(interner, subst)
198 }
199 ty => {
200 never!("Only adt has field, found {:?}", ty);
201 Ty::new_error(interner, ErrorGuaranteed)
202 }
203 },
204 ProjectionElem::Field(Either::Right(f)) => match base.kind() {
205 TyKind::Tuple(subst) => {
206 subst.as_slice().get(f.index as usize).copied().unwrap_or_else(|| {
207 never!("Out of bound tuple field");
208 Ty::new_error(interner, ErrorGuaranteed)
209 })
210 }
211 ty => {
212 never!("Only tuple has tuple field: {:?}", ty);
213 Ty::new_error(interner, ErrorGuaranteed)
214 }
215 },
216 ProjectionElem::ClosureField(f) => match base.kind() {
217 TyKind::Closure(id, subst) => closure_field(id.0, subst, *f),
218 _ => {
219 never!("Only closure has closure field");
220 Ty::new_error(interner, ErrorGuaranteed)
221 }
222 },
223 ProjectionElem::ConstantIndex { .. } | ProjectionElem::Index(_) => match base.kind() {
224 TyKind::Array(inner, _) | TyKind::Slice(inner) => inner,
225 _ => {
226 never!("Overloaded index is not a projection");
227 Ty::new_error(interner, ErrorGuaranteed)
228 }
229 },
230 &ProjectionElem::Subslice { from, to } => match base.kind() {
231 TyKind::Array(inner, c) => {
232 let next_c = usize_const(
233 db,
234 match try_const_usize(db, c) {
235 None => None,
236 Some(x) => x.checked_sub(u128::from(from + to)),
237 },
238 krate,
239 );
240 Ty::new_array_with_const_len(interner, inner, next_c)
241 }
242 TyKind::Slice(_) => base,
243 _ => {
244 never!("Subslice projection should only happen on slice and array");
245 Ty::new_error(interner, ErrorGuaranteed)
246 }
247 },
248 ProjectionElem::OpaqueCast(_) => {
249 never!("We don't emit these yet");
250 Ty::new_error(interner, ErrorGuaranteed)
251 }
252 }
253 }
254}
255
256type PlaceElem<'db> = ProjectionElem<'db, LocalId<'db>>;
257
258#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
259pub struct ProjectionId(u32);
260
261#[derive(Debug, Clone, PartialEq, Eq)]
262pub struct ProjectionStore<'db> {
263 id_to_proj: FxHashMap<ProjectionId, Box<[PlaceElem<'db>]>>,
264 proj_to_id: FxHashMap<Box<[PlaceElem<'db>]>, ProjectionId>,
265}
266
267impl Default for ProjectionStore<'_> {
268 fn default() -> Self {
269 let mut this = Self { id_to_proj: Default::default(), proj_to_id: Default::default() };
270 // Ensure that [] will get the id 0 which is used in `ProjectionId::Empty`
271 this.intern(Box::new([]));
272 this
273 }
274}
275
276impl<'db> ProjectionStore<'db> {
277 pub fn shrink_to_fit(&mut self) {
278 self.id_to_proj.shrink_to_fit();
279 self.proj_to_id.shrink_to_fit();
280 }
281
282 pub fn intern_if_exist(&self, projection: &[PlaceElem<'db>]) -> Option<ProjectionId> {
283 self.proj_to_id.get(projection).copied()
284 }
285
286 pub fn intern(&mut self, projection: Box<[PlaceElem<'db>]>) -> ProjectionId {
287 let new_id = ProjectionId(self.proj_to_id.len() as u32);
288 match self.proj_to_id.entry(projection) {
289 Entry::Occupied(id) => *id.get(),
290 Entry::Vacant(e) => {
291 let key_clone = e.key().clone();
292 e.insert(new_id);
293 self.id_to_proj.insert(new_id, key_clone);
294 new_id
295 }
296 }
297 }
298}
299
300impl ProjectionId {
301 pub const EMPTY: ProjectionId = ProjectionId(0);
302
303 pub fn is_empty(self) -> bool {
304 self == ProjectionId::EMPTY
305 }
306
307 pub fn lookup<'a, 'db>(self, store: &'a ProjectionStore<'db>) -> &'a [PlaceElem<'db>] {
308 store.id_to_proj.get(&self).unwrap()
309 }
310
311 pub fn project<'db>(
312 self,
313 projection: PlaceElem<'db>,
314 store: &mut ProjectionStore<'db>,
315 ) -> ProjectionId {
316 let mut current = self.lookup(store).to_vec();
317 current.push(projection);
318 store.intern(current.into())
319 }
320}
321
322#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
323pub struct Place<'db> {
324 pub local: LocalId<'db>,
325 pub projection: ProjectionId,
326}
327
328impl<'db> Place<'db> {
329 fn is_parent(&self, child: &Place<'db>, store: &ProjectionStore<'db>) -> bool {
330 self.local == child.local
331 && child.projection.lookup(store).starts_with(self.projection.lookup(store))
332 }
333
334 /// The place itself is not included
335 fn iterate_over_parents<'a>(
336 &'a self,
337 store: &'a ProjectionStore<'db>,
338 ) -> impl Iterator<Item = Place<'db>> + 'a {
339 let projection = self.projection.lookup(store);
340 (0..projection.len()).map(|x| &projection[0..x]).filter_map(move |x| {
341 Some(Place { local: self.local, projection: store.intern_if_exist(x)? })
342 })
343 }
344
345 fn project(&self, projection: PlaceElem<'db>, store: &mut ProjectionStore<'db>) -> Place<'db> {
346 Place { local: self.local, projection: self.projection.project(projection, store) }
347 }
348}
349
350impl<'db> From<LocalId<'db>> for Place<'db> {
351 fn from(local: LocalId<'db>) -> Self {
352 Self { local, projection: ProjectionId::EMPTY }
353 }
354}
355
356#[derive(Debug, PartialEq, Eq, Clone)]
357pub enum AggregateKind<'db> {
358 /// The type is of the element
359 Array(Ty<'db>),
360 /// The type is of the tuple
361 Tuple(Ty<'db>),
362 Adt(VariantId, GenericArgs<'db>),
363 Union(UnionId, FieldId),
364 Closure(Ty<'db>),
365 //Coroutine(LocalDefId, SubstsRef, Movability),
366}
367
368#[derive(Debug, Clone, Hash, PartialEq, Eq)]
369pub struct SwitchTargets<'db> {
370 /// Possible values. The locations to branch to in each case
371 /// are found in the corresponding indices from the `targets` vector.
372 values: SmallVec<[u128; 1]>,
373
374 /// Possible branch sites. The last element of this vector is used
375 /// for the otherwise branch, so targets.len() == values.len() + 1
376 /// should hold.
377 //
378 // This invariant is quite non-obvious and also could be improved.
379 // One way to make this invariant is to have something like this instead:
380 //
381 // branches: Vec<(ConstInt, BasicBlock)>,
382 // otherwise: Option<BasicBlock> // exhaustive if None
383 //
384 // However we’ve decided to keep this as-is until we figure a case
385 // where some other approach seems to be strictly better than other.
386 targets: SmallVec<[BasicBlockId<'db>; 2]>,
387}
388
389impl<'db> SwitchTargets<'db> {
390 /// Creates switch targets from an iterator of values and target blocks.
391 ///
392 /// The iterator may be empty, in which case the `SwitchInt` instruction is equivalent to
393 /// `goto otherwise;`.
394 pub fn new(
395 targets: impl Iterator<Item = (u128, BasicBlockId<'db>)>,
396 otherwise: BasicBlockId<'db>,
397 ) -> Self {
398 let (values, mut targets): (SmallVec<_>, SmallVec<_>) = targets.unzip();
399 targets.push(otherwise);
400 Self { values, targets }
401 }
402
403 /// Builds a switch targets definition that jumps to `then` if the tested value equals `value`,
404 /// and to `else_` if not.
405 pub fn static_if(value: u128, then: BasicBlockId<'db>, else_: BasicBlockId<'db>) -> Self {
406 Self { values: smallvec![value], targets: smallvec![then, else_] }
407 }
408
409 /// Returns the fallback target that is jumped to when none of the values match the operand.
410 pub fn otherwise(&self) -> BasicBlockId<'db> {
411 *self.targets.last().unwrap()
412 }
413
414 /// Returns an iterator over the switch targets.
415 ///
416 /// The iterator will yield tuples containing the value and corresponding target to jump to, not
417 /// including the `otherwise` fallback target.
418 ///
419 /// Note that this may yield 0 elements. Only the `otherwise` branch is mandatory.
420 pub fn iter(&self) -> impl Iterator<Item = (u128, BasicBlockId<'db>)> + '_ {
421 iter::zip(&self.values, &self.targets).map(|(x, y)| (*x, *y))
422 }
423
424 /// Returns a slice with all possible jump targets (including the fallback target).
425 pub fn all_targets(&self) -> &[BasicBlockId<'db>] {
426 &self.targets
427 }
428
429 /// Finds the `BasicBlock` to which this `SwitchInt` will branch given the
430 /// specific value. This cannot fail, as it'll return the `otherwise`
431 /// branch if there's not a specific match for the value.
432 pub fn target_for_value(&self, value: u128) -> BasicBlockId<'db> {
433 self.iter().find_map(|(v, t)| (v == value).then_some(t)).unwrap_or_else(|| self.otherwise())
434 }
435}
436
437#[derive(Debug, PartialEq, Eq, Clone)]
438pub struct Terminator<'db> {
439 pub span: MirSpan,
440 pub kind: TerminatorKind<'db>,
441}
442
443#[derive(Debug, PartialEq, Eq, Clone)]
444pub enum TerminatorKind<'db> {
445 /// Block has one successor; we continue execution there.
446 Goto { target: BasicBlockId<'db> },
447
448 /// Switches based on the computed value.
449 ///
450 /// First, evaluates the `discr` operand. The type of the operand must be a signed or unsigned
451 /// integer, char, or bool, and must match the given type. Then, if the list of switch targets
452 /// contains the computed value, continues execution at the associated basic block. Otherwise,
453 /// continues execution at the "otherwise" basic block.
454 ///
455 /// Target values may not appear more than once.
456 SwitchInt {
457 /// The discriminant value being tested.
458 discr: Operand<'db>,
459
460 targets: SwitchTargets<'db>,
461 },
462
463 /// Indicates that the landing pad is finished and that the process should continue unwinding.
464 ///
465 /// Like a return, this marks the end of this invocation of the function.
466 ///
467 /// Only permitted in cleanup blocks. `Resume` is not permitted with `-C unwind=abort` after
468 /// deaggregation runs.
469 UnwindResume,
470
471 /// Indicates that the landing pad is finished and that the process should abort.
472 ///
473 /// Used to prevent unwinding for foreign items or with `-C unwind=abort`. Only permitted in
474 /// cleanup blocks.
475 Abort,
476
477 /// Returns from the function.
478 ///
479 /// Like function calls, the exact semantics of returns in Rust are unclear. Returning very
480 /// likely at least assigns the value currently in the return place (`_0`) to the place
481 /// specified in the associated `Call` terminator in the calling function, as if assigned via
482 /// `dest = move _0`. It might additionally do other things, like have side-effects in the
483 /// aliasing model.
484 ///
485 /// If the body is a coroutine body, this has slightly different semantics; it instead causes a
486 /// `CoroutineState::Returned(_0)` to be created (as if by an `Aggregate` rvalue) and assigned
487 /// to the return place.
488 Return,
489
490 /// Indicates a terminator that can never be reached.
491 ///
492 /// Executing this terminator is UB.
493 Unreachable,
494
495 /// The behavior of this statement differs significantly before and after drop elaboration.
496 /// After drop elaboration, `Drop` executes the drop glue for the specified place, after which
497 /// it continues execution/unwinds at the given basic blocks. It is possible that executing drop
498 /// glue is special - this would be part of Rust's memory model. (**FIXME**: due we have an
499 /// issue tracking if drop glue has any interesting semantics in addition to those of a function
500 /// call?)
501 ///
502 /// `Drop` before drop elaboration is a *conditional* execution of the drop glue. Specifically, the
503 /// `Drop` will be executed if...
504 ///
505 /// **Needs clarification**: End of that sentence. This in effect should document the exact
506 /// behavior of drop elaboration. The following sounds vaguely right, but I'm not quite sure:
507 ///
508 /// > The drop glue is executed if, among all statements executed within this `Body`, an assignment to
509 /// > the place or one of its "parents" occurred more recently than a move out of it. This does not
510 /// > consider indirect assignments.
511 Drop { place: Place<'db>, target: BasicBlockId<'db>, unwind: Option<BasicBlockId<'db>> },
512
513 /// Drops the place and assigns a new value to it.
514 ///
515 /// This first performs the exact same operation as the pre drop-elaboration `Drop` terminator;
516 /// it then additionally assigns the `value` to the `place` as if by an assignment statement.
517 /// This assignment occurs both in the unwind and the regular code paths. The semantics are best
518 /// explained by the elaboration:
519 ///
520 /// ```ignore (MIR)
521 /// BB0 {
522 /// DropAndReplace(P <- V, goto BB1, unwind BB2)
523 /// }
524 /// ```
525 ///
526 /// becomes
527 ///
528 /// ```ignore (MIR)
529 /// BB0 {
530 /// Drop(P, goto BB1, unwind BB2)
531 /// }
532 /// BB1 {
533 /// // P is now uninitialized
534 /// P <- V
535 /// }
536 /// BB2 {
537 /// // P is now uninitialized -- its dtor panicked
538 /// P <- V
539 /// }
540 /// ```
541 ///
542 /// Disallowed after drop elaboration.
543 DropAndReplace {
544 place: Place<'db>,
545 value: Operand<'db>,
546 target: BasicBlockId<'db>,
547 unwind: Option<BasicBlockId<'db>>,
548 },
549
550 /// Roughly speaking, evaluates the `func` operand and the arguments, and starts execution of
551 /// the referred to function. The operand types must match the argument types of the function.
552 /// The return place type must match the return type. The type of the `func` operand must be
553 /// callable, meaning either a function pointer, a function type, or a closure type.
554 ///
555 /// **Needs clarification**: The exact semantics of this. Current backends rely on `move`
556 /// operands not aliasing the return place. It is unclear how this is justified in MIR, see
557 /// [#71117].
558 ///
559 /// [#71117]: https://github.com/rust-lang/rust/issues/71117
560 Call {
561 /// The function that’s being called.
562 func: Operand<'db>,
563 /// Arguments the function is called with.
564 /// These are owned by the callee, which is free to modify them.
565 /// This allows the memory occupied by "by-value" arguments to be
566 /// reused across function calls without duplicating the contents.
567 args: Box<[Operand<'db>]>,
568 /// Where the returned value will be written
569 destination: Place<'db>,
570 /// Where to go after this call returns. If none, the call necessarily diverges.
571 target: Option<BasicBlockId<'db>>,
572 /// Cleanups to be done if the call unwinds.
573 cleanup: Option<BasicBlockId<'db>>,
574 /// `true` if this is from a call in HIR rather than from an overloaded
575 /// operator. True for overloaded function call.
576 from_hir_call: bool,
577 // This `Span` is the span of the function, without the dot and receiver
578 // (e.g. `foo(a, b)` in `x.foo(a, b)`
579 //fn_span: Span,
580 },
581
582 /// Evaluates the operand, which must have type `bool`. If it is not equal to `expected`,
583 /// initiates a panic. Initiating a panic corresponds to a `Call` terminator with some
584 /// unspecified constant as the function to call, all the operands stored in the `AssertMessage`
585 /// as parameters, and `None` for the destination. Keep in mind that the `cleanup` path is not
586 /// necessarily executed even in the case of a panic, for example in `-C panic=abort`. If the
587 /// assertion does not fail, execution continues at the specified basic block.
588 Assert {
589 cond: Operand<'db>,
590 expected: bool,
591 //msg: AssertMessage,
592 target: BasicBlockId<'db>,
593 cleanup: Option<BasicBlockId<'db>>,
594 },
595
596 /// Marks a suspend point.
597 ///
598 /// Like `Return` terminators in coroutine bodies, this computes `value` and then a
599 /// `CoroutineState::Yielded(value)` as if by `Aggregate` rvalue. That value is then assigned to
600 /// the return place of the function calling this one, and execution continues in the calling
601 /// function. When next invoked with the same first argument, execution of this function
602 /// continues at the `resume` basic block, with the second argument written to the `resume_arg`
603 /// place. If the coroutine is dropped before then, the `drop` basic block is invoked.
604 ///
605 /// Not permitted in bodies that are not coroutine bodies, or after coroutine lowering.
606 ///
607 /// **Needs clarification**: What about the evaluation order of the `resume_arg` and `value`?
608 Yield {
609 /// The value to return.
610 value: Operand<'db>,
611 /// Where to resume to.
612 resume: BasicBlockId<'db>,
613 /// The place to store the resume argument in.
614 resume_arg: Place<'db>,
615 /// Cleanup to be done if the coroutine is dropped at this suspend point.
616 drop: Option<BasicBlockId<'db>>,
617 },
618
619 /// Indicates the end of dropping a coroutine.
620 ///
621 /// Semantically just a `return` (from the coroutines drop glue). Only permitted in the same situations
622 /// as `yield`.
623 ///
624 /// **Needs clarification**: Is that even correct? The coroutine drop code is always confusing
625 /// to me, because it's not even really in the current body.
626 ///
627 /// **Needs clarification**: Are there type system constraints on these terminators? Should
628 /// there be a "block type" like `cleanup` blocks for them?
629 CoroutineDrop,
630
631 /// A block where control flow only ever takes one real path, but borrowck needs to be more
632 /// conservative.
633 ///
634 /// At runtime this is semantically just a goto.
635 ///
636 /// Disallowed after drop elaboration.
637 FalseEdge {
638 /// The target normal control flow will take.
639 real_target: BasicBlockId<'db>,
640 /// A block control flow could conceptually jump to, but won't in
641 /// practice.
642 imaginary_target: BasicBlockId<'db>,
643 },
644
645 /// A terminator for blocks that only take one path in reality, but where we reserve the right
646 /// to unwind in borrowck, even if it won't happen in practice. This can arise in infinite loops
647 /// with no function calls for example.
648 ///
649 /// At runtime this is semantically just a goto.
650 ///
651 /// Disallowed after drop elaboration.
652 FalseUnwind {
653 /// The target normal control flow will take.
654 real_target: BasicBlockId<'db>,
655 /// The imaginary cleanup block link. This particular path will never be taken
656 /// in practice, but in order to avoid fragility we want to always
657 /// consider it in borrowck. We don't want to accept programs which
658 /// pass borrowck only when `panic=abort` or some assertions are disabled
659 /// due to release vs. debug mode builds. This needs to be an `Option` because
660 /// of the `remove_noop_landing_pads` and `abort_unwinding_calls` passes.
661 unwind: Option<BasicBlockId<'db>>,
662 },
663}
664
665// Order of variants in this enum matter: they are used to compare borrow kinds.
666#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)]
667pub enum BorrowKind {
668 /// Data must be immutable and is aliasable.
669 Shared,
670
671 /// The immediately borrowed place must be immutable, but projections from
672 /// it don't need to be. For example, a shallow borrow of `a.b` doesn't
673 /// conflict with a mutable borrow of `a.b.c`.
674 ///
675 /// This is used when lowering matches: when matching on a place we want to
676 /// ensure that place have the same value from the start of the match until
677 /// an arm is selected. This prevents this code from compiling:
678 /// ```compile_fail,E0510
679 /// let mut x = &Some(0);
680 /// match *x {
681 /// None => (),
682 /// Some(_) if { x = &None; false } => (),
683 /// Some(_) => (),
684 /// }
685 /// ```
686 /// This can't be a shared borrow because mutably borrowing (*x as Some).0
687 /// should not prevent `if let None = x { ... }`, for example, because the
688 /// mutating `(*x as Some).0` can't affect the discriminant of `x`.
689 /// We can also report errors with this kind of borrow differently.
690 Shallow,
691
692 /// Data is mutable and not aliasable.
693 Mut { kind: MutBorrowKind },
694}
695
696// Order of variants in this enum matter: they are used to compare borrow kinds.
697#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)]
698pub enum MutBorrowKind {
699 /// Data must be immutable but not aliasable. This kind of borrow cannot currently
700 /// be expressed by the user and is used only in implicit closure bindings.
701 ClosureCapture,
702 Default,
703 /// This borrow arose from method-call auto-ref
704 /// (i.e., adjustment::Adjust::Borrow).
705 TwoPhasedBorrow,
706}
707
708impl BorrowKind {
709 fn from_hir(m: hir_def::type_ref::Mutability) -> Self {
710 match m {
711 hir_def::type_ref::Mutability::Shared => BorrowKind::Shared,
712 hir_def::type_ref::Mutability::Mut => BorrowKind::Mut { kind: MutBorrowKind::Default },
713 }
714 }
715
716 fn from_rustc(m: rustc_ast_ir::Mutability) -> Self {
717 match m {
718 rustc_ast_ir::Mutability::Not => BorrowKind::Shared,
719 rustc_ast_ir::Mutability::Mut => BorrowKind::Mut { kind: MutBorrowKind::Default },
720 }
721 }
722}
723
724#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
725pub enum UnOp {
726 /// The `!` operator for logical inversion
727 Not,
728 /// The `-` operator for negation
729 Neg,
730}
731
732#[derive(Debug, PartialEq, Eq, Clone)]
733pub enum BinOp {
734 /// The `+` operator (addition)
735 Add,
736 /// The `-` operator (subtraction)
737 Sub,
738 /// The `*` operator (multiplication)
739 Mul,
740 /// The `/` operator (division)
741 ///
742 /// Division by zero is UB, because the compiler should have inserted checks
743 /// prior to this.
744 Div,
745 /// The `%` operator (modulus)
746 ///
747 /// Using zero as the modulus (second operand) is UB, because the compiler
748 /// should have inserted checks prior to this.
749 Rem,
750 /// The `^` operator (bitwise xor)
751 BitXor,
752 /// The `&` operator (bitwise and)
753 BitAnd,
754 /// The `|` operator (bitwise or)
755 BitOr,
756 /// The `<<` operator (shift left)
757 ///
758 /// The offset is truncated to the size of the first operand before shifting.
759 Shl,
760 /// The `>>` operator (shift right)
761 ///
762 /// The offset is truncated to the size of the first operand before shifting.
763 Shr,
764 /// The `==` operator (equality)
765 Eq,
766 /// The `<` operator (less than)
767 Lt,
768 /// The `<=` operator (less than or equal to)
769 Le,
770 /// The `!=` operator (not equal to)
771 Ne,
772 /// The `>=` operator (greater than or equal to)
773 Ge,
774 /// The `>` operator (greater than)
775 Gt,
776 /// The `ptr.offset` operator
777 Offset,
778}
779
780impl BinOp {
781 fn run_compare<T: PartialEq + PartialOrd>(&self, l: T, r: T) -> bool {
782 match self {
783 BinOp::Ge => l >= r,
784 BinOp::Gt => l > r,
785 BinOp::Le => l <= r,
786 BinOp::Lt => l < r,
787 BinOp::Eq => l == r,
788 BinOp::Ne => l != r,
789 x => panic!("`run_compare` called on operator {x:?}"),
790 }
791 }
792}
793
794impl Display for BinOp {
795 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
796 f.write_str(match self {
797 BinOp::Add => "+",
798 BinOp::Sub => "-",
799 BinOp::Mul => "*",
800 BinOp::Div => "/",
801 BinOp::Rem => "%",
802 BinOp::BitXor => "^",
803 BinOp::BitAnd => "&",
804 BinOp::BitOr => "|",
805 BinOp::Shl => "<<",
806 BinOp::Shr => ">>",
807 BinOp::Eq => "==",
808 BinOp::Lt => "<",
809 BinOp::Le => "<=",
810 BinOp::Ne => "!=",
811 BinOp::Ge => ">=",
812 BinOp::Gt => ">",
813 BinOp::Offset => "`offset`",
814 })
815 }
816}
817
818impl From<hir_def::hir::ArithOp> for BinOp {
819 fn from(value: hir_def::hir::ArithOp) -> Self {
820 match value {
821 hir_def::hir::ArithOp::Add => BinOp::Add,
822 hir_def::hir::ArithOp::Mul => BinOp::Mul,
823 hir_def::hir::ArithOp::Sub => BinOp::Sub,
824 hir_def::hir::ArithOp::Div => BinOp::Div,
825 hir_def::hir::ArithOp::Rem => BinOp::Rem,
826 hir_def::hir::ArithOp::Shl => BinOp::Shl,
827 hir_def::hir::ArithOp::Shr => BinOp::Shr,
828 hir_def::hir::ArithOp::BitXor => BinOp::BitXor,
829 hir_def::hir::ArithOp::BitOr => BinOp::BitOr,
830 hir_def::hir::ArithOp::BitAnd => BinOp::BitAnd,
831 }
832 }
833}
834
835impl From<hir_def::hir::CmpOp> for BinOp {
836 fn from(value: hir_def::hir::CmpOp) -> Self {
837 match value {
838 hir_def::hir::CmpOp::Eq { negated: false } => BinOp::Eq,
839 hir_def::hir::CmpOp::Eq { negated: true } => BinOp::Ne,
840 hir_def::hir::CmpOp::Ord { ordering: Ordering::Greater, strict: false } => BinOp::Ge,
841 hir_def::hir::CmpOp::Ord { ordering: Ordering::Greater, strict: true } => BinOp::Gt,
842 hir_def::hir::CmpOp::Ord { ordering: Ordering::Less, strict: false } => BinOp::Le,
843 hir_def::hir::CmpOp::Ord { ordering: Ordering::Less, strict: true } => BinOp::Lt,
844 }
845 }
846}
847
848impl<'db> From<Operand<'db>> for Rvalue<'db> {
849 fn from(x: Operand<'db>) -> Self {
850 Self::Use(x)
851 }
852}
853
854#[derive(Debug, PartialEq, Eq, Clone)]
855pub enum CastKind {
856 /// An exposing pointer to address cast. A cast between a pointer and an integer type, or
857 /// between a function pointer and an integer type.
858 /// See the docs on `expose_addr` for more details.
859 PointerExposeAddress,
860 /// An address-to-pointer cast that picks up an exposed provenance.
861 /// See the docs on `from_exposed_addr` for more details.
862 PointerFromExposedAddress,
863 /// All sorts of pointer-to-pointer casts. Note that reference-to-raw-ptr casts are
864 /// translated into `&raw mut/const *r`, i.e., they are not actually casts.
865 PtrToPtr,
866 /// Pointer related casts that are done by coercions.
867 PointerCoercion(PointerCast),
868 /// Cast into a dyn* object.
869 DynStar,
870 IntToInt,
871 FloatToInt,
872 FloatToFloat,
873 IntToFloat,
874 FnPtrToPtr,
875}
876
877#[derive(Debug, PartialEq, Eq, Clone)]
878pub enum Rvalue<'db> {
879 /// Yields the operand unchanged
880 Use(Operand<'db>),
881
882 /// Creates an array where each element is the value of the operand.
883 ///
884 /// Corresponds to source code like `[x; 32]`.
885 Repeat(Operand<'db>, Const<'db>),
886
887 /// Creates a reference of the indicated kind to the place.
888 ///
889 /// There is not much to document here, because besides the obvious parts the semantics of this
890 /// are essentially entirely a part of the aliasing model. There are many UCG issues discussing
891 /// exactly what the behavior of this operation should be.
892 ///
893 /// `Shallow` borrows are disallowed after drop lowering.
894 Ref(BorrowKind, Place<'db>),
895
896 /// Creates a pointer/reference to the given thread local.
897 ///
898 /// The yielded type is a `*mut T` if the static is mutable, otherwise if the static is extern a
899 /// `*const T`, and if neither of those apply a `&T`.
900 ///
901 /// **Note:** This is a runtime operation that actually executes code and is in this sense more
902 /// like a function call. Also, eliminating dead stores of this rvalue causes `fn main() {}` to
903 /// SIGILL for some reason that I (JakobDegen) never got a chance to look into.
904 ///
905 /// **Needs clarification**: Are there weird additional semantics here related to the runtime
906 /// nature of this operation?
907 // ThreadLocalRef(DefId),
908 ThreadLocalRef(std::convert::Infallible),
909
910 /// Creates a pointer with the indicated mutability to the place.
911 ///
912 /// This is generated by pointer casts like `&v as *const _` or raw address of expressions like
913 /// `&raw v` or `addr_of!(v)`.
914 ///
915 /// Like with references, the semantics of this operation are heavily dependent on the aliasing
916 /// model.
917 // AddressOf(Mutability, Place),
918 AddressOf(std::convert::Infallible),
919
920 /// Yields the length of the place, as a `usize`.
921 ///
922 /// If the type of the place is an array, this is the array length. For slices (`[T]`, not
923 /// `&[T]`) this accesses the place's metadata to determine the length. This rvalue is
924 /// ill-formed for places of other types.
925 Len(Place<'db>),
926
927 /// Performs essentially all of the casts that can be performed via `as`.
928 ///
929 /// This allows for casts from/to a variety of types.
930 ///
931 /// **FIXME**: Document exactly which `CastKind`s allow which types of casts. Figure out why
932 /// `ArrayToPointer` and `MutToConstPointer` are special.
933 Cast(CastKind, Operand<'db>, Ty<'db>),
934
935 // FIXME link to `pointer::offset` when it hits stable.
936 /// * `Offset` has the same semantics as `pointer::offset`, except that the second
937 /// parameter may be a `usize` as well.
938 /// * The comparison operations accept `bool`s, `char`s, signed or unsigned integers, floats,
939 /// raw pointers, or function pointers and return a `bool`. The types of the operands must be
940 /// matching, up to the usual caveat of the lifetimes in function pointers.
941 /// * Left and right shift operations accept signed or unsigned integers not necessarily of the
942 /// same type and return a value of the same type as their LHS. Like in Rust, the RHS is
943 /// truncated as needed.
944 /// * The `Bit*` operations accept signed integers, unsigned integers, or bools with matching
945 /// types and return a value of that type.
946 /// * The remaining operations accept signed integers, unsigned integers, or floats with
947 /// matching types and return a value of that type.
948 //BinaryOp(BinOp, Box<(Operand, Operand)>),
949 BinaryOp(std::convert::Infallible),
950
951 /// Same as `BinaryOp`, but yields `(T, bool)` with a `bool` indicating an error condition.
952 ///
953 /// When overflow checking is disabled and we are generating run-time code, the error condition
954 /// is false. Otherwise, and always during CTFE, the error condition is determined as described
955 /// below.
956 ///
957 /// For addition, subtraction, and multiplication on integers the error condition is set when
958 /// the infinite precision result would be unequal to the actual result.
959 ///
960 /// For shift operations on integers the error condition is set when the value of right-hand
961 /// side is greater than or equal to the number of bits in the type of the left-hand side, or
962 /// when the value of right-hand side is negative.
963 ///
964 /// Other combinations of types and operators are unsupported.
965 CheckedBinaryOp(BinOp, Operand<'db>, Operand<'db>),
966
967 /// Computes a value as described by the operation.
968 //NullaryOp(NullOp, Ty),
969 NullaryOp(std::convert::Infallible),
970
971 /// Exactly like `BinaryOp`, but less operands.
972 ///
973 /// Also does two's-complement arithmetic. Negation requires a signed integer or a float;
974 /// bitwise not requires a signed integer, unsigned integer, or bool. Both operation kinds
975 /// return a value with the same type as their operand.
976 UnaryOp(UnOp, Operand<'db>),
977
978 /// Computes the discriminant of the place, returning it as an integer of type
979 /// `discriminant_ty`. Returns zero for types without discriminant.
980 ///
981 /// The validity requirements for the underlying value are undecided for this rvalue, see
982 /// [#91095]. Note too that the value of the discriminant is not the same thing as the
983 /// variant index; use `discriminant_for_variant` to convert.
984 ///
985 /// [#91095]: https://github.com/rust-lang/rust/issues/91095
986 Discriminant(Place<'db>),
987
988 /// Creates an aggregate value, like a tuple or struct.
989 ///
990 /// This is needed because dataflow analysis needs to distinguish
991 /// `dest = Foo { x: ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case that `Foo`
992 /// has a destructor.
993 ///
994 /// Disallowed after deaggregation for all aggregate kinds except `Array` and `Coroutine`. After
995 /// coroutine lowering, `Coroutine` aggregate kinds are disallowed too.
996 Aggregate(AggregateKind<'db>, Box<[Operand<'db>]>),
997
998 /// Transmutes a `*mut u8` into shallow-initialized `Box<T>`.
999 ///
1000 /// This is different from a normal transmute because dataflow analysis will treat the box as
1001 /// initialized but its content as uninitialized. Like other pointer casts, this in general
1002 /// affects alias analysis.
1003 ShallowInitBox(Operand<'db>, Ty<'db>),
1004
1005 /// NON STANDARD: allocates memory with the type's layout, and shallow init the box with the resulting pointer.
1006 ShallowInitBoxWithAlloc(Ty<'db>),
1007
1008 /// A CopyForDeref is equivalent to a read from a place at the
1009 /// codegen level, but is treated specially by drop elaboration. When such a read happens, it
1010 /// is guaranteed (via nature of the mir_opt `Derefer` in rustc_mir_transform/src/deref_separator)
1011 /// that the only use of the returned value is a deref operation, immediately
1012 /// followed by one or more projections. Drop elaboration treats this rvalue as if the
1013 /// read never happened and just projects further. This allows simplifying various MIR
1014 /// optimizations and codegen backends that previously had to handle deref operations anywhere
1015 /// in a place.
1016 CopyForDeref(Place<'db>),
1017}
1018
1019#[derive(Debug, PartialEq, Eq, Clone)]
1020pub enum StatementKind<'db> {
1021 Assign(Place<'db>, Rvalue<'db>),
1022 FakeRead(Place<'db>),
1023 //SetDiscriminant {
1024 // place: Box<Place>,
1025 // variant_index: VariantIdx,
1026 //},
1027 Deinit(Place<'db>),
1028 StorageLive(LocalId<'db>),
1029 StorageDead(LocalId<'db>),
1030 //Retag(RetagKind, Box<Place>),
1031 //AscribeUserType(Place, UserTypeProjection, Variance),
1032 //Intrinsic(Box<NonDivergingIntrinsic>),
1033 Nop,
1034}
1035impl<'db> StatementKind<'db> {
1036 fn with_span(self, span: MirSpan) -> Statement<'db> {
1037 Statement { kind: self, span }
1038 }
1039}
1040
1041#[derive(Debug, PartialEq, Eq, Clone)]
1042pub struct Statement<'db> {
1043 pub kind: StatementKind<'db>,
1044 pub span: MirSpan,
1045}
1046
1047#[derive(Debug, Default, Clone, PartialEq, Eq)]
1048pub struct BasicBlock<'db> {
1049 /// List of statements in this block.
1050 pub statements: Vec<Statement<'db>>,
1051
1052 /// Terminator for this block.
1053 ///
1054 /// N.B., this should generally ONLY be `None` during construction.
1055 /// Therefore, you should generally access it via the
1056 /// `terminator()` or `terminator_mut()` methods. The only
1057 /// exception is that certain passes, such as `simplify_cfg`, swap
1058 /// out the terminator temporarily with `None` while they continue
1059 /// to recurse over the set of basic blocks.
1060 pub terminator: Option<Terminator<'db>>,
1061
1062 /// If true, this block lies on an unwind path. This is used
1063 /// during codegen where distinct kinds of basic blocks may be
1064 /// generated (particularly for MSVC cleanup). Unwind blocks must
1065 /// only branch to other unwind blocks.
1066 pub is_cleanup: bool,
1067}
1068
1069#[derive(Debug, Clone, PartialEq, Eq)]
1070pub struct MirBody<'db> {
1071 pub projection_store: ProjectionStore<'db>,
1072 pub basic_blocks: Arena<BasicBlock<'db>>,
1073 pub locals: Arena<Local<'db>>,
1074 pub start_block: BasicBlockId<'db>,
1075 pub owner: DefWithBodyId,
1076 pub binding_locals: ArenaMap<BindingId, LocalId<'db>>,
1077 pub param_locals: Vec<LocalId<'db>>,
1078 /// This field stores the closures directly owned by this body. It is used
1079 /// in traversing every mir body.
1080 pub closures: Vec<InternedClosureId>,
1081}
1082
1083impl<'db> MirBody<'db> {
1084 pub fn local_to_binding_map(&self) -> ArenaMap<LocalId<'db>, BindingId> {
1085 self.binding_locals.iter().map(|(it, y)| (*y, it)).collect()
1086 }
1087
1088 fn walk_places(&mut self, mut f: impl FnMut(&mut Place<'db>, &mut ProjectionStore<'db>)) {
1089 fn for_operand<'db>(
1090 op: &mut Operand<'db>,
1091 f: &mut impl FnMut(&mut Place<'db>, &mut ProjectionStore<'db>),
1092 store: &mut ProjectionStore<'db>,
1093 ) {
1094 match &mut op.kind {
1095 OperandKind::Copy(p) | OperandKind::Move(p) => {
1096 f(p, store);
1097 }
1098 OperandKind::Constant { .. } | OperandKind::Static(_) => (),
1099 }
1100 }
1101 for (_, block) in self.basic_blocks.iter_mut() {
1102 for statement in &mut block.statements {
1103 match &mut statement.kind {
1104 StatementKind::Assign(p, r) => {
1105 f(p, &mut self.projection_store);
1106 match r {
1107 Rvalue::ShallowInitBoxWithAlloc(_) => (),
1108 Rvalue::ShallowInitBox(o, _)
1109 | Rvalue::UnaryOp(_, o)
1110 | Rvalue::Cast(_, o, _)
1111 | Rvalue::Repeat(o, _)
1112 | Rvalue::Use(o) => for_operand(o, &mut f, &mut self.projection_store),
1113 Rvalue::CopyForDeref(p)
1114 | Rvalue::Discriminant(p)
1115 | Rvalue::Len(p)
1116 | Rvalue::Ref(_, p) => f(p, &mut self.projection_store),
1117 Rvalue::CheckedBinaryOp(_, o1, o2) => {
1118 for_operand(o1, &mut f, &mut self.projection_store);
1119 for_operand(o2, &mut f, &mut self.projection_store);
1120 }
1121 Rvalue::Aggregate(_, ops) => {
1122 for op in ops.iter_mut() {
1123 for_operand(op, &mut f, &mut self.projection_store);
1124 }
1125 }
1126 Rvalue::ThreadLocalRef(n)
1127 | Rvalue::AddressOf(n)
1128 | Rvalue::BinaryOp(n)
1129 | Rvalue::NullaryOp(n) => match *n {},
1130 }
1131 }
1132 StatementKind::FakeRead(p) | StatementKind::Deinit(p) => {
1133 f(p, &mut self.projection_store)
1134 }
1135 StatementKind::StorageLive(_)
1136 | StatementKind::StorageDead(_)
1137 | StatementKind::Nop => (),
1138 }
1139 }
1140 match &mut block.terminator {
1141 Some(x) => match &mut x.kind {
1142 TerminatorKind::SwitchInt { discr, .. } => {
1143 for_operand(discr, &mut f, &mut self.projection_store)
1144 }
1145 TerminatorKind::FalseEdge { .. }
1146 | TerminatorKind::FalseUnwind { .. }
1147 | TerminatorKind::Goto { .. }
1148 | TerminatorKind::UnwindResume
1149 | TerminatorKind::CoroutineDrop
1150 | TerminatorKind::Abort
1151 | TerminatorKind::Return
1152 | TerminatorKind::Unreachable => (),
1153 TerminatorKind::Drop { place, .. } => {
1154 f(place, &mut self.projection_store);
1155 }
1156 TerminatorKind::DropAndReplace { place, value, .. } => {
1157 f(place, &mut self.projection_store);
1158 for_operand(value, &mut f, &mut self.projection_store);
1159 }
1160 TerminatorKind::Call { func, args, destination, .. } => {
1161 for_operand(func, &mut f, &mut self.projection_store);
1162 args.iter_mut()
1163 .for_each(|x| for_operand(x, &mut f, &mut self.projection_store));
1164 f(destination, &mut self.projection_store);
1165 }
1166 TerminatorKind::Assert { cond, .. } => {
1167 for_operand(cond, &mut f, &mut self.projection_store);
1168 }
1169 TerminatorKind::Yield { value, resume_arg, .. } => {
1170 for_operand(value, &mut f, &mut self.projection_store);
1171 f(resume_arg, &mut self.projection_store);
1172 }
1173 },
1174 None => (),
1175 }
1176 }
1177 }
1178
1179 fn shrink_to_fit(&mut self) {
1180 let MirBody {
1181 basic_blocks,
1182 locals,
1183 start_block: _,
1184 owner: _,
1185 binding_locals,
1186 param_locals,
1187 closures,
1188 projection_store,
1189 } = self;
1190 projection_store.shrink_to_fit();
1191 basic_blocks.shrink_to_fit();
1192 locals.shrink_to_fit();
1193 binding_locals.shrink_to_fit();
1194 param_locals.shrink_to_fit();
1195 closures.shrink_to_fit();
1196 for (_, b) in basic_blocks.iter_mut() {
1197 let BasicBlock { statements, terminator: _, is_cleanup: _ } = b;
1198 statements.shrink_to_fit();
1199 }
1200 }
1201}
1202
1203#[derive(Debug, PartialEq, Eq, Clone, Copy)]
1204pub enum MirSpan {
1205 ExprId(ExprId),
1206 PatId(PatId),
1207 BindingId(BindingId),
1208 SelfParam,
1209 Unknown,
1210}
1211
1212impl MirSpan {
1213 pub fn is_ref_span(&self, body: &Body) -> bool {
1214 match *self {
1215 MirSpan::ExprId(expr) => matches!(body[expr], Expr::Ref { .. }),
1216 // FIXME: Figure out if this is correct wrt. match ergonomics.
1217 MirSpan::BindingId(binding) => {
1218 matches!(body[binding].mode, BindingAnnotation::Ref | BindingAnnotation::RefMut)
1219 }
1220 MirSpan::PatId(_) | MirSpan::SelfParam | MirSpan::Unknown => false,
1221 }
1222 }
1223}
1224
1225impl_from!(ExprId, PatId for MirSpan);
1226
1227impl From<&ExprId> for MirSpan {
1228 fn from(value: &ExprId) -> Self {
1229 (*value).into()
1230 }
1231}