1use std::{borrow::Cow, cell::RefCell, fmt::Write, iter, mem, ops::Range};
4
5use base_db::{Crate, target::TargetLoadError};
6use either::Either;
7use hir_def::{
8 AdtId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, HasModule, ItemContainerId,
9 Lookup, StaticId, VariantId,
10 expr_store::HygieneId,
11 item_tree::FieldsShape,
12 lang_item::LangItems,
13 layout::{TagEncoding, Variants},
14 resolver::{HasResolver, TypeNs, ValueNs},
15 signatures::{StaticFlags, StructFlags},
16};
17use hir_expand::{InFile, mod_path::path, name::Name};
18use intern::sym;
19use la_arena::ArenaMap;
20use rustc_abi::TargetDataLayout;
21use rustc_apfloat::{
22 Float,
23 ieee::{Half as f16, Quad as f128},
24};
25use rustc_ast_ir::Mutability;
26use rustc_hash::{FxHashMap, FxHashSet};
27use rustc_type_ir::{
28 AliasTyKind,
29 inherent::{AdtDef, IntoKind, Region as _, SliceLike, Ty as _},
30};
31use span::FileId;
32use stdx::never;
33use syntax::{SyntaxNodePtr, TextRange};
34use triomphe::Arc;
35
36use crate::{
37 CallableDefId, ComplexMemoryMap, InferenceResult, MemoryMap, ParamEnvAndCrate,
38 consteval::{self, ConstEvalError, try_const_usize},
39 db::{HirDatabase, InternedClosure, InternedClosureId},
40 display::{ClosureStyle, DisplayTarget, HirDisplay},
41 infer::PointerCast,
42 layout::{Layout, LayoutError, RustcEnumVariantIdx},
43 method_resolution::{is_dyn_method, lookup_impl_const},
44 next_solver::{
45 Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArgs, Region, Ty, TyKind,
46 TypingMode, UnevaluatedConst, ValueConst,
47 infer::{DbInternerInferExt, InferCtxt, traits::ObligationCause},
48 obligation_ctxt::ObligationCtxt,
49 },
50 traits::FnTrait,
51 utils::detect_variant_from_bytes,
52};
53
54use super::{
55 AggregateKind, BasicBlockId, BinOp, CastKind, LocalId, MirBody, MirLowerError, MirSpan,
56 Operand, OperandKind, Place, PlaceElem, ProjectionElem, ProjectionStore, Rvalue, StatementKind,
57 TerminatorKind, UnOp, return_slot,
58};
59
60mod shim;
61#[cfg(test)]
62mod tests;
63
64macro_rules! from_bytes {
65 ($ty:tt, $value:expr) => {
66 ($ty::from_le_bytes(match ($value).try_into() {
67 Ok(it) => it,
68 Err(_) => return Err(MirEvalError::InternalError(stringify!(mismatched size in constructing $ty).into())),
69 }))
70 };
71 ($apfloat:tt, $bits:tt, $value:expr) => {
72 $apfloat::from_bits($bits::from_le_bytes(match ($value).try_into() {
74 Ok(it) => it,
75 Err(_) => return Err(MirEvalError::InternalError(stringify!(mismatched size in constructing $apfloat).into())),
76 }).into())
77 };
78}
79
80macro_rules! not_supported {
81 ($it: expr) => {
82 return Err(MirEvalError::NotSupported(format!($it)))
83 };
84}
85
86#[derive(Debug, Default, Clone, PartialEq, Eq)]
87pub struct VTableMap<'db> {
88 ty_to_id: FxHashMap<Ty<'db>, usize>,
89 id_to_ty: Vec<Ty<'db>>,
90}
91
92impl<'db> VTableMap<'db> {
93 const OFFSET: usize = 1000; fn id(&mut self, ty: Ty<'db>) -> usize {
96 if let Some(it) = self.ty_to_id.get(&ty) {
97 return *it;
98 }
99 let id = self.id_to_ty.len() + VTableMap::OFFSET;
100 self.id_to_ty.push(ty);
101 self.ty_to_id.insert(ty, id);
102 id
103 }
104
105 pub(crate) fn ty(&self, id: usize) -> Result<'db, Ty<'db>> {
106 id.checked_sub(VTableMap::OFFSET)
107 .and_then(|id| self.id_to_ty.get(id).copied())
108 .ok_or(MirEvalError::InvalidVTableId(id))
109 }
110
111 fn ty_of_bytes(&self, bytes: &[u8]) -> Result<'db, Ty<'db>> {
112 let id = from_bytes!(usize, bytes);
113 self.ty(id)
114 }
115
116 pub fn shrink_to_fit(&mut self) {
117 self.id_to_ty.shrink_to_fit();
118 self.ty_to_id.shrink_to_fit();
119 }
120
121 fn is_empty(&self) -> bool {
122 self.id_to_ty.is_empty() && self.ty_to_id.is_empty()
123 }
124}
125
126#[derive(Debug, Default, Clone, PartialEq, Eq)]
127struct TlsData {
128 keys: Vec<u128>,
129}
130
131impl TlsData {
132 fn create_key(&mut self) -> usize {
133 self.keys.push(0);
134 self.keys.len() - 1
135 }
136
137 fn get_key(&mut self, key: usize) -> Result<'static, u128> {
138 let r = self.keys.get(key).ok_or_else(|| {
139 MirEvalError::UndefinedBehavior(format!("Getting invalid tls key {key}"))
140 })?;
141 Ok(*r)
142 }
143
144 fn set_key(&mut self, key: usize, value: u128) -> Result<'static, ()> {
145 let r = self.keys.get_mut(key).ok_or_else(|| {
146 MirEvalError::UndefinedBehavior(format!("Setting invalid tls key {key}"))
147 })?;
148 *r = value;
149 Ok(())
150 }
151}
152
153struct StackFrame<'db> {
154 locals: Locals<'db>,
155 destination: Option<BasicBlockId<'db>>,
156 prev_stack_ptr: usize,
157 span: (MirSpan, DefWithBodyId),
158}
159
160#[derive(Clone)]
161enum MirOrDynIndex<'db> {
162 Mir(Arc<MirBody<'db>>),
163 Dyn(usize),
164}
165
166pub struct Evaluator<'db> {
167 db: &'db dyn HirDatabase,
168 param_env: ParamEnvAndCrate<'db>,
169 target_data_layout: Arc<TargetDataLayout>,
170 stack: Vec<u8>,
171 heap: Vec<u8>,
172 code_stack: Vec<StackFrame<'db>>,
173 static_locations: FxHashMap<StaticId, Address>,
176 vtable_map: VTableMap<'db>,
180 thread_local_storage: TlsData,
181 random_state: oorandom::Rand64,
182 stdout: Vec<u8>,
183 stderr: Vec<u8>,
184 layout_cache: RefCell<FxHashMap<Ty<'db>, Arc<Layout>>>,
185 projected_ty_cache: RefCell<FxHashMap<(Ty<'db>, PlaceElem<'db>), Ty<'db>>>,
186 not_special_fn_cache: RefCell<FxHashSet<FunctionId>>,
187 mir_or_dyn_index_cache: RefCell<FxHashMap<(FunctionId, GenericArgs<'db>), MirOrDynIndex<'db>>>,
188 unused_locals_store: RefCell<FxHashMap<DefWithBodyId, Vec<Locals<'db>>>>,
192 cached_ptr_size: usize,
193 cached_fn_trait_func: Option<FunctionId>,
194 cached_fn_mut_trait_func: Option<FunctionId>,
195 cached_fn_once_trait_func: Option<FunctionId>,
196 crate_id: Crate,
197 assert_placeholder_ty_is_unused: bool,
199 execution_limit: usize,
201 stack_depth_limit: usize,
203 memory_limit: usize,
205 infcx: InferCtxt<'db>,
206}
207
208#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
209enum Address {
210 Stack(usize),
211 Heap(usize),
212 Invalid(usize),
213}
214
215use Address::*;
216
217#[derive(Debug, Clone, Copy)]
218struct Interval {
219 addr: Address,
220 size: usize,
221}
222
223#[derive(Debug, Clone)]
224struct IntervalAndTy<'db> {
225 interval: Interval,
226 ty: Ty<'db>,
227}
228
229impl Interval {
230 fn new(addr: Address, size: usize) -> Self {
231 Self { addr, size }
232 }
233
234 fn get<'a, 'db>(&self, memory: &'a Evaluator<'db>) -> Result<'db, &'a [u8]> {
235 memory.read_memory(self.addr, self.size)
236 }
237
238 fn write_from_bytes<'db>(&self, memory: &mut Evaluator<'db>, bytes: &[u8]) -> Result<'db, ()> {
239 memory.write_memory(self.addr, bytes)
240 }
241
242 fn write_from_interval<'db>(
243 &self,
244 memory: &mut Evaluator<'db>,
245 interval: Interval,
246 ) -> Result<'db, ()> {
247 memory.copy_from_interval(self.addr, interval)
248 }
249
250 fn slice(self, range: Range<usize>) -> Interval {
251 Interval { addr: self.addr.offset(range.start), size: range.len() }
252 }
253}
254
255impl<'db> IntervalAndTy<'db> {
256 fn get<'a>(&self, memory: &'a Evaluator<'db>) -> Result<'db, &'a [u8]> {
257 memory.read_memory(self.interval.addr, self.interval.size)
258 }
259
260 fn new(
261 addr: Address,
262 ty: Ty<'db>,
263 evaluator: &Evaluator<'db>,
264 locals: &Locals<'db>,
265 ) -> Result<'db, IntervalAndTy<'db>> {
266 let size = evaluator.size_of_sized(ty, locals, "type of interval")?;
267 Ok(IntervalAndTy { interval: Interval { addr, size }, ty })
268 }
269}
270
271enum IntervalOrOwned {
272 Owned(Vec<u8>),
273 Borrowed(Interval),
274}
275
276impl From<Interval> for IntervalOrOwned {
277 fn from(it: Interval) -> IntervalOrOwned {
278 IntervalOrOwned::Borrowed(it)
279 }
280}
281
282impl IntervalOrOwned {
283 fn get<'a, 'db>(&'a self, memory: &'a Evaluator<'db>) -> Result<'db, &'a [u8]> {
284 Ok(match self {
285 IntervalOrOwned::Owned(o) => o,
286 IntervalOrOwned::Borrowed(b) => b.get(memory)?,
287 })
288 }
289}
290
291#[cfg(target_pointer_width = "64")]
292const STACK_OFFSET: usize = 1 << 60;
293#[cfg(target_pointer_width = "64")]
294const HEAP_OFFSET: usize = 1 << 59;
295
296#[cfg(target_pointer_width = "32")]
297const STACK_OFFSET: usize = 1 << 30;
298#[cfg(target_pointer_width = "32")]
299const HEAP_OFFSET: usize = 1 << 29;
300
301impl Address {
302 #[allow(clippy::double_parens)]
303 fn from_bytes<'db>(it: &[u8]) -> Result<'db, Self> {
304 Ok(Address::from_usize(from_bytes!(usize, it)))
305 }
306
307 fn from_usize(it: usize) -> Self {
308 if it > STACK_OFFSET {
309 Stack(it - STACK_OFFSET)
310 } else if it > HEAP_OFFSET {
311 Heap(it - HEAP_OFFSET)
312 } else {
313 Invalid(it)
314 }
315 }
316
317 fn to_bytes(&self) -> [u8; size_of::<usize>()] {
318 usize::to_le_bytes(self.to_usize())
319 }
320
321 fn to_usize(&self) -> usize {
322 match self {
323 Stack(it) => *it + STACK_OFFSET,
324 Heap(it) => *it + HEAP_OFFSET,
325 Invalid(it) => *it,
326 }
327 }
328
329 fn map(&self, f: impl FnOnce(usize) -> usize) -> Address {
330 match self {
331 Stack(it) => Stack(f(*it)),
332 Heap(it) => Heap(f(*it)),
333 Invalid(it) => Invalid(f(*it)),
334 }
335 }
336
337 fn offset(&self, offset: usize) -> Address {
338 self.map(|it| it + offset)
339 }
340}
341
342#[derive(Clone, PartialEq, Eq)]
343pub enum MirEvalError<'db> {
344 ConstEvalError(String, Box<ConstEvalError<'db>>),
345 LayoutError(LayoutError, Ty<'db>),
346 TargetDataLayoutNotAvailable(TargetLoadError),
347 UndefinedBehavior(String),
350 Panic(String),
351 MirLowerError(FunctionId, MirLowerError<'db>),
353 MirLowerErrorForClosure(InternedClosureId, MirLowerError<'db>),
354 TypeIsUnsized(Ty<'db>, &'static str),
355 NotSupported(String),
356 InvalidConst(Const<'db>),
357 InFunction(
358 Box<MirEvalError<'db>>,
359 Vec<(Either<FunctionId, InternedClosureId>, MirSpan, DefWithBodyId)>,
360 ),
361 ExecutionLimitExceeded,
362 StackOverflow,
363 InvalidVTableId(usize),
365 CoerceUnsizedError(Ty<'db>),
367 InternalError(Box<str>),
369}
370
371impl MirEvalError<'_> {
372 pub fn pretty_print(
373 &self,
374 f: &mut String,
375 db: &dyn HirDatabase,
376 span_formatter: impl Fn(FileId, TextRange) -> String,
377 display_target: DisplayTarget,
378 ) -> std::result::Result<(), std::fmt::Error> {
379 writeln!(f, "Mir eval error:")?;
380 let mut err = self;
381 while let MirEvalError::InFunction(e, stack) = err {
382 err = e;
383 for (func, span, def) in stack.iter().take(30).rev() {
384 match func {
385 Either::Left(func) => {
386 let function_name = db.function_signature(*func);
387 writeln!(
388 f,
389 "In function {} ({:?})",
390 function_name.name.display(db, display_target.edition),
391 func
392 )?;
393 }
394 Either::Right(closure) => {
395 writeln!(f, "In {closure:?}")?;
396 }
397 }
398 let source_map = db.body_with_source_map(*def).1;
399 let span: InFile<SyntaxNodePtr> = match span {
400 MirSpan::ExprId(e) => match source_map.expr_syntax(*e) {
401 Ok(s) => s.map(|it| it.into()),
402 Err(_) => continue,
403 },
404 MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
405 Ok(s) => s.map(|it| it.syntax_node_ptr()),
406 Err(_) => continue,
407 },
408 MirSpan::BindingId(b) => {
409 match source_map
410 .patterns_for_binding(*b)
411 .iter()
412 .find_map(|p| source_map.pat_syntax(*p).ok())
413 {
414 Some(s) => s.map(|it| it.syntax_node_ptr()),
415 None => continue,
416 }
417 }
418 MirSpan::SelfParam => match source_map.self_param_syntax() {
419 Some(s) => s.map(|it| it.syntax_node_ptr()),
420 None => continue,
421 },
422 MirSpan::Unknown => continue,
423 };
424 let file_id = span.file_id.original_file(db);
425 let text_range = span.value.text_range();
426 writeln!(f, "{}", span_formatter(file_id.file_id(db), text_range))?;
427 }
428 }
429 match err {
430 MirEvalError::InFunction(..) => unreachable!(),
431 MirEvalError::LayoutError(err, ty) => {
432 write!(
433 f,
434 "Layout for type `{}` is not available due {err:?}",
435 ty.display(db, display_target).with_closure_style(ClosureStyle::ClosureWithId)
436 )?;
437 }
438 MirEvalError::MirLowerError(func, err) => {
439 let function_name = db.function_signature(*func);
440 let self_ = match func.lookup(db).container {
441 ItemContainerId::ImplId(impl_id) => Some({
442 db.impl_self_ty(impl_id)
443 .instantiate_identity()
444 .display(db, display_target)
445 .to_string()
446 }),
447 ItemContainerId::TraitId(it) => Some(
448 db.trait_signature(it).name.display(db, display_target.edition).to_string(),
449 ),
450 _ => None,
451 };
452 writeln!(
453 f,
454 "MIR lowering for function `{}{}{}` ({:?}) failed due:",
455 self_.as_deref().unwrap_or_default(),
456 if self_.is_some() { "::" } else { "" },
457 function_name.name.display(db, display_target.edition),
458 func
459 )?;
460 err.pretty_print(f, db, span_formatter, display_target)?;
461 }
462 MirEvalError::ConstEvalError(name, err) => {
463 MirLowerError::ConstEvalError((**name).into(), err.clone()).pretty_print(
464 f,
465 db,
466 span_formatter,
467 display_target,
468 )?;
469 }
470 MirEvalError::UndefinedBehavior(_)
471 | MirEvalError::TargetDataLayoutNotAvailable(_)
472 | MirEvalError::Panic(_)
473 | MirEvalError::MirLowerErrorForClosure(_, _)
474 | MirEvalError::TypeIsUnsized(_, _)
475 | MirEvalError::NotSupported(_)
476 | MirEvalError::InvalidConst(_)
477 | MirEvalError::ExecutionLimitExceeded
478 | MirEvalError::StackOverflow
479 | MirEvalError::CoerceUnsizedError(_)
480 | MirEvalError::InternalError(_)
481 | MirEvalError::InvalidVTableId(_) => writeln!(f, "{err:?}")?,
482 }
483 Ok(())
484 }
485
486 pub fn is_panic(&self) -> Option<&str> {
487 let mut err = self;
488 while let MirEvalError::InFunction(e, _) = err {
489 err = e;
490 }
491 match err {
492 MirEvalError::Panic(msg) => Some(msg),
493 _ => None,
494 }
495 }
496}
497
498impl std::fmt::Debug for MirEvalError<'_> {
499 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
500 match self {
501 Self::ConstEvalError(arg0, arg1) => {
502 f.debug_tuple("ConstEvalError").field(arg0).field(arg1).finish()
503 }
504 Self::LayoutError(arg0, arg1) => {
505 f.debug_tuple("LayoutError").field(arg0).field(arg1).finish()
506 }
507 Self::UndefinedBehavior(arg0) => {
508 f.debug_tuple("UndefinedBehavior").field(arg0).finish()
509 }
510 Self::Panic(msg) => write!(f, "Panic with message:\n{msg:?}"),
511 Self::TargetDataLayoutNotAvailable(arg0) => {
512 f.debug_tuple("TargetDataLayoutNotAvailable").field(arg0).finish()
513 }
514 Self::TypeIsUnsized(ty, it) => write!(f, "{ty:?} is unsized. {it} should be sized."),
515 Self::ExecutionLimitExceeded => write!(f, "execution limit exceeded"),
516 Self::StackOverflow => write!(f, "stack overflow"),
517 Self::MirLowerError(arg0, arg1) => {
518 f.debug_tuple("MirLowerError").field(arg0).field(arg1).finish()
519 }
520 Self::MirLowerErrorForClosure(arg0, arg1) => {
521 f.debug_tuple("MirLowerError").field(arg0).field(arg1).finish()
522 }
523 Self::CoerceUnsizedError(arg0) => {
524 f.debug_tuple("CoerceUnsizedError").field(arg0).finish()
525 }
526 Self::InternalError(arg0) => f.debug_tuple("InternalError").field(arg0).finish(),
527 Self::InvalidVTableId(arg0) => f.debug_tuple("InvalidVTableId").field(arg0).finish(),
528 Self::NotSupported(arg0) => f.debug_tuple("NotSupported").field(arg0).finish(),
529 Self::InvalidConst(arg0) => f.debug_tuple("InvalidConst").field(&arg0).finish(),
530 Self::InFunction(e, stack) => {
531 f.debug_struct("WithStack").field("error", e).field("stack", &stack).finish()
532 }
533 }
534 }
535}
536
537type Result<'db, T> = std::result::Result<T, MirEvalError<'db>>;
538
539#[derive(Debug, Default)]
540struct DropFlags<'db> {
541 need_drop: FxHashSet<Place<'db>>,
542}
543
544impl<'db> DropFlags<'db> {
545 fn add_place(&mut self, p: Place<'db>, store: &ProjectionStore<'db>) {
546 if p.iterate_over_parents(store).any(|it| self.need_drop.contains(&it)) {
547 return;
548 }
549 self.need_drop.retain(|it| !p.is_parent(it, store));
550 self.need_drop.insert(p);
551 }
552
553 fn remove_place(&mut self, p: &Place<'db>, store: &ProjectionStore<'db>) -> bool {
554 if let Some(parent) = p.iterate_over_parents(store).find(|it| self.need_drop.contains(it)) {
556 self.need_drop.remove(&parent);
557 return true;
558 }
559 self.need_drop.remove(p)
560 }
561
562 fn clear(&mut self) {
563 self.need_drop.clear();
564 }
565}
566
567#[derive(Debug)]
568struct Locals<'db> {
569 ptr: ArenaMap<LocalId<'db>, Interval>,
570 body: Arc<MirBody<'db>>,
571 drop_flags: DropFlags<'db>,
572}
573
574pub struct MirOutput {
575 stdout: Vec<u8>,
576 stderr: Vec<u8>,
577}
578
579impl MirOutput {
580 pub fn stdout(&self) -> Cow<'_, str> {
581 String::from_utf8_lossy(&self.stdout)
582 }
583 pub fn stderr(&self) -> Cow<'_, str> {
584 String::from_utf8_lossy(&self.stderr)
585 }
586}
587
588pub fn interpret_mir<'db>(
589 db: &'db dyn HirDatabase,
590 body: Arc<MirBody<'db>>,
591 assert_placeholder_ty_is_unused: bool,
597 trait_env: Option<ParamEnvAndCrate<'db>>,
598) -> Result<'db, (Result<'db, Const<'db>>, MirOutput)> {
599 let ty = body.locals[return_slot()].ty;
600 let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env)?;
601 let it: Result<'db, Const<'db>> = (|| {
602 if evaluator.ptr_size() != size_of::<usize>() {
603 not_supported!("targets with different pointer size from host");
604 }
605 let interval = evaluator.interpret_mir(body.clone(), None.into_iter())?;
606 let bytes = interval.get(&evaluator)?;
607 let mut memory_map = evaluator.create_memory_map(
608 bytes,
609 ty,
610 &Locals { ptr: ArenaMap::new(), body, drop_flags: DropFlags::default() },
611 )?;
612 let bytes = bytes.into();
613 let memory_map = if memory_map.memory.is_empty() && evaluator.vtable_map.is_empty() {
614 MemoryMap::Empty
615 } else {
616 memory_map.vtable = mem::take(&mut evaluator.vtable_map);
617 memory_map.vtable.shrink_to_fit();
618 MemoryMap::Complex(Box::new(memory_map))
619 };
620 Ok(Const::new_valtree(evaluator.interner(), ty, bytes, memory_map))
621 })();
622 Ok((it, MirOutput { stdout: evaluator.stdout, stderr: evaluator.stderr }))
623}
624
625#[cfg(test)]
626const EXECUTION_LIMIT: usize = 100_000;
627#[cfg(not(test))]
628const EXECUTION_LIMIT: usize = 10_000_000;
629
630impl<'db> Evaluator<'db> {
631 pub fn new(
632 db: &'db dyn HirDatabase,
633 owner: DefWithBodyId,
634 assert_placeholder_ty_is_unused: bool,
635 trait_env: Option<ParamEnvAndCrate<'db>>,
636 ) -> Result<'db, Evaluator<'db>> {
637 let module = owner.module(db);
638 let crate_id = module.krate(db);
639 let target_data_layout = match db.target_data_layout(crate_id) {
640 Ok(target_data_layout) => target_data_layout,
641 Err(e) => return Err(MirEvalError::TargetDataLayoutNotAvailable(e)),
642 };
643 let cached_ptr_size = target_data_layout.pointer_size().bytes_usize();
644 let interner = DbInterner::new_with(db, crate_id);
645 let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
646 let lang_items = interner.lang_items();
647 Ok(Evaluator {
648 target_data_layout,
649 stack: vec![0],
650 heap: vec![0],
651 code_stack: vec![],
652 vtable_map: VTableMap::default(),
653 thread_local_storage: TlsData::default(),
654 static_locations: Default::default(),
655 db,
656 random_state: oorandom::Rand64::new(0),
657 param_env: trait_env.unwrap_or_else(|| ParamEnvAndCrate {
658 param_env: db.trait_environment_for_body(owner),
659 krate: crate_id,
660 }),
661 crate_id,
662 stdout: vec![],
663 stderr: vec![],
664 assert_placeholder_ty_is_unused,
665 stack_depth_limit: 100,
666 execution_limit: EXECUTION_LIMIT,
667 memory_limit: 1_000_000_000, layout_cache: RefCell::new(Default::default()),
669 projected_ty_cache: RefCell::new(Default::default()),
670 not_special_fn_cache: RefCell::new(Default::default()),
671 mir_or_dyn_index_cache: RefCell::new(Default::default()),
672 unused_locals_store: RefCell::new(Default::default()),
673 cached_ptr_size,
674 cached_fn_trait_func: lang_items
675 .Fn
676 .and_then(|x| x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call))),
677 cached_fn_mut_trait_func: lang_items.FnMut.and_then(|x| {
678 x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call_mut))
679 }),
680 cached_fn_once_trait_func: lang_items.FnOnce.and_then(|x| {
681 x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call_once))
682 }),
683 infcx,
684 })
685 }
686
687 #[inline]
688 fn interner(&self) -> DbInterner<'db> {
689 self.infcx.interner
690 }
691
692 #[inline]
693 fn lang_items(&self) -> &'db LangItems {
694 self.infcx.interner.lang_items()
695 }
696
697 fn place_addr(&self, p: &Place<'db>, locals: &Locals<'db>) -> Result<'db, Address> {
698 Ok(self.place_addr_and_ty_and_metadata(p, locals)?.0)
699 }
700
701 fn place_interval(&self, p: &Place<'db>, locals: &Locals<'db>) -> Result<'db, Interval> {
702 let place_addr_and_ty = self.place_addr_and_ty_and_metadata(p, locals)?;
703 Ok(Interval {
704 addr: place_addr_and_ty.0,
705 size: self.size_of_sized(
706 place_addr_and_ty.1,
707 locals,
708 "Type of place that we need its interval",
709 )?,
710 })
711 }
712
713 fn ptr_size(&self) -> usize {
714 self.cached_ptr_size
715 }
716
717 fn projected_ty(&self, ty: Ty<'db>, proj: PlaceElem<'db>) -> Ty<'db> {
718 let pair = (ty, proj);
719 if let Some(r) = self.projected_ty_cache.borrow().get(&pair) {
720 return *r;
721 }
722 let (ty, proj) = pair;
723 let r = proj.projected_ty(
724 &self.infcx,
725 self.param_env.param_env,
726 ty,
727 |c, subst, f| {
728 let InternedClosure(def, _) = self.db.lookup_intern_closure(c);
729 let infer = InferenceResult::for_body(self.db, def);
730 let (captures, _) = infer.closure_info(c);
731 let parent_subst = subst.split_closure_args_untupled().parent_args;
732 captures
733 .get(f)
734 .expect("broken closure field")
735 .ty
736 .instantiate(self.interner(), parent_subst)
737 },
738 self.crate_id,
739 );
740 self.projected_ty_cache.borrow_mut().insert((ty, proj), r);
741 r
742 }
743
744 fn place_addr_and_ty_and_metadata<'a>(
745 &'a self,
746 p: &Place<'db>,
747 locals: &'a Locals<'db>,
748 ) -> Result<'db, (Address, Ty<'db>, Option<IntervalOrOwned>)> {
749 let mut addr = locals.ptr[p.local].addr;
750 let mut ty: Ty<'db> = locals.body.locals[p.local].ty;
751 let mut metadata: Option<IntervalOrOwned> = None; for proj in p.projection.lookup(&locals.body.projection_store) {
753 let prev_ty = ty;
754 ty = self.projected_ty(ty, proj.clone());
755 match proj {
756 ProjectionElem::Deref => {
757 metadata = if self.size_align_of(ty, locals)?.is_none() {
758 Some(
759 Interval { addr: addr.offset(self.ptr_size()), size: self.ptr_size() }
760 .into(),
761 )
762 } else {
763 None
764 };
765 let it = from_bytes!(usize, self.read_memory(addr, self.ptr_size())?);
766 addr = Address::from_usize(it);
767 }
768 ProjectionElem::Index(op) => {
769 let offset = from_bytes!(
770 usize,
771 self.read_memory(locals.ptr[*op].addr, self.ptr_size())?
772 );
773 metadata = None; let ty_size =
775 self.size_of_sized(ty, locals, "array inner type should be sized")?;
776 addr = addr.offset(ty_size * offset);
777 }
778 &ProjectionElem::ConstantIndex { from_end, offset } => {
779 let offset = if from_end {
780 let len = match prev_ty.kind() {
781 TyKind::Array(_, c) => match try_const_usize(self.db, c) {
782 Some(it) => it as u64,
783 None => {
784 not_supported!("indexing array with unknown const from end")
785 }
786 },
787 TyKind::Slice(_) => match metadata {
788 Some(it) => from_bytes!(u64, it.get(self)?),
789 None => not_supported!("slice place without metadata"),
790 },
791 _ => not_supported!("bad type for const index"),
792 };
793 (len - offset - 1) as usize
794 } else {
795 offset as usize
796 };
797 metadata = None; let ty_size =
799 self.size_of_sized(ty, locals, "array inner type should be sized")?;
800 addr = addr.offset(ty_size * offset);
801 }
802 &ProjectionElem::Subslice { from, to } => {
803 let inner_ty = match ty.kind() {
804 TyKind::Array(inner, _) | TyKind::Slice(inner) => inner,
805 _ => Ty::new_error(self.interner(), ErrorGuaranteed),
806 };
807 metadata = match metadata {
808 Some(it) => {
809 let prev_len = from_bytes!(u64, it.get(self)?);
810 Some(IntervalOrOwned::Owned(
811 (prev_len - from - to).to_le_bytes().to_vec(),
812 ))
813 }
814 None => None,
815 };
816 let ty_size =
817 self.size_of_sized(inner_ty, locals, "array inner type should be sized")?;
818 addr = addr.offset(ty_size * (from as usize));
819 }
820 &ProjectionElem::ClosureField(f) => {
821 let layout = self.layout(prev_ty)?;
822 let offset = layout.fields.offset(f).bytes_usize();
823 addr = addr.offset(offset);
824 metadata = None;
825 }
826 ProjectionElem::Field(Either::Right(f)) => {
827 let layout = self.layout(prev_ty)?;
828 let offset = layout.fields.offset(f.index as usize).bytes_usize();
829 addr = addr.offset(offset);
830 metadata = None; }
832 ProjectionElem::Field(Either::Left(f)) => {
833 let layout = self.layout(prev_ty)?;
834 let variant_layout = match &layout.variants {
835 Variants::Single { .. } | Variants::Empty => &layout,
836 Variants::Multiple { variants, .. } => {
837 &variants[match f.parent {
838 hir_def::VariantId::EnumVariantId(it) => {
839 RustcEnumVariantIdx(it.lookup(self.db).index as usize)
840 }
841 _ => {
842 return Err(MirEvalError::InternalError(
843 "mismatched layout".into(),
844 ));
845 }
846 }]
847 }
848 };
849 let offset = variant_layout
850 .fields
851 .offset(u32::from(f.local_id.into_raw()) as usize)
852 .bytes_usize();
853 addr = addr.offset(offset);
854 if self.size_align_of(ty, locals)?.is_some() {
856 metadata = None;
857 }
858 }
859 ProjectionElem::OpaqueCast(_) => not_supported!("opaque cast"),
860 }
861 }
862 Ok((addr, ty, metadata))
863 }
864
865 fn layout(&self, ty: Ty<'db>) -> Result<'db, Arc<Layout>> {
866 if let Some(x) = self.layout_cache.borrow().get(&ty) {
867 return Ok(x.clone());
868 }
869 let r = self
870 .db
871 .layout_of_ty(ty, self.param_env)
872 .map_err(|e| MirEvalError::LayoutError(e, ty))?;
873 self.layout_cache.borrow_mut().insert(ty, r.clone());
874 Ok(r)
875 }
876
877 fn layout_adt(&self, adt: AdtId, subst: GenericArgs<'db>) -> Result<'db, Arc<Layout>> {
878 self.layout(Ty::new_adt(self.interner(), adt, subst))
879 }
880
881 fn place_ty<'a>(&'a self, p: &Place<'db>, locals: &'a Locals<'db>) -> Result<'db, Ty<'db>> {
882 Ok(self.place_addr_and_ty_and_metadata(p, locals)?.1)
883 }
884
885 fn operand_ty(&self, o: &Operand<'db>, locals: &Locals<'db>) -> Result<'db, Ty<'db>> {
886 Ok(match &o.kind {
887 OperandKind::Copy(p) | OperandKind::Move(p) => self.place_ty(p, locals)?,
888 OperandKind::Constant { konst: _, ty } => *ty,
889 &OperandKind::Static(s) => {
890 let ty =
891 InferenceResult::for_body(self.db, s.into())[self.db.body(s.into()).body_expr];
892 Ty::new_ref(
893 self.interner(),
894 Region::new_static(self.interner()),
895 ty,
896 Mutability::Not,
897 )
898 }
899 })
900 }
901
902 fn operand_ty_and_eval(
903 &mut self,
904 o: &Operand<'db>,
905 locals: &mut Locals<'db>,
906 ) -> Result<'db, IntervalAndTy<'db>> {
907 Ok(IntervalAndTy {
908 interval: self.eval_operand(o, locals)?,
909 ty: self.operand_ty(o, locals)?,
910 })
911 }
912
913 fn interpret_mir(
914 &mut self,
915 body: Arc<MirBody<'db>>,
916 args: impl Iterator<Item = IntervalOrOwned>,
917 ) -> Result<'db, Interval> {
918 if let Some(it) = self.stack_depth_limit.checked_sub(1) {
919 self.stack_depth_limit = it;
920 } else {
921 return Err(MirEvalError::StackOverflow);
922 }
923 let mut current_block_idx = body.start_block;
924 let (mut locals, prev_stack_ptr) = self.create_locals_for_body(&body, None)?;
925 self.fill_locals_for_body(&body, &mut locals, args)?;
926 let prev_code_stack = mem::take(&mut self.code_stack);
927 let span = (MirSpan::Unknown, body.owner);
928 self.code_stack.push(StackFrame { locals, destination: None, prev_stack_ptr, span });
929 'stack: loop {
930 let Some(mut my_stack_frame) = self.code_stack.pop() else {
931 not_supported!("missing stack frame");
932 };
933 let e = (|| {
934 let locals = &mut my_stack_frame.locals;
935 let body = locals.body.clone();
936 loop {
937 let current_block = &body.basic_blocks[current_block_idx];
938 if let Some(it) = self.execution_limit.checked_sub(1) {
939 self.execution_limit = it;
940 } else {
941 return Err(MirEvalError::ExecutionLimitExceeded);
942 }
943 for statement in ¤t_block.statements {
944 match &statement.kind {
945 StatementKind::Assign(l, r) => {
946 let addr = self.place_addr(l, locals)?;
947 let result = self.eval_rvalue(r, locals)?;
948 self.copy_from_interval_or_owned(addr, result)?;
949 locals.drop_flags.add_place(*l, &locals.body.projection_store);
950 }
951 StatementKind::Deinit(_) => not_supported!("de-init statement"),
952 StatementKind::StorageLive(_)
953 | StatementKind::FakeRead(_)
954 | StatementKind::StorageDead(_)
955 | StatementKind::Nop => (),
956 }
957 }
958 let Some(terminator) = current_block.terminator.as_ref() else {
959 not_supported!("block without terminator");
960 };
961 match &terminator.kind {
962 TerminatorKind::Goto { target } => {
963 current_block_idx = *target;
964 }
965 TerminatorKind::Call {
966 func,
967 args,
968 destination,
969 target,
970 cleanup: _,
971 from_hir_call: _,
972 } => {
973 let destination_interval = self.place_interval(destination, locals)?;
974 let fn_ty = self.operand_ty(func, locals)?;
975 let args = args
976 .iter()
977 .map(|it| self.operand_ty_and_eval(it, locals))
978 .collect::<Result<'db, Vec<_>>>()?;
979 let stack_frame = match fn_ty.kind() {
980 TyKind::FnPtr(..) => {
981 let bytes = self.eval_operand(func, locals)?;
982 self.exec_fn_pointer(
983 bytes,
984 destination_interval,
985 &args,
986 locals,
987 *target,
988 terminator.span,
989 )?
990 }
991 TyKind::FnDef(def, generic_args) => self.exec_fn_def(
992 def.0,
993 generic_args,
994 destination_interval,
995 &args,
996 locals,
997 *target,
998 terminator.span,
999 )?,
1000 it => not_supported!("unknown function type {it:?}"),
1001 };
1002 locals
1003 .drop_flags
1004 .add_place(*destination, &locals.body.projection_store);
1005 if let Some(stack_frame) = stack_frame {
1006 self.code_stack.push(my_stack_frame);
1007 current_block_idx = stack_frame.locals.body.start_block;
1008 self.code_stack.push(stack_frame);
1009 return Ok(None);
1010 } else {
1011 current_block_idx =
1012 target.ok_or(MirEvalError::UndefinedBehavior(
1013 "Diverging function returned".to_owned(),
1014 ))?;
1015 }
1016 }
1017 TerminatorKind::SwitchInt { discr, targets } => {
1018 let val = u128::from_le_bytes(pad16(
1019 self.eval_operand(discr, locals)?.get(self)?,
1020 false,
1021 ));
1022 current_block_idx = targets.target_for_value(val);
1023 }
1024 TerminatorKind::Return => {
1025 break;
1026 }
1027 TerminatorKind::Unreachable => {
1028 return Err(MirEvalError::UndefinedBehavior(
1029 "unreachable executed".to_owned(),
1030 ));
1031 }
1032 TerminatorKind::Drop { place, target, unwind: _ } => {
1033 self.drop_place(place, locals, terminator.span)?;
1034 current_block_idx = *target;
1035 }
1036 _ => not_supported!("unknown terminator"),
1037 }
1038 }
1039 Ok(Some(my_stack_frame))
1040 })();
1041 let my_stack_frame = match e {
1042 Ok(None) => continue 'stack,
1043 Ok(Some(x)) => x,
1044 Err(e) => {
1045 let my_code_stack = mem::replace(&mut self.code_stack, prev_code_stack);
1046 let mut error_stack = vec![];
1047 for frame in my_code_stack.into_iter().rev() {
1048 if let DefWithBodyId::FunctionId(f) = frame.locals.body.owner {
1049 error_stack.push((Either::Left(f), frame.span.0, frame.span.1));
1050 }
1051 }
1052 return Err(MirEvalError::InFunction(Box::new(e), error_stack));
1053 }
1054 };
1055 let return_interval = my_stack_frame.locals.ptr[return_slot()];
1056 self.unused_locals_store
1057 .borrow_mut()
1058 .entry(my_stack_frame.locals.body.owner)
1059 .or_default()
1060 .push(my_stack_frame.locals);
1061 match my_stack_frame.destination {
1062 None => {
1063 self.code_stack = prev_code_stack;
1064 self.stack_depth_limit += 1;
1065 return Ok(return_interval);
1066 }
1067 Some(bb) => {
1068 let _ = my_stack_frame.prev_stack_ptr;
1070 current_block_idx = bb;
1072 }
1073 }
1074 }
1075 }
1076
1077 fn fill_locals_for_body(
1078 &mut self,
1079 body: &MirBody<'db>,
1080 locals: &mut Locals<'db>,
1081 args: impl Iterator<Item = IntervalOrOwned>,
1082 ) -> Result<'db, ()> {
1083 let mut remain_args = body.param_locals.len();
1084 for ((l, interval), value) in locals.ptr.iter().skip(1).zip(args) {
1085 locals.drop_flags.add_place(l.into(), &locals.body.projection_store);
1086 match value {
1087 IntervalOrOwned::Owned(value) => interval.write_from_bytes(self, &value)?,
1088 IntervalOrOwned::Borrowed(value) => interval.write_from_interval(self, value)?,
1089 }
1090 if remain_args == 0 {
1091 return Err(MirEvalError::InternalError("too many arguments".into()));
1092 }
1093 remain_args -= 1;
1094 }
1095 if remain_args > 0 {
1096 return Err(MirEvalError::InternalError("too few arguments".into()));
1097 }
1098 Ok(())
1099 }
1100
1101 fn create_locals_for_body(
1102 &mut self,
1103 body: &Arc<MirBody<'db>>,
1104 destination: Option<Interval>,
1105 ) -> Result<'db, (Locals<'db>, usize)> {
1106 let mut locals =
1107 match self.unused_locals_store.borrow_mut().entry(body.owner).or_default().pop() {
1108 None => Locals {
1109 ptr: ArenaMap::new(),
1110 body: body.clone(),
1111 drop_flags: DropFlags::default(),
1112 },
1113 Some(mut l) => {
1114 l.drop_flags.clear();
1115 l.body = body.clone();
1116 l
1117 }
1118 };
1119 let stack_size = {
1120 let mut stack_ptr = self.stack.len();
1121 for (id, it) in body.locals.iter() {
1122 if id == return_slot()
1123 && let Some(destination) = destination
1124 {
1125 locals.ptr.insert(id, destination);
1126 continue;
1127 }
1128 let (size, align) = self.size_align_of_sized(
1129 it.ty,
1130 &locals,
1131 "no unsized local in extending stack",
1132 )?;
1133 while !stack_ptr.is_multiple_of(align) {
1134 stack_ptr += 1;
1135 }
1136 let my_ptr = stack_ptr;
1137 stack_ptr += size;
1138 locals.ptr.insert(id, Interval { addr: Stack(my_ptr), size });
1139 }
1140 stack_ptr - self.stack.len()
1141 };
1142 let prev_stack_pointer = self.stack.len();
1143 if stack_size > self.memory_limit {
1144 return Err(MirEvalError::Panic(format!(
1145 "Stack overflow. Tried to grow stack to {stack_size} bytes"
1146 )));
1147 }
1148 self.stack.extend(std::iter::repeat_n(0, stack_size));
1149 Ok((locals, prev_stack_pointer))
1150 }
1151
1152 fn eval_rvalue(
1153 &mut self,
1154 r: &Rvalue<'db>,
1155 locals: &mut Locals<'db>,
1156 ) -> Result<'db, IntervalOrOwned> {
1157 use IntervalOrOwned::*;
1158 Ok(match r {
1159 Rvalue::Use(it) => Borrowed(self.eval_operand(it, locals)?),
1160 Rvalue::Ref(_, p) => {
1161 let (addr, _, metadata) = self.place_addr_and_ty_and_metadata(p, locals)?;
1162 let mut r = addr.to_bytes().to_vec();
1163 if let Some(metadata) = metadata {
1164 r.extend(metadata.get(self)?);
1165 }
1166 Owned(r)
1167 }
1168 Rvalue::Len(p) => {
1169 let (_, _, metadata) = self.place_addr_and_ty_and_metadata(p, locals)?;
1170 match metadata {
1171 Some(m) => m,
1172 None => {
1173 return Err(MirEvalError::InternalError(
1174 "type without metadata is used for Rvalue::Len".into(),
1175 ));
1176 }
1177 }
1178 }
1179 Rvalue::UnaryOp(op, val) => {
1180 let mut c = self.eval_operand(val, locals)?.get(self)?;
1181 let mut ty = self.operand_ty(val, locals)?;
1182 while let TyKind::Ref(_, z, _) = ty.kind() {
1183 ty = z;
1184 let size = self.size_of_sized(ty, locals, "operand of unary op")?;
1185 c = self.read_memory(Address::from_bytes(c)?, size)?;
1186 }
1187 if let TyKind::Float(f) = ty.kind() {
1188 match f {
1189 rustc_type_ir::FloatTy::F16 => {
1190 let c = -from_bytes!(f16, u16, c);
1191 Owned(u16::try_from(c.to_bits()).unwrap().to_le_bytes().into())
1192 }
1193 rustc_type_ir::FloatTy::F32 => {
1194 let c = -from_bytes!(f32, c);
1195 Owned(c.to_le_bytes().into())
1196 }
1197 rustc_type_ir::FloatTy::F64 => {
1198 let c = -from_bytes!(f64, c);
1199 Owned(c.to_le_bytes().into())
1200 }
1201 rustc_type_ir::FloatTy::F128 => {
1202 let c = -from_bytes!(f128, u128, c);
1203 Owned(c.to_bits().to_le_bytes().into())
1204 }
1205 }
1206 } else {
1207 let mut c = c.to_vec();
1208 if matches!(ty.kind(), TyKind::Bool) {
1209 c[0] = 1 - c[0];
1210 } else {
1211 match op {
1212 UnOp::Not => c.iter_mut().for_each(|it| *it = !*it),
1213 UnOp::Neg => {
1214 c.iter_mut().for_each(|it| *it = !*it);
1215 for k in c.iter_mut() {
1216 let o;
1217 (*k, o) = k.overflowing_add(1);
1218 if !o {
1219 break;
1220 }
1221 }
1222 }
1223 }
1224 }
1225 Owned(c)
1226 }
1227 }
1228 Rvalue::CheckedBinaryOp(op, lhs, rhs) => 'binary_op: {
1229 let lc = self.eval_operand(lhs, locals)?;
1230 let rc = self.eval_operand(rhs, locals)?;
1231 let mut lc = lc.get(self)?;
1232 let mut rc = rc.get(self)?;
1233 let mut ty = self.operand_ty(lhs, locals)?;
1234 while let TyKind::Ref(_, z, _) = ty.kind() {
1235 ty = z;
1236 let size = if ty.is_str() {
1237 if *op != BinOp::Eq {
1238 never!("Only eq is builtin for `str`");
1239 }
1240 let ls = from_bytes!(usize, &lc[self.ptr_size()..self.ptr_size() * 2]);
1241 let rs = from_bytes!(usize, &rc[self.ptr_size()..self.ptr_size() * 2]);
1242 if ls != rs {
1243 break 'binary_op Owned(vec![0]);
1244 }
1245 lc = &lc[..self.ptr_size()];
1246 rc = &rc[..self.ptr_size()];
1247 lc = self.read_memory(Address::from_bytes(lc)?, ls)?;
1248 rc = self.read_memory(Address::from_bytes(rc)?, ls)?;
1249 break 'binary_op Owned(vec![u8::from(lc == rc)]);
1250 } else {
1251 self.size_of_sized(ty, locals, "operand of binary op")?
1252 };
1253 lc = self.read_memory(Address::from_bytes(lc)?, size)?;
1254 rc = self.read_memory(Address::from_bytes(rc)?, size)?;
1255 }
1256 if let TyKind::Float(f) = ty.kind() {
1257 match f {
1258 rustc_type_ir::FloatTy::F16 => {
1259 let l = from_bytes!(f16, u16, lc);
1260 let r = from_bytes!(f16, u16, rc);
1261 match op {
1262 BinOp::Ge
1263 | BinOp::Gt
1264 | BinOp::Le
1265 | BinOp::Lt
1266 | BinOp::Eq
1267 | BinOp::Ne => {
1268 let r = op.run_compare(l, r) as u8;
1269 Owned(vec![r])
1270 }
1271 BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div => {
1272 let r = match op {
1273 BinOp::Add => l + r,
1274 BinOp::Sub => l - r,
1275 BinOp::Mul => l * r,
1276 BinOp::Div => l / r,
1277 _ => unreachable!(),
1278 };
1279 Owned(
1280 u16::try_from(r.value.to_bits())
1281 .unwrap()
1282 .to_le_bytes()
1283 .into(),
1284 )
1285 }
1286 it => not_supported!(
1287 "invalid binop {it:?} on floating point operators"
1288 ),
1289 }
1290 }
1291 rustc_type_ir::FloatTy::F32 => {
1292 let l = from_bytes!(f32, lc);
1293 let r = from_bytes!(f32, rc);
1294 match op {
1295 BinOp::Ge
1296 | BinOp::Gt
1297 | BinOp::Le
1298 | BinOp::Lt
1299 | BinOp::Eq
1300 | BinOp::Ne => {
1301 let r = op.run_compare(l, r) as u8;
1302 Owned(vec![r])
1303 }
1304 BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div => {
1305 let r = match op {
1306 BinOp::Add => l + r,
1307 BinOp::Sub => l - r,
1308 BinOp::Mul => l * r,
1309 BinOp::Div => l / r,
1310 _ => unreachable!(),
1311 };
1312 Owned(r.to_le_bytes().into())
1313 }
1314 it => not_supported!(
1315 "invalid binop {it:?} on floating point operators"
1316 ),
1317 }
1318 }
1319 rustc_type_ir::FloatTy::F64 => {
1320 let l = from_bytes!(f64, lc);
1321 let r = from_bytes!(f64, rc);
1322 match op {
1323 BinOp::Ge
1324 | BinOp::Gt
1325 | BinOp::Le
1326 | BinOp::Lt
1327 | BinOp::Eq
1328 | BinOp::Ne => {
1329 let r = op.run_compare(l, r) as u8;
1330 Owned(vec![r])
1331 }
1332 BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div => {
1333 let r = match op {
1334 BinOp::Add => l + r,
1335 BinOp::Sub => l - r,
1336 BinOp::Mul => l * r,
1337 BinOp::Div => l / r,
1338 _ => unreachable!(),
1339 };
1340 Owned(r.to_le_bytes().into())
1341 }
1342 it => not_supported!(
1343 "invalid binop {it:?} on floating point operators"
1344 ),
1345 }
1346 }
1347 rustc_type_ir::FloatTy::F128 => {
1348 let l = from_bytes!(f128, u128, lc);
1349 let r = from_bytes!(f128, u128, rc);
1350 match op {
1351 BinOp::Ge
1352 | BinOp::Gt
1353 | BinOp::Le
1354 | BinOp::Lt
1355 | BinOp::Eq
1356 | BinOp::Ne => {
1357 let r = op.run_compare(l, r) as u8;
1358 Owned(vec![r])
1359 }
1360 BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div => {
1361 let r = match op {
1362 BinOp::Add => l + r,
1363 BinOp::Sub => l - r,
1364 BinOp::Mul => l * r,
1365 BinOp::Div => l / r,
1366 _ => unreachable!(),
1367 };
1368 Owned(r.value.to_bits().to_le_bytes().into())
1369 }
1370 it => not_supported!(
1371 "invalid binop {it:?} on floating point operators"
1372 ),
1373 }
1374 }
1375 }
1376 } else {
1377 let is_signed = matches!(ty.kind(), TyKind::Int(_));
1378 let l128 = IntValue::from_bytes(lc, is_signed);
1379 let r128 = IntValue::from_bytes(rc, is_signed);
1380 match op {
1381 BinOp::Ge | BinOp::Gt | BinOp::Le | BinOp::Lt | BinOp::Eq | BinOp::Ne => {
1382 let r = op.run_compare(l128, r128) as u8;
1383 Owned(vec![r])
1384 }
1385 BinOp::BitAnd
1386 | BinOp::BitOr
1387 | BinOp::BitXor
1388 | BinOp::Add
1389 | BinOp::Mul
1390 | BinOp::Div
1391 | BinOp::Rem
1392 | BinOp::Sub => {
1393 let r = match op {
1394 BinOp::Add => l128.checked_add(r128).ok_or_else(|| {
1395 MirEvalError::Panic(format!("Overflow in {op:?}"))
1396 })?,
1397 BinOp::Mul => l128.checked_mul(r128).ok_or_else(|| {
1398 MirEvalError::Panic(format!("Overflow in {op:?}"))
1399 })?,
1400 BinOp::Div => l128.checked_div(r128).ok_or_else(|| {
1401 MirEvalError::Panic(format!("Overflow in {op:?}"))
1402 })?,
1403 BinOp::Rem => l128.checked_rem(r128).ok_or_else(|| {
1404 MirEvalError::Panic(format!("Overflow in {op:?}"))
1405 })?,
1406 BinOp::Sub => l128.checked_sub(r128).ok_or_else(|| {
1407 MirEvalError::Panic(format!("Overflow in {op:?}"))
1408 })?,
1409 BinOp::BitAnd => l128 & r128,
1410 BinOp::BitOr => l128 | r128,
1411 BinOp::BitXor => l128 ^ r128,
1412 _ => unreachable!(),
1413 };
1414 Owned(r.to_bytes())
1415 }
1416 BinOp::Shl | BinOp::Shr => {
1417 let r = 'b: {
1418 if let Some(shift_amount) = r128.as_u32() {
1419 let r = match op {
1420 BinOp::Shl => l128.checked_shl(shift_amount),
1421 BinOp::Shr => l128.checked_shr(shift_amount),
1422 _ => unreachable!(),
1423 };
1424 if shift_amount as usize >= lc.len() * 8 {
1425 return Err(MirEvalError::Panic(format!(
1426 "Overflow in {op:?}"
1427 )));
1428 }
1429 if let Some(r) = r {
1430 break 'b r;
1431 }
1432 };
1433 return Err(MirEvalError::Panic(format!("Overflow in {op:?}")));
1434 };
1435 Owned(r.to_bytes())
1436 }
1437 BinOp::Offset => not_supported!("offset binop"),
1438 }
1439 }
1440 }
1441 Rvalue::Discriminant(p) => {
1442 let ty = self.place_ty(p, locals)?;
1443 let bytes = self.eval_place(p, locals)?.get(self)?;
1444 let result = self.compute_discriminant(ty, bytes)?;
1445 Owned(result.to_le_bytes().to_vec())
1446 }
1447 Rvalue::Repeat(it, len) => {
1448 let len = match try_const_usize(self.db, *len) {
1449 Some(it) => it as usize,
1450 None => not_supported!("non evaluatable array len in repeat Rvalue"),
1451 };
1452 let val = self.eval_operand(it, locals)?.get(self)?;
1453 let size = len * val.len();
1454 Owned(val.iter().copied().cycle().take(size).collect())
1455 }
1456 Rvalue::ShallowInitBox(_, _) => not_supported!("shallow init box"),
1457 Rvalue::ShallowInitBoxWithAlloc(ty) => {
1458 let Some((size, align)) = self.size_align_of(*ty, locals)? else {
1459 not_supported!("unsized box initialization");
1460 };
1461 let addr = self.heap_allocate(size, align)?;
1462 Owned(addr.to_bytes().to_vec())
1463 }
1464 Rvalue::CopyForDeref(_) => not_supported!("copy for deref"),
1465 Rvalue::Aggregate(kind, values) => {
1466 let values = values
1467 .iter()
1468 .map(|it| self.eval_operand(it, locals))
1469 .collect::<Result<'db, Vec<_>>>()?;
1470 match kind {
1471 AggregateKind::Array(_) => {
1472 let mut r = vec![];
1473 for it in values {
1474 let value = it.get(self)?;
1475 r.extend(value);
1476 }
1477 Owned(r)
1478 }
1479 AggregateKind::Tuple(ty) => {
1480 let layout = self.layout(*ty)?;
1481 Owned(self.construct_with_layout(
1482 layout.size.bytes_usize(),
1483 &layout,
1484 None,
1485 values.iter().map(|&it| it.into()),
1486 )?)
1487 }
1488 AggregateKind::Union(it, f) => {
1489 let layout = self.layout_adt(
1490 (*it).into(),
1491 GenericArgs::new_from_iter(self.interner(), []),
1492 )?;
1493 let offset = layout
1494 .fields
1495 .offset(u32::from(f.local_id.into_raw()) as usize)
1496 .bytes_usize();
1497 let op = values[0].get(self)?;
1498 let mut result = vec![0; layout.size.bytes_usize()];
1499 result[offset..offset + op.len()].copy_from_slice(op);
1500 Owned(result)
1501 }
1502 AggregateKind::Adt(it, subst) => {
1503 let (size, variant_layout, tag) =
1504 self.layout_of_variant(*it, *subst, locals)?;
1505 Owned(self.construct_with_layout(
1506 size,
1507 &variant_layout,
1508 tag,
1509 values.iter().map(|&it| it.into()),
1510 )?)
1511 }
1512 AggregateKind::Closure(ty) => {
1513 let layout = self.layout(*ty)?;
1514 Owned(self.construct_with_layout(
1515 layout.size.bytes_usize(),
1516 &layout,
1517 None,
1518 values.iter().map(|&it| it.into()),
1519 )?)
1520 }
1521 }
1522 }
1523 Rvalue::Cast(kind, operand, target_ty) => match kind {
1524 CastKind::PointerCoercion(cast) => match cast {
1525 PointerCast::ReifyFnPointer | PointerCast::ClosureFnPointer(_) => {
1526 let current_ty = self.operand_ty(operand, locals)?;
1527 if let TyKind::FnDef(_, _) | TyKind::Closure(_, _) = current_ty.kind() {
1528 let id = self.vtable_map.id(current_ty);
1529 let ptr_size = self.ptr_size();
1530 Owned(id.to_le_bytes()[0..ptr_size].to_vec())
1531 } else {
1532 not_supported!(
1533 "creating a fn pointer from a non FnDef or Closure type"
1534 );
1535 }
1536 }
1537 PointerCast::Unsize => {
1538 let current_ty = self.operand_ty(operand, locals)?;
1539 let addr = self.eval_operand(operand, locals)?;
1540 self.coerce_unsized(addr, current_ty, *target_ty)?
1541 }
1542 PointerCast::MutToConstPointer | PointerCast::UnsafeFnPointer => {
1543 Borrowed(self.eval_operand(operand, locals)?)
1545 }
1546 PointerCast::ArrayToPointer => {
1547 Borrowed(self.eval_operand(operand, locals)?.slice(0..self.ptr_size()))
1549 }
1550 },
1551 CastKind::DynStar => not_supported!("dyn star cast"),
1552 CastKind::IntToInt
1553 | CastKind::PtrToPtr
1554 | CastKind::PointerExposeAddress
1555 | CastKind::PointerFromExposedAddress => {
1556 let current_ty = self.operand_ty(operand, locals)?;
1557 let is_signed = matches!(current_ty.kind(), TyKind::Int(_));
1558 let current = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed);
1559 let dest_size =
1560 self.size_of_sized(*target_ty, locals, "destination of int to int cast")?;
1561 Owned(current[0..dest_size].to_vec())
1562 }
1563 CastKind::FloatToInt => {
1564 let ty = self.operand_ty(operand, locals)?;
1565 let TyKind::Float(ty) = ty.kind() else {
1566 not_supported!("invalid float to int cast");
1567 };
1568 let value = self.eval_operand(operand, locals)?.get(self)?;
1569 let value = match ty {
1570 rustc_type_ir::FloatTy::F32 => {
1571 let value = value.try_into().unwrap();
1572 f32::from_le_bytes(value) as f64
1573 }
1574 rustc_type_ir::FloatTy::F64 => {
1575 let value = value.try_into().unwrap();
1576 f64::from_le_bytes(value)
1577 }
1578 rustc_type_ir::FloatTy::F16 | rustc_type_ir::FloatTy::F128 => {
1579 not_supported!("unstable floating point type f16 and f128");
1580 }
1581 };
1582 let is_signed = matches!(target_ty.kind(), TyKind::Int(_));
1583 let dest_size =
1584 self.size_of_sized(*target_ty, locals, "destination of float to int cast")?;
1585 let dest_bits = dest_size * 8;
1586 let (max, min) = if dest_bits == 128 {
1587 (i128::MAX, i128::MIN)
1588 } else if is_signed {
1589 let max = 1i128 << (dest_bits - 1);
1590 (max - 1, -max)
1591 } else {
1592 (1i128 << dest_bits, 0)
1593 };
1594 let value = (value as i128).min(max).max(min);
1595 let result = value.to_le_bytes();
1596 Owned(result[0..dest_size].to_vec())
1597 }
1598 CastKind::FloatToFloat => {
1599 let ty = self.operand_ty(operand, locals)?;
1600 let TyKind::Float(ty) = ty.kind() else {
1601 not_supported!("invalid float to int cast");
1602 };
1603 let value = self.eval_operand(operand, locals)?.get(self)?;
1604 let value = match ty {
1605 rustc_type_ir::FloatTy::F32 => {
1606 let value = value.try_into().unwrap();
1607 f32::from_le_bytes(value) as f64
1608 }
1609 rustc_type_ir::FloatTy::F64 => {
1610 let value = value.try_into().unwrap();
1611 f64::from_le_bytes(value)
1612 }
1613 rustc_type_ir::FloatTy::F16 | rustc_type_ir::FloatTy::F128 => {
1614 not_supported!("unstable floating point type f16 and f128");
1615 }
1616 };
1617 let TyKind::Float(target_ty) = target_ty.kind() else {
1618 not_supported!("invalid float to float cast");
1619 };
1620 match target_ty {
1621 rustc_type_ir::FloatTy::F32 => Owned((value as f32).to_le_bytes().to_vec()),
1622 rustc_type_ir::FloatTy::F64 => Owned((value as f64).to_le_bytes().to_vec()),
1623 rustc_type_ir::FloatTy::F16 | rustc_type_ir::FloatTy::F128 => {
1624 not_supported!("unstable floating point type f16 and f128");
1625 }
1626 }
1627 }
1628 CastKind::IntToFloat => {
1629 let current_ty = self.operand_ty(operand, locals)?;
1630 let is_signed = matches!(current_ty.kind(), TyKind::Int(_));
1631 let value = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed);
1632 let value = i128::from_le_bytes(value);
1633 let TyKind::Float(target_ty) = target_ty.kind() else {
1634 not_supported!("invalid int to float cast");
1635 };
1636 match target_ty {
1637 rustc_type_ir::FloatTy::F32 => Owned((value as f32).to_le_bytes().to_vec()),
1638 rustc_type_ir::FloatTy::F64 => Owned((value as f64).to_le_bytes().to_vec()),
1639 rustc_type_ir::FloatTy::F16 | rustc_type_ir::FloatTy::F128 => {
1640 not_supported!("unstable floating point type f16 and f128");
1641 }
1642 }
1643 }
1644 CastKind::FnPtrToPtr => not_supported!("fn ptr to ptr cast"),
1645 },
1646 Rvalue::ThreadLocalRef(n)
1647 | Rvalue::AddressOf(n)
1648 | Rvalue::BinaryOp(n)
1649 | Rvalue::NullaryOp(n) => match *n {},
1650 })
1651 }
1652
1653 fn compute_discriminant(&self, ty: Ty<'db>, bytes: &[u8]) -> Result<'db, i128> {
1654 let layout = self.layout(ty)?;
1655 let TyKind::Adt(adt_def, _) = ty.kind() else {
1656 return Ok(0);
1657 };
1658 let AdtId::EnumId(e) = adt_def.def_id().0 else {
1659 return Ok(0);
1660 };
1661 match &layout.variants {
1662 Variants::Empty => unreachable!(),
1663 Variants::Single { index } => {
1664 let r =
1665 self.const_eval_discriminant(e.enum_variants(self.db).variants[index.0].0)?;
1666 Ok(r)
1667 }
1668 Variants::Multiple { tag, tag_encoding, variants, .. } => {
1669 let size = tag.size(&*self.target_data_layout).bytes_usize();
1670 let offset = layout.fields.offset(0).bytes_usize(); let is_signed = tag.is_signed();
1672 match tag_encoding {
1673 TagEncoding::Direct => {
1674 let tag = &bytes[offset..offset + size];
1675 Ok(i128::from_le_bytes(pad16(tag, is_signed)))
1676 }
1677 TagEncoding::Niche { untagged_variant, niche_start, .. } => {
1678 let tag = &bytes[offset..offset + size];
1679 let candidate_tag = i128::from_le_bytes(pad16(tag, is_signed))
1680 .wrapping_sub(*niche_start as i128)
1681 as usize;
1682 let idx = variants
1683 .iter_enumerated()
1684 .map(|(it, _)| it)
1685 .filter(|it| it != untagged_variant)
1686 .nth(candidate_tag)
1687 .unwrap_or(*untagged_variant)
1688 .0;
1689 let result =
1690 self.const_eval_discriminant(e.enum_variants(self.db).variants[idx].0)?;
1691 Ok(result)
1692 }
1693 }
1694 }
1695 }
1696 }
1697
1698 fn coerce_unsized_look_through_fields<T>(
1699 &self,
1700 ty: Ty<'db>,
1701 goal: impl Fn(TyKind<'db>) -> Option<T>,
1702 ) -> Result<'db, T> {
1703 let kind = ty.kind();
1704 if let Some(it) = goal(kind) {
1705 return Ok(it);
1706 }
1707 if let TyKind::Adt(adt_ef, subst) = kind
1708 && let AdtId::StructId(struct_id) = adt_ef.def_id().0
1709 {
1710 let field_types = self.db.field_types(struct_id.into());
1711 if let Some(ty) =
1712 field_types.iter().last().map(|it| it.1.instantiate(self.interner(), subst))
1713 {
1714 return self.coerce_unsized_look_through_fields(ty, goal);
1715 }
1716 }
1717 Err(MirEvalError::CoerceUnsizedError(ty))
1718 }
1719
1720 fn coerce_unsized(
1721 &mut self,
1722 addr: Interval,
1723 current_ty: Ty<'db>,
1724 target_ty: Ty<'db>,
1725 ) -> Result<'db, IntervalOrOwned> {
1726 fn for_ptr<'db>(it: TyKind<'db>) -> Option<Ty<'db>> {
1727 match it {
1728 TyKind::RawPtr(ty, _) | TyKind::Ref(_, ty, _) => Some(ty),
1729 _ => None,
1730 }
1731 }
1732 let target_ty = self.coerce_unsized_look_through_fields(target_ty, for_ptr)?;
1733 let current_ty = self.coerce_unsized_look_through_fields(current_ty, for_ptr)?;
1734
1735 self.unsizing_ptr_from_addr(target_ty, current_ty, addr)
1736 }
1737
1738 fn unsizing_ptr_from_addr(
1740 &mut self,
1741 target_ty: Ty<'db>,
1742 current_ty: Ty<'db>,
1743 addr: Interval,
1744 ) -> Result<'db, IntervalOrOwned> {
1745 use IntervalOrOwned::*;
1746 Ok(match &target_ty.kind() {
1747 TyKind::Slice(_) => match ¤t_ty.kind() {
1748 TyKind::Array(_, size) => {
1749 let len = match try_const_usize(self.db, *size) {
1750 None => {
1751 not_supported!("unevaluatble len of array in coerce unsized")
1752 }
1753 Some(it) => it as usize,
1754 };
1755 let mut r = Vec::with_capacity(16);
1756 let addr = addr.get(self)?;
1757 r.extend(addr.iter().copied());
1758 r.extend(len.to_le_bytes());
1759 Owned(r)
1760 }
1761 t => {
1762 not_supported!("slice unsizing from non array type {t:?}")
1763 }
1764 },
1765 TyKind::Dynamic(..) => {
1766 let vtable = self.vtable_map.id(current_ty);
1767 let mut r = Vec::with_capacity(16);
1768 let addr = addr.get(self)?;
1769 r.extend(addr.iter().copied());
1770 r.extend(vtable.to_le_bytes());
1771 Owned(r)
1772 }
1773 TyKind::Adt(adt_def, target_subst) => match ¤t_ty.kind() {
1774 TyKind::Adt(current_adt_def, current_subst) => {
1775 let id = adt_def.def_id().0;
1776 let current_id = current_adt_def.def_id().0;
1777 if id != current_id {
1778 not_supported!("unsizing struct with different type");
1779 }
1780 let id = match id {
1781 AdtId::StructId(s) => s,
1782 AdtId::UnionId(_) => not_supported!("unsizing unions"),
1783 AdtId::EnumId(_) => not_supported!("unsizing enums"),
1784 };
1785 let Some((last_field, _)) = id.fields(self.db).fields().iter().next_back()
1786 else {
1787 not_supported!("unsizing struct without field");
1788 };
1789 let target_last_field = self.db.field_types(id.into())[last_field]
1790 .instantiate(self.interner(), target_subst);
1791 let current_last_field = self.db.field_types(id.into())[last_field]
1792 .instantiate(self.interner(), current_subst);
1793 return self.unsizing_ptr_from_addr(
1794 target_last_field,
1795 current_last_field,
1796 addr,
1797 );
1798 }
1799 _ => not_supported!("unsizing struct with non adt type"),
1800 },
1801 _ => not_supported!("unknown unsized cast"),
1802 })
1803 }
1804
1805 fn layout_of_variant(
1806 &mut self,
1807 it: VariantId,
1808 subst: GenericArgs<'db>,
1809 locals: &Locals<'db>,
1810 ) -> Result<'db, (usize, Arc<Layout>, Option<(usize, usize, i128)>)> {
1811 let adt = it.adt_id(self.db);
1812 if let DefWithBodyId::VariantId(f) = locals.body.owner
1813 && let VariantId::EnumVariantId(it) = it
1814 && let AdtId::EnumId(e) = adt
1815 && f.lookup(self.db).parent == e
1816 {
1817 let i = self.const_eval_discriminant(it)?;
1820 return Ok((16, self.layout(Ty::new_empty_tuple(self.interner()))?, Some((0, 16, i))));
1821 }
1822 let layout = self.layout_adt(adt, subst)?;
1823 Ok(match &layout.variants {
1824 Variants::Single { .. } | Variants::Empty => (layout.size.bytes_usize(), layout, None),
1825 Variants::Multiple { variants, tag, tag_encoding, .. } => {
1826 let enum_variant_id = match it {
1827 VariantId::EnumVariantId(it) => it,
1828 _ => not_supported!("multi variant layout for non-enums"),
1829 };
1830 let mut discriminant = self.const_eval_discriminant(enum_variant_id)?;
1831 let lookup = enum_variant_id.lookup(self.db);
1832 let rustc_enum_variant_idx = RustcEnumVariantIdx(lookup.index as usize);
1833 let variant_layout = variants[rustc_enum_variant_idx].clone();
1834 let have_tag = match tag_encoding {
1835 TagEncoding::Direct => true,
1836 TagEncoding::Niche { untagged_variant, niche_variants: _, niche_start } => {
1837 if *untagged_variant == rustc_enum_variant_idx {
1838 false
1839 } else {
1840 discriminant = (variants
1841 .iter_enumerated()
1842 .filter(|(it, _)| it != untagged_variant)
1843 .position(|(it, _)| it == rustc_enum_variant_idx)
1844 .unwrap() as i128)
1845 .wrapping_add(*niche_start as i128);
1846 true
1847 }
1848 }
1849 };
1850 (
1851 layout.size.bytes_usize(),
1852 Arc::new(variant_layout),
1853 if have_tag {
1854 Some((
1855 layout.fields.offset(0).bytes_usize(),
1856 tag.size(&*self.target_data_layout).bytes_usize(),
1857 discriminant,
1858 ))
1859 } else {
1860 None
1861 },
1862 )
1863 }
1864 })
1865 }
1866
1867 fn construct_with_layout(
1868 &mut self,
1869 size: usize, variant_layout: &Layout,
1871 tag: Option<(usize, usize, i128)>,
1872 values: impl Iterator<Item = IntervalOrOwned>,
1873 ) -> Result<'db, Vec<u8>> {
1874 let mut result = vec![0; size];
1875 if let Some((offset, size, value)) = tag {
1876 match result.get_mut(offset..offset + size) {
1877 Some(it) => it.copy_from_slice(&value.to_le_bytes()[0..size]),
1878 None => {
1879 return Err(MirEvalError::InternalError(
1880 format!(
1881 "encoded tag ({offset}, {size}, {value}) is out of bounds 0..{size}"
1882 )
1883 .into(),
1884 ));
1885 }
1886 }
1887 }
1888 for (i, op) in values.enumerate() {
1889 let offset = variant_layout.fields.offset(i).bytes_usize();
1890 let op = op.get(self)?;
1891 match result.get_mut(offset..offset + op.len()) {
1892 Some(it) => it.copy_from_slice(op),
1893 None => {
1894 return Err(MirEvalError::InternalError(
1895 format!("field offset ({offset}) is out of bounds 0..{size}").into(),
1896 ));
1897 }
1898 }
1899 }
1900 Ok(result)
1901 }
1902
1903 fn eval_operand(
1904 &mut self,
1905 it: &Operand<'db>,
1906 locals: &mut Locals<'db>,
1907 ) -> Result<'db, Interval> {
1908 Ok(match &it.kind {
1909 OperandKind::Copy(p) | OperandKind::Move(p) => {
1910 locals.drop_flags.remove_place(p, &locals.body.projection_store);
1911 self.eval_place(p, locals)?
1912 }
1913 OperandKind::Static(st) => {
1914 let addr = self.eval_static(*st, locals)?;
1915 Interval::new(addr, self.ptr_size())
1916 }
1917 OperandKind::Constant { konst, .. } => self.allocate_const_in_heap(locals, *konst)?,
1918 })
1919 }
1920
1921 #[allow(clippy::double_parens)]
1922 fn allocate_const_in_heap(
1923 &mut self,
1924 locals: &Locals<'db>,
1925 konst: Const<'db>,
1926 ) -> Result<'db, Interval> {
1927 let result_owner;
1928 let value = match konst.kind() {
1929 ConstKind::Value(value) => value,
1930 ConstKind::Unevaluated(UnevaluatedConst { def: const_id, args: subst }) => 'b: {
1931 let mut id = const_id.0;
1932 let mut subst = subst;
1933 if let hir_def::GeneralConstId::ConstId(c) = id {
1934 let (c, s) = lookup_impl_const(&self.infcx, self.param_env.param_env, c, subst);
1935 id = hir_def::GeneralConstId::ConstId(c);
1936 subst = s;
1937 }
1938 result_owner = match id {
1939 GeneralConstId::ConstId(const_id) => {
1940 self.db.const_eval(const_id, subst, Some(self.param_env)).map_err(|e| {
1941 let name = id.name(self.db);
1942 MirEvalError::ConstEvalError(name, Box::new(e))
1943 })?
1944 }
1945 GeneralConstId::StaticId(static_id) => {
1946 self.db.const_eval_static(static_id).map_err(|e| {
1947 let name = id.name(self.db);
1948 MirEvalError::ConstEvalError(name, Box::new(e))
1949 })?
1950 }
1951 };
1952 if let ConstKind::Value(value) = result_owner.kind() {
1953 break 'b value;
1954 }
1955 not_supported!("unevaluatable constant");
1956 }
1957 _ => not_supported!("evaluating unknown const"),
1958 };
1959 let ValueConst { ty, value } = value;
1960 let ConstBytes { memory: v, memory_map } = value.inner();
1961 let patch_map = memory_map.transform_addresses(|b, align| {
1962 let addr = self.heap_allocate(b.len(), align)?;
1963 self.write_memory(addr, b)?;
1964 Ok(addr.to_usize())
1965 })?;
1966 let (size, align) = self.size_align_of(ty, locals)?.unwrap_or((v.len(), 1));
1967 let v: Cow<'_, [u8]> = if size != v.len() {
1968 if size == 16 && v.len() < 16 {
1970 Cow::Owned(pad16(v, false).to_vec())
1971 } else if size < 16 && v.len() == 16 {
1972 Cow::Borrowed(&v[0..size])
1973 } else {
1974 return Err(MirEvalError::InvalidConst(konst));
1975 }
1976 } else {
1977 Cow::Borrowed(v)
1978 };
1979 let addr = self.heap_allocate(size, align)?;
1980 self.write_memory(addr, &v)?;
1981 self.patch_addresses(
1982 &patch_map,
1983 |bytes| match memory_map {
1984 MemoryMap::Empty | MemoryMap::Simple(_) => {
1985 Err(MirEvalError::InvalidVTableId(from_bytes!(usize, bytes)))
1986 }
1987 MemoryMap::Complex(cm) => cm.vtable.ty_of_bytes(bytes),
1988 },
1989 addr,
1990 ty,
1991 locals,
1992 )?;
1993 Ok(Interval::new(addr, size))
1994 }
1995
1996 fn eval_place(&mut self, p: &Place<'db>, locals: &Locals<'db>) -> Result<'db, Interval> {
1997 let addr = self.place_addr(p, locals)?;
1998 Ok(Interval::new(
1999 addr,
2000 self.size_of_sized(self.place_ty(p, locals)?, locals, "type of this place")?,
2001 ))
2002 }
2003
2004 fn read_memory(&self, addr: Address, size: usize) -> Result<'db, &[u8]> {
2005 if size == 0 {
2006 return Ok(&[]);
2007 }
2008 let (mem, pos) = match addr {
2009 Stack(it) => (&self.stack, it),
2010 Heap(it) => (&self.heap, it),
2011 Invalid(it) => {
2012 return Err(MirEvalError::UndefinedBehavior(format!(
2013 "read invalid memory address {it} with size {size}"
2014 )));
2015 }
2016 };
2017 mem.get(pos..pos + size)
2018 .ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory read".to_owned()))
2019 }
2020
2021 fn write_memory_using_ref(&mut self, addr: Address, size: usize) -> Result<'db, &mut [u8]> {
2022 let (mem, pos) = match addr {
2023 Stack(it) => (&mut self.stack, it),
2024 Heap(it) => (&mut self.heap, it),
2025 Invalid(it) => {
2026 return Err(MirEvalError::UndefinedBehavior(format!(
2027 "write invalid memory address {it} with size {size}"
2028 )));
2029 }
2030 };
2031 mem.get_mut(pos..pos + size)
2032 .ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory write".to_owned()))
2033 }
2034
2035 fn write_memory(&mut self, addr: Address, r: &[u8]) -> Result<'db, ()> {
2036 if r.is_empty() {
2037 return Ok(());
2038 }
2039 self.write_memory_using_ref(addr, r.len())?.copy_from_slice(r);
2040 Ok(())
2041 }
2042
2043 fn copy_from_interval_or_owned(
2044 &mut self,
2045 addr: Address,
2046 r: IntervalOrOwned,
2047 ) -> Result<'db, ()> {
2048 match r {
2049 IntervalOrOwned::Borrowed(r) => self.copy_from_interval(addr, r),
2050 IntervalOrOwned::Owned(r) => self.write_memory(addr, &r),
2051 }
2052 }
2053
2054 fn copy_from_interval(&mut self, addr: Address, r: Interval) -> Result<'db, ()> {
2055 if r.size == 0 {
2056 return Ok(());
2057 }
2058
2059 let oob = || MirEvalError::UndefinedBehavior("out of bounds memory write".to_owned());
2060
2061 match (addr, r.addr) {
2062 (Stack(dst), Stack(src)) => {
2063 if self.stack.len() < src + r.size || self.stack.len() < dst + r.size {
2064 return Err(oob());
2065 }
2066 self.stack.copy_within(src..src + r.size, dst)
2067 }
2068 (Heap(dst), Heap(src)) => {
2069 if self.stack.len() < src + r.size || self.stack.len() < dst + r.size {
2070 return Err(oob());
2071 }
2072 self.heap.copy_within(src..src + r.size, dst)
2073 }
2074 (Stack(dst), Heap(src)) => {
2075 self.stack
2076 .get_mut(dst..dst + r.size)
2077 .ok_or_else(oob)?
2078 .copy_from_slice(self.heap.get(src..src + r.size).ok_or_else(oob)?);
2079 }
2080 (Heap(dst), Stack(src)) => {
2081 self.heap
2082 .get_mut(dst..dst + r.size)
2083 .ok_or_else(oob)?
2084 .copy_from_slice(self.stack.get(src..src + r.size).ok_or_else(oob)?);
2085 }
2086 _ => {
2087 return Err(MirEvalError::UndefinedBehavior(format!(
2088 "invalid memory write at address {addr:?}"
2089 )));
2090 }
2091 }
2092
2093 Ok(())
2094 }
2095
2096 fn size_align_of(
2097 &self,
2098 ty: Ty<'db>,
2099 locals: &Locals<'db>,
2100 ) -> Result<'db, Option<(usize, usize)>> {
2101 if let Some(layout) = self.layout_cache.borrow().get(&ty) {
2102 return Ok(layout
2103 .is_sized()
2104 .then(|| (layout.size.bytes_usize(), layout.align.bytes() as usize)));
2105 }
2106 if let DefWithBodyId::VariantId(f) = locals.body.owner
2107 && let Some((AdtId::EnumId(e), _)) = ty.as_adt()
2108 && f.lookup(self.db).parent == e
2109 {
2110 return Ok(Some((16, 16)));
2113 }
2114 let layout = self.layout(ty);
2115 if self.assert_placeholder_ty_is_unused
2116 && matches!(layout, Err(MirEvalError::LayoutError(LayoutError::HasPlaceholder, _)))
2117 {
2118 return Ok(Some((0, 1)));
2119 }
2120 let layout = layout?;
2121 Ok(layout.is_sized().then(|| (layout.size.bytes_usize(), layout.align.bytes() as usize)))
2122 }
2123
2124 fn size_of_sized(
2127 &self,
2128 ty: Ty<'db>,
2129 locals: &Locals<'db>,
2130 what: &'static str,
2131 ) -> Result<'db, usize> {
2132 match self.size_align_of(ty, locals)? {
2133 Some(it) => Ok(it.0),
2134 None => Err(MirEvalError::TypeIsUnsized(ty, what)),
2135 }
2136 }
2137
2138 fn size_align_of_sized(
2141 &self,
2142 ty: Ty<'db>,
2143 locals: &Locals<'db>,
2144 what: &'static str,
2145 ) -> Result<'db, (usize, usize)> {
2146 match self.size_align_of(ty, locals)? {
2147 Some(it) => Ok(it),
2148 None => Err(MirEvalError::TypeIsUnsized(ty, what)),
2149 }
2150 }
2151
2152 fn heap_allocate(&mut self, size: usize, align: usize) -> Result<'db, Address> {
2153 if !align.is_power_of_two() || align > 10000 {
2154 return Err(MirEvalError::UndefinedBehavior(format!("Alignment {align} is invalid")));
2155 }
2156 while !self.heap.len().is_multiple_of(align) {
2157 self.heap.push(0);
2158 }
2159 if size.checked_add(self.heap.len()).is_none_or(|x| x > self.memory_limit) {
2160 return Err(MirEvalError::Panic(format!("Memory allocation of {size} bytes failed")));
2161 }
2162 let pos = self.heap.len();
2163 self.heap.extend(std::iter::repeat_n(0, size));
2164 Ok(Address::Heap(pos))
2165 }
2166
2167 fn detect_fn_trait(&self, def: FunctionId) -> Option<FnTrait> {
2168 let def = Some(def);
2169 if def == self.cached_fn_trait_func {
2170 Some(FnTrait::Fn)
2171 } else if def == self.cached_fn_mut_trait_func {
2172 Some(FnTrait::FnMut)
2173 } else if def == self.cached_fn_once_trait_func {
2174 Some(FnTrait::FnOnce)
2175 } else {
2176 None
2177 }
2178 }
2179
2180 fn create_memory_map(
2181 &self,
2182 bytes: &[u8],
2183 ty: Ty<'db>,
2184 locals: &Locals<'db>,
2185 ) -> Result<'db, ComplexMemoryMap<'db>> {
2186 fn rec<'db>(
2187 this: &Evaluator<'db>,
2188 bytes: &[u8],
2189 ty: Ty<'db>,
2190 locals: &Locals<'db>,
2191 mm: &mut ComplexMemoryMap<'db>,
2192 stack_depth_limit: usize,
2193 ) -> Result<'db, ()> {
2194 if stack_depth_limit.checked_sub(1).is_none() {
2195 return Err(MirEvalError::StackOverflow);
2196 }
2197 match ty.kind() {
2198 TyKind::Ref(_, t, _) => {
2199 let size = this.size_align_of(t, locals)?;
2200 match size {
2201 Some((size, _)) => {
2202 let addr_usize = from_bytes!(usize, bytes);
2203 mm.insert(
2204 addr_usize,
2205 this.read_memory(Address::from_usize(addr_usize), size)?.into(),
2206 )
2207 }
2208 None => {
2209 let mut check_inner = None;
2210 let (addr, meta) = bytes.split_at(bytes.len() / 2);
2211 let element_size = match t.kind() {
2212 TyKind::Str => 1,
2213 TyKind::Slice(t) => {
2214 check_inner = Some(t);
2215 this.size_of_sized(t, locals, "slice inner type")?
2216 }
2217 TyKind::Dynamic(..) => {
2218 let t = this.vtable_map.ty_of_bytes(meta)?;
2219 check_inner = Some(t);
2220 this.size_of_sized(t, locals, "dyn concrete type")?
2221 }
2222 _ => return Ok(()),
2223 };
2224 let count = match t.kind() {
2225 TyKind::Dynamic(..) => 1,
2226 _ => from_bytes!(usize, meta),
2227 };
2228 let size = element_size * count;
2229 let addr = Address::from_bytes(addr)?;
2230 let b = this.read_memory(addr, size)?;
2231 mm.insert(addr.to_usize(), b.into());
2232 if let Some(ty) = check_inner {
2233 for i in 0..count {
2234 let offset = element_size * i;
2235 rec(
2236 this,
2237 &b[offset..offset + element_size],
2238 ty,
2239 locals,
2240 mm,
2241 stack_depth_limit - 1,
2242 )?;
2243 }
2244 }
2245 }
2246 }
2247 }
2248 TyKind::Array(inner, len) => {
2249 let len = match try_const_usize(this.db, len) {
2250 Some(it) => it as usize,
2251 None => not_supported!("non evaluatable array len in patching addresses"),
2252 };
2253 let size = this.size_of_sized(inner, locals, "inner of array")?;
2254 for i in 0..len {
2255 let offset = i * size;
2256 rec(
2257 this,
2258 &bytes[offset..offset + size],
2259 inner,
2260 locals,
2261 mm,
2262 stack_depth_limit - 1,
2263 )?;
2264 }
2265 }
2266 TyKind::Tuple(subst) => {
2267 let layout = this.layout(ty)?;
2268 for (id, ty) in subst.iter().enumerate() {
2269 let offset = layout.fields.offset(id).bytes_usize();
2270 let size = this.layout(ty)?.size.bytes_usize();
2271 rec(
2272 this,
2273 &bytes[offset..offset + size],
2274 ty,
2275 locals,
2276 mm,
2277 stack_depth_limit - 1,
2278 )?;
2279 }
2280 }
2281 TyKind::Adt(adt, subst) => match adt.def_id().0 {
2282 AdtId::StructId(s) => {
2283 let data = s.fields(this.db);
2284 let layout = this.layout(ty)?;
2285 let field_types = this.db.field_types(s.into());
2286 for (f, _) in data.fields().iter() {
2287 let offset = layout
2288 .fields
2289 .offset(u32::from(f.into_raw()) as usize)
2290 .bytes_usize();
2291 let ty = field_types[f].instantiate(this.interner(), subst);
2292 let size = this.layout(ty)?.size.bytes_usize();
2293 rec(
2294 this,
2295 &bytes[offset..offset + size],
2296 ty,
2297 locals,
2298 mm,
2299 stack_depth_limit - 1,
2300 )?;
2301 }
2302 }
2303 AdtId::EnumId(e) => {
2304 let layout = this.layout(ty)?;
2305 if let Some((v, l)) = detect_variant_from_bytes(
2306 &layout,
2307 this.db,
2308 &this.target_data_layout,
2309 bytes,
2310 e,
2311 ) {
2312 let data = v.fields(this.db);
2313 let field_types = this.db.field_types(v.into());
2314 for (f, _) in data.fields().iter() {
2315 let offset =
2316 l.fields.offset(u32::from(f.into_raw()) as usize).bytes_usize();
2317 let ty = field_types[f].instantiate(this.interner(), subst);
2318 let size = this.layout(ty)?.size.bytes_usize();
2319 rec(
2320 this,
2321 &bytes[offset..offset + size],
2322 ty,
2323 locals,
2324 mm,
2325 stack_depth_limit - 1,
2326 )?;
2327 }
2328 }
2329 }
2330 AdtId::UnionId(_) => (),
2331 },
2332 TyKind::Alias(AliasTyKind::Projection, _) => {
2333 let mut ocx = ObligationCtxt::new(&this.infcx);
2334 let ty = ocx
2335 .structurally_normalize_ty(
2336 &ObligationCause::dummy(),
2337 this.param_env.param_env,
2338 ty,
2339 )
2340 .map_err(|_| MirEvalError::NotSupported("couldn't normalize".to_owned()))?;
2341
2342 rec(this, bytes, ty, locals, mm, stack_depth_limit - 1)?;
2343 }
2344 _ => (),
2345 }
2346 Ok(())
2347 }
2348 let mut mm = ComplexMemoryMap::default();
2349 rec(self, bytes, ty, locals, &mut mm, self.stack_depth_limit - 1)?;
2350 Ok(mm)
2351 }
2352
2353 fn patch_addresses(
2354 &mut self,
2355 patch_map: &FxHashMap<usize, usize>,
2356 ty_of_bytes: impl Fn(&[u8]) -> Result<'db, Ty<'db>> + Copy,
2357 addr: Address,
2358 ty: Ty<'db>,
2359 locals: &Locals<'db>,
2360 ) -> Result<'db, ()> {
2361 let layout = self.layout(ty)?;
2363 let my_size = self.size_of_sized(ty, locals, "value to patch address")?;
2364 use rustc_type_ir::TyKind;
2365 match ty.kind() {
2366 TyKind::Ref(_, t, _) => {
2367 let size = self.size_align_of(t, locals)?;
2368 match size {
2369 Some(_) => {
2370 let current = from_bytes!(usize, self.read_memory(addr, my_size)?);
2371 if let Some(it) = patch_map.get(¤t) {
2372 self.write_memory(addr, &it.to_le_bytes())?;
2373 }
2374 }
2375 None => {
2376 let current = from_bytes!(usize, self.read_memory(addr, my_size / 2)?);
2377 if let Some(it) = patch_map.get(¤t) {
2378 self.write_memory(addr, &it.to_le_bytes())?;
2379 }
2380 }
2381 }
2382 }
2383 TyKind::FnPtr(_, _) => {
2384 let ty = ty_of_bytes(self.read_memory(addr, my_size)?)?;
2385 let new_id = self.vtable_map.id(ty);
2386 self.write_memory(addr, &new_id.to_le_bytes())?;
2387 }
2388 TyKind::Adt(id, args) => match id.def_id().0 {
2389 AdtId::StructId(s) => {
2390 for (i, (_, ty)) in self.db.field_types(s.into()).iter().enumerate() {
2391 let offset = layout.fields.offset(i).bytes_usize();
2392 let ty = ty.instantiate(self.interner(), args);
2393 self.patch_addresses(
2394 patch_map,
2395 ty_of_bytes,
2396 addr.offset(offset),
2397 ty,
2398 locals,
2399 )?;
2400 }
2401 }
2402 AdtId::UnionId(_) => (),
2403 AdtId::EnumId(e) => {
2404 if let Some((ev, layout)) = detect_variant_from_bytes(
2405 &layout,
2406 self.db,
2407 &self.target_data_layout,
2408 self.read_memory(addr, layout.size.bytes_usize())?,
2409 e,
2410 ) {
2411 for (i, (_, ty)) in self.db.field_types(ev.into()).iter().enumerate() {
2412 let offset = layout.fields.offset(i).bytes_usize();
2413 let ty = ty.instantiate(self.interner(), args);
2414 self.patch_addresses(
2415 patch_map,
2416 ty_of_bytes,
2417 addr.offset(offset),
2418 ty,
2419 locals,
2420 )?;
2421 }
2422 }
2423 }
2424 },
2425 TyKind::Tuple(tys) => {
2426 for (id, ty) in tys.iter().enumerate() {
2427 let offset = layout.fields.offset(id).bytes_usize();
2428 self.patch_addresses(patch_map, ty_of_bytes, addr.offset(offset), ty, locals)?;
2429 }
2430 }
2431 TyKind::Array(inner, len) => {
2432 let len = match consteval::try_const_usize(self.db, len) {
2433 Some(it) => it as usize,
2434 None => not_supported!("non evaluatable array len in patching addresses"),
2435 };
2436 let size = self.size_of_sized(inner, locals, "inner of array")?;
2437 for i in 0..len {
2438 self.patch_addresses(
2439 patch_map,
2440 ty_of_bytes,
2441 addr.offset(i * size),
2442 inner,
2443 locals,
2444 )?;
2445 }
2446 }
2447 TyKind::Bool
2448 | TyKind::Char
2449 | TyKind::Int(_)
2450 | TyKind::Uint(_)
2451 | TyKind::Float(_)
2452 | TyKind::Slice(_)
2453 | TyKind::RawPtr(_, _)
2454 | TyKind::FnDef(_, _)
2455 | TyKind::Str
2456 | TyKind::Never
2457 | TyKind::Closure(_, _)
2458 | TyKind::Coroutine(_, _)
2459 | TyKind::CoroutineWitness(_, _)
2460 | TyKind::Foreign(_)
2461 | TyKind::Error(_)
2462 | TyKind::Placeholder(_)
2463 | TyKind::Dynamic(_, _)
2464 | TyKind::Alias(_, _)
2465 | TyKind::Bound(_, _)
2466 | TyKind::Infer(_)
2467 | TyKind::Pat(_, _)
2468 | TyKind::Param(_)
2469 | TyKind::UnsafeBinder(_)
2470 | TyKind::CoroutineClosure(_, _) => (),
2471 }
2472 Ok(())
2473 }
2474
2475 fn exec_fn_pointer(
2476 &mut self,
2477 bytes: Interval,
2478 destination: Interval,
2479 args: &[IntervalAndTy<'db>],
2480 locals: &Locals<'db>,
2481 target_bb: Option<BasicBlockId<'db>>,
2482 span: MirSpan,
2483 ) -> Result<'db, Option<StackFrame<'db>>> {
2484 let id = from_bytes!(usize, bytes.get(self)?);
2485 let next_ty = self.vtable_map.ty(id)?;
2486 use rustc_type_ir::TyKind;
2487 match next_ty.kind() {
2488 TyKind::FnDef(def, generic_args) => {
2489 self.exec_fn_def(def.0, generic_args, destination, args, locals, target_bb, span)
2490 }
2491 TyKind::Closure(id, generic_args) => self.exec_closure(
2492 id.0,
2493 bytes.slice(0..0),
2494 generic_args,
2495 destination,
2496 args,
2497 locals,
2498 span,
2499 ),
2500 _ => Err(MirEvalError::InternalError("function pointer to non function".into())),
2501 }
2502 }
2503
2504 fn exec_closure(
2505 &mut self,
2506 closure: InternedClosureId,
2507 closure_data: Interval,
2508 generic_args: GenericArgs<'db>,
2509 destination: Interval,
2510 args: &[IntervalAndTy<'db>],
2511 locals: &Locals<'db>,
2512 span: MirSpan,
2513 ) -> Result<'db, Option<StackFrame<'db>>> {
2514 let mir_body = self
2515 .db
2516 .monomorphized_mir_body_for_closure(closure, generic_args, self.param_env)
2517 .map_err(|it| MirEvalError::MirLowerErrorForClosure(closure, it))?;
2518 let closure_data = if mir_body.locals[mir_body.param_locals[0]].ty.as_reference().is_some()
2519 {
2520 closure_data.addr.to_bytes().to_vec()
2521 } else {
2522 closure_data.get(self)?.to_owned()
2523 };
2524 let arg_bytes = iter::once(Ok(closure_data))
2525 .chain(args.iter().map(|it| Ok(it.get(self)?.to_owned())))
2526 .collect::<Result<'db, Vec<_>>>()?;
2527 let interval = self
2528 .interpret_mir(mir_body, arg_bytes.into_iter().map(IntervalOrOwned::Owned))
2529 .map_err(|e| {
2530 MirEvalError::InFunction(
2531 Box::new(e),
2532 vec![(Either::Right(closure), span, locals.body.owner)],
2533 )
2534 })?;
2535 destination.write_from_interval(self, interval)?;
2536 Ok(None)
2537 }
2538
2539 fn exec_fn_def(
2540 &mut self,
2541 def: CallableDefId,
2542 generic_args: GenericArgs<'db>,
2543 destination: Interval,
2544 args: &[IntervalAndTy<'db>],
2545 locals: &Locals<'db>,
2546 target_bb: Option<BasicBlockId<'db>>,
2547 span: MirSpan,
2548 ) -> Result<'db, Option<StackFrame<'db>>> {
2549 match def {
2550 CallableDefId::FunctionId(def) => {
2551 if self.detect_fn_trait(def).is_some() {
2552 return self.exec_fn_trait(
2553 def,
2554 args,
2555 generic_args,
2556 locals,
2557 destination,
2558 target_bb,
2559 span,
2560 );
2561 }
2562 self.exec_fn_with_args(
2563 def,
2564 args,
2565 generic_args,
2566 locals,
2567 destination,
2568 target_bb,
2569 span,
2570 )
2571 }
2572 CallableDefId::StructId(id) => {
2573 let (size, variant_layout, tag) =
2574 self.layout_of_variant(id.into(), generic_args, locals)?;
2575 let result = self.construct_with_layout(
2576 size,
2577 &variant_layout,
2578 tag,
2579 args.iter().map(|it| it.interval.into()),
2580 )?;
2581 destination.write_from_bytes(self, &result)?;
2582 Ok(None)
2583 }
2584 CallableDefId::EnumVariantId(id) => {
2585 let (size, variant_layout, tag) =
2586 self.layout_of_variant(id.into(), generic_args, locals)?;
2587 let result = self.construct_with_layout(
2588 size,
2589 &variant_layout,
2590 tag,
2591 args.iter().map(|it| it.interval.into()),
2592 )?;
2593 destination.write_from_bytes(self, &result)?;
2594 Ok(None)
2595 }
2596 }
2597 }
2598
2599 fn get_mir_or_dyn_index(
2600 &self,
2601 def: FunctionId,
2602 generic_args: GenericArgs<'db>,
2603 locals: &Locals<'db>,
2604 span: MirSpan,
2605 ) -> Result<'db, MirOrDynIndex<'db>> {
2606 let pair = (def, generic_args);
2607 if let Some(r) = self.mir_or_dyn_index_cache.borrow().get(&pair) {
2608 return Ok(r.clone());
2609 }
2610 let (def, generic_args) = pair;
2611 let r = if let Some(self_ty_idx) =
2612 is_dyn_method(self.interner(), self.param_env.param_env, def, generic_args)
2613 {
2614 MirOrDynIndex::Dyn(self_ty_idx)
2615 } else {
2616 let (imp, generic_args) = self.db.lookup_impl_method(
2617 ParamEnvAndCrate { param_env: self.param_env.param_env, krate: self.crate_id },
2618 def,
2619 generic_args,
2620 );
2621
2622 let mir_body = self
2623 .db
2624 .monomorphized_mir_body(imp.into(), generic_args, self.param_env)
2625 .map_err(|e| {
2626 MirEvalError::InFunction(
2627 Box::new(MirEvalError::MirLowerError(imp, e)),
2628 vec![(Either::Left(imp), span, locals.body.owner)],
2629 )
2630 })?;
2631 MirOrDynIndex::Mir(mir_body)
2632 };
2633 self.mir_or_dyn_index_cache.borrow_mut().insert((def, generic_args), r.clone());
2634 Ok(r)
2635 }
2636
2637 fn exec_fn_with_args(
2638 &mut self,
2639 mut def: FunctionId,
2640 args: &[IntervalAndTy<'db>],
2641 generic_args: GenericArgs<'db>,
2642 locals: &Locals<'db>,
2643 destination: Interval,
2644 target_bb: Option<BasicBlockId<'db>>,
2645 span: MirSpan,
2646 ) -> Result<'db, Option<StackFrame<'db>>> {
2647 if self.detect_and_exec_special_function(
2648 def,
2649 args,
2650 generic_args,
2651 locals,
2652 destination,
2653 span,
2654 )? {
2655 return Ok(None);
2656 }
2657 if let Some(redirect_def) = self.detect_and_redirect_special_function(def)? {
2658 def = redirect_def;
2659 }
2660 let arg_bytes = args.iter().map(|it| IntervalOrOwned::Borrowed(it.interval));
2661 match self.get_mir_or_dyn_index(def, generic_args, locals, span)? {
2662 MirOrDynIndex::Dyn(self_ty_idx) => {
2663 let first_arg = arg_bytes.clone().next().unwrap();
2668 let first_arg = first_arg.get(self)?;
2669 let ty = self
2670 .vtable_map
2671 .ty_of_bytes(&first_arg[self.ptr_size()..self.ptr_size() * 2])?;
2672 let mut args_for_target = args.to_vec();
2673 args_for_target[0] = IntervalAndTy {
2674 interval: args_for_target[0].interval.slice(0..self.ptr_size()),
2675 ty,
2676 };
2677 let generics_for_target = GenericArgs::new_from_iter(
2678 self.interner(),
2679 generic_args
2680 .iter()
2681 .enumerate()
2682 .map(|(i, it)| if i == self_ty_idx { ty.into() } else { it }),
2683 );
2684 self.exec_fn_with_args(
2685 def,
2686 &args_for_target,
2687 generics_for_target,
2688 locals,
2689 destination,
2690 target_bb,
2691 span,
2692 )
2693 }
2694 MirOrDynIndex::Mir(body) => self.exec_looked_up_function(
2695 body,
2696 locals,
2697 def,
2698 arg_bytes,
2699 span,
2700 destination,
2701 target_bb,
2702 ),
2703 }
2704 }
2705
2706 fn exec_looked_up_function(
2707 &mut self,
2708 mir_body: Arc<MirBody<'db>>,
2709 locals: &Locals<'db>,
2710 def: FunctionId,
2711 arg_bytes: impl Iterator<Item = IntervalOrOwned>,
2712 span: MirSpan,
2713 destination: Interval,
2714 target_bb: Option<BasicBlockId<'db>>,
2715 ) -> Result<'db, Option<StackFrame<'db>>> {
2716 Ok(if let Some(target_bb) = target_bb {
2717 let (mut locals, prev_stack_ptr) =
2718 self.create_locals_for_body(&mir_body, Some(destination))?;
2719 self.fill_locals_for_body(&mir_body, &mut locals, arg_bytes.into_iter())?;
2720 let span = (span, locals.body.owner);
2721 Some(StackFrame { locals, destination: Some(target_bb), prev_stack_ptr, span })
2722 } else {
2723 let result = self.interpret_mir(mir_body, arg_bytes).map_err(|e| {
2724 MirEvalError::InFunction(
2725 Box::new(e),
2726 vec![(Either::Left(def), span, locals.body.owner)],
2727 )
2728 })?;
2729 destination.write_from_interval(self, result)?;
2730 None
2731 })
2732 }
2733
2734 fn exec_fn_trait(
2735 &mut self,
2736 def: FunctionId,
2737 args: &[IntervalAndTy<'db>],
2738 generic_args: GenericArgs<'db>,
2739 locals: &Locals<'db>,
2740 destination: Interval,
2741 target_bb: Option<BasicBlockId<'db>>,
2742 span: MirSpan,
2743 ) -> Result<'db, Option<StackFrame<'db>>> {
2744 let func = args
2745 .first()
2746 .ok_or_else(|| MirEvalError::InternalError("fn trait with no arg".into()))?;
2747 let mut func_ty = func.ty;
2748 let mut func_data = func.interval;
2749 while let TyKind::Ref(_, z, _) = func_ty.kind() {
2750 func_ty = z;
2751 if matches!(func_ty.kind(), TyKind::Dynamic(..)) {
2752 let id =
2753 from_bytes!(usize, &func_data.get(self)?[self.ptr_size()..self.ptr_size() * 2]);
2754 func_data = func_data.slice(0..self.ptr_size());
2755 func_ty = self.vtable_map.ty(id)?;
2756 }
2757 let size = self.size_of_sized(func_ty, locals, "self type of fn trait")?;
2758 func_data = Interval { addr: Address::from_bytes(func_data.get(self)?)?, size };
2759 }
2760 match func_ty.kind() {
2761 TyKind::FnDef(def, subst) => {
2762 self.exec_fn_def(def.0, subst, destination, &args[1..], locals, target_bb, span)
2763 }
2764 TyKind::FnPtr(..) => {
2765 self.exec_fn_pointer(func_data, destination, &args[1..], locals, target_bb, span)
2766 }
2767 TyKind::Closure(closure, subst) => self.exec_closure(
2768 closure.0,
2769 func_data,
2770 subst.split_closure_args_untupled().parent_args,
2771 destination,
2772 &args[1..],
2773 locals,
2774 span,
2775 ),
2776 _ => {
2777 let arg0 = func;
2779 let args = &args[1..];
2780 let arg1 = {
2781 let ty = Ty::new_tup_from_iter(self.interner(), args.iter().map(|it| it.ty));
2782 let layout = self.layout(ty)?;
2783 let result = self.construct_with_layout(
2784 layout.size.bytes_usize(),
2785 &layout,
2786 None,
2787 args.iter().map(|it| IntervalOrOwned::Borrowed(it.interval)),
2788 )?;
2789 let size = layout.size.bytes_usize();
2791 let addr = self.heap_allocate(size, layout.align.bytes() as usize)?;
2792 self.write_memory(addr, &result)?;
2793 IntervalAndTy { interval: Interval { addr, size }, ty }
2794 };
2795 self.exec_fn_with_args(
2796 def,
2797 &[arg0.clone(), arg1],
2798 generic_args,
2799 locals,
2800 destination,
2801 target_bb,
2802 span,
2803 )
2804 }
2805 }
2806 }
2807
2808 fn eval_static(&mut self, st: StaticId, locals: &Locals<'db>) -> Result<'db, Address> {
2809 if let Some(o) = self.static_locations.get(&st) {
2810 return Ok(*o);
2811 };
2812 let static_data = self.db.static_signature(st);
2813 let result = if !static_data.flags.contains(StaticFlags::EXTERN) {
2814 let konst = self.db.const_eval_static(st).map_err(|e| {
2815 MirEvalError::ConstEvalError(static_data.name.as_str().to_owned(), Box::new(e))
2816 })?;
2817 self.allocate_const_in_heap(locals, konst)?
2818 } else {
2819 let ty =
2820 InferenceResult::for_body(self.db, st.into())[self.db.body(st.into()).body_expr];
2821 let Some((size, align)) = self.size_align_of(ty, locals)? else {
2822 not_supported!("unsized extern static");
2823 };
2824 let addr = self.heap_allocate(size, align)?;
2825 Interval::new(addr, size)
2826 };
2827 let addr = self.heap_allocate(self.ptr_size(), self.ptr_size())?;
2828 self.write_memory(addr, &result.addr.to_bytes())?;
2829 self.static_locations.insert(st, addr);
2830 Ok(addr)
2831 }
2832
2833 fn const_eval_discriminant(&self, variant: EnumVariantId) -> Result<'db, i128> {
2834 let r = self.db.const_eval_discriminant(variant);
2835 match r {
2836 Ok(r) => Ok(r),
2837 Err(e) => {
2838 let db = self.db;
2839 let loc = variant.lookup(db);
2840 let edition = self.crate_id.data(self.db).edition;
2841 let name = format!(
2842 "{}::{}",
2843 self.db.enum_signature(loc.parent).name.display(db, edition),
2844 loc.parent
2845 .enum_variants(self.db)
2846 .variant_name_by_id(variant)
2847 .unwrap()
2848 .display(db, edition),
2849 );
2850 Err(MirEvalError::ConstEvalError(name, Box::new(e)))
2851 }
2852 }
2853 }
2854
2855 fn drop_place(
2856 &mut self,
2857 place: &Place<'db>,
2858 locals: &mut Locals<'db>,
2859 span: MirSpan,
2860 ) -> Result<'db, ()> {
2861 let (addr, ty, metadata) = self.place_addr_and_ty_and_metadata(place, locals)?;
2862 if !locals.drop_flags.remove_place(place, &locals.body.projection_store) {
2863 return Ok(());
2864 }
2865 let metadata = match metadata {
2866 Some(it) => it.get(self)?.to_vec(),
2867 None => vec![],
2868 };
2869 self.run_drop_glue_deep(ty, locals, addr, &metadata, span)
2870 }
2871
2872 fn run_drop_glue_deep(
2873 &mut self,
2874 ty: Ty<'db>,
2875 locals: &Locals<'db>,
2876 addr: Address,
2877 _metadata: &[u8],
2878 span: MirSpan,
2879 ) -> Result<'db, ()> {
2880 let Some(drop_fn) = (|| {
2881 let drop_trait = self.lang_items().Drop?;
2882 drop_trait.trait_items(self.db).method_by_name(&Name::new_symbol_root(sym::drop))
2883 })() else {
2884 return Ok(());
2887 };
2888
2889 let generic_args = GenericArgs::new_from_iter(self.interner(), [ty.into()]);
2890 if let Ok(MirOrDynIndex::Mir(body)) =
2891 self.get_mir_or_dyn_index(drop_fn, generic_args, locals, span)
2892 {
2893 self.exec_looked_up_function(
2894 body,
2895 locals,
2896 drop_fn,
2897 iter::once(IntervalOrOwned::Owned(addr.to_bytes().to_vec())),
2898 span,
2899 Interval { addr: Address::Invalid(0), size: 0 },
2900 None,
2901 )?;
2902 }
2903 match ty.kind() {
2904 TyKind::Adt(adt_def, subst) => {
2905 let id = adt_def.def_id().0;
2906 match id {
2907 AdtId::StructId(s) => {
2908 let data = self.db.struct_signature(s);
2909 if data.flags.contains(StructFlags::IS_MANUALLY_DROP) {
2910 return Ok(());
2911 }
2912 let layout = self.layout_adt(id, subst)?;
2913 let variant_fields = s.fields(self.db);
2914 match variant_fields.shape {
2915 FieldsShape::Record | FieldsShape::Tuple => {
2916 let field_types = self.db.field_types(s.into());
2917 for (field, _) in variant_fields.fields().iter() {
2918 let offset = layout
2919 .fields
2920 .offset(u32::from(field.into_raw()) as usize)
2921 .bytes_usize();
2922 let addr = addr.offset(offset);
2923 let ty = field_types[field].instantiate(self.interner(), subst);
2924 self.run_drop_glue_deep(ty, locals, addr, &[], span)?;
2925 }
2926 }
2927 FieldsShape::Unit => (),
2928 }
2929 }
2930 AdtId::UnionId(_) => (), AdtId::EnumId(_) => (),
2932 }
2933 }
2934 TyKind::Bool
2935 | TyKind::Char
2936 | TyKind::Int(_)
2937 | TyKind::Uint(_)
2938 | TyKind::Float(_)
2939 | TyKind::Tuple(_)
2940 | TyKind::Array(_, _)
2941 | TyKind::Slice(_)
2942 | TyKind::RawPtr(_, _)
2943 | TyKind::Ref(_, _, _)
2944 | TyKind::Alias(..)
2945 | TyKind::FnDef(_, _)
2946 | TyKind::Str
2947 | TyKind::Never
2948 | TyKind::Closure(_, _)
2949 | TyKind::Coroutine(_, _)
2950 | TyKind::CoroutineClosure(..)
2951 | TyKind::CoroutineWitness(_, _)
2952 | TyKind::Foreign(_)
2953 | TyKind::Error(_)
2954 | TyKind::Param(_)
2955 | TyKind::Placeholder(_)
2956 | TyKind::Dynamic(..)
2957 | TyKind::FnPtr(..)
2958 | TyKind::Bound(..)
2959 | TyKind::Infer(..)
2960 | TyKind::Pat(..)
2961 | TyKind::UnsafeBinder(..) => (),
2962 };
2963 Ok(())
2964 }
2965
2966 fn write_to_stdout(&mut self, interval: Interval) -> Result<'db, ()> {
2967 self.stdout.extend(interval.get(self)?.to_vec());
2968 Ok(())
2969 }
2970
2971 fn write_to_stderr(&mut self, interval: Interval) -> Result<'db, ()> {
2972 self.stderr.extend(interval.get(self)?.to_vec());
2973 Ok(())
2974 }
2975}
2976
2977pub fn render_const_using_debug_impl<'db>(
2978 db: &'db dyn HirDatabase,
2979 owner: DefWithBodyId,
2980 c: Const<'db>,
2981 ty: Ty<'db>,
2982) -> Result<'db, String> {
2983 let mut evaluator = Evaluator::new(db, owner, false, None)?;
2984 let locals = &Locals {
2985 ptr: ArenaMap::new(),
2986 body: db
2987 .mir_body(owner)
2988 .map_err(|_| MirEvalError::NotSupported("unreachable".to_owned()))?,
2989 drop_flags: DropFlags::default(),
2990 };
2991 let data = evaluator.allocate_const_in_heap(locals, c)?;
2992 let resolver = owner.resolver(db);
2993 let Some(TypeNs::TraitId(debug_trait)) = resolver.resolve_path_in_type_ns_fully(
2994 db,
2995 &hir_def::expr_store::path::Path::from_known_path_with_no_generic(path![core::fmt::Debug]),
2996 ) else {
2997 not_supported!("core::fmt::Debug not found");
2998 };
2999 let Some(debug_fmt_fn) =
3000 debug_trait.trait_items(db).method_by_name(&Name::new_symbol_root(sym::fmt))
3001 else {
3002 not_supported!("core::fmt::Debug::fmt not found");
3003 };
3004 let a1 = evaluator.heap_allocate(evaluator.ptr_size() * 2, evaluator.ptr_size())?;
3006 let a2 = evaluator.heap_allocate(evaluator.ptr_size() * 2, evaluator.ptr_size())?;
3010 evaluator.write_memory(a2, &data.addr.to_bytes())?;
3011 let debug_fmt_fn_ptr = evaluator.vtable_map.id(Ty::new_fn_def(
3012 evaluator.interner(),
3013 CallableDefId::FunctionId(debug_fmt_fn).into(),
3014 GenericArgs::new_from_iter(evaluator.interner(), [ty.into()]),
3015 ));
3016 evaluator.write_memory(a2.offset(evaluator.ptr_size()), &debug_fmt_fn_ptr.to_le_bytes())?;
3017 let a3 = evaluator.heap_allocate(evaluator.ptr_size() * 6, evaluator.ptr_size())?;
3020 evaluator.write_memory(a3, &a1.to_bytes())?;
3021 evaluator.write_memory(a3.offset(evaluator.ptr_size()), &[1])?;
3022 evaluator.write_memory(a3.offset(2 * evaluator.ptr_size()), &a2.to_bytes())?;
3023 evaluator.write_memory(a3.offset(3 * evaluator.ptr_size()), &[1])?;
3024 let Some(ValueNs::FunctionId(format_fn)) = resolver.resolve_path_in_value_ns_fully(
3025 db,
3026 &hir_def::expr_store::path::Path::from_known_path_with_no_generic(path![std::fmt::format]),
3027 HygieneId::ROOT,
3028 ) else {
3029 not_supported!("std::fmt::format not found");
3030 };
3031 let interval = evaluator.interpret_mir(
3032 db.mir_body(format_fn.into()).map_err(|e| MirEvalError::MirLowerError(format_fn, e))?,
3033 [IntervalOrOwned::Borrowed(Interval { addr: a3, size: evaluator.ptr_size() * 6 })]
3034 .into_iter(),
3035 )?;
3036 let message_string = interval.get(&evaluator)?;
3037 let addr =
3038 Address::from_bytes(&message_string[evaluator.ptr_size()..2 * evaluator.ptr_size()])?;
3039 let size = from_bytes!(usize, message_string[2 * evaluator.ptr_size()..]);
3040 Ok(std::string::String::from_utf8_lossy(evaluator.read_memory(addr, size)?).into_owned())
3041}
3042
3043pub fn pad16(it: &[u8], is_signed: bool) -> [u8; 16] {
3044 let is_negative = is_signed && it.last().unwrap_or(&0) > &127;
3045 let mut res = [if is_negative { 255 } else { 0 }; 16];
3046 res[..it.len()].copy_from_slice(it);
3047 res
3048}
3049
3050macro_rules! for_each_int_type {
3051 ($call_macro:path, $args:tt) => {
3052 $call_macro! {
3053 $args
3054 I8
3055 U8
3056 I16
3057 U16
3058 I32
3059 U32
3060 I64
3061 U64
3062 I128
3063 U128
3064 }
3065 };
3066}
3067
3068#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
3069enum IntValue {
3070 I8(i8),
3071 U8(u8),
3072 I16(i16),
3073 U16(u16),
3074 I32(i32),
3075 U32(u32),
3076 I64(i64),
3077 U64(u64),
3078 I128(i128),
3079 U128(u128),
3080}
3081
3082macro_rules! checked_int_op {
3083 ( [ $op:ident ] $( $int_ty:ident )+ ) => {
3084 fn $op(self, other: Self) -> Option<Self> {
3085 match (self, other) {
3086 $( (Self::$int_ty(a), Self::$int_ty(b)) => a.$op(b).map(Self::$int_ty), )+
3087 _ => panic!("incompatible integer types"),
3088 }
3089 }
3090 };
3091}
3092
3093macro_rules! int_bit_shifts {
3094 ( [ $op:ident ] $( $int_ty:ident )+ ) => {
3095 fn $op(self, amount: u32) -> Option<Self> {
3096 match self {
3097 $( Self::$int_ty(this) => this.$op(amount).map(Self::$int_ty), )+
3098 }
3099 }
3100 };
3101}
3102
3103macro_rules! unchecked_int_op {
3104 ( [ $name:ident, $op:tt ] $( $int_ty:ident )+ ) => {
3105 fn $name(self, other: Self) -> Self {
3106 match (self, other) {
3107 $( (Self::$int_ty(a), Self::$int_ty(b)) => Self::$int_ty(a $op b), )+
3108 _ => panic!("incompatible integer types"),
3109 }
3110 }
3111 };
3112}
3113
3114impl IntValue {
3115 fn from_bytes(bytes: &[u8], is_signed: bool) -> Self {
3116 match (bytes.len(), is_signed) {
3117 (1, false) => Self::U8(u8::from_le_bytes(bytes.try_into().unwrap())),
3118 (1, true) => Self::I8(i8::from_le_bytes(bytes.try_into().unwrap())),
3119 (2, false) => Self::U16(u16::from_le_bytes(bytes.try_into().unwrap())),
3120 (2, true) => Self::I16(i16::from_le_bytes(bytes.try_into().unwrap())),
3121 (4, false) => Self::U32(u32::from_le_bytes(bytes.try_into().unwrap())),
3122 (4, true) => Self::I32(i32::from_le_bytes(bytes.try_into().unwrap())),
3123 (8, false) => Self::U64(u64::from_le_bytes(bytes.try_into().unwrap())),
3124 (8, true) => Self::I64(i64::from_le_bytes(bytes.try_into().unwrap())),
3125 (16, false) => Self::U128(u128::from_le_bytes(bytes.try_into().unwrap())),
3126 (16, true) => Self::I128(i128::from_le_bytes(bytes.try_into().unwrap())),
3127 (len, is_signed) => {
3128 never!("invalid integer size: {len}, signed: {is_signed}");
3129 Self::I32(0)
3130 }
3131 }
3132 }
3133
3134 fn to_bytes(self) -> Vec<u8> {
3135 macro_rules! m {
3136 ( [] $( $int_ty:ident )+ ) => {
3137 match self {
3138 $( Self::$int_ty(v) => v.to_le_bytes().to_vec() ),+
3139 }
3140 };
3141 }
3142 for_each_int_type! { m, [] }
3143 }
3144
3145 fn as_u32(self) -> Option<u32> {
3146 macro_rules! m {
3147 ( [] $( $int_ty:ident )+ ) => {
3148 match self {
3149 $( Self::$int_ty(v) => v.try_into().ok() ),+
3150 }
3151 };
3152 }
3153 for_each_int_type! { m, [] }
3154 }
3155
3156 for_each_int_type!(checked_int_op, [checked_add]);
3157 for_each_int_type!(checked_int_op, [checked_sub]);
3158 for_each_int_type!(checked_int_op, [checked_div]);
3159 for_each_int_type!(checked_int_op, [checked_rem]);
3160 for_each_int_type!(checked_int_op, [checked_mul]);
3161
3162 for_each_int_type!(int_bit_shifts, [checked_shl]);
3163 for_each_int_type!(int_bit_shifts, [checked_shr]);
3164}
3165
3166impl std::ops::BitAnd for IntValue {
3167 type Output = Self;
3168 for_each_int_type!(unchecked_int_op, [bitand, &]);
3169}
3170impl std::ops::BitOr for IntValue {
3171 type Output = Self;
3172 for_each_int_type!(unchecked_int_op, [bitor, |]);
3173}
3174impl std::ops::BitXor for IntValue {
3175 type Output = Self;
3176 for_each_int_type!(unchecked_int_op, [bitxor, ^]);
3177}