1use std::{borrow::Cow, cell::RefCell, fmt::Write, iter, mem, ops::Range};
4
5use base_db::Crate;
6use chalk_ir::{Mutability, cast::Cast};
7use either::Either;
8use hir_def::{
9 AdtId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, StaticId,
10 VariantId,
11 builtin_type::BuiltinType,
12 expr_store::HygieneId,
13 item_tree::FieldsShape,
14 lang_item::LangItem,
15 layout::{TagEncoding, Variants},
16 resolver::{HasResolver, TypeNs, ValueNs},
17 signatures::{StaticFlags, StructFlags},
18};
19use hir_expand::{InFile, mod_path::path, name::Name};
20use intern::sym;
21use la_arena::ArenaMap;
22use rustc_abi::TargetDataLayout;
23use rustc_apfloat::{
24 Float,
25 ieee::{Half as f16, Quad as f128},
26};
27use rustc_hash::{FxHashMap, FxHashSet};
28use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike};
29use span::FileId;
30use stdx::never;
31use syntax::{SyntaxNodePtr, TextRange};
32use triomphe::Arc;
33
34use crate::{
35 AliasTy, CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, Interner,
36 MemoryMap, Substitution, ToChalk, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
37 consteval::{ConstEvalError, intern_const_scalar, try_const_usize},
38 consteval_nextsolver,
39 db::{HirDatabase, InternedClosure},
40 display::{ClosureStyle, DisplayTarget, HirDisplay},
41 infer::PointerCast,
42 layout::{Layout, LayoutError, RustcEnumVariantIdx},
43 method_resolution::{is_dyn_method, lookup_impl_const},
44 next_solver::{
45 Ctor, DbInterner, SolverDefId,
46 mapping::{ChalkToNextSolver, convert_args_for_result, convert_ty_for_result},
47 },
48 static_lifetime,
49 traits::FnTrait,
50 utils::{ClosureSubst, detect_variant_from_bytes},
51};
52
53use super::{
54 AggregateKind, BasicBlockId, BinOp, CastKind, LocalId, MirBody, MirLowerError, MirSpan,
55 Operand, OperandKind, Place, PlaceElem, ProjectionElem, ProjectionStore, Rvalue, StatementKind,
56 TerminatorKind, UnOp, return_slot,
57};
58
59mod shim;
60#[cfg(test)]
61mod tests;
62
63macro_rules! from_bytes {
64 ($ty:tt, $value:expr) => {
65 ($ty::from_le_bytes(match ($value).try_into() {
66 Ok(it) => it,
67 Err(_) => return Err(MirEvalError::InternalError(stringify!(mismatched size in constructing $ty).into())),
68 }))
69 };
70 ($apfloat:tt, $bits:tt, $value:expr) => {
71 $apfloat::from_bits($bits::from_le_bytes(match ($value).try_into() {
73 Ok(it) => it,
74 Err(_) => return Err(MirEvalError::InternalError(stringify!(mismatched size in constructing $apfloat).into())),
75 }).into())
76 };
77}
78
79macro_rules! not_supported {
80 ($it: expr) => {
81 return Err(MirEvalError::NotSupported(format!($it)))
82 };
83}
84
85#[derive(Debug, Default, Clone, PartialEq, Eq)]
86pub struct VTableMap<'db> {
87 ty_to_id: FxHashMap<crate::next_solver::Ty<'db>, usize>,
88 id_to_ty: Vec<crate::next_solver::Ty<'db>>,
89}
90
91impl<'db> VTableMap<'db> {
92 const OFFSET: usize = 1000; fn id(&mut self, ty: crate::next_solver::Ty<'db>) -> usize {
95 if let Some(it) = self.ty_to_id.get(&ty) {
96 return *it;
97 }
98 let id = self.id_to_ty.len() + VTableMap::OFFSET;
99 self.id_to_ty.push(ty);
100 self.ty_to_id.insert(ty, id);
101 id
102 }
103
104 pub(crate) fn ty(&self, id: usize) -> Result<crate::next_solver::Ty<'db>> {
105 id.checked_sub(VTableMap::OFFSET)
106 .and_then(|id| self.id_to_ty.get(id).copied())
107 .ok_or(MirEvalError::InvalidVTableId(id))
108 }
109
110 fn ty_of_bytes(&self, bytes: &[u8]) -> Result<crate::next_solver::Ty<'db>> {
111 let id = from_bytes!(usize, bytes);
112 self.ty(id)
113 }
114
115 pub fn shrink_to_fit(&mut self) {
116 self.id_to_ty.shrink_to_fit();
117 self.ty_to_id.shrink_to_fit();
118 }
119
120 fn is_empty(&self) -> bool {
121 self.id_to_ty.is_empty() && self.ty_to_id.is_empty()
122 }
123}
124
125#[derive(Debug, Default, Clone, PartialEq, Eq)]
126struct TlsData {
127 keys: Vec<u128>,
128}
129
130impl TlsData {
131 fn create_key(&mut self) -> usize {
132 self.keys.push(0);
133 self.keys.len() - 1
134 }
135
136 fn get_key(&mut self, key: usize) -> Result<u128> {
137 let r = self.keys.get(key).ok_or_else(|| {
138 MirEvalError::UndefinedBehavior(format!("Getting invalid tls key {key}"))
139 })?;
140 Ok(*r)
141 }
142
143 fn set_key(&mut self, key: usize, value: u128) -> Result<()> {
144 let r = self.keys.get_mut(key).ok_or_else(|| {
145 MirEvalError::UndefinedBehavior(format!("Setting invalid tls key {key}"))
146 })?;
147 *r = value;
148 Ok(())
149 }
150}
151
152struct StackFrame {
153 locals: Locals,
154 destination: Option<BasicBlockId>,
155 prev_stack_ptr: usize,
156 span: (MirSpan, DefWithBodyId),
157}
158
159#[derive(Clone)]
160enum MirOrDynIndex {
161 Mir(Arc<MirBody>),
162 Dyn(usize),
163}
164
165pub struct Evaluator<'a> {
166 db: &'a dyn HirDatabase,
167 trait_env: Arc<TraitEnvironment>,
168 target_data_layout: Arc<TargetDataLayout>,
169 stack: Vec<u8>,
170 heap: Vec<u8>,
171 code_stack: Vec<StackFrame>,
172 static_locations: FxHashMap<StaticId, Address>,
175 vtable_map: VTableMap<'a>,
179 thread_local_storage: TlsData,
180 random_state: oorandom::Rand64,
181 stdout: Vec<u8>,
182 stderr: Vec<u8>,
183 layout_cache: RefCell<FxHashMap<crate::next_solver::Ty<'a>, Arc<Layout>>>,
184 projected_ty_cache: RefCell<FxHashMap<(Ty, PlaceElem), Ty>>,
185 not_special_fn_cache: RefCell<FxHashSet<FunctionId>>,
186 mir_or_dyn_index_cache: RefCell<FxHashMap<(FunctionId, Substitution), MirOrDynIndex>>,
187 unused_locals_store: RefCell<FxHashMap<DefWithBodyId, Vec<Locals>>>,
191 cached_ptr_size: usize,
192 cached_fn_trait_func: Option<FunctionId>,
193 cached_fn_mut_trait_func: Option<FunctionId>,
194 cached_fn_once_trait_func: Option<FunctionId>,
195 crate_id: Crate,
196 assert_placeholder_ty_is_unused: bool,
198 execution_limit: usize,
200 stack_depth_limit: usize,
202 memory_limit: usize,
204}
205
206#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
207enum Address {
208 Stack(usize),
209 Heap(usize),
210 Invalid(usize),
211}
212
213use Address::*;
214
215#[derive(Debug, Clone, Copy)]
216struct Interval {
217 addr: Address,
218 size: usize,
219}
220
221#[derive(Debug, Clone)]
222struct IntervalAndTy {
223 interval: Interval,
224 ty: Ty,
225}
226
227impl Interval {
228 fn new(addr: Address, size: usize) -> Self {
229 Self { addr, size }
230 }
231
232 fn get<'a, 'db>(&self, memory: &'a Evaluator<'db>) -> Result<&'a [u8]> {
233 memory.read_memory(self.addr, self.size)
234 }
235
236 fn write_from_bytes(&self, memory: &mut Evaluator<'_>, bytes: &[u8]) -> Result<()> {
237 memory.write_memory(self.addr, bytes)
238 }
239
240 fn write_from_interval(&self, memory: &mut Evaluator<'_>, interval: Interval) -> Result<()> {
241 memory.copy_from_interval(self.addr, interval)
242 }
243
244 fn slice(self, range: Range<usize>) -> Interval {
245 Interval { addr: self.addr.offset(range.start), size: range.len() }
246 }
247}
248
249impl IntervalAndTy {
250 fn get<'a, 'db>(&self, memory: &'a Evaluator<'db>) -> Result<&'a [u8]> {
251 memory.read_memory(self.interval.addr, self.interval.size)
252 }
253
254 fn new(
255 addr: Address,
256 ty: Ty,
257 evaluator: &Evaluator<'_>,
258 locals: &Locals,
259 ) -> Result<IntervalAndTy> {
260 let size = evaluator.size_of_sized(&ty, locals, "type of interval")?;
261 Ok(IntervalAndTy { interval: Interval { addr, size }, ty })
262 }
263}
264
265enum IntervalOrOwned {
266 Owned(Vec<u8>),
267 Borrowed(Interval),
268}
269
270impl From<Interval> for IntervalOrOwned {
271 fn from(it: Interval) -> IntervalOrOwned {
272 IntervalOrOwned::Borrowed(it)
273 }
274}
275
276impl IntervalOrOwned {
277 fn get<'a, 'db>(&'a self, memory: &'a Evaluator<'db>) -> Result<&'a [u8]> {
278 Ok(match self {
279 IntervalOrOwned::Owned(o) => o,
280 IntervalOrOwned::Borrowed(b) => b.get(memory)?,
281 })
282 }
283}
284
285#[cfg(target_pointer_width = "64")]
286const STACK_OFFSET: usize = 1 << 60;
287#[cfg(target_pointer_width = "64")]
288const HEAP_OFFSET: usize = 1 << 59;
289
290#[cfg(target_pointer_width = "32")]
291const STACK_OFFSET: usize = 1 << 30;
292#[cfg(target_pointer_width = "32")]
293const HEAP_OFFSET: usize = 1 << 29;
294
295impl Address {
296 #[allow(clippy::double_parens)]
297 fn from_bytes(it: &[u8]) -> Result<Self> {
298 Ok(Address::from_usize(from_bytes!(usize, it)))
299 }
300
301 fn from_usize(it: usize) -> Self {
302 if it > STACK_OFFSET {
303 Stack(it - STACK_OFFSET)
304 } else if it > HEAP_OFFSET {
305 Heap(it - HEAP_OFFSET)
306 } else {
307 Invalid(it)
308 }
309 }
310
311 fn to_bytes(&self) -> [u8; size_of::<usize>()] {
312 usize::to_le_bytes(self.to_usize())
313 }
314
315 fn to_usize(&self) -> usize {
316 match self {
317 Stack(it) => *it + STACK_OFFSET,
318 Heap(it) => *it + HEAP_OFFSET,
319 Invalid(it) => *it,
320 }
321 }
322
323 fn map(&self, f: impl FnOnce(usize) -> usize) -> Address {
324 match self {
325 Stack(it) => Stack(f(*it)),
326 Heap(it) => Heap(f(*it)),
327 Invalid(it) => Invalid(f(*it)),
328 }
329 }
330
331 fn offset(&self, offset: usize) -> Address {
332 self.map(|it| it + offset)
333 }
334}
335
336#[derive(Clone, PartialEq, Eq)]
337pub enum MirEvalError {
338 ConstEvalError(String, Box<ConstEvalError>),
339 LayoutError(LayoutError, Ty),
340 TargetDataLayoutNotAvailable(Arc<str>),
341 UndefinedBehavior(String),
344 Panic(String),
345 MirLowerError(FunctionId, MirLowerError),
347 MirLowerErrorForClosure(ClosureId, MirLowerError),
348 TypeIsUnsized(Ty, &'static str),
349 NotSupported(String),
350 InvalidConst(Const),
351 InFunction(Box<MirEvalError>, Vec<(Either<FunctionId, ClosureId>, MirSpan, DefWithBodyId)>),
352 ExecutionLimitExceeded,
353 StackOverflow,
354 InvalidVTableId(usize),
356 CoerceUnsizedError(Ty),
358 InternalError(Box<str>),
360}
361
362impl MirEvalError {
363 pub fn pretty_print(
364 &self,
365 f: &mut String,
366 db: &dyn HirDatabase,
367 span_formatter: impl Fn(FileId, TextRange) -> String,
368 display_target: DisplayTarget,
369 ) -> std::result::Result<(), std::fmt::Error> {
370 writeln!(f, "Mir eval error:")?;
371 let mut err = self;
372 while let MirEvalError::InFunction(e, stack) = err {
373 err = e;
374 for (func, span, def) in stack.iter().take(30).rev() {
375 match func {
376 Either::Left(func) => {
377 let function_name = db.function_signature(*func);
378 writeln!(
379 f,
380 "In function {} ({:?})",
381 function_name.name.display(db, display_target.edition),
382 func
383 )?;
384 }
385 Either::Right(closure) => {
386 writeln!(f, "In {closure:?}")?;
387 }
388 }
389 let source_map = db.body_with_source_map(*def).1;
390 let span: InFile<SyntaxNodePtr> = match span {
391 MirSpan::ExprId(e) => match source_map.expr_syntax(*e) {
392 Ok(s) => s.map(|it| it.into()),
393 Err(_) => continue,
394 },
395 MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
396 Ok(s) => s.map(|it| it.syntax_node_ptr()),
397 Err(_) => continue,
398 },
399 MirSpan::BindingId(b) => {
400 match source_map
401 .patterns_for_binding(*b)
402 .iter()
403 .find_map(|p| source_map.pat_syntax(*p).ok())
404 {
405 Some(s) => s.map(|it| it.syntax_node_ptr()),
406 None => continue,
407 }
408 }
409 MirSpan::SelfParam => match source_map.self_param_syntax() {
410 Some(s) => s.map(|it| it.syntax_node_ptr()),
411 None => continue,
412 },
413 MirSpan::Unknown => continue,
414 };
415 let file_id = span.file_id.original_file(db);
416 let text_range = span.value.text_range();
417 writeln!(f, "{}", span_formatter(file_id.file_id(db), text_range))?;
418 }
419 }
420 match err {
421 MirEvalError::InFunction(..) => unreachable!(),
422 MirEvalError::LayoutError(err, ty) => {
423 write!(
424 f,
425 "Layout for type `{}` is not available due {err:?}",
426 ty.display(db, display_target).with_closure_style(ClosureStyle::ClosureWithId)
427 )?;
428 }
429 MirEvalError::MirLowerError(func, err) => {
430 let function_name = db.function_signature(*func);
431 let self_ = match func.lookup(db).container {
432 ItemContainerId::ImplId(impl_id) => Some({
433 let generics = crate::generics::generics(db, impl_id.into());
434 let substs = generics.placeholder_subst(db);
435 db.impl_self_ty(impl_id)
436 .substitute(Interner, &substs)
437 .display(db, display_target)
438 .to_string()
439 }),
440 ItemContainerId::TraitId(it) => Some(
441 db.trait_signature(it).name.display(db, display_target.edition).to_string(),
442 ),
443 _ => None,
444 };
445 writeln!(
446 f,
447 "MIR lowering for function `{}{}{}` ({:?}) failed due:",
448 self_.as_deref().unwrap_or_default(),
449 if self_.is_some() { "::" } else { "" },
450 function_name.name.display(db, display_target.edition),
451 func
452 )?;
453 err.pretty_print(f, db, span_formatter, display_target)?;
454 }
455 MirEvalError::ConstEvalError(name, err) => {
456 MirLowerError::ConstEvalError((**name).into(), err.clone()).pretty_print(
457 f,
458 db,
459 span_formatter,
460 display_target,
461 )?;
462 }
463 MirEvalError::UndefinedBehavior(_)
464 | MirEvalError::TargetDataLayoutNotAvailable(_)
465 | MirEvalError::Panic(_)
466 | MirEvalError::MirLowerErrorForClosure(_, _)
467 | MirEvalError::TypeIsUnsized(_, _)
468 | MirEvalError::NotSupported(_)
469 | MirEvalError::InvalidConst(_)
470 | MirEvalError::ExecutionLimitExceeded
471 | MirEvalError::StackOverflow
472 | MirEvalError::CoerceUnsizedError(_)
473 | MirEvalError::InternalError(_)
474 | MirEvalError::InvalidVTableId(_) => writeln!(f, "{err:?}")?,
475 }
476 Ok(())
477 }
478
479 pub fn is_panic(&self) -> Option<&str> {
480 let mut err = self;
481 while let MirEvalError::InFunction(e, _) = err {
482 err = e;
483 }
484 match err {
485 MirEvalError::Panic(msg) => Some(msg),
486 _ => None,
487 }
488 }
489}
490
491impl std::fmt::Debug for MirEvalError {
492 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
493 match self {
494 Self::ConstEvalError(arg0, arg1) => {
495 f.debug_tuple("ConstEvalError").field(arg0).field(arg1).finish()
496 }
497 Self::LayoutError(arg0, arg1) => {
498 f.debug_tuple("LayoutError").field(arg0).field(arg1).finish()
499 }
500 Self::UndefinedBehavior(arg0) => {
501 f.debug_tuple("UndefinedBehavior").field(arg0).finish()
502 }
503 Self::Panic(msg) => write!(f, "Panic with message:\n{msg:?}"),
504 Self::TargetDataLayoutNotAvailable(arg0) => {
505 f.debug_tuple("TargetDataLayoutNotAvailable").field(arg0).finish()
506 }
507 Self::TypeIsUnsized(ty, it) => write!(f, "{ty:?} is unsized. {it} should be sized."),
508 Self::ExecutionLimitExceeded => write!(f, "execution limit exceeded"),
509 Self::StackOverflow => write!(f, "stack overflow"),
510 Self::MirLowerError(arg0, arg1) => {
511 f.debug_tuple("MirLowerError").field(arg0).field(arg1).finish()
512 }
513 Self::MirLowerErrorForClosure(arg0, arg1) => {
514 f.debug_tuple("MirLowerError").field(arg0).field(arg1).finish()
515 }
516 Self::CoerceUnsizedError(arg0) => {
517 f.debug_tuple("CoerceUnsizedError").field(arg0).finish()
518 }
519 Self::InternalError(arg0) => f.debug_tuple("InternalError").field(arg0).finish(),
520 Self::InvalidVTableId(arg0) => f.debug_tuple("InvalidVTableId").field(arg0).finish(),
521 Self::NotSupported(arg0) => f.debug_tuple("NotSupported").field(arg0).finish(),
522 Self::InvalidConst(arg0) => {
523 let data = &arg0.data(Interner);
524 f.debug_struct("InvalidConst").field("ty", &data.ty).field("value", &arg0).finish()
525 }
526 Self::InFunction(e, stack) => {
527 f.debug_struct("WithStack").field("error", e).field("stack", &stack).finish()
528 }
529 }
530 }
531}
532
533type Result<T> = std::result::Result<T, MirEvalError>;
534
535#[derive(Debug, Default)]
536struct DropFlags {
537 need_drop: FxHashSet<Place>,
538}
539
540impl DropFlags {
541 fn add_place(&mut self, p: Place, store: &ProjectionStore) {
542 if p.iterate_over_parents(store).any(|it| self.need_drop.contains(&it)) {
543 return;
544 }
545 self.need_drop.retain(|it| !p.is_parent(it, store));
546 self.need_drop.insert(p);
547 }
548
549 fn remove_place(&mut self, p: &Place, store: &ProjectionStore) -> bool {
550 if let Some(parent) = p.iterate_over_parents(store).find(|it| self.need_drop.contains(it)) {
552 self.need_drop.remove(&parent);
553 return true;
554 }
555 self.need_drop.remove(p)
556 }
557
558 fn clear(&mut self) {
559 self.need_drop.clear();
560 }
561}
562
563#[derive(Debug)]
564struct Locals {
565 ptr: ArenaMap<LocalId, Interval>,
566 body: Arc<MirBody>,
567 drop_flags: DropFlags,
568}
569
570pub struct MirOutput {
571 stdout: Vec<u8>,
572 stderr: Vec<u8>,
573}
574
575impl MirOutput {
576 pub fn stdout(&self) -> Cow<'_, str> {
577 String::from_utf8_lossy(&self.stdout)
578 }
579 pub fn stderr(&self) -> Cow<'_, str> {
580 String::from_utf8_lossy(&self.stderr)
581 }
582}
583
584pub fn interpret_mir(
585 db: &dyn HirDatabase,
586 body: Arc<MirBody>,
587 assert_placeholder_ty_is_unused: bool,
593 trait_env: Option<Arc<TraitEnvironment>>,
594) -> Result<(Result<Const>, MirOutput)> {
595 let ty = body.locals[return_slot()].ty.clone();
596 let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env)?;
597 let it: Result<Const> = (|| {
598 if evaluator.ptr_size() != size_of::<usize>() {
599 not_supported!("targets with different pointer size from host");
600 }
601 let interval = evaluator.interpret_mir(body.clone(), None.into_iter())?;
602 let bytes = interval.get(&evaluator)?;
603 let mut memory_map = evaluator.create_memory_map(
604 bytes,
605 &ty,
606 &Locals { ptr: ArenaMap::new(), body, drop_flags: DropFlags::default() },
607 )?;
608 let bytes = bytes.into();
609 let memory_map = if memory_map.memory.is_empty() && evaluator.vtable_map.is_empty() {
610 MemoryMap::Empty
611 } else {
612 memory_map.vtable = mem::take(&mut evaluator.vtable_map);
613 memory_map.vtable.shrink_to_fit();
614 MemoryMap::Complex(Box::new(memory_map))
615 };
616 Ok(intern_const_scalar(
618 ConstScalar::Bytes(bytes, unsafe {
619 std::mem::transmute::<MemoryMap<'_>, MemoryMap<'static>>(memory_map)
620 }),
621 ty,
622 ))
623 })();
624 Ok((it, MirOutput { stdout: evaluator.stdout, stderr: evaluator.stderr }))
625}
626
627#[cfg(test)]
628const EXECUTION_LIMIT: usize = 100_000;
629#[cfg(not(test))]
630const EXECUTION_LIMIT: usize = 10_000_000;
631
632impl<'db> Evaluator<'db> {
633 pub fn new(
634 db: &dyn HirDatabase,
635 owner: DefWithBodyId,
636 assert_placeholder_ty_is_unused: bool,
637 trait_env: Option<Arc<TraitEnvironment>>,
638 ) -> Result<Evaluator<'_>> {
639 let crate_id = owner.module(db).krate();
640 let target_data_layout = match db.target_data_layout(crate_id) {
641 Ok(target_data_layout) => target_data_layout,
642 Err(e) => return Err(MirEvalError::TargetDataLayoutNotAvailable(e)),
643 };
644 let cached_ptr_size = target_data_layout.pointer_size().bytes_usize();
645 Ok(Evaluator {
646 target_data_layout,
647 stack: vec![0],
648 heap: vec![0],
649 code_stack: vec![],
650 vtable_map: VTableMap::default(),
651 thread_local_storage: TlsData::default(),
652 static_locations: Default::default(),
653 db,
654 random_state: oorandom::Rand64::new(0),
655 trait_env: trait_env.unwrap_or_else(|| db.trait_environment_for_body(owner)),
656 crate_id,
657 stdout: vec![],
658 stderr: vec![],
659 assert_placeholder_ty_is_unused,
660 stack_depth_limit: 100,
661 execution_limit: EXECUTION_LIMIT,
662 memory_limit: 1_000_000_000, layout_cache: RefCell::new(Default::default()),
664 projected_ty_cache: RefCell::new(Default::default()),
665 not_special_fn_cache: RefCell::new(Default::default()),
666 mir_or_dyn_index_cache: RefCell::new(Default::default()),
667 unused_locals_store: RefCell::new(Default::default()),
668 cached_ptr_size,
669 cached_fn_trait_func: LangItem::Fn
670 .resolve_trait(db, crate_id)
671 .and_then(|x| x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call))),
672 cached_fn_mut_trait_func: LangItem::FnMut.resolve_trait(db, crate_id).and_then(|x| {
673 x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call_mut))
674 }),
675 cached_fn_once_trait_func: LangItem::FnOnce.resolve_trait(db, crate_id).and_then(|x| {
676 x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call_once))
677 }),
678 })
679 }
680
681 fn place_addr(&self, p: &Place, locals: &Locals) -> Result<Address> {
682 Ok(self.place_addr_and_ty_and_metadata(p, locals)?.0)
683 }
684
685 fn place_interval(&self, p: &Place, locals: &Locals) -> Result<Interval> {
686 let place_addr_and_ty = self.place_addr_and_ty_and_metadata(p, locals)?;
687 Ok(Interval {
688 addr: place_addr_and_ty.0,
689 size: self.size_of_sized(
690 &place_addr_and_ty.1,
691 locals,
692 "Type of place that we need its interval",
693 )?,
694 })
695 }
696
697 fn ptr_size(&self) -> usize {
698 self.cached_ptr_size
699 }
700
701 fn projected_ty(&self, ty: Ty, proj: PlaceElem) -> Ty {
702 let pair = (ty, proj);
703 if let Some(r) = self.projected_ty_cache.borrow().get(&pair) {
704 return r.clone();
705 }
706 let (ty, proj) = pair;
707 let r = proj.projected_ty(
708 ty.clone(),
709 self.db,
710 |c, subst, f| {
711 let InternedClosure(def, _) = self.db.lookup_intern_closure(c.into());
712 let infer = self.db.infer(def);
713 let (captures, _) = infer.closure_info(&c);
714 let parent_subst = ClosureSubst(subst).parent_subst();
715 captures
716 .get(f)
717 .expect("broken closure field")
718 .ty
719 .clone()
720 .substitute(Interner, parent_subst)
721 },
722 self.crate_id,
723 );
724 self.projected_ty_cache.borrow_mut().insert((ty, proj), r.clone());
725 r
726 }
727
728 fn place_addr_and_ty_and_metadata<'a>(
729 &'a self,
730 p: &Place,
731 locals: &'a Locals,
732 ) -> Result<(Address, Ty, Option<IntervalOrOwned>)> {
733 let interner = DbInterner::new_with(self.db, None, None);
734 let mut addr = locals.ptr[p.local].addr;
735 let mut ty: Ty = locals.body.locals[p.local].ty.clone();
736 let mut metadata: Option<IntervalOrOwned> = None; for proj in p.projection.lookup(&locals.body.projection_store) {
738 let prev_ty = ty.clone();
739 ty = self.projected_ty(ty, proj.clone());
740 match proj {
741 ProjectionElem::Deref => {
742 metadata = if self.size_align_of(&ty, locals)?.is_none() {
743 Some(
744 Interval { addr: addr.offset(self.ptr_size()), size: self.ptr_size() }
745 .into(),
746 )
747 } else {
748 None
749 };
750 let it = from_bytes!(usize, self.read_memory(addr, self.ptr_size())?);
751 addr = Address::from_usize(it);
752 }
753 ProjectionElem::Index(op) => {
754 let offset = from_bytes!(
755 usize,
756 self.read_memory(locals.ptr[*op].addr, self.ptr_size())?
757 );
758 metadata = None; let ty_size =
760 self.size_of_sized(&ty, locals, "array inner type should be sized")?;
761 addr = addr.offset(ty_size * offset);
762 }
763 &ProjectionElem::ConstantIndex { from_end, offset } => {
764 let offset = if from_end {
765 let len = match prev_ty.kind(Interner) {
766 TyKind::Array(_, c) => match try_const_usize(self.db, c) {
767 Some(it) => it as u64,
768 None => {
769 not_supported!("indexing array with unknown const from end")
770 }
771 },
772 TyKind::Slice(_) => match metadata {
773 Some(it) => from_bytes!(u64, it.get(self)?),
774 None => not_supported!("slice place without metadata"),
775 },
776 _ => not_supported!("bad type for const index"),
777 };
778 (len - offset - 1) as usize
779 } else {
780 offset as usize
781 };
782 metadata = None; let ty_size =
784 self.size_of_sized(&ty, locals, "array inner type should be sized")?;
785 addr = addr.offset(ty_size * offset);
786 }
787 &ProjectionElem::Subslice { from, to } => {
788 let inner_ty = match &ty.kind(Interner) {
789 TyKind::Array(inner, _) | TyKind::Slice(inner) => inner.clone(),
790 _ => TyKind::Error.intern(Interner),
791 };
792 metadata = match metadata {
793 Some(it) => {
794 let prev_len = from_bytes!(u64, it.get(self)?);
795 Some(IntervalOrOwned::Owned(
796 (prev_len - from - to).to_le_bytes().to_vec(),
797 ))
798 }
799 None => None,
800 };
801 let ty_size =
802 self.size_of_sized(&inner_ty, locals, "array inner type should be sized")?;
803 addr = addr.offset(ty_size * (from as usize));
804 }
805 &ProjectionElem::ClosureField(f) => {
806 let layout = self.layout(prev_ty.to_nextsolver(interner))?;
807 let offset = layout.fields.offset(f).bytes_usize();
808 addr = addr.offset(offset);
809 metadata = None;
810 }
811 ProjectionElem::Field(Either::Right(f)) => {
812 let layout = self.layout(prev_ty.to_nextsolver(interner))?;
813 let offset = layout.fields.offset(f.index as usize).bytes_usize();
814 addr = addr.offset(offset);
815 metadata = None; }
817 ProjectionElem::Field(Either::Left(f)) => {
818 let layout = self.layout(prev_ty.to_nextsolver(interner))?;
819 let variant_layout = match &layout.variants {
820 Variants::Single { .. } | Variants::Empty => &layout,
821 Variants::Multiple { variants, .. } => {
822 &variants[match f.parent {
823 hir_def::VariantId::EnumVariantId(it) => {
824 RustcEnumVariantIdx(it.lookup(self.db).index as usize)
825 }
826 _ => {
827 return Err(MirEvalError::InternalError(
828 "mismatched layout".into(),
829 ));
830 }
831 }]
832 }
833 };
834 let offset = variant_layout
835 .fields
836 .offset(u32::from(f.local_id.into_raw()) as usize)
837 .bytes_usize();
838 addr = addr.offset(offset);
839 if self.size_align_of(&ty, locals)?.is_some() {
841 metadata = None;
842 }
843 }
844 ProjectionElem::OpaqueCast(_) => not_supported!("opaque cast"),
845 }
846 }
847 Ok((addr, ty, metadata))
848 }
849
850 fn layout(&self, ty: crate::next_solver::Ty<'db>) -> Result<Arc<Layout>> {
851 if let Some(x) = self.layout_cache.borrow().get(&ty) {
852 return Ok(x.clone());
853 }
854 let interner = DbInterner::new_with(self.db, None, None);
855 let r = self
856 .db
857 .layout_of_ty(ty, self.trait_env.clone())
858 .map_err(|e| MirEvalError::LayoutError(e, convert_ty_for_result(interner, ty)))?;
859 self.layout_cache.borrow_mut().insert(ty, r.clone());
860 Ok(r)
861 }
862
863 fn layout_adt(&self, adt: AdtId, subst: Substitution) -> Result<Arc<Layout>> {
864 let interner = DbInterner::new_with(self.db, None, None);
865 self.layout(crate::next_solver::Ty::new(
866 interner,
867 rustc_type_ir::TyKind::Adt(
868 crate::next_solver::AdtDef::new(adt, interner),
869 subst.to_nextsolver(interner),
870 ),
871 ))
872 }
873
874 fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals) -> Result<Ty> {
875 Ok(self.place_addr_and_ty_and_metadata(p, locals)?.1)
876 }
877
878 fn operand_ty(&self, o: &Operand, locals: &Locals) -> Result<Ty> {
879 Ok(match &o.kind {
880 OperandKind::Copy(p) | OperandKind::Move(p) => self.place_ty(p, locals)?,
881 OperandKind::Constant(c) => c.data(Interner).ty.clone(),
882 &OperandKind::Static(s) => {
883 let ty = self.db.infer(s.into())[self.db.body(s.into()).body_expr].clone();
884 TyKind::Ref(Mutability::Not, static_lifetime(), ty).intern(Interner)
885 }
886 })
887 }
888
889 fn operand_ty_and_eval(&mut self, o: &Operand, locals: &mut Locals) -> Result<IntervalAndTy> {
890 Ok(IntervalAndTy {
891 interval: self.eval_operand(o, locals)?,
892 ty: self.operand_ty(o, locals)?,
893 })
894 }
895
896 fn interpret_mir(
897 &mut self,
898 body: Arc<MirBody>,
899 args: impl Iterator<Item = IntervalOrOwned>,
900 ) -> Result<Interval> {
901 if let Some(it) = self.stack_depth_limit.checked_sub(1) {
902 self.stack_depth_limit = it;
903 } else {
904 return Err(MirEvalError::StackOverflow);
905 }
906 let mut current_block_idx = body.start_block;
907 let (mut locals, prev_stack_ptr) = self.create_locals_for_body(&body, None)?;
908 self.fill_locals_for_body(&body, &mut locals, args)?;
909 let prev_code_stack = mem::take(&mut self.code_stack);
910 let span = (MirSpan::Unknown, body.owner);
911 self.code_stack.push(StackFrame { locals, destination: None, prev_stack_ptr, span });
912 'stack: loop {
913 let Some(mut my_stack_frame) = self.code_stack.pop() else {
914 not_supported!("missing stack frame");
915 };
916 let e = (|| {
917 let locals = &mut my_stack_frame.locals;
918 let body = locals.body.clone();
919 loop {
920 let current_block = &body.basic_blocks[current_block_idx];
921 if let Some(it) = self.execution_limit.checked_sub(1) {
922 self.execution_limit = it;
923 } else {
924 return Err(MirEvalError::ExecutionLimitExceeded);
925 }
926 for statement in ¤t_block.statements {
927 match &statement.kind {
928 StatementKind::Assign(l, r) => {
929 let addr = self.place_addr(l, locals)?;
930 let result = self.eval_rvalue(r, locals)?;
931 self.copy_from_interval_or_owned(addr, result)?;
932 locals.drop_flags.add_place(*l, &locals.body.projection_store);
933 }
934 StatementKind::Deinit(_) => not_supported!("de-init statement"),
935 StatementKind::StorageLive(_)
936 | StatementKind::FakeRead(_)
937 | StatementKind::StorageDead(_)
938 | StatementKind::Nop => (),
939 }
940 }
941 let Some(terminator) = current_block.terminator.as_ref() else {
942 not_supported!("block without terminator");
943 };
944 match &terminator.kind {
945 TerminatorKind::Goto { target } => {
946 current_block_idx = *target;
947 }
948 TerminatorKind::Call {
949 func,
950 args,
951 destination,
952 target,
953 cleanup: _,
954 from_hir_call: _,
955 } => {
956 let destination_interval = self.place_interval(destination, locals)?;
957 let fn_ty = self.operand_ty(func, locals)?;
958 let args = args
959 .iter()
960 .map(|it| self.operand_ty_and_eval(it, locals))
961 .collect::<Result<Vec<_>>>()?;
962 let stack_frame = match &fn_ty.kind(Interner) {
963 TyKind::Function(_) => {
964 let bytes = self.eval_operand(func, locals)?;
965 self.exec_fn_pointer(
966 bytes,
967 destination_interval,
968 &args,
969 locals,
970 *target,
971 terminator.span,
972 )?
973 }
974 TyKind::FnDef(def, generic_args) => self.exec_fn_def(
975 CallableDefId::from_chalk(self.db, *def),
976 generic_args,
977 destination_interval,
978 &args,
979 locals,
980 *target,
981 terminator.span,
982 )?,
983 it => not_supported!("unknown function type {it:?}"),
984 };
985 locals
986 .drop_flags
987 .add_place(*destination, &locals.body.projection_store);
988 if let Some(stack_frame) = stack_frame {
989 self.code_stack.push(my_stack_frame);
990 current_block_idx = stack_frame.locals.body.start_block;
991 self.code_stack.push(stack_frame);
992 return Ok(None);
993 } else {
994 current_block_idx =
995 target.ok_or(MirEvalError::UndefinedBehavior(
996 "Diverging function returned".to_owned(),
997 ))?;
998 }
999 }
1000 TerminatorKind::SwitchInt { discr, targets } => {
1001 let val = u128::from_le_bytes(pad16(
1002 self.eval_operand(discr, locals)?.get(self)?,
1003 false,
1004 ));
1005 current_block_idx = targets.target_for_value(val);
1006 }
1007 TerminatorKind::Return => {
1008 break;
1009 }
1010 TerminatorKind::Unreachable => {
1011 return Err(MirEvalError::UndefinedBehavior(
1012 "unreachable executed".to_owned(),
1013 ));
1014 }
1015 TerminatorKind::Drop { place, target, unwind: _ } => {
1016 self.drop_place(place, locals, terminator.span)?;
1017 current_block_idx = *target;
1018 }
1019 _ => not_supported!("unknown terminator"),
1020 }
1021 }
1022 Ok(Some(my_stack_frame))
1023 })();
1024 let my_stack_frame = match e {
1025 Ok(None) => continue 'stack,
1026 Ok(Some(x)) => x,
1027 Err(e) => {
1028 let my_code_stack = mem::replace(&mut self.code_stack, prev_code_stack);
1029 let mut error_stack = vec![];
1030 for frame in my_code_stack.into_iter().rev() {
1031 if let DefWithBodyId::FunctionId(f) = frame.locals.body.owner {
1032 error_stack.push((Either::Left(f), frame.span.0, frame.span.1));
1033 }
1034 }
1035 return Err(MirEvalError::InFunction(Box::new(e), error_stack));
1036 }
1037 };
1038 let return_interval = my_stack_frame.locals.ptr[return_slot()];
1039 self.unused_locals_store
1040 .borrow_mut()
1041 .entry(my_stack_frame.locals.body.owner)
1042 .or_default()
1043 .push(my_stack_frame.locals);
1044 match my_stack_frame.destination {
1045 None => {
1046 self.code_stack = prev_code_stack;
1047 self.stack_depth_limit += 1;
1048 return Ok(return_interval);
1049 }
1050 Some(bb) => {
1051 let _ = my_stack_frame.prev_stack_ptr;
1053 current_block_idx = bb;
1055 }
1056 }
1057 }
1058 }
1059
1060 fn fill_locals_for_body(
1061 &mut self,
1062 body: &MirBody,
1063 locals: &mut Locals,
1064 args: impl Iterator<Item = IntervalOrOwned>,
1065 ) -> Result<()> {
1066 let mut remain_args = body.param_locals.len();
1067 for ((l, interval), value) in locals.ptr.iter().skip(1).zip(args) {
1068 locals.drop_flags.add_place(l.into(), &locals.body.projection_store);
1069 match value {
1070 IntervalOrOwned::Owned(value) => interval.write_from_bytes(self, &value)?,
1071 IntervalOrOwned::Borrowed(value) => interval.write_from_interval(self, value)?,
1072 }
1073 if remain_args == 0 {
1074 return Err(MirEvalError::InternalError("too many arguments".into()));
1075 }
1076 remain_args -= 1;
1077 }
1078 if remain_args > 0 {
1079 return Err(MirEvalError::InternalError("too few arguments".into()));
1080 }
1081 Ok(())
1082 }
1083
1084 fn create_locals_for_body(
1085 &mut self,
1086 body: &Arc<MirBody>,
1087 destination: Option<Interval>,
1088 ) -> Result<(Locals, usize)> {
1089 let mut locals =
1090 match self.unused_locals_store.borrow_mut().entry(body.owner).or_default().pop() {
1091 None => Locals {
1092 ptr: ArenaMap::new(),
1093 body: body.clone(),
1094 drop_flags: DropFlags::default(),
1095 },
1096 Some(mut l) => {
1097 l.drop_flags.clear();
1098 l.body = body.clone();
1099 l
1100 }
1101 };
1102 let stack_size = {
1103 let mut stack_ptr = self.stack.len();
1104 for (id, it) in body.locals.iter() {
1105 if id == return_slot()
1106 && let Some(destination) = destination
1107 {
1108 locals.ptr.insert(id, destination);
1109 continue;
1110 }
1111 let (size, align) = self.size_align_of_sized(
1112 &it.ty,
1113 &locals,
1114 "no unsized local in extending stack",
1115 )?;
1116 while !stack_ptr.is_multiple_of(align) {
1117 stack_ptr += 1;
1118 }
1119 let my_ptr = stack_ptr;
1120 stack_ptr += size;
1121 locals.ptr.insert(id, Interval { addr: Stack(my_ptr), size });
1122 }
1123 stack_ptr - self.stack.len()
1124 };
1125 let prev_stack_pointer = self.stack.len();
1126 if stack_size > self.memory_limit {
1127 return Err(MirEvalError::Panic(format!(
1128 "Stack overflow. Tried to grow stack to {stack_size} bytes"
1129 )));
1130 }
1131 self.stack.extend(std::iter::repeat_n(0, stack_size));
1132 Ok((locals, prev_stack_pointer))
1133 }
1134
1135 fn eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals) -> Result<IntervalOrOwned> {
1136 let interner = DbInterner::new_with(self.db, None, None);
1137 use IntervalOrOwned::*;
1138 Ok(match r {
1139 Rvalue::Use(it) => Borrowed(self.eval_operand(it, locals)?),
1140 Rvalue::Ref(_, p) => {
1141 let (addr, _, metadata) = self.place_addr_and_ty_and_metadata(p, locals)?;
1142 let mut r = addr.to_bytes().to_vec();
1143 if let Some(metadata) = metadata {
1144 r.extend(metadata.get(self)?);
1145 }
1146 Owned(r)
1147 }
1148 Rvalue::Len(p) => {
1149 let (_, _, metadata) = self.place_addr_and_ty_and_metadata(p, locals)?;
1150 match metadata {
1151 Some(m) => m,
1152 None => {
1153 return Err(MirEvalError::InternalError(
1154 "type without metadata is used for Rvalue::Len".into(),
1155 ));
1156 }
1157 }
1158 }
1159 Rvalue::UnaryOp(op, val) => {
1160 let mut c = self.eval_operand(val, locals)?.get(self)?;
1161 let mut ty = self.operand_ty(val, locals)?;
1162 while let TyKind::Ref(_, _, z) = ty.kind(Interner) {
1163 ty = z.clone();
1164 let size = self.size_of_sized(&ty, locals, "operand of unary op")?;
1165 c = self.read_memory(Address::from_bytes(c)?, size)?;
1166 }
1167 if let TyKind::Scalar(chalk_ir::Scalar::Float(f)) = ty.kind(Interner) {
1168 match f {
1169 chalk_ir::FloatTy::F16 => {
1170 let c = -from_bytes!(f16, u16, c);
1171 Owned(u16::try_from(c.to_bits()).unwrap().to_le_bytes().into())
1172 }
1173 chalk_ir::FloatTy::F32 => {
1174 let c = -from_bytes!(f32, c);
1175 Owned(c.to_le_bytes().into())
1176 }
1177 chalk_ir::FloatTy::F64 => {
1178 let c = -from_bytes!(f64, c);
1179 Owned(c.to_le_bytes().into())
1180 }
1181 chalk_ir::FloatTy::F128 => {
1182 let c = -from_bytes!(f128, u128, c);
1183 Owned(c.to_bits().to_le_bytes().into())
1184 }
1185 }
1186 } else {
1187 let mut c = c.to_vec();
1188 if ty.as_builtin() == Some(BuiltinType::Bool) {
1189 c[0] = 1 - c[0];
1190 } else {
1191 match op {
1192 UnOp::Not => c.iter_mut().for_each(|it| *it = !*it),
1193 UnOp::Neg => {
1194 c.iter_mut().for_each(|it| *it = !*it);
1195 for k in c.iter_mut() {
1196 let o;
1197 (*k, o) = k.overflowing_add(1);
1198 if !o {
1199 break;
1200 }
1201 }
1202 }
1203 }
1204 }
1205 Owned(c)
1206 }
1207 }
1208 Rvalue::CheckedBinaryOp(op, lhs, rhs) => 'binary_op: {
1209 let lc = self.eval_operand(lhs, locals)?;
1210 let rc = self.eval_operand(rhs, locals)?;
1211 let mut lc = lc.get(self)?;
1212 let mut rc = rc.get(self)?;
1213 let mut ty = self.operand_ty(lhs, locals)?;
1214 while let TyKind::Ref(_, _, z) = ty.kind(Interner) {
1215 ty = z.clone();
1216 let size = if ty.is_str() {
1217 if *op != BinOp::Eq {
1218 never!("Only eq is builtin for `str`");
1219 }
1220 let ls = from_bytes!(usize, &lc[self.ptr_size()..self.ptr_size() * 2]);
1221 let rs = from_bytes!(usize, &rc[self.ptr_size()..self.ptr_size() * 2]);
1222 if ls != rs {
1223 break 'binary_op Owned(vec![0]);
1224 }
1225 lc = &lc[..self.ptr_size()];
1226 rc = &rc[..self.ptr_size()];
1227 lc = self.read_memory(Address::from_bytes(lc)?, ls)?;
1228 rc = self.read_memory(Address::from_bytes(rc)?, ls)?;
1229 break 'binary_op Owned(vec![u8::from(lc == rc)]);
1230 } else {
1231 self.size_of_sized(&ty, locals, "operand of binary op")?
1232 };
1233 lc = self.read_memory(Address::from_bytes(lc)?, size)?;
1234 rc = self.read_memory(Address::from_bytes(rc)?, size)?;
1235 }
1236 if let TyKind::Scalar(chalk_ir::Scalar::Float(f)) = ty.kind(Interner) {
1237 match f {
1238 chalk_ir::FloatTy::F16 => {
1239 let l = from_bytes!(f16, u16, lc);
1240 let r = from_bytes!(f16, u16, rc);
1241 match op {
1242 BinOp::Ge
1243 | BinOp::Gt
1244 | BinOp::Le
1245 | BinOp::Lt
1246 | BinOp::Eq
1247 | BinOp::Ne => {
1248 let r = op.run_compare(l, r) as u8;
1249 Owned(vec![r])
1250 }
1251 BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div => {
1252 let r = match op {
1253 BinOp::Add => l + r,
1254 BinOp::Sub => l - r,
1255 BinOp::Mul => l * r,
1256 BinOp::Div => l / r,
1257 _ => unreachable!(),
1258 };
1259 Owned(
1260 u16::try_from(r.value.to_bits())
1261 .unwrap()
1262 .to_le_bytes()
1263 .into(),
1264 )
1265 }
1266 it => not_supported!(
1267 "invalid binop {it:?} on floating point operators"
1268 ),
1269 }
1270 }
1271 chalk_ir::FloatTy::F32 => {
1272 let l = from_bytes!(f32, lc);
1273 let r = from_bytes!(f32, rc);
1274 match op {
1275 BinOp::Ge
1276 | BinOp::Gt
1277 | BinOp::Le
1278 | BinOp::Lt
1279 | BinOp::Eq
1280 | BinOp::Ne => {
1281 let r = op.run_compare(l, r) as u8;
1282 Owned(vec![r])
1283 }
1284 BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div => {
1285 let r = match op {
1286 BinOp::Add => l + r,
1287 BinOp::Sub => l - r,
1288 BinOp::Mul => l * r,
1289 BinOp::Div => l / r,
1290 _ => unreachable!(),
1291 };
1292 Owned(r.to_le_bytes().into())
1293 }
1294 it => not_supported!(
1295 "invalid binop {it:?} on floating point operators"
1296 ),
1297 }
1298 }
1299 chalk_ir::FloatTy::F64 => {
1300 let l = from_bytes!(f64, lc);
1301 let r = from_bytes!(f64, rc);
1302 match op {
1303 BinOp::Ge
1304 | BinOp::Gt
1305 | BinOp::Le
1306 | BinOp::Lt
1307 | BinOp::Eq
1308 | BinOp::Ne => {
1309 let r = op.run_compare(l, r) as u8;
1310 Owned(vec![r])
1311 }
1312 BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div => {
1313 let r = match op {
1314 BinOp::Add => l + r,
1315 BinOp::Sub => l - r,
1316 BinOp::Mul => l * r,
1317 BinOp::Div => l / r,
1318 _ => unreachable!(),
1319 };
1320 Owned(r.to_le_bytes().into())
1321 }
1322 it => not_supported!(
1323 "invalid binop {it:?} on floating point operators"
1324 ),
1325 }
1326 }
1327 chalk_ir::FloatTy::F128 => {
1328 let l = from_bytes!(f128, u128, lc);
1329 let r = from_bytes!(f128, u128, rc);
1330 match op {
1331 BinOp::Ge
1332 | BinOp::Gt
1333 | BinOp::Le
1334 | BinOp::Lt
1335 | BinOp::Eq
1336 | BinOp::Ne => {
1337 let r = op.run_compare(l, r) as u8;
1338 Owned(vec![r])
1339 }
1340 BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div => {
1341 let r = match op {
1342 BinOp::Add => l + r,
1343 BinOp::Sub => l - r,
1344 BinOp::Mul => l * r,
1345 BinOp::Div => l / r,
1346 _ => unreachable!(),
1347 };
1348 Owned(r.value.to_bits().to_le_bytes().into())
1349 }
1350 it => not_supported!(
1351 "invalid binop {it:?} on floating point operators"
1352 ),
1353 }
1354 }
1355 }
1356 } else {
1357 let is_signed = matches!(ty.as_builtin(), Some(BuiltinType::Int(_)));
1358 let l128 = IntValue::from_bytes(lc, is_signed);
1359 let r128 = IntValue::from_bytes(rc, is_signed);
1360 match op {
1361 BinOp::Ge | BinOp::Gt | BinOp::Le | BinOp::Lt | BinOp::Eq | BinOp::Ne => {
1362 let r = op.run_compare(l128, r128) as u8;
1363 Owned(vec![r])
1364 }
1365 BinOp::BitAnd
1366 | BinOp::BitOr
1367 | BinOp::BitXor
1368 | BinOp::Add
1369 | BinOp::Mul
1370 | BinOp::Div
1371 | BinOp::Rem
1372 | BinOp::Sub => {
1373 let r = match op {
1374 BinOp::Add => l128.checked_add(r128).ok_or_else(|| {
1375 MirEvalError::Panic(format!("Overflow in {op:?}"))
1376 })?,
1377 BinOp::Mul => l128.checked_mul(r128).ok_or_else(|| {
1378 MirEvalError::Panic(format!("Overflow in {op:?}"))
1379 })?,
1380 BinOp::Div => l128.checked_div(r128).ok_or_else(|| {
1381 MirEvalError::Panic(format!("Overflow in {op:?}"))
1382 })?,
1383 BinOp::Rem => l128.checked_rem(r128).ok_or_else(|| {
1384 MirEvalError::Panic(format!("Overflow in {op:?}"))
1385 })?,
1386 BinOp::Sub => l128.checked_sub(r128).ok_or_else(|| {
1387 MirEvalError::Panic(format!("Overflow in {op:?}"))
1388 })?,
1389 BinOp::BitAnd => l128 & r128,
1390 BinOp::BitOr => l128 | r128,
1391 BinOp::BitXor => l128 ^ r128,
1392 _ => unreachable!(),
1393 };
1394 Owned(r.to_bytes())
1395 }
1396 BinOp::Shl | BinOp::Shr => {
1397 let r = 'b: {
1398 if let Some(shift_amount) = r128.as_u32() {
1399 let r = match op {
1400 BinOp::Shl => l128.checked_shl(shift_amount),
1401 BinOp::Shr => l128.checked_shr(shift_amount),
1402 _ => unreachable!(),
1403 };
1404 if shift_amount as usize >= lc.len() * 8 {
1405 return Err(MirEvalError::Panic(format!(
1406 "Overflow in {op:?}"
1407 )));
1408 }
1409 if let Some(r) = r {
1410 break 'b r;
1411 }
1412 };
1413 return Err(MirEvalError::Panic(format!("Overflow in {op:?}")));
1414 };
1415 Owned(r.to_bytes())
1416 }
1417 BinOp::Offset => not_supported!("offset binop"),
1418 }
1419 }
1420 }
1421 Rvalue::Discriminant(p) => {
1422 let ty = self.place_ty(p, locals)?;
1423 let bytes = self.eval_place(p, locals)?.get(self)?;
1424 let result = self.compute_discriminant(ty, bytes)?;
1425 Owned(result.to_le_bytes().to_vec())
1426 }
1427 Rvalue::Repeat(it, len) => {
1428 let len = match try_const_usize(self.db, len) {
1429 Some(it) => it as usize,
1430 None => not_supported!("non evaluatable array len in repeat Rvalue"),
1431 };
1432 let val = self.eval_operand(it, locals)?.get(self)?;
1433 let size = len * val.len();
1434 Owned(val.iter().copied().cycle().take(size).collect())
1435 }
1436 Rvalue::ShallowInitBox(_, _) => not_supported!("shallow init box"),
1437 Rvalue::ShallowInitBoxWithAlloc(ty) => {
1438 let Some((size, align)) = self.size_align_of(ty, locals)? else {
1439 not_supported!("unsized box initialization");
1440 };
1441 let addr = self.heap_allocate(size, align)?;
1442 Owned(addr.to_bytes().to_vec())
1443 }
1444 Rvalue::CopyForDeref(_) => not_supported!("copy for deref"),
1445 Rvalue::Aggregate(kind, values) => {
1446 let values = values
1447 .iter()
1448 .map(|it| self.eval_operand(it, locals))
1449 .collect::<Result<Vec<_>>>()?;
1450 match kind {
1451 AggregateKind::Array(_) => {
1452 let mut r = vec![];
1453 for it in values {
1454 let value = it.get(self)?;
1455 r.extend(value);
1456 }
1457 Owned(r)
1458 }
1459 AggregateKind::Tuple(ty) => {
1460 let layout = self.layout(ty.to_nextsolver(interner))?;
1461 Owned(self.construct_with_layout(
1462 layout.size.bytes_usize(),
1463 &layout,
1464 None,
1465 values.iter().map(|&it| it.into()),
1466 )?)
1467 }
1468 AggregateKind::Union(it, f) => {
1469 let layout =
1470 self.layout_adt((*it).into(), Substitution::empty(Interner))?;
1471 let offset = layout
1472 .fields
1473 .offset(u32::from(f.local_id.into_raw()) as usize)
1474 .bytes_usize();
1475 let op = values[0].get(self)?;
1476 let mut result = vec![0; layout.size.bytes_usize()];
1477 result[offset..offset + op.len()].copy_from_slice(op);
1478 Owned(result)
1479 }
1480 AggregateKind::Adt(it, subst) => {
1481 let (size, variant_layout, tag) =
1482 self.layout_of_variant(*it, subst.clone(), locals)?;
1483 Owned(self.construct_with_layout(
1484 size,
1485 &variant_layout,
1486 tag,
1487 values.iter().map(|&it| it.into()),
1488 )?)
1489 }
1490 AggregateKind::Closure(ty) => {
1491 let layout = self.layout(ty.to_nextsolver(interner))?;
1492 Owned(self.construct_with_layout(
1493 layout.size.bytes_usize(),
1494 &layout,
1495 None,
1496 values.iter().map(|&it| it.into()),
1497 )?)
1498 }
1499 }
1500 }
1501 Rvalue::Cast(kind, operand, target_ty) => match kind {
1502 CastKind::PointerCoercion(cast) => match cast {
1503 PointerCast::ReifyFnPointer | PointerCast::ClosureFnPointer(_) => {
1504 let current_ty = self.operand_ty(operand, locals)?;
1505 if let TyKind::FnDef(_, _) | TyKind::Closure(_, _) =
1506 ¤t_ty.kind(Interner)
1507 {
1508 let interner = DbInterner::new_with(self.db, None, None);
1509 let current_ty = current_ty.to_nextsolver(interner);
1510 let id = self.vtable_map.id(current_ty);
1511 let ptr_size = self.ptr_size();
1512 Owned(id.to_le_bytes()[0..ptr_size].to_vec())
1513 } else {
1514 not_supported!(
1515 "creating a fn pointer from a non FnDef or Closure type"
1516 );
1517 }
1518 }
1519 PointerCast::Unsize => {
1520 let current_ty = self.operand_ty(operand, locals)?;
1521 let addr = self.eval_operand(operand, locals)?;
1522 self.coerce_unsized(addr, ¤t_ty, target_ty)?
1523 }
1524 PointerCast::MutToConstPointer | PointerCast::UnsafeFnPointer => {
1525 Borrowed(self.eval_operand(operand, locals)?)
1527 }
1528 PointerCast::ArrayToPointer => {
1529 Borrowed(self.eval_operand(operand, locals)?.slice(0..self.ptr_size()))
1531 }
1532 },
1533 CastKind::DynStar => not_supported!("dyn star cast"),
1534 CastKind::IntToInt
1535 | CastKind::PtrToPtr
1536 | CastKind::PointerExposeAddress
1537 | CastKind::PointerFromExposedAddress => {
1538 let current_ty = self.operand_ty(operand, locals)?;
1539 let is_signed = matches!(
1540 current_ty.kind(Interner),
1541 TyKind::Scalar(chalk_ir::Scalar::Int(_))
1542 );
1543 let current = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed);
1544 let dest_size =
1545 self.size_of_sized(target_ty, locals, "destination of int to int cast")?;
1546 Owned(current[0..dest_size].to_vec())
1547 }
1548 CastKind::FloatToInt => {
1549 let ty = self.operand_ty(operand, locals)?;
1550 let TyKind::Scalar(chalk_ir::Scalar::Float(ty)) = ty.kind(Interner) else {
1551 not_supported!("invalid float to int cast");
1552 };
1553 let value = self.eval_operand(operand, locals)?.get(self)?;
1554 let value = match ty {
1555 chalk_ir::FloatTy::F32 => {
1556 let value = value.try_into().unwrap();
1557 f32::from_le_bytes(value) as f64
1558 }
1559 chalk_ir::FloatTy::F64 => {
1560 let value = value.try_into().unwrap();
1561 f64::from_le_bytes(value)
1562 }
1563 chalk_ir::FloatTy::F16 | chalk_ir::FloatTy::F128 => {
1564 not_supported!("unstable floating point type f16 and f128");
1565 }
1566 };
1567 let is_signed = matches!(
1568 target_ty.kind(Interner),
1569 TyKind::Scalar(chalk_ir::Scalar::Int(_))
1570 );
1571 let dest_size =
1572 self.size_of_sized(target_ty, locals, "destination of float to int cast")?;
1573 let dest_bits = dest_size * 8;
1574 let (max, min) = if dest_bits == 128 {
1575 (i128::MAX, i128::MIN)
1576 } else if is_signed {
1577 let max = 1i128 << (dest_bits - 1);
1578 (max - 1, -max)
1579 } else {
1580 (1i128 << dest_bits, 0)
1581 };
1582 let value = (value as i128).min(max).max(min);
1583 let result = value.to_le_bytes();
1584 Owned(result[0..dest_size].to_vec())
1585 }
1586 CastKind::FloatToFloat => {
1587 let ty = self.operand_ty(operand, locals)?;
1588 let TyKind::Scalar(chalk_ir::Scalar::Float(ty)) = ty.kind(Interner) else {
1589 not_supported!("invalid float to int cast");
1590 };
1591 let value = self.eval_operand(operand, locals)?.get(self)?;
1592 let value = match ty {
1593 chalk_ir::FloatTy::F32 => {
1594 let value = value.try_into().unwrap();
1595 f32::from_le_bytes(value) as f64
1596 }
1597 chalk_ir::FloatTy::F64 => {
1598 let value = value.try_into().unwrap();
1599 f64::from_le_bytes(value)
1600 }
1601 chalk_ir::FloatTy::F16 | chalk_ir::FloatTy::F128 => {
1602 not_supported!("unstable floating point type f16 and f128");
1603 }
1604 };
1605 let TyKind::Scalar(chalk_ir::Scalar::Float(target_ty)) =
1606 target_ty.kind(Interner)
1607 else {
1608 not_supported!("invalid float to float cast");
1609 };
1610 match target_ty {
1611 chalk_ir::FloatTy::F32 => Owned((value as f32).to_le_bytes().to_vec()),
1612 chalk_ir::FloatTy::F64 => Owned((value as f64).to_le_bytes().to_vec()),
1613 chalk_ir::FloatTy::F16 | chalk_ir::FloatTy::F128 => {
1614 not_supported!("unstable floating point type f16 and f128");
1615 }
1616 }
1617 }
1618 CastKind::IntToFloat => {
1619 let current_ty = self.operand_ty(operand, locals)?;
1620 let is_signed = matches!(
1621 current_ty.kind(Interner),
1622 TyKind::Scalar(chalk_ir::Scalar::Int(_))
1623 );
1624 let value = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed);
1625 let value = i128::from_le_bytes(value);
1626 let TyKind::Scalar(chalk_ir::Scalar::Float(target_ty)) =
1627 target_ty.kind(Interner)
1628 else {
1629 not_supported!("invalid int to float cast");
1630 };
1631 match target_ty {
1632 chalk_ir::FloatTy::F32 => Owned((value as f32).to_le_bytes().to_vec()),
1633 chalk_ir::FloatTy::F64 => Owned((value as f64).to_le_bytes().to_vec()),
1634 chalk_ir::FloatTy::F16 | chalk_ir::FloatTy::F128 => {
1635 not_supported!("unstable floating point type f16 and f128");
1636 }
1637 }
1638 }
1639 CastKind::FnPtrToPtr => not_supported!("fn ptr to ptr cast"),
1640 },
1641 Rvalue::ThreadLocalRef(n)
1642 | Rvalue::AddressOf(n)
1643 | Rvalue::BinaryOp(n)
1644 | Rvalue::NullaryOp(n) => match *n {},
1645 })
1646 }
1647
1648 fn compute_discriminant(&self, ty: Ty, bytes: &[u8]) -> Result<i128> {
1649 let interner = DbInterner::new_with(self.db, None, None);
1650 let layout = self.layout(ty.to_nextsolver(interner))?;
1651 let &TyKind::Adt(chalk_ir::AdtId(AdtId::EnumId(e)), _) = ty.kind(Interner) else {
1652 return Ok(0);
1653 };
1654 match &layout.variants {
1655 Variants::Empty => unreachable!(),
1656 Variants::Single { index } => {
1657 let r =
1658 self.const_eval_discriminant(e.enum_variants(self.db).variants[index.0].0)?;
1659 Ok(r)
1660 }
1661 Variants::Multiple { tag, tag_encoding, variants, .. } => {
1662 let size = tag.size(&*self.target_data_layout).bytes_usize();
1663 let offset = layout.fields.offset(0).bytes_usize(); let is_signed = tag.is_signed();
1665 match tag_encoding {
1666 TagEncoding::Direct => {
1667 let tag = &bytes[offset..offset + size];
1668 Ok(i128::from_le_bytes(pad16(tag, is_signed)))
1669 }
1670 TagEncoding::Niche { untagged_variant, niche_start, .. } => {
1671 let tag = &bytes[offset..offset + size];
1672 let candidate_tag = i128::from_le_bytes(pad16(tag, is_signed))
1673 .wrapping_sub(*niche_start as i128)
1674 as usize;
1675 let idx = variants
1676 .iter_enumerated()
1677 .map(|(it, _)| it)
1678 .filter(|it| it != untagged_variant)
1679 .nth(candidate_tag)
1680 .unwrap_or(*untagged_variant)
1681 .0;
1682 let result =
1683 self.const_eval_discriminant(e.enum_variants(self.db).variants[idx].0)?;
1684 Ok(result)
1685 }
1686 }
1687 }
1688 }
1689 }
1690
1691 fn coerce_unsized_look_through_fields<T>(
1692 &self,
1693 ty: &Ty,
1694 goal: impl Fn(&TyKind) -> Option<T>,
1695 ) -> Result<T> {
1696 let kind = ty.kind(Interner);
1697 if let Some(it) = goal(kind) {
1698 return Ok(it);
1699 }
1700 if let TyKind::Adt(id, subst) = kind
1701 && let AdtId::StructId(struct_id) = id.0
1702 {
1703 let field_types = self.db.field_types(struct_id.into());
1704 if let Some(ty) =
1705 field_types.iter().last().map(|it| it.1.clone().substitute(Interner, subst))
1706 {
1707 return self.coerce_unsized_look_through_fields(&ty, goal);
1708 }
1709 }
1710 Err(MirEvalError::CoerceUnsizedError(ty.clone()))
1711 }
1712
1713 fn coerce_unsized(
1714 &mut self,
1715 addr: Interval,
1716 current_ty: &Ty,
1717 target_ty: &Ty,
1718 ) -> Result<IntervalOrOwned> {
1719 fn for_ptr(it: &TyKind) -> Option<Ty> {
1720 match it {
1721 TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => Some(ty.clone()),
1722 _ => None,
1723 }
1724 }
1725 let target_ty = self.coerce_unsized_look_through_fields(target_ty, for_ptr)?;
1726 let current_ty = self.coerce_unsized_look_through_fields(current_ty, for_ptr)?;
1727
1728 self.unsizing_ptr_from_addr(target_ty, current_ty, addr)
1729 }
1730
1731 fn unsizing_ptr_from_addr(
1733 &mut self,
1734 target_ty: Ty,
1735 current_ty: Ty,
1736 addr: Interval,
1737 ) -> Result<IntervalOrOwned> {
1738 use IntervalOrOwned::*;
1739 Ok(match &target_ty.kind(Interner) {
1740 TyKind::Slice(_) => match ¤t_ty.kind(Interner) {
1741 TyKind::Array(_, size) => {
1742 let len = match try_const_usize(self.db, size) {
1743 None => {
1744 not_supported!("unevaluatble len of array in coerce unsized")
1745 }
1746 Some(it) => it as usize,
1747 };
1748 let mut r = Vec::with_capacity(16);
1749 let addr = addr.get(self)?;
1750 r.extend(addr.iter().copied());
1751 r.extend(len.to_le_bytes());
1752 Owned(r)
1753 }
1754 t => {
1755 not_supported!("slice unsizing from non array type {t:?}")
1756 }
1757 },
1758 TyKind::Dyn(_) => {
1759 let interner = DbInterner::new_with(self.db, None, None);
1760 let current_ty = current_ty.to_nextsolver(interner);
1761 let vtable = self.vtable_map.id(current_ty);
1762 let mut r = Vec::with_capacity(16);
1763 let addr = addr.get(self)?;
1764 r.extend(addr.iter().copied());
1765 r.extend(vtable.to_le_bytes());
1766 Owned(r)
1767 }
1768 TyKind::Adt(id, target_subst) => match ¤t_ty.kind(Interner) {
1769 TyKind::Adt(current_id, current_subst) => {
1770 if id != current_id {
1771 not_supported!("unsizing struct with different type");
1772 }
1773 let id = match id.0 {
1774 AdtId::StructId(s) => s,
1775 AdtId::UnionId(_) => not_supported!("unsizing unions"),
1776 AdtId::EnumId(_) => not_supported!("unsizing enums"),
1777 };
1778 let Some((last_field, _)) = id.fields(self.db).fields().iter().next_back()
1779 else {
1780 not_supported!("unsizing struct without field");
1781 };
1782 let target_last_field = self.db.field_types(id.into())[last_field]
1783 .clone()
1784 .substitute(Interner, target_subst);
1785 let current_last_field = self.db.field_types(id.into())[last_field]
1786 .clone()
1787 .substitute(Interner, current_subst);
1788 return self.unsizing_ptr_from_addr(
1789 target_last_field,
1790 current_last_field,
1791 addr,
1792 );
1793 }
1794 _ => not_supported!("unsizing struct with non adt type"),
1795 },
1796 _ => not_supported!("unknown unsized cast"),
1797 })
1798 }
1799
1800 fn layout_of_variant(
1801 &mut self,
1802 it: VariantId,
1803 subst: Substitution,
1804 locals: &Locals,
1805 ) -> Result<(usize, Arc<Layout>, Option<(usize, usize, i128)>)> {
1806 let interner = DbInterner::new_with(self.db, None, None);
1807 let adt = it.adt_id(self.db);
1808 if let DefWithBodyId::VariantId(f) = locals.body.owner
1809 && let VariantId::EnumVariantId(it) = it
1810 && let AdtId::EnumId(e) = adt
1811 && f.lookup(self.db).parent == e
1812 {
1813 let i = self.const_eval_discriminant(it)?;
1816 return Ok((
1817 16,
1818 self.layout(crate::next_solver::Ty::new_empty_tuple(interner))?,
1819 Some((0, 16, i)),
1820 ));
1821 }
1822 let layout = self.layout_adt(adt, subst)?;
1823 Ok(match &layout.variants {
1824 Variants::Single { .. } | Variants::Empty => (layout.size.bytes_usize(), layout, None),
1825 Variants::Multiple { variants, tag, tag_encoding, .. } => {
1826 let enum_variant_id = match it {
1827 VariantId::EnumVariantId(it) => it,
1828 _ => not_supported!("multi variant layout for non-enums"),
1829 };
1830 let mut discriminant = self.const_eval_discriminant(enum_variant_id)?;
1831 let lookup = enum_variant_id.lookup(self.db);
1832 let rustc_enum_variant_idx = RustcEnumVariantIdx(lookup.index as usize);
1833 let variant_layout = variants[rustc_enum_variant_idx].clone();
1834 let have_tag = match tag_encoding {
1835 TagEncoding::Direct => true,
1836 TagEncoding::Niche { untagged_variant, niche_variants: _, niche_start } => {
1837 if *untagged_variant == rustc_enum_variant_idx {
1838 false
1839 } else {
1840 discriminant = (variants
1841 .iter_enumerated()
1842 .filter(|(it, _)| it != untagged_variant)
1843 .position(|(it, _)| it == rustc_enum_variant_idx)
1844 .unwrap() as i128)
1845 .wrapping_add(*niche_start as i128);
1846 true
1847 }
1848 }
1849 };
1850 (
1851 layout.size.bytes_usize(),
1852 Arc::new(variant_layout),
1853 if have_tag {
1854 Some((
1855 layout.fields.offset(0).bytes_usize(),
1856 tag.size(&*self.target_data_layout).bytes_usize(),
1857 discriminant,
1858 ))
1859 } else {
1860 None
1861 },
1862 )
1863 }
1864 })
1865 }
1866
1867 fn construct_with_layout(
1868 &mut self,
1869 size: usize, variant_layout: &Layout,
1871 tag: Option<(usize, usize, i128)>,
1872 values: impl Iterator<Item = IntervalOrOwned>,
1873 ) -> Result<Vec<u8>> {
1874 let mut result = vec![0; size];
1875 if let Some((offset, size, value)) = tag {
1876 match result.get_mut(offset..offset + size) {
1877 Some(it) => it.copy_from_slice(&value.to_le_bytes()[0..size]),
1878 None => {
1879 return Err(MirEvalError::InternalError(
1880 format!(
1881 "encoded tag ({offset}, {size}, {value}) is out of bounds 0..{size}"
1882 )
1883 .into(),
1884 ));
1885 }
1886 }
1887 }
1888 for (i, op) in values.enumerate() {
1889 let offset = variant_layout.fields.offset(i).bytes_usize();
1890 let op = op.get(self)?;
1891 match result.get_mut(offset..offset + op.len()) {
1892 Some(it) => it.copy_from_slice(op),
1893 None => {
1894 return Err(MirEvalError::InternalError(
1895 format!("field offset ({offset}) is out of bounds 0..{size}").into(),
1896 ));
1897 }
1898 }
1899 }
1900 Ok(result)
1901 }
1902
1903 fn eval_operand(&mut self, it: &Operand, locals: &mut Locals) -> Result<Interval> {
1904 Ok(match &it.kind {
1905 OperandKind::Copy(p) | OperandKind::Move(p) => {
1906 locals.drop_flags.remove_place(p, &locals.body.projection_store);
1907 self.eval_place(p, locals)?
1908 }
1909 OperandKind::Static(st) => {
1910 let addr = self.eval_static(*st, locals)?;
1911 Interval::new(addr, self.ptr_size())
1912 }
1913 OperandKind::Constant(konst) => self.allocate_const_in_heap(locals, konst)?,
1914 })
1915 }
1916
1917 #[allow(clippy::double_parens)]
1918 fn allocate_const_in_heap(&mut self, locals: &Locals, konst: &Const) -> Result<Interval> {
1919 let interner = DbInterner::new_with(self.db, None, None);
1920 let ConstData { ty, value: chalk_ir::ConstValue::Concrete(c) } = &konst.data(Interner)
1921 else {
1922 not_supported!("evaluating non concrete constant");
1923 };
1924 let result_owner;
1925 let (v, memory_map) = match &c.interned {
1926 ConstScalar::Bytes(v, mm) => (v, mm),
1927 ConstScalar::UnevaluatedConst(const_id, subst) => 'b: {
1928 let mut const_id = *const_id;
1929 let mut subst = subst.clone();
1930 if let hir_def::GeneralConstId::ConstId(c) = const_id {
1931 let (c, s) = lookup_impl_const(self.db, self.trait_env.clone(), c, subst);
1932 const_id = hir_def::GeneralConstId::ConstId(c);
1933 subst = s;
1934 }
1935 result_owner = self
1936 .db
1937 .const_eval(const_id, subst, Some(self.trait_env.clone()))
1938 .map_err(|e| {
1939 let name = const_id.name(self.db);
1940 MirEvalError::ConstEvalError(name, Box::new(e))
1941 })?;
1942 if let chalk_ir::ConstValue::Concrete(c) = &result_owner.data(Interner).value
1943 && let ConstScalar::Bytes(v, mm) = &c.interned
1944 {
1945 break 'b (v, mm);
1946 }
1947 not_supported!("unevaluatable constant");
1948 }
1949 ConstScalar::Unknown => not_supported!("evaluating unknown const"),
1950 };
1951 let patch_map = memory_map.transform_addresses(|b, align| {
1952 let addr = self.heap_allocate(b.len(), align)?;
1953 self.write_memory(addr, b)?;
1954 Ok(addr.to_usize())
1955 })?;
1956 let (size, align) = self.size_align_of(ty, locals)?.unwrap_or((v.len(), 1));
1957 let v: Cow<'_, [u8]> = if size != v.len() {
1958 if size == 16 && v.len() < 16 {
1960 Cow::Owned(pad16(v, false).to_vec())
1961 } else if size < 16 && v.len() == 16 {
1962 Cow::Borrowed(&v[0..size])
1963 } else {
1964 return Err(MirEvalError::InvalidConst(konst.clone()));
1965 }
1966 } else {
1967 Cow::Borrowed(v)
1968 };
1969 let addr = self.heap_allocate(size, align)?;
1970 self.write_memory(addr, &v)?;
1971 self.patch_addresses(
1972 &patch_map,
1973 |bytes| match memory_map {
1974 MemoryMap::Empty | MemoryMap::Simple(_) => {
1975 Err(MirEvalError::InvalidVTableId(from_bytes!(usize, bytes)))
1976 }
1977 MemoryMap::Complex(cm) => cm.vtable.ty_of_bytes(bytes),
1978 },
1979 addr,
1980 ty.to_nextsolver(interner),
1981 locals,
1982 )?;
1983 Ok(Interval::new(addr, size))
1984 }
1985
1986 fn eval_place(&mut self, p: &Place, locals: &Locals) -> Result<Interval> {
1987 let addr = self.place_addr(p, locals)?;
1988 Ok(Interval::new(
1989 addr,
1990 self.size_of_sized(&self.place_ty(p, locals)?, locals, "type of this place")?,
1991 ))
1992 }
1993
1994 fn read_memory(&self, addr: Address, size: usize) -> Result<&[u8]> {
1995 if size == 0 {
1996 return Ok(&[]);
1997 }
1998 let (mem, pos) = match addr {
1999 Stack(it) => (&self.stack, it),
2000 Heap(it) => (&self.heap, it),
2001 Invalid(it) => {
2002 return Err(MirEvalError::UndefinedBehavior(format!(
2003 "read invalid memory address {it} with size {size}"
2004 )));
2005 }
2006 };
2007 mem.get(pos..pos + size)
2008 .ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory read".to_owned()))
2009 }
2010
2011 fn write_memory_using_ref(&mut self, addr: Address, size: usize) -> Result<&mut [u8]> {
2012 let (mem, pos) = match addr {
2013 Stack(it) => (&mut self.stack, it),
2014 Heap(it) => (&mut self.heap, it),
2015 Invalid(it) => {
2016 return Err(MirEvalError::UndefinedBehavior(format!(
2017 "write invalid memory address {it} with size {size}"
2018 )));
2019 }
2020 };
2021 mem.get_mut(pos..pos + size)
2022 .ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory write".to_owned()))
2023 }
2024
2025 fn write_memory(&mut self, addr: Address, r: &[u8]) -> Result<()> {
2026 if r.is_empty() {
2027 return Ok(());
2028 }
2029 self.write_memory_using_ref(addr, r.len())?.copy_from_slice(r);
2030 Ok(())
2031 }
2032
2033 fn copy_from_interval_or_owned(&mut self, addr: Address, r: IntervalOrOwned) -> Result<()> {
2034 match r {
2035 IntervalOrOwned::Borrowed(r) => self.copy_from_interval(addr, r),
2036 IntervalOrOwned::Owned(r) => self.write_memory(addr, &r),
2037 }
2038 }
2039
2040 fn copy_from_interval(&mut self, addr: Address, r: Interval) -> Result<()> {
2041 if r.size == 0 {
2042 return Ok(());
2043 }
2044
2045 let oob = || MirEvalError::UndefinedBehavior("out of bounds memory write".to_owned());
2046
2047 match (addr, r.addr) {
2048 (Stack(dst), Stack(src)) => {
2049 if self.stack.len() < src + r.size || self.stack.len() < dst + r.size {
2050 return Err(oob());
2051 }
2052 self.stack.copy_within(src..src + r.size, dst)
2053 }
2054 (Heap(dst), Heap(src)) => {
2055 if self.stack.len() < src + r.size || self.stack.len() < dst + r.size {
2056 return Err(oob());
2057 }
2058 self.heap.copy_within(src..src + r.size, dst)
2059 }
2060 (Stack(dst), Heap(src)) => {
2061 self.stack
2062 .get_mut(dst..dst + r.size)
2063 .ok_or_else(oob)?
2064 .copy_from_slice(self.heap.get(src..src + r.size).ok_or_else(oob)?);
2065 }
2066 (Heap(dst), Stack(src)) => {
2067 self.heap
2068 .get_mut(dst..dst + r.size)
2069 .ok_or_else(oob)?
2070 .copy_from_slice(self.stack.get(src..src + r.size).ok_or_else(oob)?);
2071 }
2072 _ => {
2073 return Err(MirEvalError::UndefinedBehavior(format!(
2074 "invalid memory write at address {addr:?}"
2075 )));
2076 }
2077 }
2078
2079 Ok(())
2080 }
2081
2082 fn size_align_of(&self, ty: &Ty, locals: &Locals) -> Result<Option<(usize, usize)>> {
2083 let interner = DbInterner::new_with(self.db, None, None);
2084 if let Some(layout) = self.layout_cache.borrow().get(&ty.to_nextsolver(interner)) {
2085 return Ok(layout
2086 .is_sized()
2087 .then(|| (layout.size.bytes_usize(), layout.align.abi.bytes() as usize)));
2088 }
2089 if let DefWithBodyId::VariantId(f) = locals.body.owner
2090 && let Some((AdtId::EnumId(e), _)) = ty.as_adt()
2091 && f.lookup(self.db).parent == e
2092 {
2093 return Ok(Some((16, 16)));
2096 }
2097 let layout = self.layout(ty.to_nextsolver(interner));
2098 if self.assert_placeholder_ty_is_unused
2099 && matches!(layout, Err(MirEvalError::LayoutError(LayoutError::HasPlaceholder, _)))
2100 {
2101 return Ok(Some((0, 1)));
2102 }
2103 let layout = layout?;
2104 Ok(layout
2105 .is_sized()
2106 .then(|| (layout.size.bytes_usize(), layout.align.abi.bytes() as usize)))
2107 }
2108
2109 fn size_of_sized(&self, ty: &Ty, locals: &Locals, what: &'static str) -> Result<usize> {
2112 match self.size_align_of(ty, locals)? {
2113 Some(it) => Ok(it.0),
2114 None => Err(MirEvalError::TypeIsUnsized(ty.clone(), what)),
2115 }
2116 }
2117
2118 fn size_align_of_sized(
2121 &self,
2122 ty: &Ty,
2123 locals: &Locals,
2124 what: &'static str,
2125 ) -> Result<(usize, usize)> {
2126 match self.size_align_of(ty, locals)? {
2127 Some(it) => Ok(it),
2128 None => Err(MirEvalError::TypeIsUnsized(ty.clone(), what)),
2129 }
2130 }
2131
2132 fn heap_allocate(&mut self, size: usize, align: usize) -> Result<Address> {
2133 if !align.is_power_of_two() || align > 10000 {
2134 return Err(MirEvalError::UndefinedBehavior(format!("Alignment {align} is invalid")));
2135 }
2136 while !self.heap.len().is_multiple_of(align) {
2137 self.heap.push(0);
2138 }
2139 if size.checked_add(self.heap.len()).is_none_or(|x| x > self.memory_limit) {
2140 return Err(MirEvalError::Panic(format!("Memory allocation of {size} bytes failed")));
2141 }
2142 let pos = self.heap.len();
2143 self.heap.extend(std::iter::repeat_n(0, size));
2144 Ok(Address::Heap(pos))
2145 }
2146
2147 fn detect_fn_trait(&self, def: FunctionId) -> Option<FnTrait> {
2148 let def = Some(def);
2149 if def == self.cached_fn_trait_func {
2150 Some(FnTrait::Fn)
2151 } else if def == self.cached_fn_mut_trait_func {
2152 Some(FnTrait::FnMut)
2153 } else if def == self.cached_fn_once_trait_func {
2154 Some(FnTrait::FnOnce)
2155 } else {
2156 None
2157 }
2158 }
2159
2160 fn create_memory_map(
2161 &self,
2162 bytes: &[u8],
2163 ty: &Ty,
2164 locals: &Locals,
2165 ) -> Result<ComplexMemoryMap<'db>> {
2166 fn rec<'db>(
2167 this: &Evaluator<'db>,
2168 bytes: &[u8],
2169 ty: &Ty,
2170 locals: &Locals,
2171 mm: &mut ComplexMemoryMap<'db>,
2172 stack_depth_limit: usize,
2173 ) -> Result<()> {
2174 let interner = DbInterner::new_with(this.db, None, None);
2175 if stack_depth_limit.checked_sub(1).is_none() {
2176 return Err(MirEvalError::StackOverflow);
2177 }
2178 match ty.kind(Interner) {
2179 TyKind::Ref(_, _, t) => {
2180 let size = this.size_align_of(t, locals)?;
2181 match size {
2182 Some((size, _)) => {
2183 let addr_usize = from_bytes!(usize, bytes);
2184 mm.insert(
2185 addr_usize,
2186 this.read_memory(Address::from_usize(addr_usize), size)?.into(),
2187 )
2188 }
2189 None => {
2190 let mut check_inner = None;
2191 let (addr, meta) = bytes.split_at(bytes.len() / 2);
2192 let element_size = match t.kind(Interner) {
2193 TyKind::Str => 1,
2194 TyKind::Slice(t) => {
2195 check_inner = Some(t.clone());
2196 this.size_of_sized(t, locals, "slice inner type")?
2197 }
2198 TyKind::Dyn(_) => {
2199 let t = this.vtable_map.ty_of_bytes(meta)?;
2200 let t = convert_ty_for_result(interner, t);
2201 check_inner = Some(t.clone());
2202 this.size_of_sized(&t, locals, "dyn concrete type")?
2203 }
2204 _ => return Ok(()),
2205 };
2206 let count = match t.kind(Interner) {
2207 TyKind::Dyn(_) => 1,
2208 _ => from_bytes!(usize, meta),
2209 };
2210 let size = element_size * count;
2211 let addr = Address::from_bytes(addr)?;
2212 let b = this.read_memory(addr, size)?;
2213 mm.insert(addr.to_usize(), b.into());
2214 if let Some(ty) = &check_inner {
2215 for i in 0..count {
2216 let offset = element_size * i;
2217 rec(
2218 this,
2219 &b[offset..offset + element_size],
2220 ty,
2221 locals,
2222 mm,
2223 stack_depth_limit - 1,
2224 )?;
2225 }
2226 }
2227 }
2228 }
2229 }
2230 TyKind::Array(inner, len) => {
2231 let len = match try_const_usize(this.db, len) {
2232 Some(it) => it as usize,
2233 None => not_supported!("non evaluatable array len in patching addresses"),
2234 };
2235 let size = this.size_of_sized(inner, locals, "inner of array")?;
2236 for i in 0..len {
2237 let offset = i * size;
2238 rec(
2239 this,
2240 &bytes[offset..offset + size],
2241 inner,
2242 locals,
2243 mm,
2244 stack_depth_limit - 1,
2245 )?;
2246 }
2247 }
2248 TyKind::Tuple(_, subst) => {
2249 let layout = this.layout(ty.to_nextsolver(interner))?;
2250 for (id, ty) in subst.iter(Interner).enumerate() {
2251 let ty = ty.assert_ty_ref(Interner); let offset = layout.fields.offset(id).bytes_usize();
2253 let size = this.layout(ty.to_nextsolver(interner))?.size.bytes_usize();
2254 rec(
2255 this,
2256 &bytes[offset..offset + size],
2257 ty,
2258 locals,
2259 mm,
2260 stack_depth_limit - 1,
2261 )?;
2262 }
2263 }
2264 TyKind::Adt(adt, subst) => match adt.0 {
2265 AdtId::StructId(s) => {
2266 let data = s.fields(this.db);
2267 let layout = this.layout(ty.to_nextsolver(interner))?;
2268 let field_types = this.db.field_types(s.into());
2269 for (f, _) in data.fields().iter() {
2270 let offset = layout
2271 .fields
2272 .offset(u32::from(f.into_raw()) as usize)
2273 .bytes_usize();
2274 let ty = &field_types[f].clone().substitute(Interner, subst);
2275 let size = this.layout(ty.to_nextsolver(interner))?.size.bytes_usize();
2276 rec(
2277 this,
2278 &bytes[offset..offset + size],
2279 ty,
2280 locals,
2281 mm,
2282 stack_depth_limit - 1,
2283 )?;
2284 }
2285 }
2286 AdtId::EnumId(e) => {
2287 let layout = this.layout(ty.to_nextsolver(interner))?;
2288 if let Some((v, l)) = detect_variant_from_bytes(
2289 &layout,
2290 this.db,
2291 &this.target_data_layout,
2292 bytes,
2293 e,
2294 ) {
2295 let data = v.fields(this.db);
2296 let field_types = this.db.field_types(v.into());
2297 for (f, _) in data.fields().iter() {
2298 let offset =
2299 l.fields.offset(u32::from(f.into_raw()) as usize).bytes_usize();
2300 let ty = &field_types[f].clone().substitute(Interner, subst);
2301 let size =
2302 this.layout(ty.to_nextsolver(interner))?.size.bytes_usize();
2303 rec(
2304 this,
2305 &bytes[offset..offset + size],
2306 ty,
2307 locals,
2308 mm,
2309 stack_depth_limit - 1,
2310 )?;
2311 }
2312 }
2313 }
2314 AdtId::UnionId(_) => (),
2315 },
2316 TyKind::Alias(AliasTy::Projection(proj)) => {
2317 let ty = this.db.normalize_projection(proj.clone(), this.trait_env.clone());
2318 rec(this, bytes, &ty, locals, mm, stack_depth_limit - 1)?;
2319 }
2320 _ => (),
2321 }
2322 Ok(())
2323 }
2324 let mut mm = ComplexMemoryMap::default();
2325 rec(self, bytes, ty, locals, &mut mm, self.stack_depth_limit - 1)?;
2326 Ok(mm)
2327 }
2328
2329 fn patch_addresses(
2330 &mut self,
2331 patch_map: &FxHashMap<usize, usize>,
2332 ty_of_bytes: impl Fn(&[u8]) -> Result<crate::next_solver::Ty<'db>> + Copy,
2333 addr: Address,
2334 ty: crate::next_solver::Ty<'db>,
2335 locals: &Locals,
2336 ) -> Result<()> {
2337 let interner = DbInterner::new_with(self.db, None, None);
2338 let layout = self.layout(ty)?;
2340 let my_size = self.size_of_sized(
2341 &convert_ty_for_result(interner, ty),
2342 locals,
2343 "value to patch address",
2344 )?;
2345 use rustc_type_ir::TyKind;
2346 match ty.kind() {
2347 TyKind::Ref(_, t, _) => {
2348 let size = self.size_align_of(&convert_ty_for_result(interner, t), locals)?;
2349 match size {
2350 Some(_) => {
2351 let current = from_bytes!(usize, self.read_memory(addr, my_size)?);
2352 if let Some(it) = patch_map.get(¤t) {
2353 self.write_memory(addr, &it.to_le_bytes())?;
2354 }
2355 }
2356 None => {
2357 let current = from_bytes!(usize, self.read_memory(addr, my_size / 2)?);
2358 if let Some(it) = patch_map.get(¤t) {
2359 self.write_memory(addr, &it.to_le_bytes())?;
2360 }
2361 }
2362 }
2363 }
2364 TyKind::FnPtr(_, _) => {
2365 let ty = ty_of_bytes(self.read_memory(addr, my_size)?)?;
2366 let new_id = self.vtable_map.id(ty);
2367 self.write_memory(addr, &new_id.to_le_bytes())?;
2368 }
2369 TyKind::Adt(id, args) => match id.def_id() {
2370 SolverDefId::AdtId(AdtId::StructId(s)) => {
2371 for (i, (_, ty)) in self.db.field_types_ns(s.into()).iter().enumerate() {
2372 let offset = layout.fields.offset(i).bytes_usize();
2373 let ty = ty.instantiate(interner, args);
2374 self.patch_addresses(
2375 patch_map,
2376 ty_of_bytes,
2377 addr.offset(offset),
2378 ty,
2379 locals,
2380 )?;
2381 }
2382 }
2383 SolverDefId::AdtId(AdtId::UnionId(_)) => (),
2384 SolverDefId::AdtId(AdtId::EnumId(e)) => {
2385 if let Some((ev, layout)) = detect_variant_from_bytes(
2386 &layout,
2387 self.db,
2388 &self.target_data_layout,
2389 self.read_memory(addr, layout.size.bytes_usize())?,
2390 e,
2391 ) {
2392 for (i, (_, ty)) in self.db.field_types_ns(ev.into()).iter().enumerate() {
2393 let offset = layout.fields.offset(i).bytes_usize();
2394 let ty = ty.instantiate(interner, args);
2395 self.patch_addresses(
2396 patch_map,
2397 ty_of_bytes,
2398 addr.offset(offset),
2399 ty,
2400 locals,
2401 )?;
2402 }
2403 }
2404 }
2405 _ => unreachable!(),
2406 },
2407 TyKind::Tuple(tys) => {
2408 for (id, ty) in tys.iter().enumerate() {
2409 let offset = layout.fields.offset(id).bytes_usize();
2410 self.patch_addresses(patch_map, ty_of_bytes, addr.offset(offset), ty, locals)?;
2411 }
2412 }
2413 TyKind::Array(inner, len) => {
2414 let len = match consteval_nextsolver::try_const_usize(self.db, len) {
2415 Some(it) => it as usize,
2416 None => not_supported!("non evaluatable array len in patching addresses"),
2417 };
2418 let size = self.size_of_sized(
2419 &convert_ty_for_result(interner, inner),
2420 locals,
2421 "inner of array",
2422 )?;
2423 for i in 0..len {
2424 self.patch_addresses(
2425 patch_map,
2426 ty_of_bytes,
2427 addr.offset(i * size),
2428 inner,
2429 locals,
2430 )?;
2431 }
2432 }
2433 TyKind::Bool
2434 | TyKind::Char
2435 | TyKind::Int(_)
2436 | TyKind::Uint(_)
2437 | TyKind::Float(_)
2438 | TyKind::Slice(_)
2439 | TyKind::RawPtr(_, _)
2440 | TyKind::FnDef(_, _)
2441 | TyKind::Str
2442 | TyKind::Never
2443 | TyKind::Closure(_, _)
2444 | TyKind::Coroutine(_, _)
2445 | TyKind::CoroutineWitness(_, _)
2446 | TyKind::Foreign(_)
2447 | TyKind::Error(_)
2448 | TyKind::Placeholder(_)
2449 | TyKind::Dynamic(_, _, _)
2450 | TyKind::Alias(_, _)
2451 | TyKind::Bound(_, _)
2452 | TyKind::Infer(_)
2453 | TyKind::Pat(_, _)
2454 | TyKind::Param(_)
2455 | TyKind::UnsafeBinder(_)
2456 | TyKind::CoroutineClosure(_, _) => (),
2457 }
2458 Ok(())
2459 }
2460
2461 fn exec_fn_pointer(
2462 &mut self,
2463 bytes: Interval,
2464 destination: Interval,
2465 args: &[IntervalAndTy],
2466 locals: &Locals,
2467 target_bb: Option<BasicBlockId>,
2468 span: MirSpan,
2469 ) -> Result<Option<StackFrame>> {
2470 let id = from_bytes!(usize, bytes.get(self)?);
2471 let next_ty = self.vtable_map.ty(id)?;
2472 let interner = DbInterner::new_with(self.db, None, None);
2473 use rustc_type_ir::TyKind;
2474 match next_ty.kind() {
2475 TyKind::FnDef(def, generic_args) => {
2476 let def = match def {
2477 SolverDefId::FunctionId(id) => CallableDefId::FunctionId(id),
2478 SolverDefId::Ctor(Ctor::Struct(s)) => CallableDefId::StructId(s),
2479 SolverDefId::Ctor(Ctor::Enum(e)) => CallableDefId::EnumVariantId(e),
2480 _ => unreachable!(),
2481 };
2482 self.exec_fn_def(
2483 def,
2484 &convert_args_for_result(interner, generic_args.as_slice()),
2485 destination,
2486 args,
2487 locals,
2488 target_bb,
2489 span,
2490 )
2491 }
2492 TyKind::Closure(id, generic_args) => {
2493 let id = match id {
2494 SolverDefId::InternedClosureId(id) => id,
2495 _ => unreachable!(),
2496 };
2497 self.exec_closure(
2498 id.into(),
2499 bytes.slice(0..0),
2500 &convert_args_for_result(interner, generic_args.as_slice()),
2501 destination,
2502 args,
2503 locals,
2504 span,
2505 )
2506 }
2507 _ => Err(MirEvalError::InternalError("function pointer to non function".into())),
2508 }
2509 }
2510
2511 fn exec_closure(
2512 &mut self,
2513 closure: ClosureId,
2514 closure_data: Interval,
2515 generic_args: &Substitution,
2516 destination: Interval,
2517 args: &[IntervalAndTy],
2518 locals: &Locals,
2519 span: MirSpan,
2520 ) -> Result<Option<StackFrame>> {
2521 let mir_body = self
2522 .db
2523 .monomorphized_mir_body_for_closure(
2524 closure.into(),
2525 generic_args.clone(),
2526 self.trait_env.clone(),
2527 )
2528 .map_err(|it| MirEvalError::MirLowerErrorForClosure(closure, it))?;
2529 let closure_data = if mir_body.locals[mir_body.param_locals[0]].ty.as_reference().is_some()
2530 {
2531 closure_data.addr.to_bytes().to_vec()
2532 } else {
2533 closure_data.get(self)?.to_owned()
2534 };
2535 let arg_bytes = iter::once(Ok(closure_data))
2536 .chain(args.iter().map(|it| Ok(it.get(self)?.to_owned())))
2537 .collect::<Result<Vec<_>>>()?;
2538 let interval = self
2539 .interpret_mir(mir_body, arg_bytes.into_iter().map(IntervalOrOwned::Owned))
2540 .map_err(|e| {
2541 MirEvalError::InFunction(
2542 Box::new(e),
2543 vec![(Either::Right(closure), span, locals.body.owner)],
2544 )
2545 })?;
2546 destination.write_from_interval(self, interval)?;
2547 Ok(None)
2548 }
2549
2550 fn exec_fn_def(
2551 &mut self,
2552 def: CallableDefId,
2553 generic_args: &Substitution,
2554 destination: Interval,
2555 args: &[IntervalAndTy],
2556 locals: &Locals,
2557 target_bb: Option<BasicBlockId>,
2558 span: MirSpan,
2559 ) -> Result<Option<StackFrame>> {
2560 let generic_args = generic_args.clone();
2561 match def {
2562 CallableDefId::FunctionId(def) => {
2563 if self.detect_fn_trait(def).is_some() {
2564 return self.exec_fn_trait(
2565 def,
2566 args,
2567 generic_args,
2568 locals,
2569 destination,
2570 target_bb,
2571 span,
2572 );
2573 }
2574 self.exec_fn_with_args(
2575 def,
2576 args,
2577 generic_args,
2578 locals,
2579 destination,
2580 target_bb,
2581 span,
2582 )
2583 }
2584 CallableDefId::StructId(id) => {
2585 let (size, variant_layout, tag) =
2586 self.layout_of_variant(id.into(), generic_args, locals)?;
2587 let result = self.construct_with_layout(
2588 size,
2589 &variant_layout,
2590 tag,
2591 args.iter().map(|it| it.interval.into()),
2592 )?;
2593 destination.write_from_bytes(self, &result)?;
2594 Ok(None)
2595 }
2596 CallableDefId::EnumVariantId(id) => {
2597 let (size, variant_layout, tag) =
2598 self.layout_of_variant(id.into(), generic_args, locals)?;
2599 let result = self.construct_with_layout(
2600 size,
2601 &variant_layout,
2602 tag,
2603 args.iter().map(|it| it.interval.into()),
2604 )?;
2605 destination.write_from_bytes(self, &result)?;
2606 Ok(None)
2607 }
2608 }
2609 }
2610
2611 fn get_mir_or_dyn_index(
2612 &self,
2613 def: FunctionId,
2614 generic_args: Substitution,
2615 locals: &Locals,
2616 span: MirSpan,
2617 ) -> Result<MirOrDynIndex> {
2618 let pair = (def, generic_args);
2619 if let Some(r) = self.mir_or_dyn_index_cache.borrow().get(&pair) {
2620 return Ok(r.clone());
2621 }
2622 let (def, generic_args) = pair;
2623 let r = if let Some(self_ty_idx) =
2624 is_dyn_method(self.db, self.trait_env.clone(), def, generic_args.clone())
2625 {
2626 MirOrDynIndex::Dyn(self_ty_idx)
2627 } else {
2628 let (imp, generic_args) =
2629 self.db.lookup_impl_method(self.trait_env.clone(), def, generic_args.clone());
2630
2631 let mir_body = self
2632 .db
2633 .monomorphized_mir_body(imp.into(), generic_args, self.trait_env.clone())
2634 .map_err(|e| {
2635 MirEvalError::InFunction(
2636 Box::new(MirEvalError::MirLowerError(imp, e)),
2637 vec![(Either::Left(imp), span, locals.body.owner)],
2638 )
2639 })?;
2640 MirOrDynIndex::Mir(mir_body)
2641 };
2642 self.mir_or_dyn_index_cache.borrow_mut().insert((def, generic_args), r.clone());
2643 Ok(r)
2644 }
2645
2646 fn exec_fn_with_args(
2647 &mut self,
2648 mut def: FunctionId,
2649 args: &[IntervalAndTy],
2650 generic_args: Substitution,
2651 locals: &Locals,
2652 destination: Interval,
2653 target_bb: Option<BasicBlockId>,
2654 span: MirSpan,
2655 ) -> Result<Option<StackFrame>> {
2656 let interner = DbInterner::new_with(self.db, None, None);
2657 if self.detect_and_exec_special_function(
2658 def,
2659 args,
2660 &generic_args,
2661 locals,
2662 destination,
2663 span,
2664 )? {
2665 return Ok(None);
2666 }
2667 if let Some(redirect_def) = self.detect_and_redirect_special_function(def)? {
2668 def = redirect_def;
2669 }
2670 let arg_bytes = args.iter().map(|it| IntervalOrOwned::Borrowed(it.interval));
2671 match self.get_mir_or_dyn_index(def, generic_args.clone(), locals, span)? {
2672 MirOrDynIndex::Dyn(self_ty_idx) => {
2673 let first_arg = arg_bytes.clone().next().unwrap();
2678 let first_arg = first_arg.get(self)?;
2679 let ty = self
2680 .vtable_map
2681 .ty_of_bytes(&first_arg[self.ptr_size()..self.ptr_size() * 2])?;
2682 let mut args_for_target = args.to_vec();
2683 let ty = convert_ty_for_result(interner, ty);
2684 args_for_target[0] = IntervalAndTy {
2685 interval: args_for_target[0].interval.slice(0..self.ptr_size()),
2686 ty: ty.clone(),
2687 };
2688 let ty = ty.clone().cast(Interner);
2689 let generics_for_target = Substitution::from_iter(
2690 Interner,
2691 generic_args
2692 .iter(Interner)
2693 .enumerate()
2694 .map(|(i, it)| if i == self_ty_idx { &ty } else { it }),
2695 );
2696 self.exec_fn_with_args(
2697 def,
2698 &args_for_target,
2699 generics_for_target,
2700 locals,
2701 destination,
2702 target_bb,
2703 span,
2704 )
2705 }
2706 MirOrDynIndex::Mir(body) => self.exec_looked_up_function(
2707 body,
2708 locals,
2709 def,
2710 arg_bytes,
2711 span,
2712 destination,
2713 target_bb,
2714 ),
2715 }
2716 }
2717
2718 fn exec_looked_up_function(
2719 &mut self,
2720 mir_body: Arc<MirBody>,
2721 locals: &Locals,
2722 def: FunctionId,
2723 arg_bytes: impl Iterator<Item = IntervalOrOwned>,
2724 span: MirSpan,
2725 destination: Interval,
2726 target_bb: Option<BasicBlockId>,
2727 ) -> Result<Option<StackFrame>> {
2728 Ok(if let Some(target_bb) = target_bb {
2729 let (mut locals, prev_stack_ptr) =
2730 self.create_locals_for_body(&mir_body, Some(destination))?;
2731 self.fill_locals_for_body(&mir_body, &mut locals, arg_bytes.into_iter())?;
2732 let span = (span, locals.body.owner);
2733 Some(StackFrame { locals, destination: Some(target_bb), prev_stack_ptr, span })
2734 } else {
2735 let result = self.interpret_mir(mir_body, arg_bytes).map_err(|e| {
2736 MirEvalError::InFunction(
2737 Box::new(e),
2738 vec![(Either::Left(def), span, locals.body.owner)],
2739 )
2740 })?;
2741 destination.write_from_interval(self, result)?;
2742 None
2743 })
2744 }
2745
2746 fn exec_fn_trait(
2747 &mut self,
2748 def: FunctionId,
2749 args: &[IntervalAndTy],
2750 generic_args: Substitution,
2751 locals: &Locals,
2752 destination: Interval,
2753 target_bb: Option<BasicBlockId>,
2754 span: MirSpan,
2755 ) -> Result<Option<StackFrame>> {
2756 let interner = DbInterner::new_with(self.db, None, None);
2757 let func = args
2758 .first()
2759 .ok_or_else(|| MirEvalError::InternalError("fn trait with no arg".into()))?;
2760 let mut func_ty = func.ty.clone();
2761 let mut func_data = func.interval;
2762 while let TyKind::Ref(_, _, z) = func_ty.kind(Interner) {
2763 func_ty = z.clone();
2764 if matches!(func_ty.kind(Interner), TyKind::Dyn(_)) {
2765 let id =
2766 from_bytes!(usize, &func_data.get(self)?[self.ptr_size()..self.ptr_size() * 2]);
2767 func_data = func_data.slice(0..self.ptr_size());
2768 func_ty = convert_ty_for_result(interner, self.vtable_map.ty(id)?);
2769 }
2770 let size = self.size_of_sized(&func_ty, locals, "self type of fn trait")?;
2771 func_data = Interval { addr: Address::from_bytes(func_data.get(self)?)?, size };
2772 }
2773 match &func_ty.kind(Interner) {
2774 TyKind::FnDef(def, subst) => self.exec_fn_def(
2775 CallableDefId::from_chalk(self.db, *def),
2776 subst,
2777 destination,
2778 &args[1..],
2779 locals,
2780 target_bb,
2781 span,
2782 ),
2783 TyKind::Function(_) => {
2784 self.exec_fn_pointer(func_data, destination, &args[1..], locals, target_bb, span)
2785 }
2786 TyKind::Closure(closure, subst) => self.exec_closure(
2787 *closure,
2788 func_data,
2789 &Substitution::from_iter(Interner, ClosureSubst(subst).parent_subst()),
2790 destination,
2791 &args[1..],
2792 locals,
2793 span,
2794 ),
2795 _ => {
2796 let arg0 = func;
2798 let args = &args[1..];
2799 let arg1 = {
2800 let ty = TyKind::Tuple(
2801 args.len(),
2802 Substitution::from_iter(Interner, args.iter().map(|it| it.ty.clone())),
2803 )
2804 .intern(Interner);
2805 let layout = self.layout(ty.to_nextsolver(interner))?;
2806 let result = self.construct_with_layout(
2807 layout.size.bytes_usize(),
2808 &layout,
2809 None,
2810 args.iter().map(|it| IntervalOrOwned::Borrowed(it.interval)),
2811 )?;
2812 let size = layout.size.bytes_usize();
2814 let addr = self.heap_allocate(size, layout.align.abi.bytes() as usize)?;
2815 self.write_memory(addr, &result)?;
2816 IntervalAndTy { interval: Interval { addr, size }, ty }
2817 };
2818 self.exec_fn_with_args(
2819 def,
2820 &[arg0.clone(), arg1],
2821 generic_args,
2822 locals,
2823 destination,
2824 target_bb,
2825 span,
2826 )
2827 }
2828 }
2829 }
2830
2831 fn eval_static(&mut self, st: StaticId, locals: &Locals) -> Result<Address> {
2832 if let Some(o) = self.static_locations.get(&st) {
2833 return Ok(*o);
2834 };
2835 let static_data = self.db.static_signature(st);
2836 let result = if !static_data.flags.contains(StaticFlags::EXTERN) {
2837 let konst = self.db.const_eval_static(st).map_err(|e| {
2838 MirEvalError::ConstEvalError(static_data.name.as_str().to_owned(), Box::new(e))
2839 })?;
2840 self.allocate_const_in_heap(locals, &konst)?
2841 } else {
2842 let ty = &self.db.infer(st.into())[self.db.body(st.into()).body_expr];
2843 let Some((size, align)) = self.size_align_of(ty, locals)? else {
2844 not_supported!("unsized extern static");
2845 };
2846 let addr = self.heap_allocate(size, align)?;
2847 Interval::new(addr, size)
2848 };
2849 let addr = self.heap_allocate(self.ptr_size(), self.ptr_size())?;
2850 self.write_memory(addr, &result.addr.to_bytes())?;
2851 self.static_locations.insert(st, addr);
2852 Ok(addr)
2853 }
2854
2855 fn const_eval_discriminant(&self, variant: EnumVariantId) -> Result<i128> {
2856 let r = self.db.const_eval_discriminant(variant);
2857 match r {
2858 Ok(r) => Ok(r),
2859 Err(e) => {
2860 let db = self.db;
2861 let loc = variant.lookup(db);
2862 let edition = self.crate_id.data(self.db).edition;
2863 let name = format!(
2864 "{}::{}",
2865 self.db.enum_signature(loc.parent).name.display(db, edition),
2866 loc.parent
2867 .enum_variants(self.db)
2868 .variant_name_by_id(variant)
2869 .unwrap()
2870 .display(db, edition),
2871 );
2872 Err(MirEvalError::ConstEvalError(name, Box::new(e)))
2873 }
2874 }
2875 }
2876
2877 fn drop_place(&mut self, place: &Place, locals: &mut Locals, span: MirSpan) -> Result<()> {
2878 let (addr, ty, metadata) = self.place_addr_and_ty_and_metadata(place, locals)?;
2879 if !locals.drop_flags.remove_place(place, &locals.body.projection_store) {
2880 return Ok(());
2881 }
2882 let metadata = match metadata {
2883 Some(it) => it.get(self)?.to_vec(),
2884 None => vec![],
2885 };
2886 self.run_drop_glue_deep(ty, locals, addr, &metadata, span)
2887 }
2888
2889 fn run_drop_glue_deep(
2890 &mut self,
2891 ty: Ty,
2892 locals: &Locals,
2893 addr: Address,
2894 _metadata: &[u8],
2895 span: MirSpan,
2896 ) -> Result<()> {
2897 let Some(drop_fn) = (|| {
2898 let drop_trait = LangItem::Drop.resolve_trait(self.db, self.crate_id)?;
2899 drop_trait.trait_items(self.db).method_by_name(&Name::new_symbol_root(sym::drop))
2900 })() else {
2901 return Ok(());
2904 };
2905
2906 let generic_args = Substitution::from1(Interner, ty.clone());
2907 if let Ok(MirOrDynIndex::Mir(body)) =
2908 self.get_mir_or_dyn_index(drop_fn, generic_args, locals, span)
2909 {
2910 self.exec_looked_up_function(
2911 body,
2912 locals,
2913 drop_fn,
2914 iter::once(IntervalOrOwned::Owned(addr.to_bytes().to_vec())),
2915 span,
2916 Interval { addr: Address::Invalid(0), size: 0 },
2917 None,
2918 )?;
2919 }
2920 match ty.kind(Interner) {
2921 TyKind::Adt(id, subst) => {
2922 match id.0 {
2923 AdtId::StructId(s) => {
2924 let data = self.db.struct_signature(s);
2925 if data.flags.contains(StructFlags::IS_MANUALLY_DROP) {
2926 return Ok(());
2927 }
2928 let layout = self.layout_adt(id.0, subst.clone())?;
2929 let variant_fields = s.fields(self.db);
2930 match variant_fields.shape {
2931 FieldsShape::Record | FieldsShape::Tuple => {
2932 let field_types = self.db.field_types(s.into());
2933 for (field, _) in variant_fields.fields().iter() {
2934 let offset = layout
2935 .fields
2936 .offset(u32::from(field.into_raw()) as usize)
2937 .bytes_usize();
2938 let addr = addr.offset(offset);
2939 let ty = field_types[field].clone().substitute(Interner, subst);
2940 self.run_drop_glue_deep(ty, locals, addr, &[], span)?;
2941 }
2942 }
2943 FieldsShape::Unit => (),
2944 }
2945 }
2946 AdtId::UnionId(_) => (), AdtId::EnumId(_) => (),
2948 }
2949 }
2950 TyKind::AssociatedType(_, _)
2951 | TyKind::Scalar(_)
2952 | TyKind::Tuple(_, _)
2953 | TyKind::Array(_, _)
2954 | TyKind::Slice(_)
2955 | TyKind::Raw(_, _)
2956 | TyKind::Ref(_, _, _)
2957 | TyKind::OpaqueType(_, _)
2958 | TyKind::FnDef(_, _)
2959 | TyKind::Str
2960 | TyKind::Never
2961 | TyKind::Closure(_, _)
2962 | TyKind::Coroutine(_, _)
2963 | TyKind::CoroutineWitness(_, _)
2964 | TyKind::Foreign(_)
2965 | TyKind::Error
2966 | TyKind::Placeholder(_)
2967 | TyKind::Dyn(_)
2968 | TyKind::Alias(_)
2969 | TyKind::Function(_)
2970 | TyKind::BoundVar(_)
2971 | TyKind::InferenceVar(_, _) => (),
2972 };
2973 Ok(())
2974 }
2975
2976 fn write_to_stdout(&mut self, interval: Interval) -> Result<()> {
2977 self.stdout.extend(interval.get(self)?.to_vec());
2978 Ok(())
2979 }
2980
2981 fn write_to_stderr(&mut self, interval: Interval) -> Result<()> {
2982 self.stderr.extend(interval.get(self)?.to_vec());
2983 Ok(())
2984 }
2985}
2986
2987pub fn render_const_using_debug_impl(
2988 db: &dyn HirDatabase,
2989 owner: DefWithBodyId,
2990 c: &Const,
2991) -> Result<String> {
2992 let interner = DbInterner::new_with(db, None, None);
2993 let mut evaluator = Evaluator::new(db, owner, false, None)?;
2994 let locals = &Locals {
2995 ptr: ArenaMap::new(),
2996 body: db
2997 .mir_body(owner)
2998 .map_err(|_| MirEvalError::NotSupported("unreachable".to_owned()))?,
2999 drop_flags: DropFlags::default(),
3000 };
3001 let data = evaluator.allocate_const_in_heap(locals, c)?;
3002 let resolver = owner.resolver(db);
3003 let Some(TypeNs::TraitId(debug_trait)) = resolver.resolve_path_in_type_ns_fully(
3004 db,
3005 &hir_def::expr_store::path::Path::from_known_path_with_no_generic(path![core::fmt::Debug]),
3006 ) else {
3007 not_supported!("core::fmt::Debug not found");
3008 };
3009 let Some(debug_fmt_fn) =
3010 debug_trait.trait_items(db).method_by_name(&Name::new_symbol_root(sym::fmt))
3011 else {
3012 not_supported!("core::fmt::Debug::fmt not found");
3013 };
3014 let a1 = evaluator.heap_allocate(evaluator.ptr_size() * 2, evaluator.ptr_size())?;
3016 let a2 = evaluator.heap_allocate(evaluator.ptr_size() * 2, evaluator.ptr_size())?;
3020 evaluator.write_memory(a2, &data.addr.to_bytes())?;
3021 let debug_fmt_fn_ptr = evaluator.vtable_map.id(TyKind::FnDef(
3022 CallableDefId::FunctionId(debug_fmt_fn).to_chalk(db),
3023 Substitution::from1(Interner, c.data(Interner).ty.clone()),
3024 )
3025 .intern(Interner)
3026 .to_nextsolver(interner));
3027 evaluator.write_memory(a2.offset(evaluator.ptr_size()), &debug_fmt_fn_ptr.to_le_bytes())?;
3028 let a3 = evaluator.heap_allocate(evaluator.ptr_size() * 6, evaluator.ptr_size())?;
3031 evaluator.write_memory(a3, &a1.to_bytes())?;
3032 evaluator.write_memory(a3.offset(evaluator.ptr_size()), &[1])?;
3033 evaluator.write_memory(a3.offset(2 * evaluator.ptr_size()), &a2.to_bytes())?;
3034 evaluator.write_memory(a3.offset(3 * evaluator.ptr_size()), &[1])?;
3035 let Some(ValueNs::FunctionId(format_fn)) = resolver.resolve_path_in_value_ns_fully(
3036 db,
3037 &hir_def::expr_store::path::Path::from_known_path_with_no_generic(path![std::fmt::format]),
3038 HygieneId::ROOT,
3039 ) else {
3040 not_supported!("std::fmt::format not found");
3041 };
3042 let interval = evaluator.interpret_mir(
3043 db.mir_body(format_fn.into()).map_err(|e| MirEvalError::MirLowerError(format_fn, e))?,
3044 [IntervalOrOwned::Borrowed(Interval { addr: a3, size: evaluator.ptr_size() * 6 })]
3045 .into_iter(),
3046 )?;
3047 let message_string = interval.get(&evaluator)?;
3048 let addr =
3049 Address::from_bytes(&message_string[evaluator.ptr_size()..2 * evaluator.ptr_size()])?;
3050 let size = from_bytes!(usize, message_string[2 * evaluator.ptr_size()..]);
3051 Ok(std::string::String::from_utf8_lossy(evaluator.read_memory(addr, size)?).into_owned())
3052}
3053
3054pub fn pad16(it: &[u8], is_signed: bool) -> [u8; 16] {
3055 let is_negative = is_signed && it.last().unwrap_or(&0) > &127;
3056 let mut res = [if is_negative { 255 } else { 0 }; 16];
3057 res[..it.len()].copy_from_slice(it);
3058 res
3059}
3060
3061macro_rules! for_each_int_type {
3062 ($call_macro:path, $args:tt) => {
3063 $call_macro! {
3064 $args
3065 I8
3066 U8
3067 I16
3068 U16
3069 I32
3070 U32
3071 I64
3072 U64
3073 I128
3074 U128
3075 }
3076 };
3077}
3078
3079#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
3080enum IntValue {
3081 I8(i8),
3082 U8(u8),
3083 I16(i16),
3084 U16(u16),
3085 I32(i32),
3086 U32(u32),
3087 I64(i64),
3088 U64(u64),
3089 I128(i128),
3090 U128(u128),
3091}
3092
3093macro_rules! checked_int_op {
3094 ( [ $op:ident ] $( $int_ty:ident )+ ) => {
3095 fn $op(self, other: Self) -> Option<Self> {
3096 match (self, other) {
3097 $( (Self::$int_ty(a), Self::$int_ty(b)) => a.$op(b).map(Self::$int_ty), )+
3098 _ => panic!("incompatible integer types"),
3099 }
3100 }
3101 };
3102}
3103
3104macro_rules! int_bit_shifts {
3105 ( [ $op:ident ] $( $int_ty:ident )+ ) => {
3106 fn $op(self, amount: u32) -> Option<Self> {
3107 match self {
3108 $( Self::$int_ty(this) => this.$op(amount).map(Self::$int_ty), )+
3109 }
3110 }
3111 };
3112}
3113
3114macro_rules! unchecked_int_op {
3115 ( [ $name:ident, $op:tt ] $( $int_ty:ident )+ ) => {
3116 fn $name(self, other: Self) -> Self {
3117 match (self, other) {
3118 $( (Self::$int_ty(a), Self::$int_ty(b)) => Self::$int_ty(a $op b), )+
3119 _ => panic!("incompatible integer types"),
3120 }
3121 }
3122 };
3123}
3124
3125impl IntValue {
3126 fn from_bytes(bytes: &[u8], is_signed: bool) -> Self {
3127 match (bytes.len(), is_signed) {
3128 (1, false) => Self::U8(u8::from_le_bytes(bytes.try_into().unwrap())),
3129 (1, true) => Self::I8(i8::from_le_bytes(bytes.try_into().unwrap())),
3130 (2, false) => Self::U16(u16::from_le_bytes(bytes.try_into().unwrap())),
3131 (2, true) => Self::I16(i16::from_le_bytes(bytes.try_into().unwrap())),
3132 (4, false) => Self::U32(u32::from_le_bytes(bytes.try_into().unwrap())),
3133 (4, true) => Self::I32(i32::from_le_bytes(bytes.try_into().unwrap())),
3134 (8, false) => Self::U64(u64::from_le_bytes(bytes.try_into().unwrap())),
3135 (8, true) => Self::I64(i64::from_le_bytes(bytes.try_into().unwrap())),
3136 (16, false) => Self::U128(u128::from_le_bytes(bytes.try_into().unwrap())),
3137 (16, true) => Self::I128(i128::from_le_bytes(bytes.try_into().unwrap())),
3138 (len, is_signed) => {
3139 never!("invalid integer size: {len}, signed: {is_signed}");
3140 Self::I32(0)
3141 }
3142 }
3143 }
3144
3145 fn to_bytes(self) -> Vec<u8> {
3146 macro_rules! m {
3147 ( [] $( $int_ty:ident )+ ) => {
3148 match self {
3149 $( Self::$int_ty(v) => v.to_le_bytes().to_vec() ),+
3150 }
3151 };
3152 }
3153 for_each_int_type! { m, [] }
3154 }
3155
3156 fn as_u32(self) -> Option<u32> {
3157 macro_rules! m {
3158 ( [] $( $int_ty:ident )+ ) => {
3159 match self {
3160 $( Self::$int_ty(v) => v.try_into().ok() ),+
3161 }
3162 };
3163 }
3164 for_each_int_type! { m, [] }
3165 }
3166
3167 for_each_int_type!(checked_int_op, [checked_add]);
3168 for_each_int_type!(checked_int_op, [checked_sub]);
3169 for_each_int_type!(checked_int_op, [checked_div]);
3170 for_each_int_type!(checked_int_op, [checked_rem]);
3171 for_each_int_type!(checked_int_op, [checked_mul]);
3172
3173 for_each_int_type!(int_bit_shifts, [checked_shl]);
3174 for_each_int_type!(int_bit_shifts, [checked_shr]);
3175}
3176
3177impl std::ops::BitAnd for IntValue {
3178 type Output = Self;
3179 for_each_int_type!(unchecked_int_op, [bitand, &]);
3180}
3181impl std::ops::BitOr for IntValue {
3182 type Output = Self;
3183 for_each_int_type!(unchecked_int_op, [bitor, |]);
3184}
3185impl std::ops::BitXor for IntValue {
3186 type Output = Self;
3187 for_each_int_type!(unchecked_int_op, [bitxor, ^]);
3188}