1use std::cmp::{self, Ordering};
5
6use hir_def::{attrs::AttrFlags, signatures::FunctionSignature};
7use hir_expand::name::Name;
8use intern::sym;
9use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike, Ty as _};
10use stdx::never;
11
12use crate::{
13 InferenceResult,
14 display::DisplayTarget,
15 drop::{DropGlue, has_drop_glue},
16 mir::eval::{
17 Address, AdtId, Arc, Evaluator, FunctionId, GenericArgs, HasModule, HirDisplay,
18 InternedClosure, Interval, IntervalAndTy, IntervalOrOwned, ItemContainerId, Layout, Locals,
19 Lookup, MirEvalError, MirSpan, Mutability, Result, Ty, TyKind, pad16,
20 },
21 next_solver::Region,
22};
23
24mod simd;
25
26macro_rules! from_bytes {
27 ($ty:tt, $value:expr) => {
28 ($ty::from_le_bytes(match ($value).try_into() {
29 Ok(it) => it,
30 #[allow(unreachable_patterns)]
31 Err(_) => return Err(MirEvalError::InternalError("mismatched size".into())),
32 }))
33 };
34}
35
36macro_rules! not_supported {
37 ($it: expr) => {
38 return Err(MirEvalError::NotSupported(format!($it)))
39 };
40}
41
42#[derive(Debug, Clone, Copy, PartialEq, Eq)]
43enum EvalLangItem {
44 BeginPanic,
45 SliceLen,
46 DropInPlace,
47}
48
49impl<'db> Evaluator<'db> {
50 pub(super) fn detect_and_exec_special_function(
51 &mut self,
52 def: FunctionId,
53 args: &[IntervalAndTy<'db>],
54 generic_args: GenericArgs<'db>,
55 locals: &Locals<'db>,
56 destination: Interval,
57 span: MirSpan,
58 ) -> Result<'db, bool> {
59 if self.not_special_fn_cache.borrow().contains(&def) {
60 return Ok(false);
61 }
62
63 let function_data = self.db.function_signature(def);
64 let attrs = AttrFlags::query(self.db, def.into());
65 let is_intrinsic = FunctionSignature::is_intrinsic(self.db, def);
66
67 if is_intrinsic {
68 return self.exec_intrinsic(
69 function_data.name.as_str(),
70 args,
71 generic_args,
72 destination,
73 locals,
74 span,
75 !function_data.has_body()
76 || attrs.contains(AttrFlags::RUSTC_INTRINSIC_MUST_BE_OVERRIDDEN),
77 );
78 }
79 let is_extern_c = match def.lookup(self.db).container {
80 hir_def::ItemContainerId::ExternBlockId(block) => block.abi(self.db) == Some(sym::C),
81 _ => false,
82 };
83 if is_extern_c {
84 return self
85 .exec_extern_c(
86 function_data.name.as_str(),
87 args,
88 generic_args,
89 destination,
90 locals,
91 span,
92 )
93 .map(|()| true);
94 }
95
96 if attrs.intersects(
97 AttrFlags::RUSTC_ALLOCATOR
98 | AttrFlags::RUSTC_DEALLOCATOR
99 | AttrFlags::RUSTC_REALLOCATOR
100 | AttrFlags::RUSTC_ALLOCATOR_ZEROED,
101 ) {
102 self.exec_alloc_fn(attrs, args, destination)?;
103 return Ok(true);
104 }
105 if let Some(it) = self.detect_lang_function(def) {
106 let result = self.exec_lang_item(it, generic_args, args, locals, span)?;
107 destination.write_from_bytes(self, &result)?;
108 return Ok(true);
109 }
110 if let ItemContainerId::TraitId(t) = def.lookup(self.db).container
111 && Some(t) == self.lang_items().Clone
112 {
113 let [self_ty] = generic_args.as_slice() else {
114 not_supported!("wrong generic arg count for clone");
115 };
116 let Some(self_ty) = self_ty.ty() else {
117 not_supported!("wrong generic arg kind for clone");
118 };
119 if matches!(self_ty.kind(), TyKind::FnPtr(..) | TyKind::Tuple(..) | TyKind::Closure(..))
121 {
122 self.exec_clone(def, args, self_ty, locals, destination, span)?;
123 return Ok(true);
124 }
125 return Ok(false);
127 }
128 self.not_special_fn_cache.borrow_mut().insert(def);
129 Ok(false)
130 }
131
132 pub(super) fn detect_and_redirect_special_function(
133 &mut self,
134 def: FunctionId,
135 ) -> Result<'db, Option<FunctionId>> {
136 if Some(def) == self.lang_items().PanicFmt {
138 let Some(const_panic_fmt) = self.lang_items().ConstPanicFmt else {
139 not_supported!("const_panic_fmt lang item not found or not a function");
140 };
141 return Ok(Some(const_panic_fmt));
142 }
143 Ok(None)
144 }
145
146 fn exec_clone(
148 &mut self,
149 def: FunctionId,
150 args: &[IntervalAndTy<'db>],
151 self_ty: Ty<'db>,
152 locals: &Locals<'db>,
153 destination: Interval,
154 span: MirSpan,
155 ) -> Result<'db, ()> {
156 match self_ty.kind() {
157 TyKind::FnPtr(..) => {
158 let [arg] = args else {
159 not_supported!("wrong arg count for clone");
160 };
161 let addr = Address::from_bytes(arg.get(self)?)?;
162 return destination
163 .write_from_interval(self, Interval { addr, size: destination.size });
164 }
165 TyKind::Closure(id, subst) => {
166 let [arg] = args else {
167 not_supported!("wrong arg count for clone");
168 };
169 let addr = Address::from_bytes(arg.get(self)?)?;
170 let InternedClosure(closure_owner, _) = self.db.lookup_intern_closure(id.0);
171 let infer = InferenceResult::for_body(self.db, closure_owner);
172 let (captures, _) = infer.closure_info(id.0);
173 let layout = self.layout(self_ty)?;
174 let db = self.db;
175 let ty_iter = captures.iter().map(|c| c.ty(db, subst));
176 self.exec_clone_for_fields(ty_iter, layout, addr, def, locals, destination, span)?;
177 }
178 TyKind::Tuple(subst) => {
179 let [arg] = args else {
180 not_supported!("wrong arg count for clone");
181 };
182 let addr = Address::from_bytes(arg.get(self)?)?;
183 let layout = self.layout(self_ty)?;
184 self.exec_clone_for_fields(
185 subst.iter(),
186 layout,
187 addr,
188 def,
189 locals,
190 destination,
191 span,
192 )?;
193 }
194 _ => {
195 self.exec_fn_with_args(
196 def,
197 args,
198 GenericArgs::new_from_iter(self.interner(), [self_ty.into()]),
199 locals,
200 destination,
201 None,
202 span,
203 )?;
204 }
205 }
206 Ok(())
207 }
208
209 fn exec_clone_for_fields(
210 &mut self,
211 ty_iter: impl Iterator<Item = Ty<'db>>,
212 layout: Arc<Layout>,
213 addr: Address,
214 def: FunctionId,
215 locals: &Locals<'db>,
216 destination: Interval,
217 span: MirSpan,
218 ) -> Result<'db, ()> {
219 for (i, ty) in ty_iter.enumerate() {
220 let size = self.layout(ty)?.size.bytes_usize();
221 let tmp = self.heap_allocate(self.ptr_size(), self.ptr_size())?;
222 let arg = IntervalAndTy {
223 interval: Interval { addr: tmp, size: self.ptr_size() },
224 ty: Ty::new_ref(
225 self.interner(),
226 Region::error(self.interner()),
227 ty,
228 Mutability::Not,
229 ),
230 };
231 let offset = layout.fields.offset(i).bytes_usize();
232 self.write_memory(tmp, &addr.offset(offset).to_bytes())?;
233 self.exec_clone(
234 def,
235 &[arg],
236 ty,
237 locals,
238 destination.slice(offset..offset + size),
239 span,
240 )?;
241 }
242 Ok(())
243 }
244
245 fn exec_alloc_fn(
246 &mut self,
247 alloc_fn: AttrFlags,
248 args: &[IntervalAndTy<'db>],
249 destination: Interval,
250 ) -> Result<'db, ()> {
251 match alloc_fn {
252 _ if alloc_fn
253 .intersects(AttrFlags::RUSTC_ALLOCATOR_ZEROED | AttrFlags::RUSTC_ALLOCATOR) =>
254 {
255 let [size, align] = args else {
256 return Err(MirEvalError::InternalError(
257 "rustc_allocator args are not provided".into(),
258 ));
259 };
260 let size = from_bytes!(usize, size.get(self)?);
261 let align = from_bytes!(usize, align.get(self)?);
262 let result = self.heap_allocate(size, align)?;
263 destination.write_from_bytes(self, &result.to_bytes())?;
264 }
265 _ if alloc_fn.contains(AttrFlags::RUSTC_DEALLOCATOR) => { }
266 _ if alloc_fn.contains(AttrFlags::RUSTC_REALLOCATOR) => {
267 let [ptr, old_size, align, new_size] = args else {
268 return Err(MirEvalError::InternalError(
269 "rustc_allocator args are not provided".into(),
270 ));
271 };
272 let old_size = from_bytes!(usize, old_size.get(self)?);
273 let new_size = from_bytes!(usize, new_size.get(self)?);
274 if old_size >= new_size {
275 destination.write_from_interval(self, ptr.interval)?;
276 } else {
277 let ptr = Address::from_bytes(ptr.get(self)?)?;
278 let align = from_bytes!(usize, align.get(self)?);
279 let result = self.heap_allocate(new_size, align)?;
280 Interval { addr: result, size: old_size }
281 .write_from_interval(self, Interval { addr: ptr, size: old_size })?;
282 destination.write_from_bytes(self, &result.to_bytes())?;
283 }
284 }
285 _ => not_supported!("unknown alloc function"),
286 }
287 Ok(())
288 }
289
290 fn detect_lang_function(&self, def: FunctionId) -> Option<EvalLangItem> {
291 use EvalLangItem::*;
292 let lang_items = self.lang_items();
293 let attrs = AttrFlags::query(self.db, def.into());
294
295 if attrs.contains(AttrFlags::RUSTC_CONST_PANIC_STR) {
296 return Some(BeginPanic);
298 }
299
300 if let Some((_, candidate)) = [
303 (lang_items.BeginPanic, BeginPanic),
304 (lang_items.SliceLen, SliceLen),
305 (lang_items.DropInPlace, DropInPlace),
306 ]
307 .iter()
308 .find(|&(candidate, _)| candidate == Some(def))
309 {
310 return Some(candidate);
311 }
312
313 None
314 }
315
316 fn exec_lang_item(
317 &mut self,
318 it: EvalLangItem,
319 generic_args: GenericArgs<'db>,
320 args: &[IntervalAndTy<'db>],
321 locals: &Locals<'db>,
322 span: MirSpan,
323 ) -> Result<'db, Vec<u8>> {
324 use EvalLangItem::*;
325 let mut args = args.iter();
326 match it {
327 BeginPanic => {
328 let mut arg = args
329 .next()
330 .ok_or(MirEvalError::InternalError(
331 "argument of BeginPanic is not provided".into(),
332 ))?
333 .clone();
334 while let TyKind::Ref(_, ty, _) = arg.ty.kind() {
335 if ty.is_str() {
336 let (pointee, metadata) = arg.interval.get(self)?.split_at(self.ptr_size());
337 let len = from_bytes!(usize, metadata);
338
339 return {
340 Err(MirEvalError::Panic(
341 std::str::from_utf8(
342 self.read_memory(Address::from_bytes(pointee)?, len)?,
343 )
344 .unwrap()
345 .to_owned(),
346 ))
347 };
348 }
349 let size = self.size_of_sized(ty, locals, "begin panic arg")?;
350 let pointee = arg.interval.get(self)?;
351 arg = IntervalAndTy {
352 interval: Interval::new(Address::from_bytes(pointee)?, size),
353 ty,
354 };
355 }
356 Err(MirEvalError::Panic(format!("unknown-panic-payload: {:?}", arg.ty.kind())))
357 }
358 SliceLen => {
359 let arg = args.next().ok_or(MirEvalError::InternalError(
360 "argument of <[T]>::len() is not provided".into(),
361 ))?;
362 let arg = arg.get(self)?;
363 let ptr_size = arg.len() / 2;
364 Ok(arg[ptr_size..].into())
365 }
366 DropInPlace => {
367 let ty = generic_args.as_slice().first().and_then(|it| it.ty()).ok_or(
368 MirEvalError::InternalError(
369 "generic argument of drop_in_place is not provided".into(),
370 ),
371 )?;
372 let arg = args.next().ok_or(MirEvalError::InternalError(
373 "argument of drop_in_place is not provided".into(),
374 ))?;
375 let arg = arg.interval.get(self)?.to_owned();
376 self.run_drop_glue_deep(
377 ty,
378 locals,
379 Address::from_bytes(&arg[0..self.ptr_size()])?,
380 &arg[self.ptr_size()..],
381 span,
382 )?;
383 Ok(vec![])
384 }
385 }
386 }
387
388 fn exec_syscall(
389 &mut self,
390 id: i64,
391 args: &[IntervalAndTy<'db>],
392 destination: Interval,
393 _locals: &Locals<'db>,
394 _span: MirSpan,
395 ) -> Result<'db, ()> {
396 match id {
397 318 => {
398 let [buf, len, _flags] = args else {
400 return Err(MirEvalError::InternalError(
401 "SYS_getrandom args are not provided".into(),
402 ));
403 };
404 let addr = Address::from_bytes(buf.get(self)?)?;
405 let size = from_bytes!(usize, len.get(self)?);
406 for i in 0..size {
407 let rand_byte = self.random_state.rand_u64() as u8;
408 self.write_memory(addr.offset(i), &[rand_byte])?;
409 }
410 destination.write_from_interval(self, len.interval)
411 }
412 _ => {
413 not_supported!("Unknown syscall id {id:?}")
414 }
415 }
416 }
417
418 fn exec_extern_c(
419 &mut self,
420 as_str: &str,
421 args: &[IntervalAndTy<'db>],
422 _generic_args: GenericArgs<'db>,
423 destination: Interval,
424 locals: &Locals<'db>,
425 span: MirSpan,
426 ) -> Result<'db, ()> {
427 match as_str {
428 "memcmp" => {
429 let [ptr1, ptr2, size] = args else {
430 return Err(MirEvalError::InternalError("memcmp args are not provided".into()));
431 };
432 let addr1 = Address::from_bytes(ptr1.get(self)?)?;
433 let addr2 = Address::from_bytes(ptr2.get(self)?)?;
434 let size = from_bytes!(usize, size.get(self)?);
435 let slice1 = self.read_memory(addr1, size)?;
436 let slice2 = self.read_memory(addr2, size)?;
437 let r: i128 = match slice1.cmp(slice2) {
438 cmp::Ordering::Less => -1,
439 cmp::Ordering::Equal => 0,
440 cmp::Ordering::Greater => 1,
441 };
442 destination.write_from_bytes(self, &r.to_le_bytes()[..destination.size])
443 }
444 "write" => {
445 let [fd, ptr, len] = args else {
446 return Err(MirEvalError::InternalError(
447 "libc::write args are not provided".into(),
448 ));
449 };
450 let fd = u128::from_le_bytes(pad16(fd.get(self)?, false));
451 let interval = Interval {
452 addr: Address::from_bytes(ptr.get(self)?)?,
453 size: from_bytes!(usize, len.get(self)?),
454 };
455 match fd {
456 1 => {
457 self.write_to_stdout(interval)?;
458 }
459 2 => {
460 self.write_to_stderr(interval)?;
461 }
462 _ => not_supported!("write to arbitrary file descriptor"),
463 }
464 destination.write_from_interval(self, len.interval)?;
465 Ok(())
466 }
467 "pthread_key_create" => {
468 let key = self.thread_local_storage.create_key();
469 let Some(arg0) = args.first() else {
470 return Err(MirEvalError::InternalError(
471 "pthread_key_create arg0 is not provided".into(),
472 ));
473 };
474 let arg0_addr = Address::from_bytes(arg0.get(self)?)?;
475 let key_ty = if let Some((ty, ..)) = arg0.ty.as_reference_or_ptr() {
476 ty
477 } else {
478 return Err(MirEvalError::InternalError(
479 "pthread_key_create arg0 is not a pointer".into(),
480 ));
481 };
482 let arg0_interval = Interval::new(
483 arg0_addr,
484 self.size_of_sized(key_ty, locals, "pthread_key_create key arg")?,
485 );
486 arg0_interval.write_from_bytes(self, &key.to_le_bytes()[0..arg0_interval.size])?;
487 destination.write_from_bytes(self, &0u64.to_le_bytes()[0..destination.size])?;
489 Ok(())
490 }
491 "pthread_getspecific" => {
492 let Some(arg0) = args.first() else {
493 return Err(MirEvalError::InternalError(
494 "pthread_getspecific arg0 is not provided".into(),
495 ));
496 };
497 let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
498 let value = self.thread_local_storage.get_key(key)?;
499 destination.write_from_bytes(self, &value.to_le_bytes()[0..destination.size])?;
500 Ok(())
501 }
502 "pthread_setspecific" => {
503 let Some(arg0) = args.first() else {
504 return Err(MirEvalError::InternalError(
505 "pthread_setspecific arg0 is not provided".into(),
506 ));
507 };
508 let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
509 let Some(arg1) = args.get(1) else {
510 return Err(MirEvalError::InternalError(
511 "pthread_setspecific arg1 is not provided".into(),
512 ));
513 };
514 let value = from_bytes!(u128, pad16(arg1.get(self)?, false));
515 self.thread_local_storage.set_key(key, value)?;
516 destination.write_from_bytes(self, &0u64.to_le_bytes()[0..destination.size])?;
518 Ok(())
519 }
520 "pthread_key_delete" => {
521 destination.write_from_bytes(self, &0u64.to_le_bytes()[0..destination.size])?;
524 Ok(())
525 }
526 "syscall" => {
527 let Some((id, rest)) = args.split_first() else {
528 return Err(MirEvalError::InternalError("syscall arg1 is not provided".into()));
529 };
530 let id = from_bytes!(i64, id.get(self)?);
531 self.exec_syscall(id, rest, destination, locals, span)
532 }
533 "sched_getaffinity" => {
534 let [_pid, _set_size, set] = args else {
535 return Err(MirEvalError::InternalError(
536 "libc::write args are not provided".into(),
537 ));
538 };
539 let set = Address::from_bytes(set.get(self)?)?;
540 self.write_memory(set, &[1])?;
542 self.write_memory_using_ref(destination.addr, destination.size)?.fill(0);
544 Ok(())
545 }
546 "getenv" => {
547 let [name] = args else {
548 return Err(MirEvalError::InternalError(
549 "libc::write args are not provided".into(),
550 ));
551 };
552 let mut name_buf = vec![];
553 let name = {
554 let mut index = Address::from_bytes(name.get(self)?)?;
555 loop {
556 let byte = self.read_memory(index, 1)?[0];
557 index = index.offset(1);
558 if byte == 0 {
559 break;
560 }
561 name_buf.push(byte);
562 }
563 String::from_utf8_lossy(&name_buf)
564 };
565 let value = self.crate_id.env(self.db).get(&name);
566 match value {
567 None => {
568 self.write_memory_using_ref(destination.addr, destination.size)?.fill(0);
570 }
571 Some(mut value) => {
572 value.push('\0');
573 let addr = self.heap_allocate(value.len(), 1)?;
574 self.write_memory(addr, value.as_bytes())?;
575 self.write_memory(destination.addr, &addr.to_bytes())?;
576 }
577 }
578 Ok(())
579 }
580 _ => not_supported!("unknown external function {as_str}"),
581 }
582 }
583
584 fn exec_intrinsic(
585 &mut self,
586 name: &str,
587 args: &[IntervalAndTy<'db>],
588 generic_args: GenericArgs<'db>,
589 destination: Interval,
590 locals: &Locals<'db>,
591 span: MirSpan,
592 needs_override: bool,
593 ) -> Result<'db, bool> {
594 if let Some(name) = name.strip_prefix("atomic_") {
595 return self
596 .exec_atomic_intrinsic(name, args, generic_args, destination, locals, span)
597 .map(|()| true);
598 }
599 if let Some(name) = name.strip_prefix("simd_") {
600 return self
601 .exec_simd_intrinsic(name, args, generic_args, destination, locals, span)
602 .map(|()| true);
603 }
604 if let Some(name) = name.strip_suffix("f64") {
606 let result = match name {
607 "sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
608 | "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
609 let [arg] = args else {
610 return Err(MirEvalError::InternalError(
611 "f64 intrinsic signature doesn't match fn (f64) -> f64".into(),
612 ));
613 };
614 let arg = from_bytes!(f64, arg.get(self)?);
615 match name {
616 "sqrt" => arg.sqrt(),
617 "sin" => arg.sin(),
618 "cos" => arg.cos(),
619 "exp" => arg.exp(),
620 "exp2" => arg.exp2(),
621 "log" => arg.ln(),
622 "log10" => arg.log10(),
623 "log2" => arg.log2(),
624 "fabs" => arg.abs(),
625 "floor" => arg.floor(),
626 "ceil" => arg.ceil(),
627 "trunc" => arg.trunc(),
628 "rint" => arg.round(),
630 "nearbyint" => arg.round(),
631 "round" => arg.round(),
632 "roundeven" => arg.round(),
633 _ => unreachable!(),
634 }
635 }
636 "pow" | "minnum" | "maxnum" | "copysign" => {
637 let [arg1, arg2] = args else {
638 return Err(MirEvalError::InternalError(
639 "f64 intrinsic signature doesn't match fn (f64, f64) -> f64".into(),
640 ));
641 };
642 let arg1 = from_bytes!(f64, arg1.get(self)?);
643 let arg2 = from_bytes!(f64, arg2.get(self)?);
644 match name {
645 "pow" => arg1.powf(arg2),
646 "minnum" => arg1.min(arg2),
647 "maxnum" => arg1.max(arg2),
648 "copysign" => arg1.copysign(arg2),
649 _ => unreachable!(),
650 }
651 }
652 "powi" => {
653 let [arg1, arg2] = args else {
654 return Err(MirEvalError::InternalError(
655 "powif64 signature doesn't match fn (f64, i32) -> f64".into(),
656 ));
657 };
658 let arg1 = from_bytes!(f64, arg1.get(self)?);
659 let arg2 = from_bytes!(i32, arg2.get(self)?);
660 arg1.powi(arg2)
661 }
662 "fma" => {
663 let [arg1, arg2, arg3] = args else {
664 return Err(MirEvalError::InternalError(
665 "fmaf64 signature doesn't match fn (f64, f64, f64) -> f64".into(),
666 ));
667 };
668 let arg1 = from_bytes!(f64, arg1.get(self)?);
669 let arg2 = from_bytes!(f64, arg2.get(self)?);
670 let arg3 = from_bytes!(f64, arg3.get(self)?);
671 arg1.mul_add(arg2, arg3)
672 }
673 _ => not_supported!("unknown f64 intrinsic {name}"),
674 };
675 return destination.write_from_bytes(self, &result.to_le_bytes()).map(|()| true);
676 }
677 if let Some(name) = name.strip_suffix("f32") {
678 let result = match name {
679 "sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
680 | "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
681 let [arg] = args else {
682 return Err(MirEvalError::InternalError(
683 "f32 intrinsic signature doesn't match fn (f32) -> f32".into(),
684 ));
685 };
686 let arg = from_bytes!(f32, arg.get(self)?);
687 match name {
688 "sqrt" => arg.sqrt(),
689 "sin" => arg.sin(),
690 "cos" => arg.cos(),
691 "exp" => arg.exp(),
692 "exp2" => arg.exp2(),
693 "log" => arg.ln(),
694 "log10" => arg.log10(),
695 "log2" => arg.log2(),
696 "fabs" => arg.abs(),
697 "floor" => arg.floor(),
698 "ceil" => arg.ceil(),
699 "trunc" => arg.trunc(),
700 "rint" => arg.round(),
702 "nearbyint" => arg.round(),
703 "round" => arg.round(),
704 "roundeven" => arg.round(),
705 _ => unreachable!(),
706 }
707 }
708 "pow" | "minnum" | "maxnum" | "copysign" => {
709 let [arg1, arg2] = args else {
710 return Err(MirEvalError::InternalError(
711 "f32 intrinsic signature doesn't match fn (f32, f32) -> f32".into(),
712 ));
713 };
714 let arg1 = from_bytes!(f32, arg1.get(self)?);
715 let arg2 = from_bytes!(f32, arg2.get(self)?);
716 match name {
717 "pow" => arg1.powf(arg2),
718 "minnum" => arg1.min(arg2),
719 "maxnum" => arg1.max(arg2),
720 "copysign" => arg1.copysign(arg2),
721 _ => unreachable!(),
722 }
723 }
724 "powi" => {
725 let [arg1, arg2] = args else {
726 return Err(MirEvalError::InternalError(
727 "powif32 signature doesn't match fn (f32, i32) -> f32".into(),
728 ));
729 };
730 let arg1 = from_bytes!(f32, arg1.get(self)?);
731 let arg2 = from_bytes!(i32, arg2.get(self)?);
732 arg1.powi(arg2)
733 }
734 "fma" => {
735 let [arg1, arg2, arg3] = args else {
736 return Err(MirEvalError::InternalError(
737 "fmaf32 signature doesn't match fn (f32, f32, f32) -> f32".into(),
738 ));
739 };
740 let arg1 = from_bytes!(f32, arg1.get(self)?);
741 let arg2 = from_bytes!(f32, arg2.get(self)?);
742 let arg3 = from_bytes!(f32, arg3.get(self)?);
743 arg1.mul_add(arg2, arg3)
744 }
745 _ => not_supported!("unknown f32 intrinsic {name}"),
746 };
747 return destination.write_from_bytes(self, &result.to_le_bytes()).map(|()| true);
748 }
749 match name {
750 "size_of" => {
751 let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
752 return Err(MirEvalError::InternalError(
753 "size_of generic arg is not provided".into(),
754 ));
755 };
756 let size = self.size_of_sized(ty, locals, "size_of arg")?;
757 destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size])
758 }
759 "min_align_of" | "align_of" => {
762 let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
763 return Err(MirEvalError::InternalError(
764 "align_of generic arg is not provided".into(),
765 ));
766 };
767 let align = self.layout(ty)?.align.bytes();
768 destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size])
769 }
770 "size_of_val" => {
771 let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
772 return Err(MirEvalError::InternalError(
773 "size_of_val generic arg is not provided".into(),
774 ));
775 };
776 let [arg] = args else {
777 return Err(MirEvalError::InternalError(
778 "size_of_val args are not provided".into(),
779 ));
780 };
781 if let Some((size, _)) = self.size_align_of(ty, locals)? {
782 destination.write_from_bytes(self, &size.to_le_bytes())
783 } else {
784 let metadata = arg.interval.slice(self.ptr_size()..self.ptr_size() * 2);
785 let (size, _) = self.size_align_of_unsized(ty, metadata, locals)?;
786 destination.write_from_bytes(self, &size.to_le_bytes())
787 }
788 }
789 "min_align_of_val" | "align_of_val" => {
792 let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
793 return Err(MirEvalError::InternalError(
794 "align_of_val generic arg is not provided".into(),
795 ));
796 };
797 let [arg] = args else {
798 return Err(MirEvalError::InternalError(
799 "align_of_val args are not provided".into(),
800 ));
801 };
802 if let Some((_, align)) = self.size_align_of(ty, locals)? {
803 destination.write_from_bytes(self, &align.to_le_bytes())
804 } else {
805 let metadata = arg.interval.slice(self.ptr_size()..self.ptr_size() * 2);
806 let (_, align) = self.size_align_of_unsized(ty, metadata, locals)?;
807 destination.write_from_bytes(self, &align.to_le_bytes())
808 }
809 }
810 "type_name" => {
811 let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
812 return Err(MirEvalError::InternalError(
813 "type_name generic arg is not provided".into(),
814 ));
815 };
816 let ty_name = match ty.display_source_code(
817 self.db,
818 locals.body.owner.module(self.db),
819 true,
820 ) {
821 Ok(ty_name) => ty_name,
822 Err(_) => {
825 let krate = locals.body.owner.krate(self.db);
826 ty.display(self.db, DisplayTarget::from_crate(self.db, krate)).to_string()
827 }
828 };
829 let len = ty_name.len();
830 let addr = self.heap_allocate(len, 1)?;
831 self.write_memory(addr, ty_name.as_bytes())?;
832 destination.slice(0..self.ptr_size()).write_from_bytes(self, &addr.to_bytes())?;
833 destination
834 .slice(self.ptr_size()..2 * self.ptr_size())
835 .write_from_bytes(self, &len.to_le_bytes())
836 }
837 "needs_drop" => {
838 let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
839 return Err(MirEvalError::InternalError(
840 "size_of generic arg is not provided".into(),
841 ));
842 };
843 let result = match has_drop_glue(&self.infcx, ty, self.param_env.param_env) {
844 DropGlue::HasDropGlue => true,
845 DropGlue::None => false,
846 DropGlue::DependOnParams => {
847 never!("should be fully monomorphized now");
848 true
849 }
850 };
851 destination.write_from_bytes(self, &[u8::from(result)])
852 }
853 "ptr_guaranteed_cmp" => {
854 let [lhs, rhs] = args else {
857 return Err(MirEvalError::InternalError(
858 "wrapping_add args are not provided".into(),
859 ));
860 };
861 let ans = lhs.get(self)? == rhs.get(self)?;
862 destination.write_from_bytes(self, &[u8::from(ans)])
863 }
864 "saturating_add" | "saturating_sub" => {
865 let [lhs, rhs] = args else {
866 return Err(MirEvalError::InternalError(
867 "saturating_add args are not provided".into(),
868 ));
869 };
870 let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
871 let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
872 let ans = match name {
873 "saturating_add" => lhs.saturating_add(rhs),
874 "saturating_sub" => lhs.saturating_sub(rhs),
875 _ => unreachable!(),
876 };
877 let bits = destination.size * 8;
878 let is_signed = false;
880 let mx: u128 = if is_signed { (1 << (bits - 1)) - 1 } else { (1 << bits) - 1 };
881 let mn: u128 = 0;
883 let ans = cmp::min(mx, cmp::max(mn, ans));
884 destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
885 }
886 "wrapping_add" | "unchecked_add" => {
887 let [lhs, rhs] = args else {
888 return Err(MirEvalError::InternalError(
889 "wrapping_add args are not provided".into(),
890 ));
891 };
892 let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
893 let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
894 let ans = lhs.wrapping_add(rhs);
895 destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
896 }
897 "ptr_offset_from_unsigned" | "ptr_offset_from" => {
898 let [lhs, rhs] = args else {
899 return Err(MirEvalError::InternalError(
900 "wrapping_sub args are not provided".into(),
901 ));
902 };
903 let lhs = i128::from_le_bytes(pad16(lhs.get(self)?, false));
904 let rhs = i128::from_le_bytes(pad16(rhs.get(self)?, false));
905 let ans = lhs.wrapping_sub(rhs);
906 let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
907 return Err(MirEvalError::InternalError(
908 "ptr_offset_from generic arg is not provided".into(),
909 ));
910 };
911 let size = self.size_of_sized(ty, locals, "ptr_offset_from arg")? as i128;
912 let ans = ans / size;
913 destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
914 }
915 "wrapping_sub" | "unchecked_sub" => {
916 let [lhs, rhs] = args else {
917 return Err(MirEvalError::InternalError(
918 "wrapping_sub args are not provided".into(),
919 ));
920 };
921 let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
922 let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
923 let ans = lhs.wrapping_sub(rhs);
924 destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
925 }
926 "wrapping_mul" | "unchecked_mul" => {
927 let [lhs, rhs] = args else {
928 return Err(MirEvalError::InternalError(
929 "wrapping_mul args are not provided".into(),
930 ));
931 };
932 let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
933 let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
934 let ans = lhs.wrapping_mul(rhs);
935 destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
936 }
937 "wrapping_shl" | "unchecked_shl" => {
938 let [lhs, rhs] = args else {
940 return Err(MirEvalError::InternalError(
941 "unchecked_shl args are not provided".into(),
942 ));
943 };
944 let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
945 let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
946 let ans = lhs.wrapping_shl(rhs as u32);
947 destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
948 }
949 "wrapping_shr" | "unchecked_shr" => {
950 let [lhs, rhs] = args else {
952 return Err(MirEvalError::InternalError(
953 "unchecked_shr args are not provided".into(),
954 ));
955 };
956 let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
957 let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
958 let ans = lhs.wrapping_shr(rhs as u32);
959 destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
960 }
961 "unchecked_rem" => {
962 let [lhs, rhs] = args else {
964 return Err(MirEvalError::InternalError(
965 "unchecked_rem args are not provided".into(),
966 ));
967 };
968 let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
969 let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
970 let ans = lhs.checked_rem(rhs).ok_or_else(|| {
971 MirEvalError::UndefinedBehavior("unchecked_rem with bad inputs".to_owned())
972 })?;
973 destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
974 }
975 "unchecked_div" | "exact_div" => {
976 let [lhs, rhs] = args else {
978 return Err(MirEvalError::InternalError(
979 "unchecked_div args are not provided".into(),
980 ));
981 };
982 let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
983 let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
984 let ans = lhs.checked_div(rhs).ok_or_else(|| {
985 MirEvalError::UndefinedBehavior("unchecked_rem with bad inputs".to_owned())
986 })?;
987 destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
988 }
989 "add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" => {
990 let [lhs, rhs] = args else {
991 return Err(MirEvalError::InternalError(
992 "const_eval_select args are not provided".into(),
993 ));
994 };
995 let result_ty = Ty::new_tup_from_iter(
996 self.interner(),
997 [lhs.ty, Ty::new_bool(self.interner())].into_iter(),
998 );
999 let op_size = self.size_of_sized(lhs.ty, locals, "operand of add_with_overflow")?;
1000 let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
1001 let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
1002 let (ans, u128overflow) = match name {
1003 "add_with_overflow" => lhs.overflowing_add(rhs),
1004 "sub_with_overflow" => lhs.overflowing_sub(rhs),
1005 "mul_with_overflow" => lhs.overflowing_mul(rhs),
1006 _ => unreachable!(),
1007 };
1008 let is_overflow = u128overflow
1009 || ans.to_le_bytes()[op_size..].iter().any(|&it| it != 0 && it != 255);
1010 let is_overflow = vec![u8::from(is_overflow)];
1011 let layout = self.layout(result_ty)?;
1012 let result = self.construct_with_layout(
1013 layout.size.bytes_usize(),
1014 &layout,
1015 None,
1016 [ans.to_le_bytes()[0..op_size].to_vec(), is_overflow]
1017 .into_iter()
1018 .map(IntervalOrOwned::Owned),
1019 )?;
1020 destination.write_from_bytes(self, &result)
1021 }
1022 "copy" | "copy_nonoverlapping" => {
1023 let [src, dst, offset] = args else {
1024 return Err(MirEvalError::InternalError(
1025 "copy_nonoverlapping args are not provided".into(),
1026 ));
1027 };
1028 let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
1029 return Err(MirEvalError::InternalError(
1030 "copy_nonoverlapping generic arg is not provided".into(),
1031 ));
1032 };
1033 let src = Address::from_bytes(src.get(self)?)?;
1034 let dst = Address::from_bytes(dst.get(self)?)?;
1035 let offset = from_bytes!(usize, offset.get(self)?);
1036 let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?;
1037 let size = offset * size;
1038 let src = Interval { addr: src, size };
1039 let dst = Interval { addr: dst, size };
1040 dst.write_from_interval(self, src)
1041 }
1042 "offset" | "arith_offset" => {
1043 let [ptr, offset] = args else {
1044 return Err(MirEvalError::InternalError("offset args are not provided".into()));
1045 };
1046 let ty = if name == "offset" {
1047 let Some(ty0) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
1048 return Err(MirEvalError::InternalError(
1049 "offset generic arg is not provided".into(),
1050 ));
1051 };
1052 let Some(ty1) = generic_args.as_slice().get(1).and_then(|it| it.ty()) else {
1053 return Err(MirEvalError::InternalError(
1054 "offset generic arg is not provided".into(),
1055 ));
1056 };
1057 if !matches!(
1058 ty1.kind(),
1059 TyKind::Int(rustc_type_ir::IntTy::Isize)
1060 | TyKind::Uint(rustc_type_ir::UintTy::Usize)
1061 ) {
1062 return Err(MirEvalError::InternalError(
1063 "offset generic arg is not usize or isize".into(),
1064 ));
1065 }
1066 match ty0.kind() {
1067 TyKind::RawPtr(ty, _) => ty,
1068 _ => {
1069 return Err(MirEvalError::InternalError(
1070 "offset generic arg is not a raw pointer".into(),
1071 ));
1072 }
1073 }
1074 } else {
1075 let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
1076 return Err(MirEvalError::InternalError(
1077 "arith_offset generic arg is not provided".into(),
1078 ));
1079 };
1080 ty
1081 };
1082 let ptr = u128::from_le_bytes(pad16(ptr.get(self)?, false));
1083 let offset = u128::from_le_bytes(pad16(offset.get(self)?, false));
1084 let size = self.size_of_sized(ty, locals, "offset ptr type")? as u128;
1085 let ans = ptr + offset * size;
1086 destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
1087 }
1088 "assert_inhabited" | "assert_zero_valid" | "assert_uninit_valid" | "assume" => {
1089 Ok(())
1091 }
1092 "forget" => {
1093 Ok(())
1095 }
1096 "transmute" | "transmute_unchecked" => {
1097 let [arg] = args else {
1098 return Err(MirEvalError::InternalError(
1099 "transmute arg is not provided".into(),
1100 ));
1101 };
1102 destination.write_from_interval(self, arg.interval)
1103 }
1104 "ctpop" => {
1105 let [arg] = args else {
1106 return Err(MirEvalError::InternalError("ctpop arg is not provided".into()));
1107 };
1108 let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).count_ones();
1109 destination
1110 .write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size])
1111 }
1112 "ctlz" | "ctlz_nonzero" => {
1113 let [arg] = args else {
1114 return Err(MirEvalError::InternalError("ctlz arg is not provided".into()));
1115 };
1116 let result =
1117 u128::from_le_bytes(pad16(arg.get(self)?, false)).leading_zeros() as usize;
1118 let result = result - (128 - arg.interval.size * 8);
1119 destination
1120 .write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size])
1121 }
1122 "cttz" | "cttz_nonzero" => {
1123 let [arg] = args else {
1124 return Err(MirEvalError::InternalError("cttz arg is not provided".into()));
1125 };
1126 let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).trailing_zeros();
1127 destination
1128 .write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size])
1129 }
1130 "rotate_left" => {
1131 let [lhs, rhs] = args else {
1132 return Err(MirEvalError::InternalError(
1133 "rotate_left args are not provided".into(),
1134 ));
1135 };
1136 let lhs = &lhs.get(self)?[0..destination.size];
1137 let rhs = rhs.get(self)?[0] as u32;
1138 match destination.size {
1139 1 => {
1140 let r = from_bytes!(u8, lhs).rotate_left(rhs);
1141 destination.write_from_bytes(self, &r.to_le_bytes())
1142 }
1143 2 => {
1144 let r = from_bytes!(u16, lhs).rotate_left(rhs);
1145 destination.write_from_bytes(self, &r.to_le_bytes())
1146 }
1147 4 => {
1148 let r = from_bytes!(u32, lhs).rotate_left(rhs);
1149 destination.write_from_bytes(self, &r.to_le_bytes())
1150 }
1151 8 => {
1152 let r = from_bytes!(u64, lhs).rotate_left(rhs);
1153 destination.write_from_bytes(self, &r.to_le_bytes())
1154 }
1155 16 => {
1156 let r = from_bytes!(u128, lhs).rotate_left(rhs);
1157 destination.write_from_bytes(self, &r.to_le_bytes())
1158 }
1159 s => not_supported!("destination with size {s} for rotate_left"),
1160 }
1161 }
1162 "rotate_right" => {
1163 let [lhs, rhs] = args else {
1164 return Err(MirEvalError::InternalError(
1165 "rotate_right args are not provided".into(),
1166 ));
1167 };
1168 let lhs = &lhs.get(self)?[0..destination.size];
1169 let rhs = rhs.get(self)?[0] as u32;
1170 match destination.size {
1171 1 => {
1172 let r = from_bytes!(u8, lhs).rotate_right(rhs);
1173 destination.write_from_bytes(self, &r.to_le_bytes())
1174 }
1175 2 => {
1176 let r = from_bytes!(u16, lhs).rotate_right(rhs);
1177 destination.write_from_bytes(self, &r.to_le_bytes())
1178 }
1179 4 => {
1180 let r = from_bytes!(u32, lhs).rotate_right(rhs);
1181 destination.write_from_bytes(self, &r.to_le_bytes())
1182 }
1183 8 => {
1184 let r = from_bytes!(u64, lhs).rotate_right(rhs);
1185 destination.write_from_bytes(self, &r.to_le_bytes())
1186 }
1187 16 => {
1188 let r = from_bytes!(u128, lhs).rotate_right(rhs);
1189 destination.write_from_bytes(self, &r.to_le_bytes())
1190 }
1191 s => not_supported!("destination with size {s} for rotate_right"),
1192 }
1193 }
1194 "discriminant_value" => {
1195 let [arg] = args else {
1196 return Err(MirEvalError::InternalError(
1197 "discriminant_value arg is not provided".into(),
1198 ));
1199 };
1200 let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
1201 return Err(MirEvalError::InternalError(
1202 "discriminant_value generic arg is not provided".into(),
1203 ));
1204 };
1205 let addr = Address::from_bytes(arg.get(self)?)?;
1206 let size = self.size_of_sized(ty, locals, "discriminant_value ptr type")?;
1207 let interval = Interval { addr, size };
1208 let r = self.compute_discriminant(ty, interval.get(self)?)?;
1209 destination.write_from_bytes(self, &r.to_le_bytes()[0..destination.size])
1210 }
1211 "const_eval_select" => {
1212 let [tuple, const_fn, _] = args else {
1213 return Err(MirEvalError::InternalError(
1214 "const_eval_select args are not provided".into(),
1215 ));
1216 };
1217 let mut args = vec![const_fn.clone()];
1218 let TyKind::Tuple(fields) = tuple.ty.kind() else {
1219 return Err(MirEvalError::InternalError(
1220 "const_eval_select arg[0] is not a tuple".into(),
1221 ));
1222 };
1223 let layout = self.layout(tuple.ty)?;
1224 for (i, field) in fields.iter().enumerate() {
1225 let offset = layout.fields.offset(i).bytes_usize();
1226 let addr = tuple.interval.addr.offset(offset);
1227 args.push(IntervalAndTy::new(addr, field, self, locals)?);
1228 }
1229 if let Some(target) = self.lang_items().FnOnce
1230 && let Some(def) = target
1231 .trait_items(self.db)
1232 .method_by_name(&Name::new_symbol_root(sym::call_once))
1233 {
1234 self.exec_fn_trait(
1235 def,
1236 &args,
1237 GenericArgs::new_from_iter(self.interner(), []),
1239 locals,
1240 destination,
1241 None,
1242 span,
1243 )?;
1244 return Ok(true);
1245 }
1246 not_supported!("FnOnce was not available for executing const_eval_select");
1247 }
1248 "read_via_copy" | "volatile_load" => {
1249 let [arg] = args else {
1250 return Err(MirEvalError::InternalError(
1251 "read_via_copy args are not provided".into(),
1252 ));
1253 };
1254 let addr = Address::from_bytes(arg.interval.get(self)?)?;
1255 destination.write_from_interval(self, Interval { addr, size: destination.size })
1256 }
1257 "write_via_move" => {
1258 let [ptr, val] = args else {
1259 return Err(MirEvalError::InternalError(
1260 "write_via_move args are not provided".into(),
1261 ));
1262 };
1263 let dst = Address::from_bytes(ptr.get(self)?)?;
1264 let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
1265 return Err(MirEvalError::InternalError(
1266 "write_via_copy generic arg is not provided".into(),
1267 ));
1268 };
1269 let size = self.size_of_sized(ty, locals, "write_via_move ptr type")?;
1270 Interval { addr: dst, size }.write_from_interval(self, val.interval)?;
1271 Ok(())
1272 }
1273 "write_bytes" => {
1274 let [dst, val, count] = args else {
1275 return Err(MirEvalError::InternalError(
1276 "write_bytes args are not provided".into(),
1277 ));
1278 };
1279 let count = from_bytes!(usize, count.get(self)?);
1280 let val = from_bytes!(u8, val.get(self)?);
1281 let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
1282 return Err(MirEvalError::InternalError(
1283 "write_bytes generic arg is not provided".into(),
1284 ));
1285 };
1286 let dst = Address::from_bytes(dst.get(self)?)?;
1287 let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?;
1288 let size = count * size;
1289 self.write_memory_using_ref(dst, size)?.fill(val);
1290 Ok(())
1291 }
1292 "ptr_metadata" => {
1293 let [ptr] = args else {
1294 return Err(MirEvalError::InternalError(
1295 "ptr_metadata args are not provided".into(),
1296 ));
1297 };
1298 let arg = ptr.interval.get(self)?.to_owned();
1299 let metadata = &arg[self.ptr_size()..];
1300 destination.write_from_bytes(self, metadata)?;
1301 Ok(())
1302 }
1303 "three_way_compare" => {
1304 let [lhs, rhs] = args else {
1305 return Err(MirEvalError::InternalError(
1306 "three_way_compare args are not provided".into(),
1307 ));
1308 };
1309 let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
1310 return Err(MirEvalError::InternalError(
1311 "three_way_compare generic arg is not provided".into(),
1312 ));
1313 };
1314 let signed = match ty.kind() {
1315 TyKind::Int(_) => true,
1316 TyKind::Uint(_) => false,
1317 _ => {
1318 return Err(MirEvalError::InternalError(
1319 "three_way_compare expects an integral type".into(),
1320 ));
1321 }
1322 };
1323 let rhs = rhs.get(self)?;
1324 let lhs = lhs.get(self)?;
1325 let mut result = Ordering::Equal;
1326 for (l, r) in lhs.iter().zip(rhs).rev() {
1327 let it = l.cmp(r);
1328 if it != Ordering::Equal {
1329 result = it;
1330 break;
1331 }
1332 }
1333 if signed
1334 && let Some((&l, &r)) = lhs.iter().zip(rhs).next_back()
1335 && l != r
1336 {
1337 result = (l as i8).cmp(&(r as i8));
1338 }
1339 if let Some(e) = self.lang_items().Ordering {
1340 let ty = self.db.ty(e.into()).skip_binder();
1341 let r = self.compute_discriminant(ty, &[result as i8 as u8])?;
1342 destination.write_from_bytes(self, &r.to_le_bytes()[0..destination.size])?;
1343 Ok(())
1344 } else {
1345 Err(MirEvalError::InternalError("Ordering enum not found".into()))
1346 }
1347 }
1348 "aggregate_raw_ptr" => {
1349 let [data, meta] = args else {
1350 return Err(MirEvalError::InternalError(
1351 "aggregate_raw_ptr args are not provided".into(),
1352 ));
1353 };
1354 destination.write_from_interval(self, data.interval)?;
1355 Interval {
1356 addr: destination.addr.offset(data.interval.size),
1357 size: destination.size - data.interval.size,
1358 }
1359 .write_from_interval(self, meta.interval)?;
1360 Ok(())
1361 }
1362 _ if needs_override => not_supported!("intrinsic {name} is not implemented"),
1363 _ => return Ok(false),
1364 }
1365 .map(|()| true)
1366 }
1367
1368 fn size_align_of_unsized(
1369 &mut self,
1370 ty: Ty<'db>,
1371 metadata: Interval,
1372 locals: &Locals<'db>,
1373 ) -> Result<'db, (usize, usize)> {
1374 Ok(match ty.kind() {
1375 TyKind::Str => (from_bytes!(usize, metadata.get(self)?), 1),
1376 TyKind::Slice(inner) => {
1377 let len = from_bytes!(usize, metadata.get(self)?);
1378 let (size, align) = self.size_align_of_sized(inner, locals, "slice inner type")?;
1379 (size * len, align)
1380 }
1381 TyKind::Dynamic(..) => self.size_align_of_sized(
1382 self.vtable_map.ty_of_bytes(metadata.get(self)?)?,
1383 locals,
1384 "dyn concrete type",
1385 )?,
1386 TyKind::Adt(adt_def, subst) => {
1387 let id = adt_def.def_id().0;
1388 let layout = self.layout_adt(id, subst)?;
1389 let id = match id {
1390 AdtId::StructId(s) => s,
1391 _ => not_supported!("unsized enum or union"),
1392 };
1393 let field_types = self.db.field_types(id.into());
1394 let last_field_ty =
1395 field_types.iter().next_back().unwrap().1.instantiate(self.interner(), subst);
1396 let sized_part_size =
1397 layout.fields.offset(field_types.iter().count() - 1).bytes_usize();
1398 let sized_part_align = layout.align.bytes() as usize;
1399 let (unsized_part_size, unsized_part_align) =
1400 self.size_align_of_unsized(last_field_ty, metadata, locals)?;
1401 let align = sized_part_align.max(unsized_part_align) as isize;
1402 let size = (sized_part_size + unsized_part_size) as isize;
1403 let size = (size + (align - 1)) & (-align);
1414 (size as usize, align as usize)
1415 }
1416 _ => not_supported!("unsized type other than str, slice, struct and dyn"),
1417 })
1418 }
1419
1420 fn exec_atomic_intrinsic(
1421 &mut self,
1422 name: &str,
1423 args: &[IntervalAndTy<'db>],
1424 generic_args: GenericArgs<'db>,
1425 destination: Interval,
1426 locals: &Locals<'db>,
1427 _span: MirSpan,
1428 ) -> Result<'db, ()> {
1429 if name.starts_with("singlethreadfence_") || name.starts_with("fence_") {
1433 return Ok(());
1434 }
1435
1436 let Some(ty) = generic_args.as_slice().first().and_then(|it| it.ty()) else {
1439 return Err(MirEvalError::InternalError(
1440 "atomic intrinsic generic arg is not provided".into(),
1441 ));
1442 };
1443 let Some(arg0) = args.first() else {
1444 return Err(MirEvalError::InternalError(
1445 "atomic intrinsic arg0 is not provided".into(),
1446 ));
1447 };
1448 let arg0_addr = Address::from_bytes(arg0.get(self)?)?;
1449 let arg0_interval =
1450 Interval::new(arg0_addr, self.size_of_sized(ty, locals, "atomic intrinsic type arg")?);
1451 if name.starts_with("load_") {
1452 return destination.write_from_interval(self, arg0_interval);
1453 }
1454 let Some(arg1) = args.get(1) else {
1455 return Err(MirEvalError::InternalError(
1456 "atomic intrinsic arg1 is not provided".into(),
1457 ));
1458 };
1459 if name.starts_with("store_") {
1460 return arg0_interval.write_from_interval(self, arg1.interval);
1461 }
1462 if name.starts_with("xchg_") {
1463 destination.write_from_interval(self, arg0_interval)?;
1464 return arg0_interval.write_from_interval(self, arg1.interval);
1465 }
1466 if name.starts_with("xadd_") {
1467 destination.write_from_interval(self, arg0_interval)?;
1468 let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false));
1469 let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false));
1470 let ans = lhs.wrapping_add(rhs);
1471 return arg0_interval.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
1472 }
1473 if name.starts_with("xsub_") {
1474 destination.write_from_interval(self, arg0_interval)?;
1475 let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false));
1476 let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false));
1477 let ans = lhs.wrapping_sub(rhs);
1478 return arg0_interval.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
1479 }
1480 if name.starts_with("and_") {
1481 destination.write_from_interval(self, arg0_interval)?;
1482 let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false));
1483 let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false));
1484 let ans = lhs & rhs;
1485 return arg0_interval.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
1486 }
1487 if name.starts_with("or_") {
1488 destination.write_from_interval(self, arg0_interval)?;
1489 let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false));
1490 let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false));
1491 let ans = lhs | rhs;
1492 return arg0_interval.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
1493 }
1494 if name.starts_with("xor_") {
1495 destination.write_from_interval(self, arg0_interval)?;
1496 let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false));
1497 let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false));
1498 let ans = lhs ^ rhs;
1499 return arg0_interval.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
1500 }
1501 if name.starts_with("nand_") {
1502 destination.write_from_interval(self, arg0_interval)?;
1503 let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false));
1504 let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false));
1505 let ans = !(lhs & rhs);
1506 return arg0_interval.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
1507 }
1508 let Some(arg2) = args.get(2) else {
1509 return Err(MirEvalError::InternalError(
1510 "atomic intrinsic arg2 is not provided".into(),
1511 ));
1512 };
1513 if name.starts_with("cxchg_") || name.starts_with("cxchgweak_") {
1514 let dest = if arg1.get(self)? == arg0_interval.get(self)? {
1515 arg0_interval.write_from_interval(self, arg2.interval)?;
1516 (arg1.interval, true)
1517 } else {
1518 (arg0_interval, false)
1519 };
1520 let result_ty = Ty::new_tup_from_iter(
1521 self.interner(),
1522 [ty, Ty::new_bool(self.interner())].into_iter(),
1523 );
1524 let layout = self.layout(result_ty)?;
1525 let result = self.construct_with_layout(
1526 layout.size.bytes_usize(),
1527 &layout,
1528 None,
1529 [IntervalOrOwned::Borrowed(dest.0), IntervalOrOwned::Owned(vec![u8::from(dest.1)])]
1530 .into_iter(),
1531 )?;
1532 return destination.write_from_bytes(self, &result);
1533 }
1534 not_supported!("unknown atomic intrinsic {name}");
1535 }
1536}