1mod child_by_source;
4mod source_to_def;
5
6use std::{
7 cell::RefCell,
8 convert::Infallible,
9 fmt, iter, mem,
10 ops::{self, ControlFlow, Not},
11};
12
13use either::Either;
14use hir_def::{
15 DefWithBodyId, FunctionId, MacroId, StructId, TraitId, VariantId,
16 expr_store::{Body, ExprOrPatSource, path::Path},
17 hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
18 nameres::{ModuleOrigin, crate_def_map},
19 resolver::{self, HasResolver, Resolver, TypeNs},
20 type_ref::Mutability,
21};
22use hir_expand::{
23 EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId,
24 attrs::collect_attrs,
25 builtin::{BuiltinFnLikeExpander, EagerExpander},
26 db::ExpandDatabase,
27 files::{FileRangeWrapper, HirFileRange, InRealFile},
28 mod_path::{ModPath, PathKind},
29 name::AsName,
30};
31use hir_ty::diagnostics::{unsafe_operations, unsafe_operations_for_body};
32use intern::{Interned, Symbol, sym};
33use itertools::Itertools;
34use rustc_hash::{FxHashMap, FxHashSet};
35use smallvec::{SmallVec, smallvec};
36use span::{Edition, FileId, SyntaxContext};
37use stdx::{TupleExt, always};
38use syntax::{
39 AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
40 TextSize,
41 algo::skip_trivia_token,
42 ast::{self, HasAttrs as _, HasGenericParams},
43};
44
45use crate::{
46 Adjust, Adjustment, Adt, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const, ConstParam,
47 Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution, HasSource, Impl,
48 InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro, Module, ModuleDef,
49 Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule, Trait, TupleField, Type,
50 TypeAlias, TypeParam, Union, Variant, VariantDef,
51 db::HirDatabase,
52 semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
53 source_analyzer::{SourceAnalyzer, name_hygiene, resolve_hir_path},
54};
55
56const CONTINUE_NO_BREAKS: ControlFlow<Infallible, ()> = ControlFlow::Continue(());
57
58#[derive(Debug, Copy, Clone, PartialEq, Eq)]
59pub enum PathResolution {
60 Def(ModuleDef),
62 Local(Local),
64 TypeParam(TypeParam),
66 ConstParam(ConstParam),
68 SelfType(Impl),
69 BuiltinAttr(BuiltinAttr),
70 ToolModule(ToolModule),
71 DeriveHelper(DeriveHelper),
72}
73
74impl PathResolution {
75 pub(crate) fn in_type_ns(&self) -> Option<TypeNs> {
76 match self {
77 PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())),
78 PathResolution::Def(ModuleDef::BuiltinType(builtin)) => {
79 Some(TypeNs::BuiltinType((*builtin).into()))
80 }
81 PathResolution::Def(
82 ModuleDef::Const(_)
83 | ModuleDef::Variant(_)
84 | ModuleDef::Macro(_)
85 | ModuleDef::Function(_)
86 | ModuleDef::Module(_)
87 | ModuleDef::Static(_)
88 | ModuleDef::Trait(_),
89 ) => None,
90 PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
91 Some(TypeNs::TypeAliasId((*alias).into()))
92 }
93 PathResolution::BuiltinAttr(_)
94 | PathResolution::ToolModule(_)
95 | PathResolution::Local(_)
96 | PathResolution::DeriveHelper(_)
97 | PathResolution::ConstParam(_) => None,
98 PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
99 PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())),
100 }
101 }
102}
103
104#[derive(Debug, Copy, Clone, PartialEq, Eq)]
105pub struct PathResolutionPerNs {
106 pub type_ns: Option<PathResolution>,
107 pub value_ns: Option<PathResolution>,
108 pub macro_ns: Option<PathResolution>,
109}
110
111impl PathResolutionPerNs {
112 pub fn new(
113 type_ns: Option<PathResolution>,
114 value_ns: Option<PathResolution>,
115 macro_ns: Option<PathResolution>,
116 ) -> Self {
117 PathResolutionPerNs { type_ns, value_ns, macro_ns }
118 }
119 pub fn any(&self) -> Option<PathResolution> {
120 self.type_ns.or(self.value_ns).or(self.macro_ns)
121 }
122}
123
124#[derive(Debug)]
125pub struct TypeInfo<'db> {
126 pub original: Type<'db>,
128 pub adjusted: Option<Type<'db>>,
130}
131
132impl<'db> TypeInfo<'db> {
133 pub fn original(self) -> Type<'db> {
134 self.original
135 }
136
137 pub fn has_adjustment(&self) -> bool {
138 self.adjusted.is_some()
139 }
140
141 pub fn adjusted(self) -> Type<'db> {
143 self.adjusted.unwrap_or(self.original)
144 }
145}
146
147pub struct Semantics<'db, DB: ?Sized> {
149 pub db: &'db DB,
150 imp: SemanticsImpl<'db>,
151}
152
153pub struct SemanticsImpl<'db> {
154 pub db: &'db dyn HirDatabase,
155 s2d_cache: RefCell<SourceToDefCache>,
156 macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroCallId>>,
158}
159
160impl<DB: ?Sized> fmt::Debug for Semantics<'_, DB> {
161 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
162 write!(f, "Semantics {{ ... }}")
163 }
164}
165
166impl<'db, DB: ?Sized> ops::Deref for Semantics<'db, DB> {
167 type Target = SemanticsImpl<'db>;
168
169 fn deref(&self) -> &Self::Target {
170 &self.imp
171 }
172}
173
174impl Semantics<'_, dyn HirDatabase> {
178 pub fn new_dyn(db: &'_ dyn HirDatabase) -> Semantics<'_, dyn HirDatabase> {
180 let impl_ = SemanticsImpl::new(db);
181 Semantics { db, imp: impl_ }
182 }
183}
184
185impl<DB: HirDatabase> Semantics<'_, DB> {
186 pub fn new(db: &DB) -> Semantics<'_, DB> {
188 let impl_ = SemanticsImpl::new(db);
189 Semantics { db, imp: impl_ }
190 }
191}
192
193impl<DB: HirDatabase + ?Sized> Semantics<'_, DB> {
196 pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
197 self.imp.find_file(syntax_node).file_id
198 }
199
200 pub fn token_ancestors_with_macros(
201 &self,
202 token: SyntaxToken,
203 ) -> impl Iterator<Item = SyntaxNode> + '_ {
204 token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
205 }
206
207 pub fn find_node_at_offset_with_macros<N: AstNode>(
210 &self,
211 node: &SyntaxNode,
212 offset: TextSize,
213 ) -> Option<N> {
214 self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
215 }
216
217 pub fn find_node_at_offset_with_descend<N: AstNode>(
221 &self,
222 node: &SyntaxNode,
223 offset: TextSize,
224 ) -> Option<N> {
225 self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
226 }
227
228 pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>(
232 &'slf self,
233 node: &SyntaxNode,
234 offset: TextSize,
235 ) -> impl Iterator<Item = N> + 'slf {
236 self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
237 }
238
239 pub fn find_namelike_at_offset_with_descend<'slf>(
241 &'slf self,
242 node: &SyntaxNode,
243 offset: TextSize,
244 ) -> impl Iterator<Item = ast::NameLike> + 'slf {
245 node.token_at_offset(offset)
246 .map(move |token| self.descend_into_macros_no_opaque(token, true))
247 .map(|descendants| descendants.into_iter().filter_map(move |it| it.value.parent()))
248 .kmerge_by(|left, right| left.text_range().len().lt(&right.text_range().len()))
251 .filter_map(ast::NameLike::cast)
252 }
253
254 pub fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option<Struct> {
255 self.imp.resolve_range_pat(range_pat).map(Struct::from)
256 }
257
258 pub fn resolve_range_expr(&self, range_expr: &ast::RangeExpr) -> Option<Struct> {
259 self.imp.resolve_range_expr(range_expr).map(Struct::from)
260 }
261
262 pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
263 self.imp.resolve_await_to_poll(await_expr).map(Function::from)
264 }
265
266 pub fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<Function> {
267 self.imp.resolve_prefix_expr(prefix_expr).map(Function::from)
268 }
269
270 pub fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<Function> {
271 self.imp.resolve_index_expr(index_expr).map(Function::from)
272 }
273
274 pub fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<Function> {
275 self.imp.resolve_bin_expr(bin_expr).map(Function::from)
276 }
277
278 pub fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<Function> {
279 self.imp.resolve_try_expr(try_expr).map(Function::from)
280 }
281
282 pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
283 self.imp.resolve_variant(record_lit).map(VariantDef::from)
284 }
285
286 pub fn file_to_module_def(&self, file: impl Into<FileId>) -> Option<Module> {
287 self.imp.file_to_module_defs(file.into()).next()
288 }
289
290 pub fn file_to_module_defs(&self, file: impl Into<FileId>) -> impl Iterator<Item = Module> {
291 self.imp.file_to_module_defs(file.into())
292 }
293
294 pub fn hir_file_to_module_def(&self, file: impl Into<HirFileId>) -> Option<Module> {
295 self.imp.hir_file_to_module_defs(file.into()).next()
296 }
297
298 pub fn hir_file_to_module_defs(
299 &self,
300 file: impl Into<HirFileId>,
301 ) -> impl Iterator<Item = Module> {
302 self.imp.hir_file_to_module_defs(file.into())
303 }
304
305 pub fn to_adt_def(&self, a: &ast::Adt) -> Option<Adt> {
306 self.imp.to_def(a)
307 }
308
309 pub fn to_const_def(&self, c: &ast::Const) -> Option<Const> {
310 self.imp.to_def(c)
311 }
312
313 pub fn to_enum_def(&self, e: &ast::Enum) -> Option<Enum> {
314 self.imp.to_def(e)
315 }
316
317 pub fn to_enum_variant_def(&self, v: &ast::Variant) -> Option<Variant> {
318 self.imp.to_def(v)
319 }
320
321 pub fn to_fn_def(&self, f: &ast::Fn) -> Option<Function> {
322 self.imp.to_def(f)
323 }
324
325 pub fn to_impl_def(&self, i: &ast::Impl) -> Option<Impl> {
326 self.imp.to_def(i)
327 }
328
329 pub fn to_macro_def(&self, m: &ast::Macro) -> Option<Macro> {
330 self.imp.to_def(m)
331 }
332
333 pub fn to_module_def(&self, m: &ast::Module) -> Option<Module> {
334 self.imp.to_def(m)
335 }
336
337 pub fn to_static_def(&self, s: &ast::Static) -> Option<Static> {
338 self.imp.to_def(s)
339 }
340
341 pub fn to_struct_def(&self, s: &ast::Struct) -> Option<Struct> {
342 self.imp.to_def(s)
343 }
344
345 pub fn to_trait_def(&self, t: &ast::Trait) -> Option<Trait> {
346 self.imp.to_def(t)
347 }
348
349 pub fn to_type_alias_def(&self, t: &ast::TypeAlias) -> Option<TypeAlias> {
350 self.imp.to_def(t)
351 }
352
353 pub fn to_union_def(&self, u: &ast::Union) -> Option<Union> {
354 self.imp.to_def(u)
355 }
356}
357
358impl<'db> SemanticsImpl<'db> {
359 fn new(db: &'db dyn HirDatabase) -> Self {
360 SemanticsImpl { db, s2d_cache: Default::default(), macro_call_cache: Default::default() }
361 }
362
363 pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile {
364 let hir_file_id = file_id.into();
365 let tree = self.db.parse(file_id).tree();
366 self.cache(tree.syntax().clone(), hir_file_id);
367 tree
368 }
369
370 pub fn first_crate(&self, file: FileId) -> Option<Crate> {
372 match self.file_to_module_defs(file).next() {
373 Some(module) => Some(module.krate()),
374 None => self.db.all_crates().last().copied().map(Into::into),
375 }
376 }
377
378 pub fn attach_first_edition(&self, file: FileId) -> Option<EditionedFileId> {
379 Some(EditionedFileId::new(
380 self.db,
381 file,
382 self.file_to_module_defs(file).next()?.krate().edition(self.db),
383 ))
384 }
385
386 pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
387 let file_id = self
388 .attach_first_edition(file_id)
389 .unwrap_or_else(|| EditionedFileId::new(self.db, file_id, Edition::CURRENT));
390
391 let tree = self.db.parse(file_id).tree();
392 self.cache(tree.syntax().clone(), file_id.into());
393 tree
394 }
395
396 pub fn adjust_edition(&self, file_id: HirFileId) -> HirFileId {
397 if let Some(editioned_file_id) = file_id.file_id() {
398 self.attach_first_edition(editioned_file_id.file_id(self.db))
399 .map_or(file_id, Into::into)
400 } else {
401 file_id
402 }
403 }
404
405 pub fn find_parent_file(&self, file_id: HirFileId) -> Option<InFile<SyntaxNode>> {
406 match file_id {
407 HirFileId::FileId(file_id) => {
408 let module = self.file_to_module_defs(file_id.file_id(self.db)).next()?;
409 let def_map = crate_def_map(self.db, module.krate().id);
410 match def_map[module.id.local_id].origin {
411 ModuleOrigin::CrateRoot { .. } => None,
412 ModuleOrigin::File { declaration, declaration_tree_id, .. } => {
413 let file_id = declaration_tree_id.file_id();
414 let in_file = InFile::new(file_id, declaration);
415 let node = in_file.to_node(self.db);
416 let root = find_root(node.syntax());
417 self.cache(root, file_id);
418 Some(in_file.with_value(node.syntax().clone()))
419 }
420 _ => unreachable!("FileId can only belong to a file module"),
421 }
422 }
423 HirFileId::MacroFile(macro_file) => {
424 let node = self.db.lookup_intern_macro_call(macro_file).to_node(self.db);
425 let root = find_root(&node.value);
426 self.cache(root, node.file_id);
427 Some(node)
428 }
429 }
430 }
431
432 pub fn module_definition_node(&self, module: Module) -> InFile<SyntaxNode> {
435 let def_map = module.id.def_map(self.db);
436 let definition = def_map[module.id.local_id].origin.definition_source(self.db);
437 let definition = definition.map(|it| it.node());
438 let root_node = find_root(&definition.value);
439 self.cache(root_node, definition.file_id);
440 definition
441 }
442
443 pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode {
444 let node = self.db.parse_or_expand(file_id);
445 self.cache(node.clone(), file_id);
446 node
447 }
448
449 pub fn expand(&self, file_id: MacroCallId) -> ExpandResult<SyntaxNode> {
450 let res = self.db.parse_macro_expansion(file_id).map(|it| it.0.syntax_node());
451 self.cache(res.value.clone(), file_id.into());
452 res
453 }
454
455 pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option<InFile<SyntaxNode>> {
456 let file_id = self.to_def(macro_call)?;
457 let node = self.parse_or_expand(file_id.into());
458 Some(InFile::new(file_id.into(), node))
459 }
460
461 pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> {
462 let file_id = self.find_file(attr.syntax()).file_id;
463 let krate = match file_id {
464 HirFileId::FileId(file_id) => {
465 self.file_to_module_defs(file_id.file_id(self.db)).next()?.krate().id
466 }
467 HirFileId::MacroFile(macro_file) => self.db.lookup_intern_macro_call(macro_file).krate,
468 };
469 hir_expand::check_cfg_attr_value(self.db, attr, krate)
470 }
471
472 pub fn expand_allowed_builtins(
475 &self,
476 macro_call: &ast::MacroCall,
477 ) -> Option<ExpandResult<SyntaxNode>> {
478 let file_id = self.to_def(macro_call)?;
479 let macro_call = self.db.lookup_intern_macro_call(file_id);
480
481 let skip = matches!(
482 macro_call.def.kind,
483 hir_expand::MacroDefKind::BuiltIn(
484 _,
485 BuiltinFnLikeExpander::Column
486 | BuiltinFnLikeExpander::File
487 | BuiltinFnLikeExpander::ModulePath
488 | BuiltinFnLikeExpander::Asm
489 | BuiltinFnLikeExpander::GlobalAsm
490 | BuiltinFnLikeExpander::NakedAsm
491 | BuiltinFnLikeExpander::LogSyntax
492 | BuiltinFnLikeExpander::TraceMacros
493 | BuiltinFnLikeExpander::FormatArgs
494 | BuiltinFnLikeExpander::FormatArgsNl
495 | BuiltinFnLikeExpander::ConstFormatArgs,
496 ) | hir_expand::MacroDefKind::BuiltInEager(_, EagerExpander::CompileError)
497 );
498 if skip {
499 return None;
502 }
503
504 let node = self.expand(file_id);
505 Some(node)
506 }
507
508 pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<InFile<SyntaxNode>>> {
510 let src = self.wrap_node_infile(item.clone());
511 let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
512 Some(self.expand(macro_call_id).map(|it| InFile::new(macro_call_id.into(), it)))
513 }
514
515 pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
516 let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
517 let src = self.wrap_node_infile(attr.clone());
518 let call_id = self.with_ctx(|ctx| {
519 ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
520 })?;
521 Some(self.parse_or_expand(call_id.into()))
522 }
523
524 pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
525 let calls = self.derive_macro_calls(attr)?;
526 self.with_ctx(|ctx| {
527 Some(
528 calls
529 .into_iter()
530 .map(|call| macro_call_to_macro_id(ctx, call?).map(|id| Macro { id }))
531 .collect(),
532 )
533 })
534 }
535
536 pub fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<ExpandResult<SyntaxNode>>> {
537 let res: Vec<_> = self
538 .derive_macro_calls(attr)?
539 .into_iter()
540 .flat_map(|call| {
541 let file_id = call?;
542 let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id);
543 let root_node = value.0.syntax_node();
544 self.cache(root_node.clone(), file_id.into());
545 Some(ExpandResult { value: root_node, err })
546 })
547 .collect();
548 Some(res)
549 }
550
551 fn derive_macro_calls(&self, attr: &ast::Attr) -> Option<Vec<Option<MacroCallId>>> {
552 let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
553 let file_id = self.find_file(adt.syntax()).file_id;
554 let adt = InFile::new(file_id, &adt);
555 let src = InFile::new(file_id, attr.clone());
556 self.with_ctx(|ctx| {
557 let (.., res) = ctx.attr_to_derive_macro_call(adt, src)?;
558 Some(res.to_vec())
559 })
560 }
561
562 pub fn is_derive_annotated(&self, adt: InFile<&ast::Adt>) -> bool {
563 self.with_ctx(|ctx| ctx.file_of_adt_has_derives(adt))
564 }
565
566 pub fn derive_helpers_in_scope(&self, adt: &ast::Adt) -> Option<Vec<(Symbol, Symbol)>> {
567 let sa = self.analyze_no_infer(adt.syntax())?;
568 let id = self.db.ast_id_map(sa.file_id).ast_id(adt);
569 let result = sa
570 .resolver
571 .def_map()
572 .derive_helpers_in_scope(InFile::new(sa.file_id, id))?
573 .iter()
574 .map(|(name, macro_, _)| {
575 let macro_name = Macro::from(*macro_).name(self.db).symbol().clone();
576 (name.symbol().clone(), macro_name)
577 })
578 .collect();
579 Some(result)
580 }
581
582 pub fn derive_helper(&self, attr: &ast::Attr) -> Option<Vec<(Macro, MacroCallId)>> {
583 let adt = attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it {
584 ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
585 ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
586 ast::Item::Union(it) => Some(ast::Adt::Union(it)),
587 _ => None,
588 })?;
589 let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
590 let sa = self.analyze_no_infer(adt.syntax())?;
591 let id = self.db.ast_id_map(sa.file_id).ast_id(&adt);
592 let res: Vec<_> = sa
593 .resolver
594 .def_map()
595 .derive_helpers_in_scope(InFile::new(sa.file_id, id))?
596 .iter()
597 .filter(|&(name, _, _)| *name == attr_name)
598 .map(|&(_, macro_, call)| (macro_.into(), call))
599 .collect();
600 res.is_empty().not().then_some(res)
601 }
602
603 pub fn is_attr_macro_call(&self, item: InFile<&ast::Item>) -> bool {
604 self.with_ctx(|ctx| ctx.item_to_macro_call(item).is_some())
605 }
606
607 pub fn speculative_expand_macro_call(
610 &self,
611 actual_macro_call: &ast::MacroCall,
612 speculative_args: &ast::TokenTree,
613 token_to_map: SyntaxToken,
614 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
615 let macro_file = self.to_def(actual_macro_call)?;
616 hir_expand::db::expand_speculative(
617 self.db,
618 macro_file,
619 speculative_args.syntax(),
620 token_to_map,
621 )
622 }
623
624 pub fn speculative_expand_raw(
625 &self,
626 macro_file: MacroCallId,
627 speculative_args: &SyntaxNode,
628 token_to_map: SyntaxToken,
629 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
630 hir_expand::db::expand_speculative(self.db, macro_file, speculative_args, token_to_map)
631 }
632
633 pub fn speculative_expand_attr_macro(
636 &self,
637 actual_macro_call: &ast::Item,
638 speculative_args: &ast::Item,
639 token_to_map: SyntaxToken,
640 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
641 let macro_call = self.wrap_node_infile(actual_macro_call.clone());
642 let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call.as_ref()))?;
643 hir_expand::db::expand_speculative(
644 self.db,
645 macro_call_id,
646 speculative_args.syntax(),
647 token_to_map,
648 )
649 }
650
651 pub fn speculative_expand_derive_as_pseudo_attr_macro(
652 &self,
653 actual_macro_call: &ast::Attr,
654 speculative_args: &ast::Attr,
655 token_to_map: SyntaxToken,
656 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
657 let attr = self.wrap_node_infile(actual_macro_call.clone());
658 let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?;
659 let macro_call_id = self.with_ctx(|ctx| {
660 ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it)
661 })?;
662 hir_expand::db::expand_speculative(
663 self.db,
664 macro_call_id,
665 speculative_args.syntax(),
666 token_to_map,
667 )
668 }
669
670 pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &Name) -> Vec<Local> {
673 let body = self.db.body(to_be_renamed.parent);
674 let resolver = to_be_renamed.parent.resolver(self.db);
675 let starting_expr = body.binding_owner(to_be_renamed.binding_id).unwrap_or(body.body_expr);
676 let mut visitor = RenameConflictsVisitor {
677 body: &body,
678 conflicts: FxHashSet::default(),
679 db: self.db,
680 new_name: new_name.symbol().clone(),
681 old_name: to_be_renamed.name(self.db).symbol().clone(),
682 owner: to_be_renamed.parent,
683 to_be_renamed: to_be_renamed.binding_id,
684 resolver,
685 };
686 visitor.rename_conflicts(starting_expr);
687 visitor
688 .conflicts
689 .into_iter()
690 .map(|binding_id| Local { parent: to_be_renamed.parent, binding_id })
691 .collect()
692 }
693
694 pub fn as_format_args_parts(
696 &self,
697 string: &ast::String,
698 ) -> Option<Vec<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)>> {
699 let string_start = string.syntax().text_range().start();
700 let token = self.wrap_token_infile(string.syntax().clone());
701 self.descend_into_macros_breakable(token, |token, _| {
702 (|| {
703 let token = token.value;
704 let string = ast::String::cast(token)?;
705 let literal =
706 string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
707 let parent = literal.parent()?;
708 if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) {
709 let source_analyzer = self.analyze_no_infer(format_args.syntax())?;
710 let format_args = self.wrap_node_infile(format_args);
711 let res = source_analyzer
712 .as_format_args_parts(self.db, format_args.as_ref())?
713 .map(|(range, res)| (range + string_start, res.map(Either::Left)))
714 .collect();
715 Some(res)
716 } else {
717 let asm = ast::AsmExpr::cast(parent)?;
718 let source_analyzer = self.analyze_no_infer(asm.syntax())?;
719 let line = asm.template().position(|it| *it.syntax() == literal)?;
720 let asm = self.wrap_node_infile(asm);
721 let (owner, (expr, asm_parts)) = source_analyzer.as_asm_parts(asm.as_ref())?;
722 let res = asm_parts
723 .get(line)?
724 .iter()
725 .map(|&(range, index)| {
726 (
727 range + string_start,
728 Some(Either::Right(InlineAsmOperand { owner, expr, index })),
729 )
730 })
731 .collect();
732 Some(res)
733 }
734 })()
735 .map_or(ControlFlow::Continue(()), ControlFlow::Break)
736 })
737 }
738
739 pub fn check_for_format_args_template(
748 &self,
749 original_token: SyntaxToken,
750 offset: TextSize,
751 ) -> Option<(
752 TextRange,
753 HirFileRange,
754 ast::String,
755 Option<Either<PathResolution, InlineAsmOperand>>,
756 )> {
757 let original_token =
758 self.wrap_token_infile(original_token).map(ast::String::cast).transpose()?;
759 self.check_for_format_args_template_with_file(original_token, offset)
760 }
761
762 pub fn check_for_format_args_template_with_file(
770 &self,
771 original_token: InFile<ast::String>,
772 offset: TextSize,
773 ) -> Option<(
774 TextRange,
775 HirFileRange,
776 ast::String,
777 Option<Either<PathResolution, InlineAsmOperand>>,
778 )> {
779 let relative_offset =
780 offset.checked_sub(original_token.value.syntax().text_range().start())?;
781 self.descend_into_macros_breakable(
782 original_token.as_ref().map(|it| it.syntax().clone()),
783 |token, _| {
784 (|| {
785 let token = token.map(ast::String::cast).transpose()?;
786 self.resolve_offset_in_format_args(token.as_ref(), relative_offset).map(
787 |(range, res)| {
788 (
789 range + original_token.value.syntax().text_range().start(),
790 HirFileRange {
791 file_id: token.file_id,
792 range: range + token.value.syntax().text_range().start(),
793 },
794 token.value,
795 res,
796 )
797 },
798 )
799 })()
800 .map_or(ControlFlow::Continue(()), ControlFlow::Break)
801 },
802 )
803 }
804
805 fn resolve_offset_in_format_args(
806 &self,
807 InFile { value: string, file_id }: InFile<&ast::String>,
808 offset: TextSize,
809 ) -> Option<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)> {
810 debug_assert!(offset <= string.syntax().text_range().len());
811 let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
812 let parent = literal.parent()?;
813 if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) {
814 let source_analyzer =
815 &self.analyze_impl(InFile::new(file_id, format_args.syntax()), None, false)?;
816 source_analyzer
817 .resolve_offset_in_format_args(self.db, InFile::new(file_id, &format_args), offset)
818 .map(|(range, res)| (range, res.map(Either::Left)))
819 } else {
820 let asm = ast::AsmExpr::cast(parent)?;
821 let source_analyzer =
822 self.analyze_impl(InFile::new(file_id, asm.syntax()), None, false)?;
823 let line = asm.template().position(|it| *it.syntax() == literal)?;
824 source_analyzer
825 .resolve_offset_in_asm_template(InFile::new(file_id, &asm), line, offset)
826 .map(|(owner, (expr, range, index))| {
827 (range, Some(Either::Right(InlineAsmOperand { owner, expr, index })))
828 })
829 }
830 }
831
832 pub fn debug_hir_at(&self, token: SyntaxToken) -> Option<String> {
833 self.analyze_no_infer(&token.parent()?).and_then(|it| {
834 Some(match it.body_or_sig.as_ref()? {
835 crate::source_analyzer::BodyOrSig::Body { def, body, .. } => {
836 hir_def::expr_store::pretty::print_body_hir(
837 self.db,
838 body,
839 *def,
840 it.file_id.edition(self.db),
841 )
842 }
843 &crate::source_analyzer::BodyOrSig::VariantFields { def, .. } => {
844 hir_def::expr_store::pretty::print_variant_body_hir(
845 self.db,
846 def,
847 it.file_id.edition(self.db),
848 )
849 }
850 &crate::source_analyzer::BodyOrSig::Sig { def, .. } => {
851 hir_def::expr_store::pretty::print_signature(
852 self.db,
853 def,
854 it.file_id.edition(self.db),
855 )
856 }
857 })
858 })
859 }
860
861 pub fn descend_token_into_include_expansion(
863 &self,
864 tok: InRealFile<SyntaxToken>,
865 ) -> InFile<SyntaxToken> {
866 let Some(include) =
867 self.s2d_cache.borrow_mut().get_or_insert_include_for(self.db, tok.file_id)
868 else {
869 return tok.into();
870 };
871 let span = self.db.real_span_map(tok.file_id).span_for_range(tok.value.text_range());
872 let Some(InMacroFile { file_id, value: mut mapped_tokens }) = self.with_ctx(|ctx| {
873 Some(
874 ctx.cache
875 .get_or_insert_expansion(ctx.db, include)
876 .map_range_down(span)?
877 .map(SmallVec::<[_; 2]>::from_iter),
878 )
879 }) else {
880 return tok.into();
881 };
882 mapped_tokens.pop().map_or_else(|| tok.into(), |(tok, _)| InFile::new(file_id.into(), tok))
884 }
885
886 pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
888 let mut res = smallvec![];
890 let tokens = (|| {
891 let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
893 let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?;
894 Some((first, last))
895 })();
896 let (first, last) = match tokens {
897 Some(it) => it,
898 None => return res,
899 };
900 let file = self.find_file(node.syntax());
901
902 if first == last {
903 self.descend_into_macros_all(
905 InFile::new(file.file_id, first),
906 false,
907 &mut |InFile { value, .. }, _ctx| {
908 if let Some(node) = value
909 .parent_ancestors()
910 .take_while(|it| it.text_range() == value.text_range())
911 .find_map(N::cast)
912 {
913 res.push(node)
914 }
915 },
916 );
917 } else {
918 let mut scratch: SmallVec<[_; 1]> = smallvec![];
920 self.descend_into_macros_all(
921 InFile::new(file.file_id, first),
922 false,
923 &mut |token, _ctx| scratch.push(token),
924 );
925
926 let mut scratch = scratch.into_iter();
927 self.descend_into_macros_all(
928 InFile::new(file.file_id, last),
929 false,
930 &mut |InFile { value: last, file_id: last_fid }, _ctx| {
931 if let Some(InFile { value: first, file_id: first_fid }) = scratch.next()
932 && first_fid == last_fid
933 && let Some(p) = first.parent()
934 {
935 let range = first.text_range().cover(last.text_range());
936 let node = find_root(&p)
937 .covering_element(range)
938 .ancestors()
939 .take_while(|it| it.text_range() == range)
940 .find_map(N::cast);
941 if let Some(node) = node {
942 res.push(node);
943 }
944 }
945 },
946 );
947 }
948 res
949 }
950
951 pub fn is_inside_macro_call(&self, token @ InFile { value, .. }: InFile<&SyntaxToken>) -> bool {
956 value.parent_ancestors().any(|ancestor| {
957 if ast::MacroCall::can_cast(ancestor.kind()) {
958 return true;
959 }
960
961 let Some(item) = ast::Item::cast(ancestor) else {
962 return false;
963 };
964 self.with_ctx(|ctx| {
965 if ctx.item_to_macro_call(token.with_value(&item)).is_some() {
966 return true;
967 }
968 let adt = match item {
969 ast::Item::Struct(it) => it.into(),
970 ast::Item::Enum(it) => it.into(),
971 ast::Item::Union(it) => it.into(),
972 _ => return false,
973 };
974 ctx.file_of_adt_has_derives(token.with_value(&adt))
975 })
976 })
977 }
978
979 pub fn descend_into_macros_cb(
980 &self,
981 token: SyntaxToken,
982 mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext),
983 ) {
984 self.descend_into_macros_all(self.wrap_token_infile(token), false, &mut |t, ctx| {
985 cb(t, ctx)
986 });
987 }
988
989 pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
990 let mut res = smallvec![];
991 self.descend_into_macros_all(
992 self.wrap_token_infile(token.clone()),
993 false,
994 &mut |t, _ctx| res.push(t.value),
995 );
996 if res.is_empty() {
997 res.push(token);
998 }
999 res
1000 }
1001
1002 pub fn descend_into_macros_no_opaque(
1003 &self,
1004 token: SyntaxToken,
1005 always_descend_into_derives: bool,
1006 ) -> SmallVec<[InFile<SyntaxToken>; 1]> {
1007 let mut res = smallvec![];
1008 let token = self.wrap_token_infile(token);
1009 self.descend_into_macros_all(token.clone(), always_descend_into_derives, &mut |t, ctx| {
1010 if !ctx.is_opaque(self.db) {
1011 res.push(t);
1013 }
1014 });
1015 if res.is_empty() {
1016 res.push(token);
1017 }
1018 res
1019 }
1020
1021 pub fn descend_into_macros_breakable<T>(
1022 &self,
1023 token: InFile<SyntaxToken>,
1024 mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
1025 ) -> Option<T> {
1026 self.descend_into_macros_impl(token, false, &mut cb)
1027 }
1028
1029 pub fn descend_into_macros_exact(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
1032 let mut r = smallvec![];
1033 let text = token.text();
1034 let kind = token.kind();
1035
1036 self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }, ctx| {
1037 let mapped_kind = value.kind();
1038 let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
1039 let matches = (kind == mapped_kind || any_ident_match())
1040 && text == value.text()
1041 && !ctx.is_opaque(self.db);
1042 if matches {
1043 r.push(value);
1044 }
1045 });
1046 if r.is_empty() {
1047 r.push(token);
1048 }
1049 r
1050 }
1051
1052 pub fn descend_into_macros_exact_with_file(
1055 &self,
1056 token: SyntaxToken,
1057 ) -> SmallVec<[InFile<SyntaxToken>; 1]> {
1058 let mut r = smallvec![];
1059 let text = token.text();
1060 let kind = token.kind();
1061
1062 self.descend_into_macros_cb(token.clone(), |InFile { value, file_id }, ctx| {
1063 let mapped_kind = value.kind();
1064 let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
1065 let matches = (kind == mapped_kind || any_ident_match())
1066 && text == value.text()
1067 && !ctx.is_opaque(self.db);
1068 if matches {
1069 r.push(InFile { value, file_id });
1070 }
1071 });
1072 if r.is_empty() {
1073 r.push(self.wrap_token_infile(token));
1074 }
1075 r
1076 }
1077
1078 pub fn descend_into_macros_single_exact(&self, token: SyntaxToken) -> SyntaxToken {
1081 let text = token.text();
1082 let kind = token.kind();
1083 self.descend_into_macros_breakable(
1084 self.wrap_token_infile(token.clone()),
1085 |InFile { value, file_id: _ }, _ctx| {
1086 let mapped_kind = value.kind();
1087 let any_ident_match =
1088 || kind.is_any_identifier() && value.kind().is_any_identifier();
1089 let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
1090 if matches { ControlFlow::Break(value) } else { ControlFlow::Continue(()) }
1091 },
1092 )
1093 .unwrap_or(token)
1094 }
1095
1096 fn descend_into_macros_all(
1097 &self,
1098 token: InFile<SyntaxToken>,
1099 always_descend_into_derives: bool,
1100 f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContext),
1101 ) {
1102 self.descend_into_macros_impl(token, always_descend_into_derives, &mut |tok, ctx| {
1103 f(tok, ctx);
1104 CONTINUE_NO_BREAKS
1105 });
1106 }
1107
1108 fn descend_into_macros_impl<T>(
1109 &self,
1110 InFile { value: token, file_id }: InFile<SyntaxToken>,
1111 always_descend_into_derives: bool,
1112 f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
1113 ) -> Option<T> {
1114 let _p = tracing::info_span!("descend_into_macros_impl").entered();
1115
1116 let db = self.db;
1117 let span = db.span_map(file_id).span_for_range(token.text_range());
1118
1119 let process_expansion_for_token =
1121 |ctx: &mut SourceToDefCtx<'_, '_>, stack: &mut Vec<_>, macro_file| {
1122 let InMacroFile { file_id, value: mapped_tokens } = ctx
1123 .cache
1124 .get_or_insert_expansion(ctx.db, macro_file)
1125 .map_range_down(span)?
1126 .map(SmallVec::<[_; 2]>::from_iter);
1127 let res = mapped_tokens.is_empty().not().then_some(());
1129 stack.push((HirFileId::from(file_id), mapped_tokens));
1131 res
1132 };
1133
1134 let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![];
1139 let include = file_id
1140 .file_id()
1141 .and_then(|file_id| self.s2d_cache.borrow_mut().get_or_insert_include_for(db, file_id));
1142 match include {
1143 Some(include) => {
1144 self.with_ctx(|ctx| process_expansion_for_token(ctx, &mut stack, include))?;
1146 }
1147 None => {
1148 stack.push((file_id, smallvec![(token, span.ctx)]));
1149 }
1150 }
1151
1152 let mut m_cache = self.macro_call_cache.borrow_mut();
1153
1154 let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| {
1157 tokens.retain(|(t, _): &mut (SyntaxToken, _)| !range.contains_range(t.text_range()))
1158 };
1159
1160 while let Some((expansion, ref mut tokens)) = stack.pop() {
1161 tokens.reverse();
1165 while let Some((token, ctx)) = tokens.pop() {
1166 let was_not_remapped = (|| {
1167 let res = self.with_ctx(|ctx| {
1171 token
1172 .parent_ancestors()
1173 .filter_map(ast::Item::cast)
1174 .find_map(|item| {
1184 item.attrs().next()?;
1186 ctx.item_to_macro_call(InFile::new(expansion, &item))
1187 .zip(Some(item))
1188 })
1189 .map(|(call_id, item)| {
1190 let attr_id = match db.lookup_intern_macro_call(call_id).kind {
1191 hir_expand::MacroCallKind::Attr {
1192 invoc_attr_index, ..
1193 } => invoc_attr_index.ast_index(),
1194 _ => 0,
1195 };
1196 let text_range = item.syntax().text_range();
1209 let start = collect_attrs(&item)
1210 .nth(attr_id)
1211 .map(|attr| match attr.1 {
1212 Either::Left(it) => it.syntax().text_range().start(),
1213 Either::Right(it) => it.syntax().text_range().start(),
1214 })
1215 .unwrap_or_else(|| text_range.start());
1216 let text_range = TextRange::new(start, text_range.end());
1217 filter_duplicates(tokens, text_range);
1218 process_expansion_for_token(ctx, &mut stack, call_id)
1219 })
1220 });
1221
1222 if let Some(res) = res {
1223 return res;
1224 }
1225
1226 if always_descend_into_derives {
1227 let res = self.with_ctx(|ctx| {
1228 let (derives, adt) = token
1229 .parent_ancestors()
1230 .filter_map(ast::Adt::cast)
1231 .find_map(|adt| {
1232 Some((
1233 ctx.derive_macro_calls(InFile::new(expansion, &adt))?
1234 .map(|(a, b, c)| (a, b, c.to_owned()))
1235 .collect::<SmallVec<[_; 2]>>(),
1236 adt,
1237 ))
1238 })?;
1239 for (_, derive_attr, derives) in derives {
1240 process_expansion_for_token(ctx, &mut stack, derive_attr);
1244 for derive in derives.into_iter().flatten() {
1245 process_expansion_for_token(ctx, &mut stack, derive);
1246 }
1247 }
1248 filter_duplicates(tokens, adt.syntax().text_range());
1250 Some(())
1251 });
1252 if let Some(()) = res {
1255 return None;
1260 }
1261 }
1262 let tt = token
1265 .parent_ancestors()
1266 .map_while(Either::<ast::TokenTree, ast::Meta>::cast)
1267 .last()?;
1268
1269 match tt {
1270 Either::Left(tt) => {
1272 let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
1273 if tt.left_delimiter_token().map_or(false, |it| it == token) {
1274 return None;
1275 }
1276 if tt.right_delimiter_token().map_or(false, |it| it == token) {
1277 return None;
1278 }
1279 let mcall = InFile::new(expansion, macro_call);
1280 let file_id = match m_cache.get(&mcall) {
1281 Some(&it) => it,
1282 None => {
1283 let it = ast::MacroCall::to_def(self, mcall.as_ref())?;
1284 m_cache.insert(mcall, it);
1285 it
1286 }
1287 };
1288 let text_range = tt.syntax().text_range();
1289 filter_duplicates(tokens, text_range);
1290
1291 self.with_ctx(|ctx| {
1292 process_expansion_for_token(ctx, &mut stack, file_id).or(file_id
1293 .eager_arg(db)
1294 .and_then(|arg| {
1295 process_expansion_for_token(ctx, &mut stack, arg)
1297 }))
1298 })
1299 }
1300 Either::Right(_) if always_descend_into_derives => None,
1301 Either::Right(meta) => {
1303 let attr = meta.parent_attr()?;
1306 let adt = match attr.syntax().parent().and_then(ast::Adt::cast) {
1307 Some(adt) => {
1308 let res = self.with_ctx(|ctx| {
1310 let derive_call = ctx
1313 .attr_to_derive_macro_call(
1314 InFile::new(expansion, &adt),
1315 InFile::new(expansion, attr.clone()),
1316 )?
1317 .1;
1318
1319 let text_range = attr.syntax().text_range();
1321 tokens.retain(|(t, _)| {
1324 !text_range.contains_range(t.text_range())
1325 });
1326 Some(process_expansion_for_token(
1327 ctx,
1328 &mut stack,
1329 derive_call,
1330 ))
1331 });
1332 if let Some(res) = res {
1333 return res;
1334 }
1335 Some(adt)
1336 }
1337 None => {
1338 attr.syntax().ancestors().find_map(ast::Item::cast).and_then(
1340 |it| match it {
1341 ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
1342 ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
1343 ast::Item::Union(it) => Some(ast::Adt::Union(it)),
1344 _ => None,
1345 },
1346 )
1347 }
1348 }?;
1349 let attr_name =
1350 attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
1351 let resolver = &token
1354 .parent()
1355 .and_then(|parent| {
1356 self.analyze_impl(InFile::new(expansion, &parent), None, false)
1357 })?
1358 .resolver;
1359 let id = db.ast_id_map(expansion).ast_id(&adt);
1360 let helpers = resolver
1361 .def_map()
1362 .derive_helpers_in_scope(InFile::new(expansion, id))?;
1363
1364 if !helpers.is_empty() {
1365 let text_range = attr.syntax().text_range();
1366 filter_duplicates(tokens, text_range);
1367 }
1368
1369 let mut res = None;
1370 self.with_ctx(|ctx| {
1371 for (.., derive) in
1372 helpers.iter().filter(|(helper, ..)| *helper == attr_name)
1373 {
1374 res = res
1378 .or(process_expansion_for_token(ctx, &mut stack, *derive));
1379 }
1380 res
1381 })
1382 }
1383 }
1384 })()
1385 .is_none();
1386 if was_not_remapped
1387 && let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx)
1388 {
1389 return Some(b);
1390 }
1391 }
1392 }
1393 None
1394 }
1395
1396 fn descend_node_at_offset(
1401 &self,
1402 node: &SyntaxNode,
1403 offset: TextSize,
1404 ) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
1405 node.token_at_offset(offset)
1406 .map(move |token| self.descend_into_macros_exact(token))
1407 .map(|descendants| {
1408 descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
1409 })
1410 .kmerge_by(|left, right| {
1413 left.clone()
1414 .map(|node| node.text_range().len())
1415 .lt(right.clone().map(|node| node.text_range().len()))
1416 })
1417 }
1418
1419 pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
1423 let node = self.find_file(node);
1424 node.original_file_range_rooted(self.db)
1425 }
1426
1427 pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
1429 let node = self.find_file(node);
1430 node.original_file_range_opt(self.db).filter(|(_, ctx)| ctx.is_root()).map(TupleExt::head)
1431 }
1432
1433 pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
1436 self.wrap_node_infile(node).original_ast_node_rooted(self.db).map(
1437 |InRealFile { file_id, value }| {
1438 self.cache(find_root(value.syntax()), file_id.into());
1439 value
1440 },
1441 )
1442 }
1443
1444 pub fn original_syntax_node_rooted(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
1447 let InFile { file_id, .. } = self.find_file(node);
1448 InFile::new(file_id, node).original_syntax_node_rooted(self.db).map(
1449 |InRealFile { file_id, value }| {
1450 self.cache(find_root(&value), file_id.into());
1451 value
1452 },
1453 )
1454 }
1455
1456 pub fn diagnostics_display_range(
1457 &self,
1458 src: InFile<SyntaxNodePtr>,
1459 ) -> FileRangeWrapper<FileId> {
1460 let root = self.parse_or_expand(src.file_id);
1461 let node = src.map(|it| it.to_node(&root));
1462 let FileRange { file_id, range } = node.as_ref().original_file_range_rooted(self.db);
1463 FileRangeWrapper { file_id: file_id.file_id(self.db), range }
1464 }
1465
1466 fn token_ancestors_with_macros(
1467 &self,
1468 token: SyntaxToken,
1469 ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
1470 token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent))
1471 }
1472
1473 pub fn ancestors_with_macros(
1476 &self,
1477 node: SyntaxNode,
1478 ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
1479 let node = self.find_file(&node);
1480 self.ancestors_with_macros_file(node.cloned()).map(|it| it.value)
1481 }
1482
1483 pub fn ancestors_with_macros_file(
1485 &self,
1486 node: InFile<SyntaxNode>,
1487 ) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
1488 iter::successors(Some(node), move |&InFile { file_id, ref value }| match value.parent() {
1489 Some(parent) => Some(InFile::new(file_id, parent)),
1490 None => {
1491 let macro_file = file_id.macro_file()?;
1492
1493 self.with_ctx(|ctx| {
1494 let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
1495 expansion_info.arg().map(|node| node?.parent()).transpose()
1496 })
1497 }
1498 })
1499 }
1500
1501 pub fn ancestors_at_offset_with_macros(
1502 &self,
1503 node: &SyntaxNode,
1504 offset: TextSize,
1505 ) -> impl Iterator<Item = SyntaxNode> + '_ {
1506 node.token_at_offset(offset)
1507 .map(|token| self.token_ancestors_with_macros(token))
1508 .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
1509 }
1510
1511 pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
1512 let text = lifetime.text();
1513 let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| {
1514 let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?;
1515 gpl.lifetime_params()
1516 .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text))
1517 })?;
1518 let src = self.wrap_node_infile(lifetime_param);
1519 ToDef::to_def(self, src.as_ref())
1520 }
1521
1522 pub fn resolve_label(&self, label: &ast::Lifetime) -> Option<Label> {
1523 let src = self.wrap_node_infile(label.clone());
1524 let (parent, label_id) = self.with_ctx(|ctx| ctx.label_ref_to_def(src.as_ref()))?;
1525 Some(Label { parent, label_id })
1526 }
1527
1528 pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type<'db>> {
1529 let analyze = self.analyze(ty.syntax())?;
1530 analyze.type_of_type(self.db, ty)
1531 }
1532
1533 pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
1534 let parent_ty = path.syntax().parent().and_then(ast::Type::cast)?;
1535 let analyze = self.analyze(path.syntax())?;
1536 let ty = analyze.store_sm()?.node_type(InFile::new(analyze.file_id, &parent_ty))?;
1537 let path = match &analyze.store()?.types[ty] {
1538 hir_def::type_ref::TypeRef::Path(path) => path,
1539 _ => return None,
1540 };
1541 match analyze.resolver.resolve_path_in_type_ns_fully(self.db, path)? {
1542 TypeNs::TraitId(trait_id) => Some(trait_id.into()),
1543 _ => None,
1544 }
1545 }
1546
1547 pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment<'db>>> {
1548 let mutability = |m| match m {
1549 hir_ty::Mutability::Not => Mutability::Shared,
1550 hir_ty::Mutability::Mut => Mutability::Mut,
1551 };
1552
1553 let analyzer = self.analyze(expr.syntax())?;
1554
1555 let (mut source_ty, _) = analyzer.type_of_expr(self.db, expr)?;
1556
1557 analyzer.expr_adjustments(expr).map(|it| {
1558 it.iter()
1559 .map(|adjust| {
1560 let target =
1561 Type::new_with_resolver(self.db, &analyzer.resolver, adjust.target.clone());
1562 let kind = match adjust.kind {
1563 hir_ty::Adjust::NeverToAny => Adjust::NeverToAny,
1564 hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => {
1565 Adjust::Deref(Some(OverloadedDeref(
1567 m.map(mutability).unwrap_or(Mutability::Shared),
1568 )))
1569 }
1570 hir_ty::Adjust::Deref(None) => Adjust::Deref(None),
1571 hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => {
1572 Adjust::Borrow(AutoBorrow::RawPtr(mutability(m)))
1573 }
1574 hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(_, m)) => {
1575 Adjust::Borrow(AutoBorrow::Ref(mutability(m)))
1577 }
1578 hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc),
1579 };
1580
1581 let source = mem::replace(&mut source_ty, target.clone());
1583
1584 Adjustment { source, target, kind }
1585 })
1586 .collect()
1587 })
1588 }
1589
1590 pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo<'db>> {
1591 self.analyze(expr.syntax())?
1592 .type_of_expr(self.db, expr)
1593 .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
1594 }
1595
1596 pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo<'db>> {
1597 self.analyze(pat.syntax())?
1598 .type_of_pat(self.db, pat)
1599 .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
1600 }
1601
1602 pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type<'db>> {
1606 self.analyze(pat.syntax())?.type_of_binding_in_pat(self.db, pat)
1607 }
1608
1609 pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type<'db>> {
1610 self.analyze(param.syntax())?.type_of_self(self.db, param)
1611 }
1612
1613 pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type<'db>; 1]> {
1614 self.analyze(pat.syntax())
1615 .and_then(|it| it.pattern_adjustments(self.db, pat))
1616 .unwrap_or_default()
1617 }
1618
1619 pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
1620 self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
1621 }
1622
1623 pub fn resolve_expr_as_callable(&self, call: &ast::Expr) -> Option<Callable<'db>> {
1624 self.analyze(call.syntax())?.resolve_expr_as_callable(self.db, call)
1625 }
1626
1627 pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
1628 self.analyze(call.syntax())?.resolve_method_call(self.db, call)
1629 }
1630
1631 pub fn resolve_method_call_fallback(
1633 &self,
1634 call: &ast::MethodCallExpr,
1635 ) -> Option<(Either<Function, Field>, Option<GenericSubstitution<'db>>)> {
1636 self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call)
1637 }
1638
1639 pub fn resolve_trait_impl_method(
1642 &self,
1643 env: Type<'db>,
1644 trait_: Trait,
1645 func: Function,
1646 subst: impl IntoIterator<Item = Type<'db>>,
1647 ) -> Option<Function> {
1648 let mut substs = hir_ty::TyBuilder::subst_for_def(self.db, TraitId::from(trait_), None);
1649 for s in subst {
1650 substs = substs.push(s.ty);
1651 }
1652 Some(self.db.lookup_impl_method(env.env, func.into(), substs.build()).0.into())
1653 }
1654
1655 fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option<StructId> {
1656 self.analyze(range_pat.syntax())?.resolve_range_pat(self.db, range_pat)
1657 }
1658
1659 fn resolve_range_expr(&self, range_expr: &ast::RangeExpr) -> Option<StructId> {
1660 self.analyze(range_expr.syntax())?.resolve_range_expr(self.db, range_expr)
1661 }
1662
1663 fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<FunctionId> {
1664 self.analyze(await_expr.syntax())?.resolve_await_to_poll(self.db, await_expr)
1665 }
1666
1667 fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<FunctionId> {
1668 self.analyze(prefix_expr.syntax())?.resolve_prefix_expr(self.db, prefix_expr)
1669 }
1670
1671 fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<FunctionId> {
1672 self.analyze(index_expr.syntax())?.resolve_index_expr(self.db, index_expr)
1673 }
1674
1675 fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<FunctionId> {
1676 self.analyze(bin_expr.syntax())?.resolve_bin_expr(self.db, bin_expr)
1677 }
1678
1679 fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<FunctionId> {
1680 self.analyze(try_expr.syntax())?.resolve_try_expr(self.db, try_expr)
1681 }
1682
1683 pub fn resolve_method_call_as_callable(
1686 &self,
1687 call: &ast::MethodCallExpr,
1688 ) -> Option<Callable<'db>> {
1689 self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
1690 }
1691
1692 pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Either<Field, TupleField>> {
1693 self.analyze(field.syntax())?.resolve_field(field)
1694 }
1695
1696 pub fn resolve_field_fallback(
1697 &self,
1698 field: &ast::FieldExpr,
1699 ) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution<'db>>)>
1700 {
1701 self.analyze(field.syntax())?.resolve_field_fallback(self.db, field)
1702 }
1703
1704 pub fn resolve_record_field(
1705 &self,
1706 field: &ast::RecordExprField,
1707 ) -> Option<(Field, Option<Local>, Type<'db>)> {
1708 self.resolve_record_field_with_substitution(field)
1709 .map(|(field, local, ty, _)| (field, local, ty))
1710 }
1711
1712 pub fn resolve_record_field_with_substitution(
1713 &self,
1714 field: &ast::RecordExprField,
1715 ) -> Option<(Field, Option<Local>, Type<'db>, GenericSubstitution<'db>)> {
1716 self.analyze(field.syntax())?.resolve_record_field(self.db, field)
1717 }
1718
1719 pub fn resolve_record_pat_field(
1720 &self,
1721 field: &ast::RecordPatField,
1722 ) -> Option<(Field, Type<'db>)> {
1723 self.resolve_record_pat_field_with_subst(field).map(|(field, ty, _)| (field, ty))
1724 }
1725
1726 pub fn resolve_record_pat_field_with_subst(
1727 &self,
1728 field: &ast::RecordPatField,
1729 ) -> Option<(Field, Type<'db>, GenericSubstitution<'db>)> {
1730 self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
1731 }
1732
1733 pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
1735 let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
1736 self.resolve_macro_call2(macro_call)
1737 }
1738
1739 pub fn resolve_macro_call2(&self, macro_call: InFile<&ast::MacroCall>) -> Option<Macro> {
1740 self.to_def2(macro_call)
1741 .and_then(|call| self.with_ctx(|ctx| macro_call_to_macro_id(ctx, call)))
1742 .map(Into::into)
1743 }
1744
1745 pub fn is_proc_macro_call(&self, macro_call: InFile<&ast::MacroCall>) -> bool {
1746 self.resolve_macro_call2(macro_call)
1747 .is_some_and(|m| matches!(m.id, MacroId::ProcMacroId(..)))
1748 }
1749
1750 pub fn resolve_macro_call_arm(&self, macro_call: &ast::MacroCall) -> Option<u32> {
1751 let file_id = self.to_def(macro_call)?;
1752 self.db.parse_macro_expansion(file_id).value.1.matched_arm
1753 }
1754
1755 pub fn get_unsafe_ops(&self, def: DefWithBody) -> FxHashSet<ExprOrPatSource> {
1756 let def = DefWithBodyId::from(def);
1757 let (body, source_map) = self.db.body_with_source_map(def);
1758 let infer = self.db.infer(def);
1759 let mut res = FxHashSet::default();
1760 unsafe_operations_for_body(self.db, &infer, def, &body, &mut |node| {
1761 if let Ok(node) = source_map.expr_or_pat_syntax(node) {
1762 res.insert(node);
1763 }
1764 });
1765 res
1766 }
1767
1768 pub fn get_unsafe_ops_for_unsafe_block(&self, block: ast::BlockExpr) -> Vec<ExprOrPatSource> {
1769 always!(block.unsafe_token().is_some());
1770 let block = self.wrap_node_infile(ast::Expr::from(block));
1771 let Some(def) = self.body_for(block.syntax()) else { return Vec::new() };
1772 let def = def.into();
1773 let (body, source_map) = self.db.body_with_source_map(def);
1774 let infer = self.db.infer(def);
1775 let Some(ExprOrPatId::ExprId(block)) = source_map.node_expr(block.as_ref()) else {
1776 return Vec::new();
1777 };
1778 let mut res = Vec::default();
1779 unsafe_operations(self.db, &infer, def, &body, block, &mut |node, _| {
1780 if let Ok(node) = source_map.expr_or_pat_syntax(node) {
1781 res.push(node);
1782 }
1783 });
1784 res
1785 }
1786
1787 pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
1788 let Some(mac) = self.resolve_macro_call(macro_call) else { return false };
1789 if mac.is_asm_like(self.db) {
1790 return true;
1791 }
1792
1793 let Some(sa) = self.analyze(macro_call.syntax()) else { return false };
1794 let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
1795 match macro_call.map(|it| it.syntax().parent().and_then(ast::MacroExpr::cast)).transpose() {
1796 Some(it) => sa.is_unsafe_macro_call_expr(self.db, it.as_ref()),
1797 None => false,
1798 }
1799 }
1800
1801 pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
1802 let item_in_file = self.wrap_node_infile(item.clone());
1803 let id = self.with_ctx(|ctx| {
1804 let macro_call_id = ctx.item_to_macro_call(item_in_file.as_ref())?;
1805 macro_call_to_macro_id(ctx, macro_call_id)
1806 })?;
1807 Some(Macro { id })
1808 }
1809
1810 pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
1811 self.resolve_path_with_subst(path).map(|(it, _)| it)
1812 }
1813
1814 pub fn resolve_path_per_ns(&self, path: &ast::Path) -> Option<PathResolutionPerNs> {
1815 self.analyze(path.syntax())?.resolve_hir_path_per_ns(self.db, path)
1816 }
1817
1818 pub fn resolve_path_with_subst(
1819 &self,
1820 path: &ast::Path,
1821 ) -> Option<(PathResolution, Option<GenericSubstitution<'db>>)> {
1822 self.analyze(path.syntax())?.resolve_path(self.db, path)
1823 }
1824
1825 pub fn resolve_use_type_arg(&self, name: &ast::NameRef) -> Option<TypeParam> {
1826 self.analyze(name.syntax())?.resolve_use_type_arg(name)
1827 }
1828
1829 pub fn resolve_offset_of_field(
1830 &self,
1831 name_ref: &ast::NameRef,
1832 ) -> Option<(Either<Variant, Field>, GenericSubstitution<'db>)> {
1833 self.analyze_no_infer(name_ref.syntax())?.resolve_offset_of_field(self.db, name_ref)
1834 }
1835
1836 pub fn resolve_mod_path(
1837 &self,
1838 scope: &SyntaxNode,
1839 path: &ModPath,
1840 ) -> Option<impl Iterator<Item = ItemInNs>> {
1841 let analyze = self.analyze(scope)?;
1842 let items = analyze.resolver.resolve_module_path_in_items(self.db, path);
1843 Some(items.iter_items().map(|(item, _)| item.into()))
1844 }
1845
1846 fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
1847 self.analyze(record_lit.syntax())?.resolve_variant(record_lit)
1848 }
1849
1850 pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
1851 self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
1852 }
1853
1854 pub fn record_literal_missing_fields(
1855 &self,
1856 literal: &ast::RecordExpr,
1857 ) -> Vec<(Field, Type<'db>)> {
1858 self.analyze(literal.syntax())
1859 .and_then(|it| it.record_literal_missing_fields(self.db, literal))
1860 .unwrap_or_default()
1861 }
1862
1863 pub fn record_pattern_missing_fields(
1864 &self,
1865 pattern: &ast::RecordPat,
1866 ) -> Vec<(Field, Type<'db>)> {
1867 self.analyze(pattern.syntax())
1868 .and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
1869 .unwrap_or_default()
1870 }
1871
1872 fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
1873 let mut ctx = SourceToDefCtx { db: self.db, cache: &mut self.s2d_cache.borrow_mut() };
1874 f(&mut ctx)
1875 }
1876
1877 pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
1878 let src = self.find_file(src.syntax()).with_value(src);
1879 T::to_def(self, src)
1880 }
1881
1882 pub fn to_def2<T: ToDef>(&self, src: InFile<&T>) -> Option<T::Def> {
1883 T::to_def(self, src)
1884 }
1885
1886 fn file_to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
1887 self.with_ctx(|ctx| ctx.file_to_def(file).to_owned()).into_iter().map(Module::from)
1888 }
1889
1890 fn hir_file_to_module_defs(&self, file: HirFileId) -> impl Iterator<Item = Module> {
1891 self.file_to_module_defs(file.original_file_respecting_includes(self.db).file_id(self.db))
1893 }
1894
1895 pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
1896 self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
1897 db: self.db,
1898 file_id,
1899 resolver,
1900 })
1901 }
1902
1903 pub fn scope_at_offset(
1904 &self,
1905 node: &SyntaxNode,
1906 offset: TextSize,
1907 ) -> Option<SemanticsScope<'db>> {
1908 self.analyze_with_offset_no_infer(node, offset).map(
1909 |SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
1910 db: self.db,
1911 file_id,
1912 resolver,
1913 },
1914 )
1915 }
1916
1917 pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
1919 where
1920 Def::Ast: AstNode,
1921 {
1922 let res = def.source(self.db)?;
1924 self.cache(find_root(res.value.syntax()), res.file_id);
1925 Some(res)
1926 }
1927
1928 pub fn body_for(&self, node: InFile<&SyntaxNode>) -> Option<DefWithBody> {
1929 let container = self.with_ctx(|ctx| ctx.find_container(node))?;
1930
1931 match container {
1932 ChildContainer::DefWithBodyId(def) => Some(def.into()),
1933 _ => None,
1934 }
1935 }
1936
1937 fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer<'db>> {
1939 let node = self.find_file(node);
1940 self.analyze_impl(node, None, true)
1941 }
1942
1943 fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer<'db>> {
1945 let node = self.find_file(node);
1946 self.analyze_impl(node, None, false)
1947 }
1948
1949 fn analyze_with_offset_no_infer(
1950 &self,
1951 node: &SyntaxNode,
1952 offset: TextSize,
1953 ) -> Option<SourceAnalyzer<'db>> {
1954 let node = self.find_file(node);
1955 self.analyze_impl(node, Some(offset), false)
1956 }
1957
1958 fn analyze_impl(
1959 &self,
1960 node: InFile<&SyntaxNode>,
1961 offset: Option<TextSize>,
1962 infer_body: bool,
1964 ) -> Option<SourceAnalyzer<'db>> {
1965 let _p = tracing::info_span!("SemanticsImpl::analyze_impl").entered();
1966
1967 let container = self.with_ctx(|ctx| ctx.find_container(node))?;
1968
1969 let resolver = match container {
1970 ChildContainer::DefWithBodyId(def) => {
1971 return Some(if infer_body {
1972 SourceAnalyzer::new_for_body(self.db, def, node, offset)
1973 } else {
1974 SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
1975 });
1976 }
1977 ChildContainer::VariantId(def) => {
1978 return Some(SourceAnalyzer::new_variant_body(self.db, def, node, offset));
1979 }
1980 ChildContainer::TraitId(it) => {
1981 return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
1982 }
1983 ChildContainer::ImplId(it) => {
1984 return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
1985 }
1986 ChildContainer::EnumId(it) => {
1987 return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
1988 }
1989 ChildContainer::GenericDefId(it) => {
1990 return Some(SourceAnalyzer::new_generic_def(self.db, it, node, offset));
1991 }
1992 ChildContainer::ModuleId(it) => it.resolver(self.db),
1993 };
1994 Some(SourceAnalyzer::new_for_resolver(resolver, node))
1995 }
1996
1997 fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
1998 SourceToDefCache::cache(
1999 &mut self.s2d_cache.borrow_mut().root_to_file_cache,
2000 root_node,
2001 file_id,
2002 );
2003 }
2004
2005 pub fn assert_contains_node(&self, node: &SyntaxNode) {
2006 self.find_file(node);
2007 }
2008
2009 fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
2010 let cache = self.s2d_cache.borrow();
2011 cache.root_to_file_cache.get(root_node).copied()
2012 }
2013
2014 fn wrap_node_infile<N: AstNode>(&self, node: N) -> InFile<N> {
2015 let InFile { file_id, .. } = self.find_file(node.syntax());
2016 InFile::new(file_id, node)
2017 }
2018
2019 fn wrap_token_infile(&self, token: SyntaxToken) -> InFile<SyntaxToken> {
2020 let InFile { file_id, .. } = self.find_file(&token.parent().unwrap());
2021 InFile::new(file_id, token)
2022 }
2023
2024 fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
2026 let root_node = find_root(node);
2027 let file_id = self.lookup(&root_node).unwrap_or_else(|| {
2028 panic!(
2029 "\n\nFailed to lookup {:?} in this Semantics.\n\
2030 Make sure to only query nodes derived from this instance of Semantics.\n\
2031 root node: {:?}\n\
2032 known nodes: {}\n\n",
2033 node,
2034 root_node,
2035 self.s2d_cache
2036 .borrow()
2037 .root_to_file_cache
2038 .keys()
2039 .map(|it| format!("{it:?}"))
2040 .collect::<Vec<_>>()
2041 .join(", ")
2042 )
2043 });
2044 InFile::new(file_id, node)
2045 }
2046
2047 pub fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool {
2049 let Some(enclosing_item) =
2050 expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast)
2051 else {
2052 return false;
2053 };
2054
2055 let def = match &enclosing_item {
2056 Either::Left(ast::Item::Fn(it)) if it.unsafe_token().is_some() => return true,
2057 Either::Left(ast::Item::Fn(it)) => {
2058 self.to_def(it).map(<_>::into).map(DefWithBodyId::FunctionId)
2059 }
2060 Either::Left(ast::Item::Const(it)) => {
2061 self.to_def(it).map(<_>::into).map(DefWithBodyId::ConstId)
2062 }
2063 Either::Left(ast::Item::Static(it)) => {
2064 self.to_def(it).map(<_>::into).map(DefWithBodyId::StaticId)
2065 }
2066 Either::Left(_) => None,
2067 Either::Right(it) => self.to_def(it).map(<_>::into).map(DefWithBodyId::VariantId),
2068 };
2069 let Some(def) = def else { return false };
2070 let enclosing_node = enclosing_item.as_ref().either(|i| i.syntax(), |v| v.syntax());
2071
2072 let (body, source_map) = self.db.body_with_source_map(def);
2073
2074 let file_id = self.find_file(expr.syntax()).file_id;
2075
2076 let Some(mut parent) = expr.syntax().parent() else { return false };
2077 loop {
2078 if &parent == enclosing_node {
2079 break false;
2080 }
2081
2082 if let Some(parent) = ast::Expr::cast(parent.clone())
2083 && let Some(ExprOrPatId::ExprId(expr_id)) =
2084 source_map.node_expr(InFile { file_id, value: &parent })
2085 && let Expr::Unsafe { .. } = body[expr_id]
2086 {
2087 break true;
2088 }
2089
2090 let Some(parent_) = parent.parent() else { break false };
2091 parent = parent_;
2092 }
2093 }
2094}
2095
2096fn macro_call_to_macro_id(
2098 ctx: &mut SourceToDefCtx<'_, '_>,
2099 macro_call_id: MacroCallId,
2100) -> Option<MacroId> {
2101 let db: &dyn ExpandDatabase = ctx.db;
2102 let loc = db.lookup_intern_macro_call(macro_call_id);
2103
2104 match loc.def.ast_id() {
2105 Either::Left(it) => {
2106 let node = match it.file_id {
2107 HirFileId::FileId(file_id) => {
2108 it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
2109 }
2110 HirFileId::MacroFile(macro_file) => {
2111 let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
2112 it.to_ptr(db).to_node(&expansion_info.expanded().value)
2113 }
2114 };
2115 ctx.macro_to_def(InFile::new(it.file_id, &node))
2116 }
2117 Either::Right(it) => {
2118 let node = match it.file_id {
2119 HirFileId::FileId(file_id) => {
2120 it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
2121 }
2122 HirFileId::MacroFile(macro_file) => {
2123 let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
2124 it.to_ptr(db).to_node(&expansion_info.expanded().value)
2125 }
2126 };
2127 ctx.proc_macro_to_def(InFile::new(it.file_id, &node))
2128 }
2129 }
2130}
2131
2132pub trait ToDef: AstNode + Clone {
2133 type Def;
2134 fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def>;
2135}
2136
2137macro_rules! to_def_impls {
2138 ($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
2139 impl ToDef for $ast {
2140 type Def = $def;
2141 fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def> {
2142 sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
2143 }
2144 }
2145 )*}
2146}
2147
2148to_def_impls![
2149 (crate::Module, ast::Module, module_to_def),
2150 (crate::Module, ast::SourceFile, source_file_to_def),
2151 (crate::Struct, ast::Struct, struct_to_def),
2152 (crate::Enum, ast::Enum, enum_to_def),
2153 (crate::Union, ast::Union, union_to_def),
2154 (crate::Trait, ast::Trait, trait_to_def),
2155 (crate::Impl, ast::Impl, impl_to_def),
2156 (crate::TypeAlias, ast::TypeAlias, type_alias_to_def),
2157 (crate::Const, ast::Const, const_to_def),
2158 (crate::Static, ast::Static, static_to_def),
2159 (crate::Function, ast::Fn, fn_to_def),
2160 (crate::Field, ast::RecordField, record_field_to_def),
2161 (crate::Field, ast::TupleField, tuple_field_to_def),
2162 (crate::Variant, ast::Variant, enum_variant_to_def),
2163 (crate::TypeParam, ast::TypeParam, type_param_to_def),
2164 (crate::LifetimeParam, ast::LifetimeParam, lifetime_param_to_def),
2165 (crate::ConstParam, ast::ConstParam, const_param_to_def),
2166 (crate::GenericParam, ast::GenericParam, generic_param_to_def),
2167 (crate::Macro, ast::Macro, macro_to_def),
2168 (crate::Local, ast::IdentPat, bind_pat_to_def),
2169 (crate::Local, ast::SelfParam, self_param_to_def),
2170 (crate::Label, ast::Label, label_to_def),
2171 (crate::Adt, ast::Adt, adt_to_def),
2172 (crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def),
2173 (crate::InlineAsmOperand, ast::AsmOperandNamed, asm_operand_to_def),
2174 (crate::ExternBlock, ast::ExternBlock, extern_block_to_def),
2175 (MacroCallId, ast::MacroCall, macro_call_to_macro_call),
2176];
2177
2178fn find_root(node: &SyntaxNode) -> SyntaxNode {
2179 node.ancestors().last().unwrap()
2180}
2181
2182#[derive(Debug)]
2202pub struct SemanticsScope<'db> {
2203 pub db: &'db dyn HirDatabase,
2204 file_id: HirFileId,
2205 resolver: Resolver<'db>,
2206}
2207
2208impl<'db> SemanticsScope<'db> {
2209 pub fn file_id(&self) -> HirFileId {
2210 self.file_id
2211 }
2212
2213 pub fn module(&self) -> Module {
2214 Module { id: self.resolver.module() }
2215 }
2216
2217 pub fn krate(&self) -> Crate {
2218 Crate { id: self.resolver.krate() }
2219 }
2220
2221 pub fn containing_function(&self) -> Option<Function> {
2222 self.resolver.body_owner().and_then(|owner| match owner {
2223 DefWithBodyId::FunctionId(id) => Some(id.into()),
2224 _ => None,
2225 })
2226 }
2227
2228 pub(crate) fn resolver(&self) -> &Resolver<'db> {
2229 &self.resolver
2230 }
2231
2232 pub fn visible_traits(&self) -> VisibleTraits {
2234 let resolver = &self.resolver;
2235 VisibleTraits(resolver.traits_in_scope(self.db))
2236 }
2237
2238 pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
2240 let scope = self.resolver.names_in_scope(self.db);
2241 for (name, entries) in scope {
2242 for entry in entries {
2243 let def = match entry {
2244 resolver::ScopeDef::ModuleDef(it) => ScopeDef::ModuleDef(it.into()),
2245 resolver::ScopeDef::Unknown => ScopeDef::Unknown,
2246 resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
2247 resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
2248 resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
2249 resolver::ScopeDef::Local(binding_id) => match self.resolver.body_owner() {
2250 Some(parent) => ScopeDef::Local(Local { parent, binding_id }),
2251 None => continue,
2252 },
2253 resolver::ScopeDef::Label(label_id) => match self.resolver.body_owner() {
2254 Some(parent) => ScopeDef::Label(Label { parent, label_id }),
2255 None => continue,
2256 },
2257 };
2258 f(name.clone(), def)
2259 }
2260 }
2261 }
2262
2263 pub fn can_use_trait_methods(&self, t: Trait) -> bool {
2265 self.resolver.traits_in_scope(self.db).contains(&t.id)
2266 }
2267
2268 pub fn speculative_resolve(&self, ast_path: &ast::Path) -> Option<PathResolution> {
2271 let mut kind = PathKind::Plain;
2272 let mut segments = vec![];
2273 let mut first = true;
2274 for segment in ast_path.segments() {
2275 if first {
2276 first = false;
2277 if segment.coloncolon_token().is_some() {
2278 kind = PathKind::Abs;
2279 }
2280 }
2281
2282 let Some(k) = segment.kind() else { continue };
2283 match k {
2284 ast::PathSegmentKind::Name(name_ref) => segments.push(name_ref.as_name()),
2285 ast::PathSegmentKind::Type { .. } => continue,
2286 ast::PathSegmentKind::SelfTypeKw => {
2287 segments.push(Name::new_symbol_root(sym::Self_))
2288 }
2289 ast::PathSegmentKind::SelfKw => kind = PathKind::Super(0),
2290 ast::PathSegmentKind::SuperKw => match kind {
2291 PathKind::Super(s) => kind = PathKind::Super(s + 1),
2292 PathKind::Plain => kind = PathKind::Super(1),
2293 PathKind::Crate | PathKind::Abs | PathKind::DollarCrate(_) => continue,
2294 },
2295 ast::PathSegmentKind::CrateKw => kind = PathKind::Crate,
2296 }
2297 }
2298
2299 resolve_hir_path(
2300 self.db,
2301 &self.resolver,
2302 &Path::BarePath(Interned::new(ModPath::from_segments(kind, segments))),
2303 name_hygiene(self.db, InFile::new(self.file_id, ast_path.syntax())),
2304 None,
2305 )
2306 }
2307
2308 pub fn resolve_mod_path(&self, path: &ModPath) -> impl Iterator<Item = ItemInNs> + use<> {
2309 let items = self.resolver.resolve_module_path_in_items(self.db, path);
2310 items.iter_items().map(|(item, _)| item.into())
2311 }
2312
2313 pub fn assoc_type_shorthand_candidates(
2316 &self,
2317 resolution: &PathResolution,
2318 mut cb: impl FnMut(TypeAlias),
2319 ) {
2320 let (Some(def), Some(resolution)) = (self.resolver.generic_def(), resolution.in_type_ns())
2321 else {
2322 return;
2323 };
2324 hir_ty::associated_type_shorthand_candidates(self.db, def, resolution, |_, id| {
2325 cb(id.into());
2326 false
2327 });
2328 }
2329
2330 pub fn generic_def(&self) -> Option<crate::GenericDef> {
2331 self.resolver.generic_def().map(|id| id.into())
2332 }
2333
2334 pub fn extern_crates(&self) -> impl Iterator<Item = (Name, Module)> + '_ {
2335 self.resolver.extern_crates_in_scope().map(|(name, id)| (name, Module { id }))
2336 }
2337
2338 pub fn extern_crate_decls(&self) -> impl Iterator<Item = Name> + '_ {
2339 self.resolver.extern_crate_decls_in_scope(self.db)
2340 }
2341
2342 pub fn has_same_self_type(&self, other: &SemanticsScope<'_>) -> bool {
2343 self.resolver.impl_def() == other.resolver.impl_def()
2344 }
2345}
2346
2347#[derive(Debug)]
2348pub struct VisibleTraits(pub FxHashSet<TraitId>);
2349
2350impl ops::Deref for VisibleTraits {
2351 type Target = FxHashSet<TraitId>;
2352
2353 fn deref(&self) -> &Self::Target {
2354 &self.0
2355 }
2356}
2357
2358struct RenameConflictsVisitor<'a> {
2359 db: &'a dyn HirDatabase,
2360 owner: DefWithBodyId,
2361 resolver: Resolver<'a>,
2362 body: &'a Body,
2363 to_be_renamed: BindingId,
2364 new_name: Symbol,
2365 old_name: Symbol,
2366 conflicts: FxHashSet<BindingId>,
2367}
2368
2369impl RenameConflictsVisitor<'_> {
2370 fn resolve_path(&mut self, node: ExprOrPatId, path: &Path) {
2371 if let Path::BarePath(path) = path
2372 && let Some(name) = path.as_ident()
2373 {
2374 if *name.symbol() == self.new_name {
2375 if let Some(conflicting) = self.resolver.rename_will_conflict_with_renamed(
2376 self.db,
2377 name,
2378 path,
2379 self.body.expr_or_pat_path_hygiene(node),
2380 self.to_be_renamed,
2381 ) {
2382 self.conflicts.insert(conflicting);
2383 }
2384 } else if *name.symbol() == self.old_name
2385 && let Some(conflicting) = self.resolver.rename_will_conflict_with_another_variable(
2386 self.db,
2387 name,
2388 path,
2389 self.body.expr_or_pat_path_hygiene(node),
2390 &self.new_name,
2391 self.to_be_renamed,
2392 )
2393 {
2394 self.conflicts.insert(conflicting);
2395 }
2396 }
2397 }
2398
2399 fn rename_conflicts(&mut self, expr: ExprId) {
2400 match &self.body[expr] {
2401 Expr::Path(path) => {
2402 let guard = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
2403 self.resolve_path(expr.into(), path);
2404 self.resolver.reset_to_guard(guard);
2405 }
2406 &Expr::Assignment { target, .. } => {
2407 let guard = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
2408 self.body.walk_pats(target, &mut |pat| {
2409 if let Pat::Path(path) = &self.body[pat] {
2410 self.resolve_path(pat.into(), path);
2411 }
2412 });
2413 self.resolver.reset_to_guard(guard);
2414 }
2415 _ => {}
2416 }
2417
2418 self.body.walk_child_exprs(expr, |expr| self.rename_conflicts(expr));
2419 }
2420}