Skip to main content

hir/
semantics.rs

1//! See `Semantics`.
2
3mod child_by_source;
4mod source_to_def;
5
6use std::{
7    cell::RefCell,
8    convert::Infallible,
9    fmt, iter, mem,
10    ops::{self, ControlFlow, Not},
11};
12
13use base_db::{FxIndexSet, all_crates, toolchain_channel};
14use either::Either;
15use hir_def::{
16    BuiltinDeriveImplId, DefWithBodyId, ExpressionStoreOwnerId, GenericDefId, HasModule, MacroId,
17    StructId, TraitId, VariantId,
18    attrs::parse_extra_crate_attrs,
19    expr_store::{Body, ExprOrPatSource, ExpressionStore, HygieneId, path::Path},
20    hir::{BindingId, Expr, ExprId, ExprOrPatId},
21    nameres::{ModuleOrigin, crate_def_map},
22    resolver::{self, HasResolver, Resolver, TypeNs, ValueNs},
23    type_ref::Mutability,
24};
25use hir_expand::{
26    EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId,
27    attrs::AstPathExt,
28    builtin::{BuiltinFnLikeExpander, EagerExpander},
29    db::ExpandDatabase,
30    files::{FileRangeWrapper, HirFileRange, InRealFile},
31    mod_path::{ModPath, PathKind},
32    name::AsName,
33};
34use hir_ty::{
35    InferBodyId, InferenceResult,
36    db::AnonConstId,
37    diagnostics::unsafe_operations,
38    infer_query_with_inspect,
39    next_solver::{
40        AnyImplId, DbInterner,
41        format_proof_tree::{ProofTreeData, dump_proof_tree_structured},
42    },
43};
44use intern::{Interned, Symbol, sym};
45use itertools::Itertools;
46use rustc_hash::{FxHashMap, FxHashSet};
47use smallvec::{SmallVec, smallvec};
48use span::{FileId, SyntaxContext};
49use stdx::{TupleExt, always};
50use syntax::{
51    AstNode, AstPtr, AstToken, Direction, SmolStr, SmolStrBuilder, SyntaxElement, SyntaxKind,
52    SyntaxNode, SyntaxNodePtr, SyntaxToken, T, TextRange, TextSize,
53    algo::skip_trivia_token,
54    ast::{self, HasAttrs as _, HasGenericParams},
55};
56
57use crate::{
58    Adjust, Adjustment, Adt, AnyFunctionId, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const,
59    ConstParam, Crate, DeriveHelper, Enum, EnumVariant, ExpressionStoreOwner, Field, Function,
60    GenericSubstitution, HasSource, Impl, InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam,
61    Local, Macro, Module, ModuleDef, Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule,
62    Trait, TupleField, Type, TypeAlias, TypeParam, Union, Variant,
63    db::HirDatabase,
64    semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
65    source_analyzer::{SourceAnalyzer, resolve_hir_path},
66};
67
68const CONTINUE_NO_BREAKS: ControlFlow<Infallible, ()> = ControlFlow::Continue(());
69
70#[derive(Debug, Copy, Clone, PartialEq, Eq)]
71pub enum PathResolution {
72    /// An item
73    Def(ModuleDef),
74    /// A local binding (only value namespace)
75    Local(Local),
76    /// A type parameter
77    TypeParam(TypeParam),
78    /// A const parameter
79    ConstParam(ConstParam),
80    SelfType(Impl),
81    BuiltinAttr(BuiltinAttr),
82    ToolModule(ToolModule),
83    DeriveHelper(DeriveHelper),
84}
85
86impl PathResolution {
87    pub(crate) fn in_type_ns(&self) -> Option<TypeNs> {
88        match self {
89            PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())),
90            PathResolution::Def(ModuleDef::BuiltinType(builtin)) => {
91                Some(TypeNs::BuiltinType((*builtin).into()))
92            }
93            PathResolution::Def(
94                ModuleDef::Const(_)
95                | ModuleDef::EnumVariant(_)
96                | ModuleDef::Macro(_)
97                | ModuleDef::Function(_)
98                | ModuleDef::Module(_)
99                | ModuleDef::Static(_)
100                | ModuleDef::Trait(_),
101            ) => None,
102            PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
103                Some(TypeNs::TypeAliasId((*alias).into()))
104            }
105            PathResolution::BuiltinAttr(_)
106            | PathResolution::ToolModule(_)
107            | PathResolution::Local(_)
108            | PathResolution::DeriveHelper(_)
109            | PathResolution::ConstParam(_) => None,
110            PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
111            PathResolution::SelfType(impl_def) => match impl_def.id {
112                AnyImplId::ImplId(id) => Some(TypeNs::SelfType(id)),
113                AnyImplId::BuiltinDeriveImplId(_) => None,
114            },
115        }
116    }
117}
118
119#[derive(Debug, Copy, Clone, PartialEq, Eq)]
120pub struct PathResolutionPerNs {
121    pub type_ns: Option<PathResolution>,
122    pub value_ns: Option<PathResolution>,
123    pub macro_ns: Option<PathResolution>,
124}
125
126impl PathResolutionPerNs {
127    pub fn new(
128        type_ns: Option<PathResolution>,
129        value_ns: Option<PathResolution>,
130        macro_ns: Option<PathResolution>,
131    ) -> Self {
132        PathResolutionPerNs { type_ns, value_ns, macro_ns }
133    }
134    pub fn any(&self) -> Option<PathResolution> {
135        self.type_ns.or(self.value_ns).or(self.macro_ns)
136    }
137}
138
139#[derive(Debug)]
140pub struct TypeInfo<'db> {
141    /// The original type of the expression or pattern.
142    pub original: Type<'db>,
143    /// The adjusted type, if an adjustment happened.
144    pub adjusted: Option<Type<'db>>,
145}
146
147impl<'db> TypeInfo<'db> {
148    pub fn original(self) -> Type<'db> {
149        self.original
150    }
151
152    pub fn has_adjustment(&self) -> bool {
153        self.adjusted.is_some()
154    }
155
156    /// The adjusted type, or the original in case no adjustments occurred.
157    pub fn adjusted(self) -> Type<'db> {
158        self.adjusted.unwrap_or(self.original)
159    }
160}
161
162/// Primary API to get semantic information, like types, from syntax trees.
163pub struct Semantics<'db, DB: ?Sized> {
164    pub db: &'db DB,
165    imp: SemanticsImpl<'db>,
166}
167
168type DefWithoutBodyWithAnonConsts = Either<GenericDefId, VariantId>;
169type ExprToAnonConst = FxHashMap<ExprId, AnonConstId>;
170type DefAnonConstsMap = FxHashMap<DefWithoutBodyWithAnonConsts, ExprToAnonConst>;
171
172pub struct SemanticsImpl<'db> {
173    pub db: &'db dyn HirDatabase,
174    s2d_cache: RefCell<SourceToDefCache<'db>>,
175    /// MacroCall to its expansion's MacroCallId cache
176    macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroCallId>>,
177    /// All anon consts defined by a *signature* (not a body).
178    signature_anon_consts_cache: RefCell<DefAnonConstsMap>,
179}
180
181impl<DB: ?Sized> fmt::Debug for Semantics<'_, DB> {
182    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
183        write!(f, "Semantics {{ ... }}")
184    }
185}
186
187impl<'db, DB: ?Sized> ops::Deref for Semantics<'db, DB> {
188    type Target = SemanticsImpl<'db>;
189
190    fn deref(&self) -> &Self::Target {
191        &self.imp
192    }
193}
194
195#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
196pub enum LintAttr {
197    Allow,
198    Expect,
199    Warn,
200    Deny,
201    Forbid,
202}
203
204// Note: while this variant of `Semantics<'_, _>` might seem unused, as it does not
205// find actual use within the rust-analyzer project itself, it exists to enable the use
206// within e.g. tracked salsa functions in third-party crates that build upon `ra_ap_hir`.
207impl Semantics<'_, dyn HirDatabase> {
208    /// Creates an instance that's weakly coupled to its underlying database type.
209    pub fn new_dyn(db: &'_ dyn HirDatabase) -> Semantics<'_, dyn HirDatabase> {
210        let impl_ = SemanticsImpl::new(db);
211        Semantics { db, imp: impl_ }
212    }
213}
214
215impl<DB: HirDatabase> Semantics<'_, DB> {
216    /// Creates an instance that's strongly coupled to its underlying database type.
217    pub fn new(db: &DB) -> Semantics<'_, DB> {
218        let impl_ = SemanticsImpl::new(db);
219        Semantics { db, imp: impl_ }
220    }
221}
222
223// Note: We take `DB` as `?Sized` here in order to support type-erased
224// use of `Semantics` via `Semantics<'_, dyn HirDatabase>`:
225impl<DB: HirDatabase + ?Sized> Semantics<'_, DB> {
226    pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
227        self.imp.find_file(syntax_node).file_id
228    }
229
230    pub fn token_ancestors_with_macros(
231        &self,
232        token: SyntaxToken,
233    ) -> impl Iterator<Item = SyntaxNode> + '_ {
234        token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
235    }
236
237    /// Find an AstNode by offset inside SyntaxNode, if it is inside *Macrofile*,
238    /// search up until it is of the target AstNode type
239    pub fn find_node_at_offset_with_macros<N: AstNode>(
240        &self,
241        node: &SyntaxNode,
242        offset: TextSize,
243    ) -> Option<N> {
244        self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
245    }
246
247    /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
248    /// descend it and find again
249    // FIXME: Rethink this API
250    pub fn find_node_at_offset_with_descend<N: AstNode>(
251        &self,
252        node: &SyntaxNode,
253        offset: TextSize,
254    ) -> Option<N> {
255        self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
256    }
257
258    /// Find an AstNode by offset inside SyntaxNode, if it is inside an attribute macro call,
259    /// descend it and find again
260    // FIXME: Rethink this API
261    pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>(
262        &'slf self,
263        node: &SyntaxNode,
264        offset: TextSize,
265    ) -> impl Iterator<Item = N> + 'slf {
266        self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
267    }
268
269    // FIXME: Rethink this API
270    pub fn find_namelike_at_offset_with_descend<'slf>(
271        &'slf self,
272        node: &SyntaxNode,
273        offset: TextSize,
274    ) -> impl Iterator<Item = ast::NameLike> + 'slf {
275        node.token_at_offset(offset)
276            .map(move |token| self.descend_into_macros_no_opaque(token, true))
277            .map(|descendants| descendants.into_iter().filter_map(move |it| it.value.parent()))
278            // re-order the tokens from token_at_offset by returning the ancestors with the smaller first nodes first
279            // See algo::ancestors_at_offset, which uses the same approach
280            .kmerge_by(|left, right| left.text_range().len().lt(&right.text_range().len()))
281            .filter_map(ast::NameLike::cast)
282    }
283
284    pub fn lint_attrs(
285        &self,
286        file_id: FileId,
287        krate: Crate,
288        item: ast::AnyHasAttrs,
289    ) -> impl DoubleEndedIterator<Item = (LintAttr, SmolStr)> {
290        let mut cfg_options = None;
291        let cfg_options = || *cfg_options.get_or_insert_with(|| krate.id.cfg_options(self.db));
292
293        let is_crate_root = file_id == krate.root_file(self.imp.db);
294        let is_source_file = ast::SourceFile::can_cast(item.syntax().kind());
295        let extra_crate_attrs = (is_crate_root && is_source_file)
296            .then(|| {
297                parse_extra_crate_attrs(self.imp.db, krate.id)
298                    .into_iter()
299                    .flat_map(|src| src.attrs())
300            })
301            .into_iter()
302            .flatten();
303
304        let mut result = Vec::new();
305        hir_expand::attrs::expand_cfg_attr::<Infallible>(
306            extra_crate_attrs.chain(ast::attrs_including_inner(&item)),
307            cfg_options,
308            |attr, _| {
309                let ast::Meta::TokenTreeMeta(attr) = attr else {
310                    return ControlFlow::Continue(());
311                };
312                let (Some(segment), Some(tt)) = (attr.path().as_one_segment(), attr.token_tree())
313                else {
314                    return ControlFlow::Continue(());
315                };
316                let lint_attr = match &*segment {
317                    "allow" => LintAttr::Allow,
318                    "expect" => LintAttr::Expect,
319                    "warn" => LintAttr::Warn,
320                    "deny" => LintAttr::Deny,
321                    "forbid" => LintAttr::Forbid,
322                    _ => return ControlFlow::Continue(()),
323                };
324                let mut lint = SmolStrBuilder::new();
325                for token in
326                    tt.syntax().children_with_tokens().filter_map(SyntaxElement::into_token)
327                {
328                    match token.kind() {
329                        T![:] | T![::] => lint.push_str(token.text()),
330                        kind if kind.is_any_identifier() => lint.push_str(token.text()),
331                        T![,] => {
332                            let lint = mem::replace(&mut lint, SmolStrBuilder::new()).finish();
333                            if !lint.is_empty() {
334                                result.push((lint_attr, lint));
335                            }
336                        }
337                        _ => {}
338                    }
339                }
340                let lint = lint.finish();
341                if !lint.is_empty() {
342                    result.push((lint_attr, lint));
343                }
344
345                ControlFlow::Continue(())
346            },
347        );
348        result.into_iter()
349    }
350
351    pub fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option<Struct> {
352        self.imp.resolve_range_pat(range_pat).map(Struct::from)
353    }
354
355    pub fn resolve_range_expr(&self, range_expr: &ast::RangeExpr) -> Option<Struct> {
356        self.imp.resolve_range_expr(range_expr).map(Struct::from)
357    }
358
359    pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
360        self.imp.resolve_await_to_poll(await_expr)
361    }
362
363    pub fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<Function> {
364        self.imp.resolve_prefix_expr(prefix_expr)
365    }
366
367    pub fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<Function> {
368        self.imp.resolve_index_expr(index_expr)
369    }
370
371    pub fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<Function> {
372        self.imp.resolve_bin_expr(bin_expr)
373    }
374
375    pub fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<Function> {
376        self.imp.resolve_try_expr(try_expr)
377    }
378
379    pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<Variant> {
380        self.imp.resolve_variant(record_lit).map(Variant::from)
381    }
382
383    pub fn file_to_module_def(&self, file: impl Into<FileId>) -> Option<Module> {
384        self.imp.file_to_module_defs(file.into()).next()
385    }
386
387    pub fn file_to_module_defs(&self, file: impl Into<FileId>) -> impl Iterator<Item = Module> {
388        self.imp.file_to_module_defs(file.into())
389    }
390
391    pub fn hir_file_to_module_def(&self, file: impl Into<HirFileId>) -> Option<Module> {
392        self.imp.hir_file_to_module_defs(file.into()).next()
393    }
394
395    pub fn hir_file_to_module_defs(
396        &self,
397        file: impl Into<HirFileId>,
398    ) -> impl Iterator<Item = Module> {
399        self.imp.hir_file_to_module_defs(file.into())
400    }
401
402    pub fn is_nightly(&self, krate: Crate) -> bool {
403        let toolchain = toolchain_channel(self.db.as_dyn_database(), krate.into());
404        // `toolchain == None` means we're in some detached files. Since we have no information on
405        // the toolchain being used, let's just allow unstable items to be listed.
406        matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None)
407    }
408
409    pub fn to_adt_def(&self, a: &ast::Adt) -> Option<Adt> {
410        self.imp.to_def(a)
411    }
412
413    pub fn to_const_def(&self, c: &ast::Const) -> Option<Const> {
414        self.imp.to_def(c)
415    }
416
417    pub fn to_enum_def(&self, e: &ast::Enum) -> Option<Enum> {
418        self.imp.to_def(e)
419    }
420
421    pub fn to_enum_variant_def(&self, v: &ast::Variant) -> Option<EnumVariant> {
422        self.imp.to_def(v)
423    }
424
425    pub fn to_fn_def(&self, f: &ast::Fn) -> Option<Function> {
426        self.imp.to_def(f)
427    }
428
429    pub fn to_impl_def(&self, i: &ast::Impl) -> Option<Impl> {
430        self.imp.to_def(i)
431    }
432
433    pub fn to_macro_def(&self, m: &ast::Macro) -> Option<Macro> {
434        self.imp.to_def(m)
435    }
436
437    pub fn to_module_def(&self, m: &ast::Module) -> Option<Module> {
438        self.imp.to_def(m)
439    }
440
441    pub fn to_static_def(&self, s: &ast::Static) -> Option<Static> {
442        self.imp.to_def(s)
443    }
444
445    pub fn to_struct_def(&self, s: &ast::Struct) -> Option<Struct> {
446        self.imp.to_def(s)
447    }
448
449    pub fn to_trait_def(&self, t: &ast::Trait) -> Option<Trait> {
450        self.imp.to_def(t)
451    }
452
453    pub fn to_type_alias_def(&self, t: &ast::TypeAlias) -> Option<TypeAlias> {
454        self.imp.to_def(t)
455    }
456
457    pub fn to_union_def(&self, u: &ast::Union) -> Option<Union> {
458        self.imp.to_def(u)
459    }
460}
461
462impl<'db> SemanticsImpl<'db> {
463    fn new(db: &'db dyn HirDatabase) -> Self {
464        SemanticsImpl {
465            db,
466            s2d_cache: Default::default(),
467            macro_call_cache: Default::default(),
468            signature_anon_consts_cache: Default::default(),
469        }
470    }
471
472    pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile {
473        let hir_file_id = file_id.into();
474        let tree = file_id.parse(self.db).tree();
475        self.cache(tree.syntax().clone(), hir_file_id);
476        tree
477    }
478
479    /// If not crate is found for the file, try to return the last crate in topological order.
480    pub fn first_crate(&self, file: FileId) -> Option<Crate> {
481        match self.file_to_module_defs(file).next() {
482            Some(module) => Some(module.krate(self.db)),
483            None => all_crates(self.db).last().copied().map(Into::into),
484        }
485    }
486
487    pub fn attach_first_edition_opt(&self, file: FileId) -> Option<EditionedFileId> {
488        let krate = self.file_to_module_defs(file).next()?.krate(self.db);
489        Some(EditionedFileId::new(self.db, file, krate.edition(self.db)))
490    }
491
492    pub fn attach_first_edition(&self, file: FileId) -> EditionedFileId {
493        self.attach_first_edition_opt(file)
494            .unwrap_or_else(|| EditionedFileId::current_edition(self.db, file))
495    }
496
497    pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
498        let file_id = self.attach_first_edition(file_id);
499
500        let tree = file_id.parse(self.db).tree();
501        self.cache(tree.syntax().clone(), file_id.into());
502        tree
503    }
504
505    pub fn adjust_edition(&self, file_id: HirFileId) -> HirFileId {
506        if let Some(editioned_file_id) = file_id.file_id() {
507            self.attach_first_edition_opt(editioned_file_id.file_id(self.db))
508                .map_or(file_id, Into::into)
509        } else {
510            file_id
511        }
512    }
513
514    pub fn find_parent_file(&self, file_id: HirFileId) -> Option<InFile<SyntaxNode>> {
515        match file_id {
516            HirFileId::FileId(file_id) => {
517                let module = self.file_to_module_defs(file_id.file_id(self.db)).next()?;
518                let def_map = crate_def_map(self.db, module.krate(self.db).id);
519                match def_map[module.id].origin {
520                    ModuleOrigin::CrateRoot { .. } => None,
521                    ModuleOrigin::File { declaration, declaration_tree_id, .. } => {
522                        let file_id = declaration_tree_id.file_id();
523                        let in_file = InFile::new(file_id, declaration);
524                        let node = in_file.to_node(self.db);
525                        let root = find_root(node.syntax());
526                        self.cache(root, file_id);
527                        Some(in_file.with_value(node.syntax().clone()))
528                    }
529                    _ => unreachable!("FileId can only belong to a file module"),
530                }
531            }
532            HirFileId::MacroFile(macro_file) => {
533                let node = self.db.lookup_intern_macro_call(macro_file).to_node(self.db);
534                let root = find_root(&node.value);
535                self.cache(root, node.file_id);
536                Some(node)
537            }
538        }
539    }
540
541    /// Returns the `SyntaxNode` of the module. If this is a file module, returns
542    /// the `SyntaxNode` of the *definition* file, not of the *declaration*.
543    pub fn module_definition_node(&self, module: Module) -> InFile<SyntaxNode> {
544        let def_map = module.id.def_map(self.db);
545        let definition = def_map[module.id].origin.definition_source(self.db);
546        let definition = definition.map(|it| it.node());
547        let root_node = find_root(&definition.value);
548        self.cache(root_node, definition.file_id);
549        definition
550    }
551
552    pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode {
553        let node = self.db.parse_or_expand(file_id);
554        self.cache(node.clone(), file_id);
555        node
556    }
557
558    pub fn to_node_syntax(&self, ptr: InFile<SyntaxNodePtr>) -> SyntaxNode {
559        ptr.value.to_node(&self.parse_or_expand(ptr.file_id))
560    }
561
562    pub fn to_node<N: AstNode>(&self, ptr: InFile<AstPtr<N>>) -> N {
563        ptr.value.to_node(&self.parse_or_expand(ptr.file_id))
564    }
565
566    pub fn expand(&self, file_id: MacroCallId) -> ExpandResult<SyntaxNode> {
567        let res = self.db.parse_macro_expansion(file_id).as_ref().map(|it| it.0.syntax_node());
568        self.cache(res.value.clone(), file_id.into());
569        res
570    }
571
572    pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option<InFile<SyntaxNode>> {
573        let file_id = self.to_def(macro_call)?;
574        let node = self.parse_or_expand(file_id.into());
575        Some(InFile::new(file_id.into(), node))
576    }
577
578    /// Expands the macro if it isn't one of the built-in ones that expand to custom syntax or dummy
579    /// expansions.
580    pub fn expand_allowed_builtins(
581        &self,
582        macro_call: &ast::MacroCall,
583    ) -> Option<ExpandResult<SyntaxNode>> {
584        let file_id = self.to_def(macro_call)?;
585        let macro_call = self.db.lookup_intern_macro_call(file_id);
586
587        let skip = matches!(
588            macro_call.def.kind,
589            hir_expand::MacroDefKind::BuiltIn(
590                _,
591                BuiltinFnLikeExpander::Column
592                    | BuiltinFnLikeExpander::File
593                    | BuiltinFnLikeExpander::ModulePath
594                    | BuiltinFnLikeExpander::Asm
595                    | BuiltinFnLikeExpander::GlobalAsm
596                    | BuiltinFnLikeExpander::NakedAsm
597                    | BuiltinFnLikeExpander::LogSyntax
598                    | BuiltinFnLikeExpander::TraceMacros
599                    | BuiltinFnLikeExpander::FormatArgs
600                    | BuiltinFnLikeExpander::FormatArgsNl
601                    | BuiltinFnLikeExpander::ConstFormatArgs,
602            ) | hir_expand::MacroDefKind::BuiltInEager(_, EagerExpander::CompileError)
603        );
604        if skip {
605            // these macros expand to custom builtin syntax and/or dummy things, no point in
606            // showing these to the user
607            return None;
608        }
609
610        let node = self.expand(file_id);
611        Some(node)
612    }
613
614    /// If `item` has an attribute macro attached to it, expands it.
615    pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<InFile<SyntaxNode>>> {
616        let src = self.wrap_node_infile(item.clone());
617        let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
618        Some(self.expand(macro_call_id).map(|it| InFile::new(macro_call_id.into(), it)))
619    }
620
621    pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Meta) -> Option<SyntaxNode> {
622        let adt = attr.parent_attr()?.syntax().parent().and_then(ast::Adt::cast)?;
623        let src = self.wrap_node_infile(attr.clone());
624        let call_id = self.with_ctx(|ctx| {
625            ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
626        })?;
627        Some(self.parse_or_expand(call_id.into()))
628    }
629
630    pub fn resolve_derive_macro(&self, attr: &ast::Meta) -> Option<Vec<Option<Macro>>> {
631        let calls = self.derive_macro_calls(attr)?;
632        self.with_ctx(|ctx| {
633            Some(
634                calls
635                    .into_iter()
636                    .map(|call| {
637                        let call = call?;
638                        match call {
639                            Either::Left(call) => {
640                                macro_call_to_macro_id(ctx, call).map(|id| Macro { id })
641                            }
642                            Either::Right(call) => {
643                                let call = call.loc(self.db);
644                                let krate = call.krate(self.db);
645                                let lang_items = hir_def::lang_item::lang_items(self.db, krate);
646                                call.trait_.derive_macro(lang_items).map(|id| Macro { id })
647                            }
648                        }
649                    })
650                    .collect(),
651            )
652        })
653    }
654
655    pub fn expand_derive_macro(
656        &self,
657        attr: &ast::Meta,
658    ) -> Option<Vec<Option<ExpandResult<SyntaxNode>>>> {
659        let res: Vec<_> = self
660            .derive_macro_calls(attr)?
661            .into_iter()
662            .map(|call| {
663                let file_id = call?.left()?;
664                let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id);
665                let root_node = value.0.syntax_node();
666                self.cache(root_node.clone(), file_id.into());
667                Some(ExpandResult { value: root_node, err: err.clone() })
668            })
669            .collect();
670        Some(res)
671    }
672
673    fn derive_macro_calls(
674        &self,
675        attr: &ast::Meta,
676    ) -> Option<Vec<Option<Either<MacroCallId, BuiltinDeriveImplId>>>> {
677        let adt = attr.parent_attr()?.syntax().parent().and_then(ast::Adt::cast)?;
678        let file_id = self.find_file(adt.syntax()).file_id;
679        let adt = InFile::new(file_id, &adt);
680        let src = InFile::new(file_id, attr.clone());
681        self.with_ctx(|ctx| {
682            let (.., res) = ctx.attr_to_derive_macro_call(adt, src)?;
683            Some(res.to_vec())
684        })
685    }
686
687    pub fn is_derive_annotated(&self, adt: InFile<&ast::Adt>) -> bool {
688        self.with_ctx(|ctx| ctx.file_of_adt_has_derives(adt))
689    }
690
691    pub fn derive_helpers_in_scope(&self, adt: &ast::Adt) -> Option<Vec<(Symbol, Symbol)>> {
692        let sa = self.analyze_no_infer(adt.syntax())?;
693        let id = self.db.ast_id_map(sa.file_id).ast_id(adt);
694        let result = sa
695            .resolver
696            .def_map()
697            .derive_helpers_in_scope(InFile::new(sa.file_id, id))?
698            .iter()
699            .map(|(name, macro_, _)| {
700                let macro_name = Macro::from(*macro_).name(self.db).symbol().clone();
701                (name.symbol().clone(), macro_name)
702            })
703            .collect();
704        Some(result)
705    }
706
707    pub fn derive_helper(&self, attr: &ast::Attr) -> Option<Vec<(Macro, MacroCallId)>> {
708        let adt = attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it {
709            ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
710            ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
711            ast::Item::Union(it) => Some(ast::Adt::Union(it)),
712            _ => None,
713        })?;
714        let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
715        let sa = self.analyze_no_infer(adt.syntax())?;
716        let id = self.db.ast_id_map(sa.file_id).ast_id(&adt);
717        let res: Vec<_> = sa
718            .resolver
719            .def_map()
720            .derive_helpers_in_scope(InFile::new(sa.file_id, id))?
721            .iter()
722            .filter(|&(name, _, _)| *name == attr_name)
723            .filter_map(|&(_, macro_, call)| Some((macro_.into(), call.left()?)))
724            .collect();
725        // FIXME: We filter our builtin derive "fake" expansions, is this correct? Should we still expose them somehow?
726        res.is_empty().not().then_some(res)
727    }
728
729    pub fn is_attr_macro_call(&self, item: InFile<&ast::Item>) -> bool {
730        self.with_ctx(|ctx| ctx.item_to_macro_call(item).is_some())
731    }
732
733    /// Expand the macro call with a different token tree, mapping the `token_to_map` down into the
734    /// expansion. `token_to_map` should be a token from the `speculative args` node.
735    pub fn speculative_expand_macro_call(
736        &self,
737        actual_macro_call: &ast::MacroCall,
738        speculative_args: &ast::TokenTree,
739        token_to_map: SyntaxToken,
740    ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
741        let macro_file = self.to_def(actual_macro_call)?;
742        hir_expand::db::expand_speculative(
743            self.db,
744            macro_file,
745            speculative_args.syntax(),
746            token_to_map,
747        )
748    }
749
750    pub fn speculative_expand_raw(
751        &self,
752        macro_file: MacroCallId,
753        speculative_args: &SyntaxNode,
754        token_to_map: SyntaxToken,
755    ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
756        hir_expand::db::expand_speculative(self.db, macro_file, speculative_args, token_to_map)
757    }
758
759    /// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the
760    /// expansion. `token_to_map` should be a token from the `speculative args` node.
761    pub fn speculative_expand_attr_macro(
762        &self,
763        actual_macro_call: &ast::Item,
764        speculative_args: &ast::Item,
765        token_to_map: SyntaxToken,
766    ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
767        let macro_call = self.wrap_node_infile(actual_macro_call.clone());
768        let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call.as_ref()))?;
769        hir_expand::db::expand_speculative(
770            self.db,
771            macro_call_id,
772            speculative_args.syntax(),
773            token_to_map,
774        )
775    }
776
777    pub fn speculative_expand_derive_as_pseudo_attr_macro(
778        &self,
779        actual_macro_call: &ast::Attr,
780        speculative_args: &ast::Attr,
781        token_to_map: SyntaxToken,
782    ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
783        let attr = self.wrap_node_infile(actual_macro_call.clone());
784        let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?;
785        let macro_call_id = self.with_ctx(|ctx| {
786            ctx.attr_to_derive_macro_call(
787                attr.with_value(&adt),
788                attr.with_value(attr.value.meta()?),
789            )
790            .map(|(_, it, _)| it)
791        })?;
792        hir_expand::db::expand_speculative(
793            self.db,
794            macro_call_id,
795            speculative_args.syntax(),
796            token_to_map,
797        )
798    }
799
800    /// Checks if renaming `renamed` to `new_name` may introduce conflicts with other locals,
801    /// and returns the conflicting locals.
802    pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &Name) -> Vec<Local> {
803        let (store, root_expr) = to_be_renamed.parent_infer.store_and_root_expr(self.db);
804        let resolver = to_be_renamed.parent.resolver(self.db);
805        let starting_expr = store.binding_owner(to_be_renamed.binding_id).unwrap_or(root_expr);
806        let mut visitor = RenameConflictsVisitor {
807            body: store,
808            conflicts: FxHashSet::default(),
809            db: self.db,
810            new_name: new_name.symbol().clone(),
811            old_name: to_be_renamed.name(self.db).symbol().clone(),
812            owner: to_be_renamed.parent,
813            to_be_renamed: to_be_renamed.binding_id,
814            resolver,
815        };
816        visitor.rename_conflicts(starting_expr);
817        visitor
818            .conflicts
819            .into_iter()
820            .map(|binding_id| Local {
821                parent: to_be_renamed.parent,
822                parent_infer: to_be_renamed.parent_infer,
823                binding_id,
824            })
825            .collect()
826    }
827
828    /// Retrieves all the formatting parts of the format_args! (or `asm!`) template string.
829    pub fn as_format_args_parts(
830        &self,
831        string: &ast::String,
832    ) -> Option<Vec<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)>> {
833        let string_start = string.syntax().text_range().start();
834        let token = self.wrap_token_infile(string.syntax().clone());
835        self.descend_into_macros_breakable(token, |token, _| {
836            (|| {
837                let token = token.value;
838                let string = ast::String::cast(token)?;
839                let literal =
840                    string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
841                let parent = literal.parent()?;
842                if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) {
843                    let source_analyzer = self.analyze_no_infer(format_args.syntax())?;
844                    let format_args = self.wrap_node_infile(format_args);
845                    let res = source_analyzer
846                        .as_format_args_parts(self.db, format_args.as_ref())?
847                        .map(|(range, res)| (range + string_start, res.map(Either::Left)))
848                        .collect();
849                    Some(res)
850                } else {
851                    let asm = ast::AsmExpr::cast(parent)?;
852                    let source_analyzer = self.analyze_no_infer(asm.syntax())?;
853                    let line = asm.template().position(|it| *it.syntax() == literal)?;
854                    let asm = self.wrap_node_infile(asm);
855                    let (owner, (expr, asm_parts)) = source_analyzer.as_asm_parts(asm.as_ref())?;
856                    let res = asm_parts
857                        .get(line)?
858                        .iter()
859                        .map(|&(range, index)| {
860                            (
861                                range + string_start,
862                                Some(Either::Right(InlineAsmOperand { owner, expr, index })),
863                            )
864                        })
865                        .collect();
866                    Some(res)
867                }
868            })()
869            .map_or(ControlFlow::Continue(()), ControlFlow::Break)
870        })
871    }
872
873    /// Retrieves the formatting part of the format_args! template string at the given offset.
874    ///
875    // FIXME: Type the return type
876    /// Returns the range (pre-expansion) in the string literal corresponding to the resolution,
877    /// absolute file range (post-expansion)
878    /// of the part in the format string (post-expansion), the corresponding string token and the resolution if it
879    /// exists.
880    // FIXME: Remove this in favor of `check_for_format_args_template_with_file`
881    pub fn check_for_format_args_template(
882        &self,
883        original_token: SyntaxToken,
884        offset: TextSize,
885    ) -> Option<(
886        TextRange,
887        HirFileRange,
888        ast::String,
889        Option<Either<PathResolution, InlineAsmOperand>>,
890    )> {
891        let original_token =
892            self.wrap_token_infile(original_token).map(ast::String::cast).transpose()?;
893        self.check_for_format_args_template_with_file(original_token, offset)
894    }
895
896    /// Retrieves the formatting part of the format_args! template string at the given offset.
897    ///
898    // FIXME: Type the return type
899    /// Returns the range (pre-expansion) in the string literal corresponding to the resolution,
900    /// absolute file range (post-expansion)
901    /// of the part in the format string, the corresponding string token and the resolution if it
902    /// exists.
903    pub fn check_for_format_args_template_with_file(
904        &self,
905        original_token: InFile<ast::String>,
906        offset: TextSize,
907    ) -> Option<(
908        TextRange,
909        HirFileRange,
910        ast::String,
911        Option<Either<PathResolution, InlineAsmOperand>>,
912    )> {
913        let relative_offset =
914            offset.checked_sub(original_token.value.syntax().text_range().start())?;
915        self.descend_into_macros_breakable(
916            original_token.as_ref().map(|it| it.syntax().clone()),
917            |token, _| {
918                (|| {
919                    let token = token.map(ast::String::cast).transpose()?;
920                    self.resolve_offset_in_format_args(token.as_ref(), relative_offset).map(
921                        |(range, res)| {
922                            (
923                                range + original_token.value.syntax().text_range().start(),
924                                HirFileRange {
925                                    file_id: token.file_id,
926                                    range: range + token.value.syntax().text_range().start(),
927                                },
928                                token.value,
929                                res,
930                            )
931                        },
932                    )
933                })()
934                .map_or(ControlFlow::Continue(()), ControlFlow::Break)
935            },
936        )
937    }
938
939    fn resolve_offset_in_format_args(
940        &self,
941        InFile { value: string, file_id }: InFile<&ast::String>,
942        offset: TextSize,
943    ) -> Option<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)> {
944        debug_assert!(offset <= string.syntax().text_range().len());
945        let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
946        let parent = literal.parent()?;
947        if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) {
948            let source_analyzer =
949                &self.analyze_impl(InFile::new(file_id, format_args.syntax()), None, false)?;
950            source_analyzer
951                .resolve_offset_in_format_args(self.db, InFile::new(file_id, &format_args), offset)
952                .map(|(range, res)| (range, res.map(Either::Left)))
953        } else {
954            let asm = ast::AsmExpr::cast(parent)?;
955            let source_analyzer =
956                self.analyze_impl(InFile::new(file_id, asm.syntax()), None, false)?;
957            let line = asm.template().position(|it| *it.syntax() == literal)?;
958            source_analyzer
959                .resolve_offset_in_asm_template(InFile::new(file_id, &asm), line, offset)
960                .map(|(owner, (expr, range, index))| {
961                    (range, Some(Either::Right(InlineAsmOperand { owner, expr, index })))
962                })
963        }
964    }
965
966    pub fn debug_hir_at(&self, token: SyntaxToken) -> Option<String> {
967        self.analyze_no_infer(&token.parent()?).and_then(|it| {
968            Some(match it.body_or_sig.as_ref()? {
969                crate::source_analyzer::BodyOrSig::Body { def, body, .. } => {
970                    hir_def::expr_store::pretty::print_body_hir(
971                        self.db,
972                        body,
973                        *def,
974                        it.file_id.edition(self.db),
975                    )
976                }
977                &crate::source_analyzer::BodyOrSig::VariantFields { def, .. } => {
978                    hir_def::expr_store::pretty::print_variant_body_hir(
979                        self.db,
980                        def,
981                        it.file_id.edition(self.db),
982                    )
983                }
984                &crate::source_analyzer::BodyOrSig::Sig { def, .. } => {
985                    hir_def::expr_store::pretty::print_signature(
986                        self.db,
987                        def,
988                        it.file_id.edition(self.db),
989                    )
990                }
991            })
992        })
993    }
994
995    /// Descends the token into the include expansion, if its file is an included file.
996    pub fn descend_token_into_include_expansion(
997        &self,
998        tok: InRealFile<SyntaxToken>,
999    ) -> InFile<SyntaxToken> {
1000        let Some(include) =
1001            self.s2d_cache.borrow_mut().get_or_insert_include_for(self.db, tok.file_id)
1002        else {
1003            return tok.into();
1004        };
1005        let span = self.db.real_span_map(tok.file_id).span_for_range(tok.value.text_range());
1006        let Some(InMacroFile { file_id, value: mut mapped_tokens }) = self.with_ctx(|ctx| {
1007            Some(
1008                ctx.cache
1009                    .get_or_insert_expansion(ctx.db, include)
1010                    .map_range_down(span)?
1011                    .map(SmallVec::<[_; 2]>::from_iter),
1012            )
1013        }) else {
1014            return tok.into();
1015        };
1016        // We should only get one result at most
1017        mapped_tokens.pop().map_or_else(|| tok.into(), |(tok, _)| InFile::new(file_id.into(), tok))
1018    }
1019
1020    /// Maps a node down by mapping its first and last token down.
1021    pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
1022        // This might not be the correct way to do this, but it works for now
1023        let mut res = smallvec![];
1024        let tokens = (|| {
1025            // FIXME: the trivia skipping should not be necessary
1026            let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
1027            let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?;
1028            Some((first, last))
1029        })();
1030        let (first, last) = match tokens {
1031            Some(it) => it,
1032            None => return res,
1033        };
1034        let file = self.find_file(node.syntax());
1035
1036        if first == last {
1037            // node is just the token, so descend the token
1038            self.descend_into_macros_all(
1039                InFile::new(file.file_id, first),
1040                false,
1041                &mut |InFile { value, .. }, _ctx| {
1042                    if let Some(node) = value
1043                        .parent_ancestors()
1044                        .take_while(|it| it.text_range() == value.text_range())
1045                        .find_map(N::cast)
1046                    {
1047                        res.push(node)
1048                    }
1049                },
1050            );
1051        } else {
1052            // Descend first and last token, then zip them to look for the node they belong to
1053            let mut scratch: SmallVec<[_; 1]> = smallvec![];
1054            self.descend_into_macros_all(
1055                InFile::new(file.file_id, first),
1056                false,
1057                &mut |token, _ctx| scratch.push(token),
1058            );
1059
1060            let mut scratch = scratch.into_iter();
1061            self.descend_into_macros_all(
1062                InFile::new(file.file_id, last),
1063                false,
1064                &mut |InFile { value: last, file_id: last_fid }, _ctx| {
1065                    if let Some(InFile { value: first, file_id: first_fid }) = scratch.next()
1066                        && first_fid == last_fid
1067                        && let Some(p) = first.parent()
1068                    {
1069                        let range = first.text_range().cover(last.text_range());
1070                        let node = find_root(&p)
1071                            .covering_element(range)
1072                            .ancestors()
1073                            .take_while(|it| it.text_range() == range)
1074                            .find_map(N::cast);
1075                        if let Some(node) = node {
1076                            res.push(node);
1077                        }
1078                    }
1079                },
1080            );
1081        }
1082        res
1083    }
1084
1085    /// Returns true if the given input is within a macro call.
1086    ///
1087    /// Note that if this token itself is within the context of a macro expansion does not matter.
1088    /// That is, we strictly check if it lies inside the input of a macro call.
1089    pub fn is_inside_macro_call(&self, token @ InFile { value, .. }: InFile<&SyntaxToken>) -> bool {
1090        value.parent_ancestors().any(|ancestor| {
1091            if ast::MacroCall::can_cast(ancestor.kind()) {
1092                return true;
1093            }
1094
1095            let Some(item) = ast::Item::cast(ancestor) else {
1096                return false;
1097            };
1098            self.with_ctx(|ctx| {
1099                if ctx.item_to_macro_call(token.with_value(&item)).is_some() {
1100                    return true;
1101                }
1102                let adt = match item {
1103                    ast::Item::Struct(it) => it.into(),
1104                    ast::Item::Enum(it) => it.into(),
1105                    ast::Item::Union(it) => it.into(),
1106                    _ => return false,
1107                };
1108                ctx.file_of_adt_has_derives(token.with_value(&adt))
1109            })
1110        })
1111    }
1112
1113    pub fn descend_into_macros_cb(
1114        &self,
1115        token: SyntaxToken,
1116        mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext),
1117    ) {
1118        self.descend_into_macros_all(self.wrap_token_infile(token), false, &mut |t, ctx| {
1119            cb(t, ctx)
1120        });
1121    }
1122
1123    pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
1124        let mut res = smallvec![];
1125        self.descend_into_macros_all(
1126            self.wrap_token_infile(token.clone()),
1127            false,
1128            &mut |t, _ctx| res.push(t.value),
1129        );
1130        if res.is_empty() {
1131            res.push(token);
1132        }
1133        res
1134    }
1135
1136    pub fn descend_into_macros_no_opaque(
1137        &self,
1138        token: SyntaxToken,
1139        always_descend_into_derives: bool,
1140    ) -> SmallVec<[InFile<SyntaxToken>; 1]> {
1141        let mut res = smallvec![];
1142        let token = self.wrap_token_infile(token);
1143        self.descend_into_macros_all(token.clone(), always_descend_into_derives, &mut |t, ctx| {
1144            if !ctx.is_opaque(self.db) {
1145                // Don't descend into opaque contexts
1146                res.push(t);
1147            }
1148        });
1149        if res.is_empty() {
1150            res.push(token);
1151        }
1152        res
1153    }
1154
1155    pub fn descend_into_macros_breakable<T>(
1156        &self,
1157        token: InFile<SyntaxToken>,
1158        mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
1159    ) -> Option<T> {
1160        self.descend_into_macros_impl(token, false, &mut cb)
1161    }
1162
1163    /// Descends the token into expansions, returning the tokens that matches the input
1164    /// token's [`SyntaxKind`] and text.
1165    pub fn descend_into_macros_exact(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
1166        let mut r = smallvec![];
1167        let text = token.text();
1168        let kind = token.kind();
1169
1170        self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }, ctx| {
1171            let mapped_kind = value.kind();
1172            let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
1173            let matches = (kind == mapped_kind || any_ident_match())
1174                && text == value.text()
1175                && !ctx.is_opaque(self.db);
1176            if matches {
1177                r.push(value);
1178            }
1179        });
1180        if r.is_empty() {
1181            r.push(token);
1182        }
1183        r
1184    }
1185
1186    /// Descends the token into expansions, returning the tokens that matches the input
1187    /// token's [`SyntaxKind`] and text.
1188    pub fn descend_into_macros_exact_with_file(
1189        &self,
1190        token: SyntaxToken,
1191    ) -> SmallVec<[InFile<SyntaxToken>; 1]> {
1192        let mut r = smallvec![];
1193        let text = token.text();
1194        let kind = token.kind();
1195
1196        self.descend_into_macros_cb(token.clone(), |InFile { value, file_id }, ctx| {
1197            let mapped_kind = value.kind();
1198            let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
1199            let matches = (kind == mapped_kind || any_ident_match())
1200                && text == value.text()
1201                && !ctx.is_opaque(self.db);
1202            if matches {
1203                r.push(InFile { value, file_id });
1204            }
1205        });
1206        if r.is_empty() {
1207            r.push(self.wrap_token_infile(token));
1208        }
1209        r
1210    }
1211
1212    /// Descends the token into expansions, returning the first token that matches the input
1213    /// token's [`SyntaxKind`] and text.
1214    pub fn descend_into_macros_single_exact(&self, token: SyntaxToken) -> SyntaxToken {
1215        let text = token.text();
1216        let kind = token.kind();
1217        self.descend_into_macros_breakable(
1218            self.wrap_token_infile(token.clone()),
1219            |InFile { value, file_id: _ }, _ctx| {
1220                let mapped_kind = value.kind();
1221                let any_ident_match =
1222                    || kind.is_any_identifier() && value.kind().is_any_identifier();
1223                let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
1224                if matches { ControlFlow::Break(value) } else { ControlFlow::Continue(()) }
1225            },
1226        )
1227        .unwrap_or(token)
1228    }
1229
1230    fn descend_into_macros_all(
1231        &self,
1232        token: InFile<SyntaxToken>,
1233        always_descend_into_derives: bool,
1234        f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContext),
1235    ) {
1236        self.descend_into_macros_impl(token, always_descend_into_derives, &mut |tok, ctx| {
1237            f(tok, ctx);
1238            CONTINUE_NO_BREAKS
1239        });
1240    }
1241
1242    fn descend_into_macros_impl<T>(
1243        &self,
1244        InFile { value: token, file_id }: InFile<SyntaxToken>,
1245        always_descend_into_derives: bool,
1246        f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
1247    ) -> Option<T> {
1248        let _p = tracing::info_span!("descend_into_macros_impl").entered();
1249
1250        let db = self.db;
1251        let span = db.span_map(file_id).span_for_range(token.text_range());
1252
1253        // Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack
1254        let process_expansion_for_token =
1255            |ctx: &mut SourceToDefCtx<'_, '_>, stack: &mut Vec<_>, macro_file| {
1256                let InMacroFile { file_id, value: mapped_tokens } = ctx
1257                    .cache
1258                    .get_or_insert_expansion(ctx.db, macro_file)
1259                    .map_range_down(span)?
1260                    .map(SmallVec::<[_; 2]>::from_iter);
1261                // we have found a mapping for the token if the vec is non-empty
1262                let res = mapped_tokens.is_empty().not().then_some(());
1263                // requeue the tokens we got from mapping our current token down
1264                stack.push((HirFileId::from(file_id), mapped_tokens));
1265                res
1266            };
1267
1268        // A stack of tokens to process, along with the file they came from
1269        // These are tracked to know which macro calls we still have to look into
1270        // the tokens themselves aren't that interesting as the span that is being used to map
1271        // things down never changes.
1272        let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![];
1273        let include = file_id
1274            .file_id()
1275            .and_then(|file_id| self.s2d_cache.borrow_mut().get_or_insert_include_for(db, file_id));
1276        match include {
1277            Some(include) => {
1278                // include! inputs are always from real files, so they only need to be handled once upfront
1279                self.with_ctx(|ctx| process_expansion_for_token(ctx, &mut stack, include))?;
1280            }
1281            None => {
1282                stack.push((file_id, smallvec![(token, span.ctx)]));
1283            }
1284        }
1285
1286        let mut m_cache = self.macro_call_cache.borrow_mut();
1287
1288        // Filters out all tokens that contain the given range (usually the macro call), any such
1289        // token is redundant as the corresponding macro call has already been processed
1290        let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| {
1291            tokens.retain(|(t, _): &mut (SyntaxToken, _)| !range.contains_range(t.text_range()))
1292        };
1293
1294        while let Some((expansion, ref mut tokens)) = stack.pop() {
1295            // Reverse the tokens so we prefer first tokens (to accommodate for popping from the
1296            // back)
1297            // alternatively we could pop from the front but that would shift the content on every pop
1298            tokens.reverse();
1299            while let Some((token, ctx)) = tokens.pop() {
1300                let was_not_remapped = (|| {
1301                    // First expand into attribute invocations, this is required to be handled
1302                    // upfront as any other macro call within will not semantically resolve unless
1303                    // also descended.
1304                    let res = self.with_ctx(|ctx| {
1305                        token
1306                            .parent_ancestors()
1307                            .filter_map(ast::Item::cast)
1308                            // FIXME: This might work incorrectly when we have a derive, followed by
1309                            // an attribute on an item, like:
1310                            // ```
1311                            // #[derive(Debug$0)]
1312                            // #[my_attr]
1313                            // struct MyStruct;
1314                            // ```
1315                            // here we should not consider the attribute at all, as our cursor
1316                            // technically lies outside of its expansion
1317                            .find_map(|item| {
1318                                // Don't force populate the dyn cache for items that don't have an attribute anyways
1319                                item.attrs().next()?;
1320                                ctx.item_to_macro_call(InFile::new(expansion, &item))
1321                                    .zip(Some(item))
1322                            })
1323                            .map(|(call_id, item)| {
1324                                let item_range = item.syntax().text_range();
1325                                let loc = db.lookup_intern_macro_call(call_id);
1326                                let text_range = match loc.kind {
1327                                    hir_expand::MacroCallKind::Attr {
1328                                        censored_attr_ids: attr_ids,
1329                                        ..
1330                                    } => {
1331                                        // FIXME: here, the attribute's text range is used to strip away all
1332                                        // entries from the start of the attribute "list" up the invoking
1333                                        // attribute. But in
1334                                        // ```
1335                                        // mod foo {
1336                                        //     #![inner]
1337                                        // }
1338                                        // ```
1339                                        // we don't wanna strip away stuff in the `mod foo {` range, that is
1340                                        // here if the id corresponds to an inner attribute we got strip all
1341                                        // text ranges of the outer ones, and then all of the inner ones up
1342                                        // to the invoking attribute so that the inbetween is ignored.
1343                                        // FIXME: Should cfg_attr be handled differently?
1344                                        let (attr, _) = attr_ids
1345                                            .invoc_attr()
1346                                            .find_attr_range_with_source(db, loc.krate, &item);
1347                                        let start = attr.syntax().text_range().start();
1348                                        TextRange::new(start, item_range.end())
1349                                    }
1350                                    _ => item_range,
1351                                };
1352                                filter_duplicates(tokens, text_range);
1353                                process_expansion_for_token(ctx, &mut stack, call_id)
1354                            })
1355                    });
1356
1357                    if let Some(res) = res {
1358                        return res;
1359                    }
1360
1361                    if always_descend_into_derives {
1362                        let res = self.with_ctx(|ctx| {
1363                            let (derives, adt) = token
1364                                .parent_ancestors()
1365                                .filter_map(ast::Adt::cast)
1366                                .find_map(|adt| {
1367                                    Some((
1368                                        ctx.derive_macro_calls(InFile::new(expansion, &adt))?
1369                                            .map(|(a, b, c)| (a, b, c.to_owned()))
1370                                            .collect::<SmallVec<[_; 2]>>(),
1371                                        adt,
1372                                    ))
1373                                })?;
1374                            for (_, derive_attr, derives) in derives {
1375                                // as there may be multiple derives registering the same helper
1376                                // name, we gotta make sure to call this for all of them!
1377                                // FIXME: We need to call `f` for all of them as well though!
1378                                process_expansion_for_token(ctx, &mut stack, derive_attr);
1379                                for derive in derives.into_iter().flatten() {
1380                                    let Either::Left(derive) = derive else { continue };
1381                                    process_expansion_for_token(ctx, &mut stack, derive);
1382                                }
1383                            }
1384                            // remove all tokens that are within the derives expansion
1385                            filter_duplicates(tokens, adt.syntax().text_range());
1386                            Some(())
1387                        });
1388                        // if we found derives, we can early exit. There is no way we can be in any
1389                        // macro call at this point given we are not in a token tree
1390                        if let Some(()) = res {
1391                            // Note: derives do not remap the original token. Furthermore, we want
1392                            // the original token to be before the derives in the list, because if they
1393                            // upmap to the same token and we deduplicate them (e.g. in rename), we
1394                            // want the original token to remain, not the derive.
1395                            return None;
1396                        }
1397                    }
1398                    // Then check for token trees, that means we are either in a function-like macro or
1399                    // secondary attribute inputs
1400                    let tt = token
1401                        .parent_ancestors()
1402                        .map_while(Either::<ast::TokenTree, ast::Meta>::cast)
1403                        .last()?;
1404
1405                    match tt {
1406                        // function-like macro call
1407                        Either::Left(tt) => {
1408                            let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
1409                            if tt.left_delimiter_token().map_or(false, |it| it == token) {
1410                                return None;
1411                            }
1412                            if tt.right_delimiter_token().map_or(false, |it| it == token) {
1413                                return None;
1414                            }
1415                            let mcall = InFile::new(expansion, macro_call);
1416                            let file_id = match m_cache.get(&mcall) {
1417                                Some(&it) => it,
1418                                None => {
1419                                    let it = ast::MacroCall::to_def(self, mcall.as_ref())?;
1420                                    m_cache.insert(mcall, it);
1421                                    it
1422                                }
1423                            };
1424                            let text_range = tt.syntax().text_range();
1425                            filter_duplicates(tokens, text_range);
1426
1427                            self.with_ctx(|ctx| {
1428                                process_expansion_for_token(ctx, &mut stack, file_id).or(file_id
1429                                    .eager_arg(db)
1430                                    .and_then(|arg| {
1431                                        // also descend into eager expansions
1432                                        process_expansion_for_token(ctx, &mut stack, arg)
1433                                    }))
1434                            })
1435                        }
1436                        Either::Right(_) if always_descend_into_derives => None,
1437                        // derive or derive helper
1438                        Either::Right(meta) => {
1439                            // attribute we failed expansion for earlier, this might be a derive invocation
1440                            // or derive helper attribute
1441                            let attr = meta.parent_attr()?;
1442                            let adt = match attr.syntax().parent().and_then(ast::Adt::cast) {
1443                                Some(adt) => {
1444                                    // this might be a derive on an ADT
1445                                    let res = self.with_ctx(|ctx| {
1446                                        // so try downmapping the token into the pseudo derive expansion
1447                                        // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
1448                                        let derive_call = ctx
1449                                            .attr_to_derive_macro_call(
1450                                                InFile::new(expansion, &adt),
1451                                                InFile::new(expansion, meta.clone()),
1452                                            )?
1453                                            .1;
1454
1455                                        // resolved to a derive
1456                                        let text_range = attr.syntax().text_range();
1457                                        // remove any other token in this macro input, all their mappings are the
1458                                        // same as this
1459                                        tokens.retain(|(t, _)| {
1460                                            !text_range.contains_range(t.text_range())
1461                                        });
1462                                        Some(process_expansion_for_token(
1463                                            ctx,
1464                                            &mut stack,
1465                                            derive_call,
1466                                        ))
1467                                    });
1468                                    if let Some(res) = res {
1469                                        return res;
1470                                    }
1471                                    Some(adt)
1472                                }
1473                                None => {
1474                                    // Otherwise this could be a derive helper on a variant or field
1475                                    attr.syntax().ancestors().find_map(ast::Item::cast).and_then(
1476                                        |it| match it {
1477                                            ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
1478                                            ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
1479                                            ast::Item::Union(it) => Some(ast::Adt::Union(it)),
1480                                            _ => None,
1481                                        },
1482                                    )
1483                                }
1484                            }?;
1485                            let attr_name =
1486                                attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
1487                            // Not an attribute, nor a derive, so it's either an inert attribute or a derive helper
1488                            // Try to resolve to a derive helper and downmap
1489                            let resolver = &token
1490                                .parent()
1491                                .and_then(|parent| {
1492                                    self.analyze_impl(InFile::new(expansion, &parent), None, false)
1493                                })?
1494                                .resolver;
1495                            let id = db.ast_id_map(expansion).ast_id(&adt);
1496                            let helpers = resolver
1497                                .def_map()
1498                                .derive_helpers_in_scope(InFile::new(expansion, id))?;
1499
1500                            if !helpers.is_empty() {
1501                                let text_range = attr.syntax().text_range();
1502                                filter_duplicates(tokens, text_range);
1503                            }
1504
1505                            let mut res = None;
1506                            self.with_ctx(|ctx| {
1507                                for (.., derive) in
1508                                    helpers.iter().filter(|(helper, ..)| *helper == attr_name)
1509                                {
1510                                    let Either::Left(derive) = *derive else { continue };
1511                                    // as there may be multiple derives registering the same helper
1512                                    // name, we gotta make sure to call this for all of them!
1513                                    // FIXME: We need to call `f` for all of them as well though!
1514                                    res = res
1515                                        .or(process_expansion_for_token(ctx, &mut stack, derive));
1516                                }
1517                                res
1518                            })
1519                        }
1520                    }
1521                })()
1522                .is_none();
1523                if was_not_remapped
1524                    && let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx)
1525                {
1526                    return Some(b);
1527                }
1528            }
1529        }
1530        None
1531    }
1532
1533    // Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop
1534    // traversing the inner iterator when it finds a node.
1535    // The outer iterator is over the tokens descendants
1536    // The inner iterator is the ancestors of a descendant
1537    fn descend_node_at_offset(
1538        &self,
1539        node: &SyntaxNode,
1540        offset: TextSize,
1541    ) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
1542        node.token_at_offset(offset)
1543            .map(move |token| self.descend_into_macros_exact(token))
1544            .map(|descendants| {
1545                descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
1546            })
1547            // re-order the tokens from token_at_offset by returning the ancestors with the smaller first nodes first
1548            // See algo::ancestors_at_offset, which uses the same approach
1549            .kmerge_by(|left, right| {
1550                left.clone()
1551                    .map(|node| node.text_range().len())
1552                    .lt(right.clone().map(|node| node.text_range().len()))
1553            })
1554    }
1555
1556    /// Attempts to map the node out of macro expanded files returning the original file range.
1557    /// If upmapping is not possible, this will fall back to the range of the macro call of the
1558    /// macro file the node resides in.
1559    pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
1560        let node = self.find_file(node);
1561        node.original_file_range_rooted(self.db)
1562    }
1563
1564    /// Attempts to map the node out of macro expanded files returning the original file range.
1565    pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
1566        let node = self.find_file(node);
1567        node.original_file_range_opt(self.db).filter(|(_, ctx)| ctx.is_root()).map(TupleExt::head)
1568    }
1569
1570    /// Attempts to map the node out of macro expanded files.
1571    /// This only work for attribute expansions, as other ones do not have nodes as input.
1572    pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
1573        self.wrap_node_infile(node).original_ast_node_rooted(self.db).map(
1574            |InRealFile { file_id, value }| {
1575                self.cache(find_root(value.syntax()), file_id.into());
1576                value
1577            },
1578        )
1579    }
1580
1581    /// Attempts to map the node out of macro expanded files.
1582    /// This only work for attribute expansions, as other ones do not have nodes as input.
1583    pub fn original_syntax_node_rooted(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
1584        let InFile { file_id, .. } = self.find_file(node);
1585        InFile::new(file_id, node).original_syntax_node_rooted(self.db).map(
1586            |InRealFile { file_id, value }| {
1587                self.cache(find_root(&value), file_id.into());
1588                value
1589            },
1590        )
1591    }
1592
1593    pub fn diagnostics_display_range(
1594        &self,
1595        src: InFile<SyntaxNodePtr>,
1596    ) -> FileRangeWrapper<FileId> {
1597        let root = self.parse_or_expand(src.file_id);
1598        let node = src.map(|it| it.to_node(&root));
1599        let FileRange { file_id, range } = node.as_ref().original_file_range_rooted(self.db);
1600        FileRangeWrapper { file_id: file_id.file_id(self.db), range }
1601    }
1602
1603    pub fn diagnostics_display_range_for_range(
1604        &self,
1605        src: InFile<TextRange>,
1606    ) -> FileRangeWrapper<FileId> {
1607        let FileRange { file_id, range } = src.original_node_file_range_rooted(self.db);
1608        FileRangeWrapper { file_id: file_id.file_id(self.db), range }
1609    }
1610
1611    fn token_ancestors_with_macros(
1612        &self,
1613        token: SyntaxToken,
1614    ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
1615        token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent))
1616    }
1617
1618    /// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
1619    // FIXME: Replace with `ancestors_with_macros_file` when all usages are updated.
1620    pub fn ancestors_with_macros(
1621        &self,
1622        node: SyntaxNode,
1623    ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
1624        let node = self.find_file(&node);
1625        self.ancestors_with_macros_file(node.cloned()).map(|it| it.value)
1626    }
1627
1628    /// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
1629    pub fn ancestors_with_macros_file(
1630        &self,
1631        node: InFile<SyntaxNode>,
1632    ) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
1633        iter::successors(Some(node), move |&InFile { file_id, ref value }| match value.parent() {
1634            Some(parent) => Some(InFile::new(file_id, parent)),
1635            None => {
1636                let macro_file = file_id.macro_file()?;
1637
1638                self.with_ctx(|ctx| {
1639                    let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
1640                    expansion_info.arg().map(|node| node?.parent()).transpose()
1641                })
1642            }
1643        })
1644    }
1645
1646    pub fn ancestors_at_offset_with_macros(
1647        &self,
1648        node: &SyntaxNode,
1649        offset: TextSize,
1650    ) -> impl Iterator<Item = SyntaxNode> + '_ {
1651        node.token_at_offset(offset)
1652            .map(|token| self.token_ancestors_with_macros(token))
1653            .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
1654    }
1655
1656    /// Returns the `return` expressions in this function's body,
1657    /// excluding those inside closures or async blocks.
1658    pub fn fn_return_points(&self, func: Function) -> Vec<InFile<ast::ReturnExpr>> {
1659        let func_id = match func.id {
1660            AnyFunctionId::FunctionId(id) => id,
1661            _ => return vec![],
1662        };
1663        let (body, source_map) = Body::with_source_map(self.db, func_id.into());
1664
1665        fn collect_returns(
1666            sema: &SemanticsImpl<'_>,
1667            body: &Body,
1668            source_map: &hir_def::expr_store::ExpressionStoreSourceMap,
1669            expr_id: ExprId,
1670            acc: &mut Vec<InFile<ast::ReturnExpr>>,
1671        ) {
1672            match &body[expr_id] {
1673                Expr::Closure { .. } | Expr::Const(_) => return,
1674                Expr::Return { .. } => {
1675                    if let Ok(source) = source_map.expr_syntax(expr_id)
1676                        && let Some(ret_expr) = source.value.cast::<ast::ReturnExpr>()
1677                    {
1678                        let root = sema.parse_or_expand(source.file_id);
1679                        acc.push(InFile::new(source.file_id, ret_expr.to_node(&root)));
1680                    }
1681                }
1682                _ => {}
1683            }
1684            body.walk_child_exprs(expr_id, |child| {
1685                collect_returns(sema, body, source_map, child, acc);
1686            });
1687        }
1688
1689        let mut returns = vec![];
1690        collect_returns(self, body, source_map, body.root_expr(), &mut returns);
1691        returns
1692    }
1693
1694    pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
1695        let text = lifetime.text();
1696        let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| {
1697            let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?;
1698            gpl.lifetime_params()
1699                .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text))
1700        })?;
1701        let src = self.wrap_node_infile(lifetime_param);
1702        ToDef::to_def(self, src.as_ref())
1703    }
1704
1705    pub fn resolve_label(&self, label: &ast::Lifetime) -> Option<Label> {
1706        let src = self.wrap_node_infile(label.clone());
1707        let (parent, label_id) = self.with_ctx(|ctx| ctx.label_ref_to_def(src.as_ref()))?;
1708        Some(Label { parent, label_id })
1709    }
1710
1711    pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type<'db>> {
1712        let analyze = self.analyze(ty.syntax())?;
1713        analyze.type_of_type(self.db, ty)
1714    }
1715
1716    pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
1717        let parent_ty = path.syntax().parent().and_then(ast::Type::cast)?;
1718        let analyze = self.analyze(path.syntax())?;
1719        let ty = analyze.store_sm()?.node_type(InFile::new(analyze.file_id, &parent_ty))?;
1720        let path = match &analyze.store()?.types[ty] {
1721            hir_def::type_ref::TypeRef::Path(path) => path,
1722            _ => return None,
1723        };
1724        match analyze.resolver.resolve_path_in_type_ns_fully(self.db, path)? {
1725            TypeNs::TraitId(trait_id) => Some(trait_id.into()),
1726            _ => None,
1727        }
1728    }
1729
1730    pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment<'db>>> {
1731        let mutability = |m| match m {
1732            hir_ty::next_solver::Mutability::Not => Mutability::Shared,
1733            hir_ty::next_solver::Mutability::Mut => Mutability::Mut,
1734        };
1735
1736        let analyzer = self.analyze(expr.syntax())?;
1737
1738        let (mut source_ty, _) = analyzer.type_of_expr(self.db, expr)?;
1739
1740        analyzer.expr_adjustments(expr).map(|it| {
1741            it.iter()
1742                .map(|adjust| {
1743                    let target = Type::new_with_resolver(
1744                        self.db,
1745                        &analyzer.resolver,
1746                        adjust.target.as_ref(),
1747                    );
1748                    let kind = match adjust.kind {
1749                        hir_ty::Adjust::NeverToAny => Adjust::NeverToAny,
1750                        hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => {
1751                            // FIXME: Should we handle unknown mutability better?
1752                            Adjust::Deref(Some(OverloadedDeref(mutability(m))))
1753                        }
1754                        hir_ty::Adjust::Deref(None) => Adjust::Deref(None),
1755                        hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => {
1756                            Adjust::Borrow(AutoBorrow::RawPtr(mutability(m)))
1757                        }
1758                        hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(m)) => {
1759                            // FIXME: Handle lifetimes here
1760                            Adjust::Borrow(AutoBorrow::Ref(mutability(m.into())))
1761                        }
1762                        hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc),
1763                    };
1764
1765                    // Update `source_ty` for the next adjustment
1766                    let source = mem::replace(&mut source_ty, target.clone());
1767
1768                    Adjustment { source, target, kind }
1769                })
1770                .collect()
1771        })
1772    }
1773
1774    pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo<'db>> {
1775        self.analyze(expr.syntax())?
1776            .type_of_expr(self.db, expr)
1777            .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
1778    }
1779
1780    pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo<'db>> {
1781        self.analyze(pat.syntax())?
1782            .type_of_pat(self.db, pat)
1783            .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
1784    }
1785
1786    /// It also includes the changes that binding mode makes in the type. For example in
1787    /// `let ref x @ Some(_) = None` the result of `type_of_pat` is `Option<T>` but the result
1788    /// of this function is `&mut Option<T>`
1789    pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type<'db>> {
1790        self.analyze(pat.syntax())?.type_of_binding_in_pat(self.db, pat)
1791    }
1792
1793    pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type<'db>> {
1794        self.analyze(param.syntax())?.type_of_self(self.db, param)
1795    }
1796
1797    pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type<'db>; 1]> {
1798        self.analyze(pat.syntax())
1799            .and_then(|it| it.pattern_adjustments(self.db, pat))
1800            .unwrap_or_default()
1801    }
1802
1803    pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
1804        self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
1805    }
1806
1807    pub fn resolve_expr_as_callable(&self, call: &ast::Expr) -> Option<Callable<'db>> {
1808        self.analyze(call.syntax())?.resolve_expr_as_callable(self.db, call)
1809    }
1810
1811    pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
1812        self.analyze(call.syntax())?.resolve_method_call(self.db, call)
1813    }
1814
1815    /// Attempts to resolve this call expression as a method call falling back to resolving it as a field.
1816    pub fn resolve_method_call_fallback(
1817        &self,
1818        call: &ast::MethodCallExpr,
1819    ) -> Option<(Either<Function, Field>, Option<GenericSubstitution<'db>>)> {
1820        self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call)
1821    }
1822
1823    /// Env is used to derive the trait environment
1824    // FIXME: better api for the trait environment
1825    pub fn resolve_trait_impl_method(
1826        &self,
1827        env: Type<'db>,
1828        trait_: Trait,
1829        func: Function,
1830        subst: impl IntoIterator<Item = Type<'db>>,
1831    ) -> Option<Function> {
1832        let AnyFunctionId::FunctionId(func) = func.id else { return Some(func) };
1833        let interner = DbInterner::new_no_crate(self.db);
1834        let mut subst = subst.into_iter();
1835        let substs =
1836            hir_ty::next_solver::GenericArgs::for_item(interner, trait_.id.into(), |_, id, _| {
1837                assert!(matches!(id, hir_def::GenericParamId::TypeParamId(_)), "expected a type");
1838                subst.next().expect("too few subst").ty.into()
1839            });
1840        assert!(subst.next().is_none(), "too many subst");
1841        Some(match self.db.lookup_impl_method(env.env, func, substs).0 {
1842            Either::Left(it) => it.into(),
1843            Either::Right((impl_, method)) => {
1844                Function { id: AnyFunctionId::BuiltinDeriveImplMethod { method, impl_ } }
1845            }
1846        })
1847    }
1848
1849    fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option<StructId> {
1850        self.analyze(range_pat.syntax())?.resolve_range_pat(self.db, range_pat)
1851    }
1852
1853    fn resolve_range_expr(&self, range_expr: &ast::RangeExpr) -> Option<StructId> {
1854        self.analyze(range_expr.syntax())?.resolve_range_expr(self.db, range_expr)
1855    }
1856
1857    fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
1858        self.analyze(await_expr.syntax())?.resolve_await_to_poll(self.db, await_expr)
1859    }
1860
1861    fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<Function> {
1862        self.analyze(prefix_expr.syntax())?.resolve_prefix_expr(self.db, prefix_expr)
1863    }
1864
1865    fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<Function> {
1866        self.analyze(index_expr.syntax())?.resolve_index_expr(self.db, index_expr)
1867    }
1868
1869    fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<Function> {
1870        self.analyze(bin_expr.syntax())?.resolve_bin_expr(self.db, bin_expr)
1871    }
1872
1873    fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<Function> {
1874        self.analyze(try_expr.syntax())?.resolve_try_expr(self.db, try_expr)
1875    }
1876
1877    /// The type that the associated `try` block, closure or function expects.
1878    pub fn try_expr_returned_type(&self, try_expr: &ast::TryExpr) -> Option<Type<'db>> {
1879        self.ancestors_with_macros(try_expr.syntax().clone()).find_map(|parent| {
1880            if let Some(try_block) = ast::BlockExpr::cast(parent.clone())
1881                && try_block.try_block_modifier().is_some()
1882            {
1883                Some(self.type_of_expr(&try_block.into())?.original)
1884            } else if let Some(closure) = ast::ClosureExpr::cast(parent.clone()) {
1885                Some(
1886                    self.type_of_expr(&closure.into())?
1887                        .original
1888                        .as_callable(self.db)?
1889                        .return_type(),
1890                )
1891            } else if let Some(function) = ast::Fn::cast(parent) {
1892                Some(self.to_def(&function)?.ret_type(self.db))
1893            } else {
1894                None
1895            }
1896        })
1897    }
1898
1899    // This does not resolve the method call to the correct trait impl!
1900    // We should probably fix that.
1901    pub fn resolve_method_call_as_callable(
1902        &self,
1903        call: &ast::MethodCallExpr,
1904    ) -> Option<Callable<'db>> {
1905        self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
1906    }
1907
1908    pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Either<Field, TupleField>> {
1909        self.analyze(field.syntax())?.resolve_field(field)
1910    }
1911
1912    pub fn resolve_field_fallback(
1913        &self,
1914        field: &ast::FieldExpr,
1915    ) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution<'db>>)>
1916    {
1917        self.analyze(field.syntax())?.resolve_field_fallback(self.db, field)
1918    }
1919
1920    pub fn resolve_record_field(
1921        &self,
1922        field: &ast::RecordExprField,
1923    ) -> Option<(Field, Option<Local>, Type<'db>)> {
1924        self.resolve_record_field_with_substitution(field)
1925            .map(|(field, local, ty, _)| (field, local, ty))
1926    }
1927
1928    pub fn resolve_record_field_with_substitution(
1929        &self,
1930        field: &ast::RecordExprField,
1931    ) -> Option<(Field, Option<Local>, Type<'db>, GenericSubstitution<'db>)> {
1932        self.analyze(field.syntax())?.resolve_record_field(self.db, field)
1933    }
1934
1935    pub fn resolve_record_pat_field(
1936        &self,
1937        field: &ast::RecordPatField,
1938    ) -> Option<(Field, Type<'db>)> {
1939        self.resolve_record_pat_field_with_subst(field).map(|(field, ty, _)| (field, ty))
1940    }
1941
1942    pub fn resolve_record_pat_field_with_subst(
1943        &self,
1944        field: &ast::RecordPatField,
1945    ) -> Option<(Field, Type<'db>, GenericSubstitution<'db>)> {
1946        self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
1947    }
1948
1949    // FIXME: Replace this with `resolve_macro_call2`
1950    pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
1951        let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
1952        self.resolve_macro_call2(macro_call)
1953    }
1954
1955    pub fn resolve_macro_call2(&self, macro_call: InFile<&ast::MacroCall>) -> Option<Macro> {
1956        self.to_def2(macro_call)
1957            .and_then(|call| self.with_ctx(|ctx| macro_call_to_macro_id(ctx, call)))
1958            .map(Into::into)
1959    }
1960
1961    pub fn is_proc_macro_call(&self, macro_call: InFile<&ast::MacroCall>) -> bool {
1962        self.resolve_macro_call2(macro_call)
1963            .is_some_and(|m| matches!(m.id, MacroId::ProcMacroId(..)))
1964    }
1965
1966    pub fn resolve_macro_call_arm(&self, macro_call: &ast::MacroCall) -> Option<u32> {
1967        let file_id = self.to_def(macro_call)?;
1968        self.db.parse_macro_expansion(file_id).value.1.matched_arm
1969    }
1970
1971    pub fn get_unsafe_ops(&self, def: ExpressionStoreOwner) -> FxHashSet<ExprOrPatSource> {
1972        let Ok(def) = ExpressionStoreOwnerId::try_from(def) else { return Default::default() };
1973        let (body, source_map) = ExpressionStore::with_source_map(self.db, def);
1974        let mut res = FxHashSet::default();
1975        self.with_all_infers_for_store(def, &mut |infer| {
1976            for root in body.expr_roots() {
1977                unsafe_operations(self.db, infer, def, body, root, &mut |node, _| {
1978                    if let Ok(node) = source_map.expr_or_pat_syntax(node) {
1979                        res.insert(node);
1980                    }
1981                });
1982            }
1983        });
1984        res
1985    }
1986
1987    pub fn get_unsafe_ops_for_unsafe_block(&self, block: ast::BlockExpr) -> Vec<ExprOrPatSource> {
1988        always!(block.unsafe_token().is_some());
1989        let Some(sa) = self.analyze(block.syntax()) else { return vec![] };
1990        let Some((def, store, sm, Some(infer))) = sa.def() else { return vec![] };
1991        let block = self.wrap_node_infile(ast::Expr::from(block));
1992        let Some(ExprOrPatId::ExprId(block)) = sm.node_expr(block.as_ref()) else {
1993            return Vec::new();
1994        };
1995        let mut res = Vec::default();
1996        unsafe_operations(self.db, infer, def, store, block, &mut |node, _| {
1997            if let Ok(node) = sm.expr_or_pat_syntax(node) {
1998                res.push(node);
1999            }
2000        });
2001        res
2002    }
2003
2004    pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
2005        let Some(mac) = self.resolve_macro_call(macro_call) else { return false };
2006        if mac.is_asm_like(self.db) {
2007            return true;
2008        }
2009
2010        let Some(sa) = self.analyze(macro_call.syntax()) else { return false };
2011        let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
2012        match macro_call.map(|it| it.syntax().parent().and_then(ast::MacroExpr::cast)).transpose() {
2013            Some(it) => sa.is_unsafe_macro_call_expr(self.db, it.as_ref()),
2014            None => false,
2015        }
2016    }
2017
2018    pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
2019        let item_in_file = self.wrap_node_infile(item.clone());
2020        let id = self.with_ctx(|ctx| {
2021            let macro_call_id = ctx.item_to_macro_call(item_in_file.as_ref())?;
2022            macro_call_to_macro_id(ctx, macro_call_id)
2023        })?;
2024        Some(Macro { id })
2025    }
2026
2027    pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
2028        self.resolve_path_with_subst(path).map(|(it, _)| it)
2029    }
2030
2031    pub fn resolve_path_per_ns(&self, path: &ast::Path) -> Option<PathResolutionPerNs> {
2032        self.analyze(path.syntax())?.resolve_hir_path_per_ns(self.db, path)
2033    }
2034
2035    pub fn resolve_path_with_subst(
2036        &self,
2037        path: &ast::Path,
2038    ) -> Option<(PathResolution, Option<GenericSubstitution<'db>>)> {
2039        self.analyze(path.syntax())?.resolve_path(self.db, path)
2040    }
2041
2042    pub fn resolve_use_type_arg(&self, name: &ast::NameRef) -> Option<TypeParam> {
2043        self.analyze(name.syntax())?.resolve_use_type_arg(name)
2044    }
2045
2046    pub fn resolve_offset_of_field(
2047        &self,
2048        name_ref: &ast::NameRef,
2049    ) -> Option<(Either<EnumVariant, Field>, GenericSubstitution<'db>)> {
2050        self.analyze_no_infer(name_ref.syntax())?.resolve_offset_of_field(self.db, name_ref)
2051    }
2052
2053    pub fn resolve_mod_path(
2054        &self,
2055        scope: &SyntaxNode,
2056        path: &ModPath,
2057    ) -> Option<impl Iterator<Item = ItemInNs>> {
2058        let analyze = self.analyze(scope)?;
2059        let items = analyze.resolver.resolve_module_path_in_items(self.db, path);
2060        Some(items.iter_items().map(|(item, _)| item.into()))
2061    }
2062
2063    fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
2064        self.analyze(record_lit.syntax())?.resolve_variant(record_lit)
2065    }
2066
2067    pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
2068        self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
2069    }
2070
2071    pub fn record_literal_missing_fields(
2072        &self,
2073        literal: &ast::RecordExpr,
2074    ) -> Vec<(Field, Type<'db>)> {
2075        self.analyze(literal.syntax())
2076            .and_then(|it| it.record_literal_missing_fields(self.db, literal))
2077            .unwrap_or_default()
2078    }
2079
2080    pub fn record_literal_matched_fields(
2081        &self,
2082        literal: &ast::RecordExpr,
2083    ) -> Vec<(Field, Type<'db>)> {
2084        self.analyze(literal.syntax())
2085            .and_then(|it| it.record_literal_matched_fields(self.db, literal))
2086            .unwrap_or_default()
2087    }
2088
2089    pub fn record_pattern_missing_fields(
2090        &self,
2091        pattern: &ast::RecordPat,
2092    ) -> Vec<(Field, Type<'db>)> {
2093        self.analyze(pattern.syntax())
2094            .and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
2095            .unwrap_or_default()
2096    }
2097
2098    pub fn record_pattern_matched_fields(
2099        &self,
2100        pattern: &ast::RecordPat,
2101    ) -> Vec<(Field, Type<'db>)> {
2102        self.analyze(pattern.syntax())
2103            .and_then(|it| it.record_pattern_matched_fields(self.db, pattern))
2104            .unwrap_or_default()
2105    }
2106
2107    fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
2108        let mut ctx = SourceToDefCtx { db: self.db, cache: &mut self.s2d_cache.borrow_mut() };
2109        f(&mut ctx)
2110    }
2111
2112    pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
2113        let src = self.find_file(src.syntax()).with_value(src);
2114        T::to_def(self, src)
2115    }
2116
2117    pub fn to_def2<T: ToDef>(&self, src: InFile<&T>) -> Option<T::Def> {
2118        T::to_def(self, src)
2119    }
2120
2121    fn file_to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
2122        self.with_ctx(|ctx| ctx.file_to_def(file).to_owned()).into_iter().map(Module::from)
2123    }
2124
2125    fn hir_file_to_module_defs(&self, file: HirFileId) -> impl Iterator<Item = Module> {
2126        // FIXME: Do we need to care about inline modules for macro expansions?
2127        self.file_to_module_defs(file.original_file_respecting_includes(self.db).file_id(self.db))
2128    }
2129
2130    pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
2131        self.analyze_no_infer(node).map(
2132            |SourceAnalyzer { file_id, resolver, infer_body, .. }| SemanticsScope {
2133                db: self.db,
2134                file_id,
2135                resolver,
2136                infer_body,
2137            },
2138        )
2139    }
2140
2141    pub fn scope_at_offset(
2142        &self,
2143        node: &SyntaxNode,
2144        offset: TextSize,
2145    ) -> Option<SemanticsScope<'db>> {
2146        self.analyze_with_offset_no_infer(node, offset).map(
2147            |SourceAnalyzer { file_id, resolver, infer_body, .. }| SemanticsScope {
2148                db: self.db,
2149                file_id,
2150                resolver,
2151                infer_body,
2152            },
2153        )
2154    }
2155
2156    /// Search for a definition's source and cache its syntax tree
2157    pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>> {
2158        // FIXME: source call should go through the parse cache
2159        let res = def.source(self.db)?;
2160        self.cache(find_root(res.value.syntax()), res.file_id);
2161        Some(res)
2162    }
2163
2164    pub fn source_with_range<Def: HasSource>(
2165        &self,
2166        def: Def,
2167    ) -> Option<InFile<(TextRange, Option<Def::Ast>)>> {
2168        let res = def.source_with_range(self.db)?;
2169        self.parse_or_expand(res.file_id);
2170        Some(res)
2171    }
2172
2173    pub fn store_owner_for(&self, node: InFile<&SyntaxNode>) -> Option<ExpressionStoreOwner> {
2174        let container = self.with_ctx(|ctx| ctx.find_container(node))?;
2175        container.as_expression_store_owner().map(|id| id.into())
2176    }
2177
2178    fn populate_anon_const_cache_for<'a>(
2179        &self,
2180        cache: &'a mut DefAnonConstsMap,
2181        def: DefWithoutBodyWithAnonConsts,
2182    ) -> &'a ExprToAnonConst {
2183        cache.entry(def).or_insert_with(|| match def {
2184            Either::Left(def) => {
2185                let all_anon_consts =
2186                    AnonConstId::all_from_signature(self.db, def).into_iter().flatten().copied();
2187                all_anon_consts
2188                    .map(|anon_const| (anon_const.loc(self.db).expr, anon_const))
2189                    .collect()
2190            }
2191            Either::Right(def) => {
2192                let all_anon_consts =
2193                    self.db.field_types_with_diagnostics(def).defined_anon_consts().iter().copied();
2194                all_anon_consts
2195                    .map(|anon_const| (anon_const.loc(self.db).expr, anon_const))
2196                    .collect()
2197            }
2198        })
2199    }
2200
2201    fn find_anon_const_for_root_expr_in_signature(
2202        &self,
2203        def: DefWithoutBodyWithAnonConsts,
2204        root_expr: ExprId,
2205    ) -> Option<AnonConstId> {
2206        let mut cache = self.signature_anon_consts_cache.borrow_mut();
2207        let anon_consts_map = self.populate_anon_const_cache_for(&mut cache, def);
2208        anon_consts_map.get(&root_expr).copied()
2209    }
2210
2211    pub(crate) fn infer_body_for_expr_or_pat(
2212        &self,
2213        def: ExpressionStoreOwnerId,
2214        store: &ExpressionStore,
2215        node: ExprOrPatId,
2216    ) -> Option<InferBodyId> {
2217        let handle_def_without_body = |def| {
2218            let root_expr = match node {
2219                ExprOrPatId::ExprId(expr) => store.find_root_for_expr(expr),
2220                ExprOrPatId::PatId(pat) => store.find_root_for_pat(pat),
2221            };
2222            let anon_const = self.find_anon_const_for_root_expr_in_signature(def, root_expr)?;
2223            Some(anon_const.into())
2224        };
2225        match def {
2226            ExpressionStoreOwnerId::Signature(def) => handle_def_without_body(Either::Left(def)),
2227            ExpressionStoreOwnerId::Body(def) => Some(def.into()),
2228            ExpressionStoreOwnerId::VariantFields(def) => {
2229                handle_def_without_body(Either::Right(def))
2230            }
2231        }
2232    }
2233
2234    fn with_all_infers_for_store(
2235        &self,
2236        owner: ExpressionStoreOwnerId,
2237        callback: &mut dyn FnMut(&'db InferenceResult),
2238    ) {
2239        let mut handle_def_without_body = |def| {
2240            let mut cache = self.signature_anon_consts_cache.borrow_mut();
2241            let map = self.populate_anon_const_cache_for(&mut cache, def);
2242            for &anon_const in map.values() {
2243                callback(InferenceResult::of(self.db, anon_const));
2244            }
2245        };
2246        match owner {
2247            ExpressionStoreOwnerId::Signature(def) => handle_def_without_body(Either::Left(def)),
2248            ExpressionStoreOwnerId::Body(def) => {
2249                callback(InferenceResult::of(self.db, def));
2250            }
2251            ExpressionStoreOwnerId::VariantFields(def) => {
2252                handle_def_without_body(Either::Right(def))
2253            }
2254        }
2255    }
2256
2257    /// Returns none if the file of the node is not part of a crate.
2258    fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer<'db>> {
2259        let node = self.find_file(node);
2260        self.analyze_impl(node, None, true)
2261    }
2262
2263    /// Returns none if the file of the node is not part of a crate.
2264    fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer<'db>> {
2265        let node = self.find_file(node);
2266        self.analyze_impl(node, None, false)
2267    }
2268
2269    fn analyze_with_offset_no_infer(
2270        &self,
2271        node: &SyntaxNode,
2272        offset: TextSize,
2273    ) -> Option<SourceAnalyzer<'db>> {
2274        let node = self.find_file(node);
2275        self.analyze_impl(node, Some(offset), false)
2276    }
2277
2278    fn analyze_impl(
2279        &self,
2280        node: InFile<&SyntaxNode>,
2281        offset: Option<TextSize>,
2282        // replace this, just make the inference result a `LazyCell`
2283        infer: bool,
2284    ) -> Option<SourceAnalyzer<'db>> {
2285        let _p = tracing::info_span!("SemanticsImpl::analyze_impl").entered();
2286
2287        let container = self.with_ctx(|ctx| ctx.find_container(node))?;
2288
2289        let resolver = match container {
2290            ChildContainer::DefWithBodyId(def) => {
2291                return Some(if infer {
2292                    SourceAnalyzer::new_for_body(self.db, def, node, offset)
2293                } else {
2294                    SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
2295                });
2296            }
2297            ChildContainer::VariantId(def) => {
2298                return Some(SourceAnalyzer::new_variant_body(
2299                    self.db, self, def, node, offset, infer,
2300                ));
2301            }
2302            ChildContainer::TraitId(it) => {
2303                return Some(if infer {
2304                    SourceAnalyzer::new_generic_def(self.db, self, it.into(), node, offset)
2305                } else {
2306                    SourceAnalyzer::new_generic_def_no_infer(self.db, self, it.into(), node, offset)
2307                });
2308            }
2309            ChildContainer::ImplId(it) => {
2310                return Some(if infer {
2311                    SourceAnalyzer::new_generic_def(self.db, self, it.into(), node, offset)
2312                } else {
2313                    SourceAnalyzer::new_generic_def_no_infer(self.db, self, it.into(), node, offset)
2314                });
2315            }
2316            ChildContainer::EnumId(it) => {
2317                return Some(if infer {
2318                    SourceAnalyzer::new_generic_def(self.db, self, it.into(), node, offset)
2319                } else {
2320                    SourceAnalyzer::new_generic_def_no_infer(self.db, self, it.into(), node, offset)
2321                });
2322            }
2323            ChildContainer::GenericDefId(it) => {
2324                return Some(if infer {
2325                    SourceAnalyzer::new_generic_def(self.db, self, it, node, offset)
2326                } else {
2327                    SourceAnalyzer::new_generic_def_no_infer(self.db, self, it, node, offset)
2328                });
2329            }
2330            ChildContainer::ModuleId(it) => it.resolver(self.db),
2331        };
2332        Some(SourceAnalyzer::new_for_resolver(resolver, node))
2333    }
2334
2335    fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
2336        SourceToDefCache::cache(
2337            &mut self.s2d_cache.borrow_mut().root_to_file_cache,
2338            root_node,
2339            file_id,
2340        );
2341    }
2342
2343    pub fn assert_contains_node(&self, node: &SyntaxNode) {
2344        self.find_file(node);
2345    }
2346
2347    fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
2348        let cache = self.s2d_cache.borrow();
2349        cache.root_to_file_cache.get(root_node).copied()
2350    }
2351
2352    fn wrap_node_infile<N: AstNode>(&self, node: N) -> InFile<N> {
2353        let InFile { file_id, .. } = self.find_file(node.syntax());
2354        InFile::new(file_id, node)
2355    }
2356
2357    fn wrap_token_infile(&self, token: SyntaxToken) -> InFile<SyntaxToken> {
2358        let InFile { file_id, .. } = self.find_file(&token.parent().unwrap());
2359        InFile::new(file_id, token)
2360    }
2361
2362    /// Wraps the node in a [`InFile`] with the file id it belongs to.
2363    fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
2364        let root_node = find_root(node);
2365        let file_id = self.lookup(&root_node).unwrap_or_else(|| {
2366            panic!(
2367                "\n\nFailed to lookup {:?} in this Semantics.\n\
2368                 Make sure to only query nodes derived from this instance of Semantics.\n\
2369                 root node:   {:?}\n\
2370                 known nodes: {}\n\n",
2371                node,
2372                root_node,
2373                self.s2d_cache
2374                    .borrow()
2375                    .root_to_file_cache
2376                    .keys()
2377                    .map(|it| format!("{it:?}"))
2378                    .collect::<Vec<_>>()
2379                    .join(", ")
2380            )
2381        });
2382        InFile::new(file_id, node)
2383    }
2384
2385    /// Returns `true` if the `node` is inside an `unsafe` context.
2386    pub fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool {
2387        let Some(enclosing_item) =
2388            expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast)
2389        else {
2390            return false;
2391        };
2392
2393        let def = match &enclosing_item {
2394            Either::Left(ast::Item::Fn(it)) if it.unsafe_token().is_some() => return true,
2395            Either::Left(ast::Item::Fn(it)) => (|| match self.to_def(it)?.id {
2396                AnyFunctionId::FunctionId(id) => Some(DefWithBodyId::FunctionId(id)),
2397                AnyFunctionId::BuiltinDeriveImplMethod { .. } => None,
2398            })(),
2399            Either::Left(ast::Item::Const(it)) => {
2400                self.to_def(it).map(<_>::into).map(DefWithBodyId::ConstId)
2401            }
2402            Either::Left(ast::Item::Static(it)) => {
2403                self.to_def(it).map(<_>::into).map(DefWithBodyId::StaticId)
2404            }
2405            Either::Left(_) => None,
2406            Either::Right(it) => self.to_def(it).map(<_>::into).map(DefWithBodyId::VariantId),
2407        };
2408        let Some(def) = def else { return false };
2409        let enclosing_node = enclosing_item.as_ref().either(|i| i.syntax(), |v| v.syntax());
2410
2411        let (body, source_map) = Body::with_source_map(self.db, def);
2412
2413        let file_id = self.find_file(expr.syntax()).file_id;
2414
2415        let Some(mut parent) = expr.syntax().parent() else { return false };
2416        loop {
2417            if &parent == enclosing_node {
2418                break false;
2419            }
2420
2421            if let Some(parent) = ast::Expr::cast(parent.clone())
2422                && let Some(ExprOrPatId::ExprId(expr_id)) =
2423                    source_map.node_expr(InFile { file_id, value: &parent })
2424                && let Expr::Unsafe { .. } = body[expr_id]
2425            {
2426                break true;
2427            }
2428
2429            let Some(parent_) = parent.parent() else { break false };
2430            parent = parent_;
2431        }
2432    }
2433
2434    pub fn impl_generated_from_derive(&self, impl_: Impl) -> Option<Adt> {
2435        let id = match impl_.id {
2436            AnyImplId::ImplId(id) => id,
2437            AnyImplId::BuiltinDeriveImplId(id) => return Some(id.loc(self.db).adt.into()),
2438        };
2439        let source = hir_def::src::HasSource::ast_ptr(&id.loc(self.db), self.db);
2440        let mut file_id = source.file_id;
2441        let adt_ast_id = loop {
2442            let macro_call = file_id.macro_file()?;
2443            match macro_call.loc(self.db).kind {
2444                hir_expand::MacroCallKind::Derive { ast_id, .. } => break ast_id,
2445                hir_expand::MacroCallKind::FnLike { ast_id, .. } => file_id = ast_id.file_id,
2446                hir_expand::MacroCallKind::Attr { ast_id, .. } => file_id = ast_id.file_id,
2447            }
2448        };
2449        let adt_source = adt_ast_id.to_in_file_node(self.db);
2450        self.cache(adt_source.value.syntax().ancestors().last().unwrap(), adt_source.file_id);
2451        ToDef::to_def(self, adt_source.as_ref())
2452    }
2453
2454    pub fn locals_used(
2455        &self,
2456        element: Either<&ast::Expr, &ast::StmtList>,
2457        text_range: TextRange,
2458    ) -> Option<FxIndexSet<Local>> {
2459        let sa = self.analyze(element.either(|e| e.syntax(), |s| s.syntax()))?;
2460        let infer_body = sa.infer_body?;
2461        let store = sa.store()?;
2462        let mut resolver = sa.resolver.clone();
2463        let def = resolver.expression_store_owner()?;
2464
2465        let is_not_generated = |path: &Path| {
2466            !path.mod_path().and_then(|path| path.as_ident()).is_some_and(Name::is_generated)
2467        };
2468
2469        let exprs = element.either(
2470            |e| vec![e.clone()],
2471            |stmts| {
2472                let mut exprs: Vec<_> = stmts
2473                    .statements()
2474                    .filter(|stmt| text_range.contains_range(stmt.syntax().text_range()))
2475                    .filter_map(|stmt| match stmt {
2476                        ast::Stmt::ExprStmt(expr_stmt) => expr_stmt.expr().map(|e| vec![e]),
2477                        ast::Stmt::Item(_) => None,
2478                        ast::Stmt::LetStmt(let_stmt) => {
2479                            let init = let_stmt.initializer();
2480                            let let_else = let_stmt
2481                                .let_else()
2482                                .and_then(|le| le.block_expr())
2483                                .map(ast::Expr::BlockExpr);
2484
2485                            match (init, let_else) {
2486                                (Some(i), Some(le)) => Some(vec![i, le]),
2487                                (Some(i), _) => Some(vec![i]),
2488                                (_, Some(le)) => Some(vec![le]),
2489                                _ => None,
2490                            }
2491                        }
2492                    })
2493                    .flatten()
2494                    .collect();
2495
2496                if let Some(tail_expr) = stmts.tail_expr()
2497                    && text_range.contains_range(tail_expr.syntax().text_range())
2498                {
2499                    exprs.push(tail_expr);
2500                }
2501                exprs
2502            },
2503        );
2504        let mut exprs: Vec<_> =
2505            exprs.into_iter().filter_map(|e| sa.expr_id(e).and_then(|e| e.as_expr())).collect();
2506
2507        let mut locals: FxIndexSet<Local> = FxIndexSet::default();
2508        let mut add_to_locals_used = |id, parent_expr| {
2509            let path = match id {
2510                ExprOrPatId::ExprId(expr_id) => {
2511                    if let Expr::Path(path) = &store[expr_id] {
2512                        Some(path)
2513                    } else {
2514                        None
2515                    }
2516                }
2517                ExprOrPatId::PatId(_) => None,
2518            };
2519
2520            if let Some(path) = path
2521                && is_not_generated(path)
2522            {
2523                let _ = resolver.update_to_inner_scope(self.db, def, parent_expr);
2524                let hygiene = store.expr_or_pat_path_hygiene(id);
2525                resolver.resolve_path_in_value_ns_fully(self.db, path, hygiene).inspect(|value| {
2526                    if let ValueNs::LocalBinding(id) = value {
2527                        locals.insert(Local {
2528                            parent: def,
2529                            parent_infer: infer_body,
2530                            binding_id: *id,
2531                        });
2532                    }
2533                });
2534            }
2535        };
2536
2537        while let Some(expr_id) = exprs.pop() {
2538            if let Expr::Assignment { target, .. } = store[expr_id] {
2539                store.walk_pats(target, &mut |id| {
2540                    add_to_locals_used(ExprOrPatId::PatId(id), expr_id)
2541                });
2542            };
2543            store.walk_child_exprs(expr_id, |id| {
2544                exprs.push(id);
2545            });
2546
2547            add_to_locals_used(ExprOrPatId::ExprId(expr_id), expr_id)
2548        }
2549
2550        Some(locals)
2551    }
2552
2553    pub fn get_failed_obligations(&self, token: SyntaxToken) -> Option<String> {
2554        let node = token.parent()?;
2555        let node = self.find_file(&node);
2556
2557        let container = self.with_ctx(|ctx| ctx.find_container(node))?;
2558
2559        match container {
2560            ChildContainer::DefWithBodyId(def) => {
2561                thread_local! {
2562                    static RESULT: RefCell<Vec<ProofTreeData>> = const { RefCell::new(Vec::new()) };
2563                }
2564                infer_query_with_inspect(
2565                    self.db,
2566                    def,
2567                    Some(|infer_ctxt, _obligation, result, proof_tree| {
2568                        if result.is_err()
2569                            && let Some(tree) = proof_tree
2570                        {
2571                            let data =
2572                                dump_proof_tree_structured(tree, hir_ty::Span::Dummy, infer_ctxt);
2573                            RESULT.with(|ctx| ctx.borrow_mut().push(data));
2574                        }
2575                    }),
2576                );
2577                let data: Vec<ProofTreeData> =
2578                    RESULT.with(|data| data.borrow_mut().drain(..).collect());
2579                let data = serde_json::to_string_pretty(&data).unwrap_or_else(|_| "[]".to_owned());
2580                Some(data)
2581            }
2582            _ => None,
2583        }
2584    }
2585}
2586
2587// FIXME This can't be the best way to do this
2588fn macro_call_to_macro_id(
2589    ctx: &mut SourceToDefCtx<'_, '_>,
2590    macro_call_id: MacroCallId,
2591) -> Option<MacroId> {
2592    let db: &dyn ExpandDatabase = ctx.db;
2593    let loc = db.lookup_intern_macro_call(macro_call_id);
2594
2595    match loc.def.ast_id() {
2596        Either::Left(it) => {
2597            let node = match it.file_id {
2598                HirFileId::FileId(file_id) => {
2599                    it.to_ptr(db).to_node(&file_id.parse(db).syntax_node())
2600                }
2601                HirFileId::MacroFile(macro_file) => {
2602                    let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
2603                    it.to_ptr(db).to_node(&expansion_info.expanded().value)
2604                }
2605            };
2606            ctx.macro_to_def(InFile::new(it.file_id, &node))
2607        }
2608        Either::Right(it) => {
2609            let node = match it.file_id {
2610                HirFileId::FileId(file_id) => {
2611                    it.to_ptr(db).to_node(&file_id.parse(db).syntax_node())
2612                }
2613                HirFileId::MacroFile(macro_file) => {
2614                    let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
2615                    it.to_ptr(db).to_node(&expansion_info.expanded().value)
2616                }
2617            };
2618            ctx.proc_macro_to_def(InFile::new(it.file_id, &node))
2619        }
2620    }
2621}
2622
2623pub trait ToDef: AstNode + Clone {
2624    type Def;
2625    fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def>;
2626}
2627
2628macro_rules! to_def_impls {
2629    ($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
2630        impl ToDef for $ast {
2631            type Def = $def;
2632            fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def> {
2633                sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
2634            }
2635        }
2636    )*}
2637}
2638
2639to_def_impls![
2640    (crate::Module, ast::Module, module_to_def),
2641    (crate::Module, ast::SourceFile, source_file_to_def),
2642    (crate::Struct, ast::Struct, struct_to_def),
2643    (crate::Enum, ast::Enum, enum_to_def),
2644    (crate::Union, ast::Union, union_to_def),
2645    (crate::Trait, ast::Trait, trait_to_def),
2646    (crate::Impl, ast::Impl, impl_to_def),
2647    (crate::TypeAlias, ast::TypeAlias, type_alias_to_def),
2648    (crate::Const, ast::Const, const_to_def),
2649    (crate::Static, ast::Static, static_to_def),
2650    (crate::Function, ast::Fn, fn_to_def),
2651    (crate::Field, ast::RecordField, record_field_to_def),
2652    (crate::Field, ast::TupleField, tuple_field_to_def),
2653    (crate::EnumVariant, ast::Variant, enum_variant_to_def),
2654    (crate::TypeParam, ast::TypeParam, type_param_to_def),
2655    (crate::LifetimeParam, ast::LifetimeParam, lifetime_param_to_def),
2656    (crate::ConstParam, ast::ConstParam, const_param_to_def),
2657    (crate::GenericParam, ast::GenericParam, generic_param_to_def),
2658    (crate::Macro, ast::Macro, macro_to_def),
2659    (crate::Local, ast::SelfParam, self_param_to_def),
2660    (crate::Label, ast::Label, label_to_def),
2661    (crate::Adt, ast::Adt, adt_to_def),
2662    (crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def),
2663    (crate::InlineAsmOperand, ast::AsmOperandNamed, asm_operand_to_def),
2664    (crate::ExternBlock, ast::ExternBlock, extern_block_to_def),
2665    (MacroCallId, ast::MacroCall, macro_call_to_macro_call),
2666];
2667
2668impl ToDef for ast::IdentPat {
2669    type Def = crate::Local;
2670
2671    fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def> {
2672        sema.with_ctx(|ctx| ctx.bind_pat_to_def(src, sema))
2673    }
2674}
2675
2676fn find_root(node: &SyntaxNode) -> SyntaxNode {
2677    node.ancestors().last().unwrap()
2678}
2679
2680/// `SemanticsScope` encapsulates the notion of a scope (the set of visible
2681/// names) at a particular program point.
2682///
2683/// It is a bit tricky, as scopes do not really exist inside the compiler.
2684/// Rather, the compiler directly computes for each reference the definition it
2685/// refers to. It might transiently compute the explicit scope map while doing
2686/// so, but, generally, this is not something left after the analysis.
2687///
2688/// However, we do very much need explicit scopes for IDE purposes --
2689/// completion, at its core, lists the contents of the current scope. The notion
2690/// of scope is also useful to answer questions like "what would be the meaning
2691/// of this piece of code if we inserted it into this position?".
2692///
2693/// So `SemanticsScope` is constructed from a specific program point (a syntax
2694/// node or just a raw offset) and provides access to the set of visible names
2695/// on a somewhat best-effort basis.
2696///
2697/// Note that if you are wondering "what does this specific existing name mean?",
2698/// you'd better use the `resolve_` family of methods.
2699#[derive(Debug)]
2700pub struct SemanticsScope<'db> {
2701    pub db: &'db dyn HirDatabase,
2702    infer_body: Option<InferBodyId>,
2703    file_id: HirFileId,
2704    resolver: Resolver<'db>,
2705}
2706
2707impl<'db> SemanticsScope<'db> {
2708    pub fn file_id(&self) -> HirFileId {
2709        self.file_id
2710    }
2711
2712    pub fn module(&self) -> Module {
2713        Module { id: self.resolver.module() }
2714    }
2715
2716    pub fn krate(&self) -> Crate {
2717        Crate { id: self.resolver.krate() }
2718    }
2719
2720    // FIXME: This is a weird function, we shouldn't have this?
2721    pub fn containing_function(&self) -> Option<Function> {
2722        self.resolver.expression_store_owner().and_then(|owner| match owner {
2723            ExpressionStoreOwnerId::Body(DefWithBodyId::FunctionId(id)) => Some(id.into()),
2724            _ => None,
2725        })
2726    }
2727
2728    pub fn expression_store_owner(&self) -> Option<ExpressionStoreOwner> {
2729        self.resolver.expression_store_owner().map(Into::into)
2730    }
2731
2732    pub(crate) fn resolver(&self) -> &Resolver<'db> {
2733        &self.resolver
2734    }
2735
2736    /// Note: `VisibleTraits` should be treated as an opaque type, passed into `Type
2737    pub fn visible_traits(&self) -> VisibleTraits {
2738        let resolver = &self.resolver;
2739        VisibleTraits(resolver.traits_in_scope(self.db))
2740    }
2741
2742    /// Calls the passed closure `f` on all names in scope.
2743    pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
2744        let scope = self.resolver.names_in_scope(self.db);
2745        for (name, entries) in scope {
2746            for entry in entries {
2747                let def = match entry {
2748                    resolver::ScopeDef::ModuleDef(it) => ScopeDef::ModuleDef(it.into()),
2749                    resolver::ScopeDef::Unknown => ScopeDef::Unknown,
2750                    resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
2751                    resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
2752                    resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
2753                    resolver::ScopeDef::Local(binding_id) => {
2754                        match (self.resolver.expression_store_owner(), self.infer_body) {
2755                            (Some(parent), Some(parent_infer)) => {
2756                                ScopeDef::Local(Local { parent, parent_infer, binding_id })
2757                            }
2758                            _ => continue,
2759                        }
2760                    }
2761                    resolver::ScopeDef::Label(label_id) => {
2762                        match self.resolver.expression_store_owner() {
2763                            Some(parent) => ScopeDef::Label(Label { parent, label_id }),
2764                            None => continue,
2765                        }
2766                    }
2767                };
2768                f(name.clone(), def)
2769            }
2770        }
2771    }
2772
2773    /// Checks if a trait is in scope, either because of an import or because we're in an impl of it.
2774    pub fn can_use_trait_methods(&self, t: Trait) -> bool {
2775        self.resolver.traits_in_scope(self.db).contains(&t.id)
2776    }
2777
2778    /// Resolve a path as-if it was written at the given scope. This is
2779    /// necessary a heuristic, as it doesn't take hygiene into account.
2780    pub fn speculative_resolve(&self, ast_path: &ast::Path) -> Option<PathResolution> {
2781        let mut kind = PathKind::Plain;
2782        let mut segments = vec![];
2783        let mut first = true;
2784        for segment in ast_path.segments() {
2785            if first {
2786                first = false;
2787                if segment.coloncolon_token().is_some() {
2788                    kind = PathKind::Abs;
2789                }
2790            }
2791
2792            let Some(k) = segment.kind() else { continue };
2793            match k {
2794                ast::PathSegmentKind::Name(name_ref) => segments.push(name_ref.as_name()),
2795                ast::PathSegmentKind::Type { .. } => continue,
2796                ast::PathSegmentKind::SelfTypeKw => {
2797                    segments.push(Name::new_symbol_root(sym::Self_))
2798                }
2799                ast::PathSegmentKind::SelfKw => kind = PathKind::Super(0),
2800                ast::PathSegmentKind::SuperKw => match kind {
2801                    PathKind::Super(s) => kind = PathKind::Super(s + 1),
2802                    PathKind::Plain => kind = PathKind::Super(1),
2803                    PathKind::Crate | PathKind::Abs | PathKind::DollarCrate(_) => continue,
2804                },
2805                ast::PathSegmentKind::CrateKw => kind = PathKind::Crate,
2806            }
2807        }
2808
2809        resolve_hir_path(
2810            self.db,
2811            &self.resolver,
2812            self.infer_body,
2813            &Path::BarePath(Interned::new(ModPath::from_segments(kind, segments))),
2814            HygieneId::ROOT,
2815            None,
2816        )
2817    }
2818
2819    pub fn resolve_mod_path(&self, path: &ModPath) -> impl Iterator<Item = ItemInNs> + use<> {
2820        let items = self.resolver.resolve_module_path_in_items(self.db, path);
2821        items.iter_items().map(|(item, _)| item.into())
2822    }
2823
2824    /// Iterates over associated types that may be specified after the given path (using
2825    /// `Ty::Assoc` syntax).
2826    pub fn assoc_type_shorthand_candidates(
2827        &self,
2828        resolution: &PathResolution,
2829        mut cb: impl FnMut(TypeAlias),
2830    ) {
2831        let (Some(def), Some(resolution)) = (self.resolver.generic_def(), resolution.in_type_ns())
2832        else {
2833            return;
2834        };
2835        hir_ty::associated_type_shorthand_candidates(self.db, def, resolution, |_, id| {
2836            cb(id.into());
2837            false
2838        });
2839    }
2840
2841    pub fn generic_def(&self) -> Option<crate::GenericDef> {
2842        self.resolver.generic_def().map(|id| id.into())
2843    }
2844
2845    pub fn extern_crates(&self) -> impl Iterator<Item = (Name, Module)> + '_ {
2846        self.resolver.extern_crates_in_scope().map(|(name, id)| (name, Module { id }))
2847    }
2848
2849    pub fn extern_crate_decls(&self) -> impl Iterator<Item = Name> + '_ {
2850        self.resolver.extern_crate_decls_in_scope(self.db)
2851    }
2852
2853    pub fn has_same_self_type(&self, other: &SemanticsScope<'_>) -> bool {
2854        self.resolver.impl_def() == other.resolver.impl_def()
2855    }
2856}
2857
2858#[derive(Debug)]
2859pub struct VisibleTraits(pub FxHashSet<TraitId>);
2860
2861impl ops::Deref for VisibleTraits {
2862    type Target = FxHashSet<TraitId>;
2863
2864    fn deref(&self) -> &Self::Target {
2865        &self.0
2866    }
2867}
2868
2869struct RenameConflictsVisitor<'a> {
2870    db: &'a dyn HirDatabase,
2871    owner: ExpressionStoreOwnerId,
2872    resolver: Resolver<'a>,
2873    body: &'a ExpressionStore,
2874    to_be_renamed: BindingId,
2875    new_name: Symbol,
2876    old_name: Symbol,
2877    conflicts: FxHashSet<BindingId>,
2878}
2879
2880impl RenameConflictsVisitor<'_> {
2881    fn resolve_path(&mut self, node: ExprOrPatId, path: &Path) {
2882        if let Path::BarePath(path) = path
2883            && let Some(name) = path.as_ident()
2884        {
2885            if *name.symbol() == self.new_name {
2886                if let Some(conflicting) = self.resolver.rename_will_conflict_with_renamed(
2887                    self.db,
2888                    name,
2889                    path,
2890                    self.body.expr_or_pat_path_hygiene(node),
2891                    self.to_be_renamed,
2892                ) {
2893                    self.conflicts.insert(conflicting);
2894                }
2895            } else if *name.symbol() == self.old_name
2896                && let Some(conflicting) = self.resolver.rename_will_conflict_with_another_variable(
2897                    self.db,
2898                    name,
2899                    path,
2900                    self.body.expr_or_pat_path_hygiene(node),
2901                    &self.new_name,
2902                    self.to_be_renamed,
2903                )
2904            {
2905                self.conflicts.insert(conflicting);
2906            }
2907        }
2908    }
2909
2910    fn rename_conflicts(&mut self, expr: ExprId) {
2911        match &self.body[expr] {
2912            Expr::Path(path) => {
2913                let guard = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
2914                self.resolve_path(expr.into(), path);
2915                self.resolver.reset_to_guard(guard);
2916            }
2917            _ => {}
2918        }
2919
2920        self.body.walk_child_exprs(expr, |expr| self.rename_conflicts(expr));
2921    }
2922}