1mod child_by_source;
4mod source_to_def;
5
6use std::{
7 cell::RefCell,
8 convert::Infallible,
9 fmt, iter, mem,
10 ops::{self, ControlFlow, Not},
11};
12
13use base_db::FxIndexSet;
14use either::Either;
15use hir_def::{
16 DefWithBodyId, FunctionId, MacroId, StructId, TraitId, VariantId,
17 expr_store::{Body, ExprOrPatSource, HygieneId, path::Path},
18 hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
19 nameres::{ModuleOrigin, crate_def_map},
20 resolver::{self, HasResolver, Resolver, TypeNs, ValueNs},
21 type_ref::Mutability,
22};
23use hir_expand::{
24 EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId,
25 builtin::{BuiltinFnLikeExpander, EagerExpander},
26 db::ExpandDatabase,
27 files::{FileRangeWrapper, HirFileRange, InRealFile},
28 mod_path::{ModPath, PathKind},
29 name::AsName,
30};
31use hir_ty::{
32 InferenceResult,
33 diagnostics::{unsafe_operations, unsafe_operations_for_body},
34 next_solver::DbInterner,
35};
36use intern::{Interned, Symbol, sym};
37use itertools::Itertools;
38use rustc_hash::{FxHashMap, FxHashSet};
39use smallvec::{SmallVec, smallvec};
40use span::{FileId, SyntaxContext};
41use stdx::{TupleExt, always};
42use syntax::{
43 AstNode, AstToken, Direction, SmolStr, SmolStrBuilder, SyntaxElement, SyntaxKind, SyntaxNode,
44 SyntaxNodePtr, SyntaxToken, T, TextRange, TextSize,
45 algo::skip_trivia_token,
46 ast::{self, HasAttrs as _, HasGenericParams},
47};
48
49use crate::{
50 Adjust, Adjustment, Adt, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const, ConstParam,
51 Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution, HasSource, Impl,
52 InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro, Module, ModuleDef,
53 Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule, Trait, TupleField, Type,
54 TypeAlias, TypeParam, Union, Variant, VariantDef,
55 db::HirDatabase,
56 semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
57 source_analyzer::{SourceAnalyzer, resolve_hir_path},
58};
59
60const CONTINUE_NO_BREAKS: ControlFlow<Infallible, ()> = ControlFlow::Continue(());
61
62#[derive(Debug, Copy, Clone, PartialEq, Eq)]
63pub enum PathResolution {
64 Def(ModuleDef),
66 Local(Local),
68 TypeParam(TypeParam),
70 ConstParam(ConstParam),
72 SelfType(Impl),
73 BuiltinAttr(BuiltinAttr),
74 ToolModule(ToolModule),
75 DeriveHelper(DeriveHelper),
76}
77
78impl PathResolution {
79 pub(crate) fn in_type_ns(&self) -> Option<TypeNs> {
80 match self {
81 PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())),
82 PathResolution::Def(ModuleDef::BuiltinType(builtin)) => {
83 Some(TypeNs::BuiltinType((*builtin).into()))
84 }
85 PathResolution::Def(
86 ModuleDef::Const(_)
87 | ModuleDef::Variant(_)
88 | ModuleDef::Macro(_)
89 | ModuleDef::Function(_)
90 | ModuleDef::Module(_)
91 | ModuleDef::Static(_)
92 | ModuleDef::Trait(_),
93 ) => None,
94 PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
95 Some(TypeNs::TypeAliasId((*alias).into()))
96 }
97 PathResolution::BuiltinAttr(_)
98 | PathResolution::ToolModule(_)
99 | PathResolution::Local(_)
100 | PathResolution::DeriveHelper(_)
101 | PathResolution::ConstParam(_) => None,
102 PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
103 PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())),
104 }
105 }
106}
107
108#[derive(Debug, Copy, Clone, PartialEq, Eq)]
109pub struct PathResolutionPerNs {
110 pub type_ns: Option<PathResolution>,
111 pub value_ns: Option<PathResolution>,
112 pub macro_ns: Option<PathResolution>,
113}
114
115impl PathResolutionPerNs {
116 pub fn new(
117 type_ns: Option<PathResolution>,
118 value_ns: Option<PathResolution>,
119 macro_ns: Option<PathResolution>,
120 ) -> Self {
121 PathResolutionPerNs { type_ns, value_ns, macro_ns }
122 }
123 pub fn any(&self) -> Option<PathResolution> {
124 self.type_ns.or(self.value_ns).or(self.macro_ns)
125 }
126}
127
128#[derive(Debug)]
129pub struct TypeInfo<'db> {
130 pub original: Type<'db>,
132 pub adjusted: Option<Type<'db>>,
134}
135
136impl<'db> TypeInfo<'db> {
137 pub fn original(self) -> Type<'db> {
138 self.original
139 }
140
141 pub fn has_adjustment(&self) -> bool {
142 self.adjusted.is_some()
143 }
144
145 pub fn adjusted(self) -> Type<'db> {
147 self.adjusted.unwrap_or(self.original)
148 }
149}
150
151pub struct Semantics<'db, DB: ?Sized> {
153 pub db: &'db DB,
154 imp: SemanticsImpl<'db>,
155}
156
157pub struct SemanticsImpl<'db> {
158 pub db: &'db dyn HirDatabase,
159 s2d_cache: RefCell<SourceToDefCache>,
160 macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroCallId>>,
162}
163
164impl<DB: ?Sized> fmt::Debug for Semantics<'_, DB> {
165 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
166 write!(f, "Semantics {{ ... }}")
167 }
168}
169
170impl<'db, DB: ?Sized> ops::Deref for Semantics<'db, DB> {
171 type Target = SemanticsImpl<'db>;
172
173 fn deref(&self) -> &Self::Target {
174 &self.imp
175 }
176}
177
178#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
179pub enum LintAttr {
180 Allow,
181 Expect,
182 Warn,
183 Deny,
184 Forbid,
185}
186
187impl Semantics<'_, dyn HirDatabase> {
191 pub fn new_dyn(db: &'_ dyn HirDatabase) -> Semantics<'_, dyn HirDatabase> {
193 let impl_ = SemanticsImpl::new(db);
194 Semantics { db, imp: impl_ }
195 }
196}
197
198impl<DB: HirDatabase> Semantics<'_, DB> {
199 pub fn new(db: &DB) -> Semantics<'_, DB> {
201 let impl_ = SemanticsImpl::new(db);
202 Semantics { db, imp: impl_ }
203 }
204}
205
206impl<DB: HirDatabase + ?Sized> Semantics<'_, DB> {
209 pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
210 self.imp.find_file(syntax_node).file_id
211 }
212
213 pub fn token_ancestors_with_macros(
214 &self,
215 token: SyntaxToken,
216 ) -> impl Iterator<Item = SyntaxNode> + '_ {
217 token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
218 }
219
220 pub fn find_node_at_offset_with_macros<N: AstNode>(
223 &self,
224 node: &SyntaxNode,
225 offset: TextSize,
226 ) -> Option<N> {
227 self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
228 }
229
230 pub fn find_node_at_offset_with_descend<N: AstNode>(
234 &self,
235 node: &SyntaxNode,
236 offset: TextSize,
237 ) -> Option<N> {
238 self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
239 }
240
241 pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>(
245 &'slf self,
246 node: &SyntaxNode,
247 offset: TextSize,
248 ) -> impl Iterator<Item = N> + 'slf {
249 self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
250 }
251
252 pub fn find_namelike_at_offset_with_descend<'slf>(
254 &'slf self,
255 node: &SyntaxNode,
256 offset: TextSize,
257 ) -> impl Iterator<Item = ast::NameLike> + 'slf {
258 node.token_at_offset(offset)
259 .map(move |token| self.descend_into_macros_no_opaque(token, true))
260 .map(|descendants| descendants.into_iter().filter_map(move |it| it.value.parent()))
261 .kmerge_by(|left, right| left.text_range().len().lt(&right.text_range().len()))
264 .filter_map(ast::NameLike::cast)
265 }
266
267 pub fn lint_attrs(
268 &self,
269 krate: Crate,
270 item: ast::AnyHasAttrs,
271 ) -> impl DoubleEndedIterator<Item = (LintAttr, SmolStr)> {
272 let mut cfg_options = None;
273 let cfg_options = || *cfg_options.get_or_insert_with(|| krate.id.cfg_options(self.db));
274 let mut result = Vec::new();
275 hir_expand::attrs::expand_cfg_attr::<Infallible>(
276 ast::attrs_including_inner(&item),
277 cfg_options,
278 |attr, _, _, _| {
279 let hir_expand::attrs::Meta::TokenTree { path, tt } = attr else {
280 return ControlFlow::Continue(());
281 };
282 if path.segments.len() != 1 {
283 return ControlFlow::Continue(());
284 }
285 let lint_attr = match path.segments[0].text() {
286 "allow" => LintAttr::Allow,
287 "expect" => LintAttr::Expect,
288 "warn" => LintAttr::Warn,
289 "deny" => LintAttr::Deny,
290 "forbid" => LintAttr::Forbid,
291 _ => return ControlFlow::Continue(()),
292 };
293 let mut lint = SmolStrBuilder::new();
294 for token in
295 tt.syntax().children_with_tokens().filter_map(SyntaxElement::into_token)
296 {
297 match token.kind() {
298 T![:] | T![::] => lint.push_str(token.text()),
299 kind if kind.is_any_identifier() => lint.push_str(token.text()),
300 T![,] => {
301 let lint = mem::replace(&mut lint, SmolStrBuilder::new()).finish();
302 if !lint.is_empty() {
303 result.push((lint_attr, lint));
304 }
305 }
306 _ => {}
307 }
308 }
309 let lint = lint.finish();
310 if !lint.is_empty() {
311 result.push((lint_attr, lint));
312 }
313
314 ControlFlow::Continue(())
315 },
316 );
317 result.into_iter()
318 }
319
320 pub fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option<Struct> {
321 self.imp.resolve_range_pat(range_pat).map(Struct::from)
322 }
323
324 pub fn resolve_range_expr(&self, range_expr: &ast::RangeExpr) -> Option<Struct> {
325 self.imp.resolve_range_expr(range_expr).map(Struct::from)
326 }
327
328 pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
329 self.imp.resolve_await_to_poll(await_expr).map(Function::from)
330 }
331
332 pub fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<Function> {
333 self.imp.resolve_prefix_expr(prefix_expr).map(Function::from)
334 }
335
336 pub fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<Function> {
337 self.imp.resolve_index_expr(index_expr).map(Function::from)
338 }
339
340 pub fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<Function> {
341 self.imp.resolve_bin_expr(bin_expr).map(Function::from)
342 }
343
344 pub fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<Function> {
345 self.imp.resolve_try_expr(try_expr).map(Function::from)
346 }
347
348 pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
349 self.imp.resolve_variant(record_lit).map(VariantDef::from)
350 }
351
352 pub fn file_to_module_def(&self, file: impl Into<FileId>) -> Option<Module> {
353 self.imp.file_to_module_defs(file.into()).next()
354 }
355
356 pub fn file_to_module_defs(&self, file: impl Into<FileId>) -> impl Iterator<Item = Module> {
357 self.imp.file_to_module_defs(file.into())
358 }
359
360 pub fn hir_file_to_module_def(&self, file: impl Into<HirFileId>) -> Option<Module> {
361 self.imp.hir_file_to_module_defs(file.into()).next()
362 }
363
364 pub fn hir_file_to_module_defs(
365 &self,
366 file: impl Into<HirFileId>,
367 ) -> impl Iterator<Item = Module> {
368 self.imp.hir_file_to_module_defs(file.into())
369 }
370
371 pub fn is_nightly(&self, krate: Crate) -> bool {
372 let toolchain = self.db.toolchain_channel(krate.into());
373 matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None)
376 }
377
378 pub fn to_adt_def(&self, a: &ast::Adt) -> Option<Adt> {
379 self.imp.to_def(a)
380 }
381
382 pub fn to_const_def(&self, c: &ast::Const) -> Option<Const> {
383 self.imp.to_def(c)
384 }
385
386 pub fn to_enum_def(&self, e: &ast::Enum) -> Option<Enum> {
387 self.imp.to_def(e)
388 }
389
390 pub fn to_enum_variant_def(&self, v: &ast::Variant) -> Option<Variant> {
391 self.imp.to_def(v)
392 }
393
394 pub fn to_fn_def(&self, f: &ast::Fn) -> Option<Function> {
395 self.imp.to_def(f)
396 }
397
398 pub fn to_impl_def(&self, i: &ast::Impl) -> Option<Impl> {
399 self.imp.to_def(i)
400 }
401
402 pub fn to_macro_def(&self, m: &ast::Macro) -> Option<Macro> {
403 self.imp.to_def(m)
404 }
405
406 pub fn to_module_def(&self, m: &ast::Module) -> Option<Module> {
407 self.imp.to_def(m)
408 }
409
410 pub fn to_static_def(&self, s: &ast::Static) -> Option<Static> {
411 self.imp.to_def(s)
412 }
413
414 pub fn to_struct_def(&self, s: &ast::Struct) -> Option<Struct> {
415 self.imp.to_def(s)
416 }
417
418 pub fn to_trait_def(&self, t: &ast::Trait) -> Option<Trait> {
419 self.imp.to_def(t)
420 }
421
422 pub fn to_type_alias_def(&self, t: &ast::TypeAlias) -> Option<TypeAlias> {
423 self.imp.to_def(t)
424 }
425
426 pub fn to_union_def(&self, u: &ast::Union) -> Option<Union> {
427 self.imp.to_def(u)
428 }
429}
430
431impl<'db> SemanticsImpl<'db> {
432 fn new(db: &'db dyn HirDatabase) -> Self {
433 SemanticsImpl { db, s2d_cache: Default::default(), macro_call_cache: Default::default() }
434 }
435
436 pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile {
437 let hir_file_id = file_id.into();
438 let tree = self.db.parse(file_id).tree();
439 self.cache(tree.syntax().clone(), hir_file_id);
440 tree
441 }
442
443 pub fn first_crate(&self, file: FileId) -> Option<Crate> {
445 match self.file_to_module_defs(file).next() {
446 Some(module) => Some(module.krate(self.db)),
447 None => self.db.all_crates().last().copied().map(Into::into),
448 }
449 }
450
451 pub fn attach_first_edition_opt(&self, file: FileId) -> Option<EditionedFileId> {
452 let krate = self.file_to_module_defs(file).next()?.krate(self.db);
453 Some(EditionedFileId::new(self.db, file, krate.edition(self.db), krate.id))
454 }
455
456 pub fn attach_first_edition(&self, file: FileId) -> EditionedFileId {
457 self.attach_first_edition_opt(file)
458 .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(self.db, file))
459 }
460
461 pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
462 let file_id = self.attach_first_edition(file_id);
463
464 let tree = self.db.parse(file_id).tree();
465 self.cache(tree.syntax().clone(), file_id.into());
466 tree
467 }
468
469 pub fn adjust_edition(&self, file_id: HirFileId) -> HirFileId {
470 if let Some(editioned_file_id) = file_id.file_id() {
471 self.attach_first_edition_opt(editioned_file_id.file_id(self.db))
472 .map_or(file_id, Into::into)
473 } else {
474 file_id
475 }
476 }
477
478 pub fn find_parent_file(&self, file_id: HirFileId) -> Option<InFile<SyntaxNode>> {
479 match file_id {
480 HirFileId::FileId(file_id) => {
481 let module = self.file_to_module_defs(file_id.file_id(self.db)).next()?;
482 let def_map = crate_def_map(self.db, module.krate(self.db).id);
483 match def_map[module.id].origin {
484 ModuleOrigin::CrateRoot { .. } => None,
485 ModuleOrigin::File { declaration, declaration_tree_id, .. } => {
486 let file_id = declaration_tree_id.file_id();
487 let in_file = InFile::new(file_id, declaration);
488 let node = in_file.to_node(self.db);
489 let root = find_root(node.syntax());
490 self.cache(root, file_id);
491 Some(in_file.with_value(node.syntax().clone()))
492 }
493 _ => unreachable!("FileId can only belong to a file module"),
494 }
495 }
496 HirFileId::MacroFile(macro_file) => {
497 let node = self.db.lookup_intern_macro_call(macro_file).to_node(self.db);
498 let root = find_root(&node.value);
499 self.cache(root, node.file_id);
500 Some(node)
501 }
502 }
503 }
504
505 pub fn module_definition_node(&self, module: Module) -> InFile<SyntaxNode> {
508 let def_map = module.id.def_map(self.db);
509 let definition = def_map[module.id].origin.definition_source(self.db);
510 let definition = definition.map(|it| it.node());
511 let root_node = find_root(&definition.value);
512 self.cache(root_node, definition.file_id);
513 definition
514 }
515
516 pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode {
517 let node = self.db.parse_or_expand(file_id);
518 self.cache(node.clone(), file_id);
519 node
520 }
521
522 pub fn expand(&self, file_id: MacroCallId) -> ExpandResult<SyntaxNode> {
523 let res = self.db.parse_macro_expansion(file_id).map(|it| it.0.syntax_node());
524 self.cache(res.value.clone(), file_id.into());
525 res
526 }
527
528 pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option<InFile<SyntaxNode>> {
529 let file_id = self.to_def(macro_call)?;
530 let node = self.parse_or_expand(file_id.into());
531 Some(InFile::new(file_id.into(), node))
532 }
533
534 pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> {
535 let file_id = self.find_file(attr.syntax()).file_id;
536 let krate = match file_id {
537 HirFileId::FileId(file_id) => {
538 self.file_to_module_defs(file_id.file_id(self.db)).next()?.krate(self.db).id
539 }
540 HirFileId::MacroFile(macro_file) => self.db.lookup_intern_macro_call(macro_file).krate,
541 };
542 hir_expand::check_cfg_attr_value(self.db, attr, krate)
543 }
544
545 pub fn expand_allowed_builtins(
548 &self,
549 macro_call: &ast::MacroCall,
550 ) -> Option<ExpandResult<SyntaxNode>> {
551 let file_id = self.to_def(macro_call)?;
552 let macro_call = self.db.lookup_intern_macro_call(file_id);
553
554 let skip = matches!(
555 macro_call.def.kind,
556 hir_expand::MacroDefKind::BuiltIn(
557 _,
558 BuiltinFnLikeExpander::Column
559 | BuiltinFnLikeExpander::File
560 | BuiltinFnLikeExpander::ModulePath
561 | BuiltinFnLikeExpander::Asm
562 | BuiltinFnLikeExpander::GlobalAsm
563 | BuiltinFnLikeExpander::NakedAsm
564 | BuiltinFnLikeExpander::LogSyntax
565 | BuiltinFnLikeExpander::TraceMacros
566 | BuiltinFnLikeExpander::FormatArgs
567 | BuiltinFnLikeExpander::FormatArgsNl
568 | BuiltinFnLikeExpander::ConstFormatArgs,
569 ) | hir_expand::MacroDefKind::BuiltInEager(_, EagerExpander::CompileError)
570 );
571 if skip {
572 return None;
575 }
576
577 let node = self.expand(file_id);
578 Some(node)
579 }
580
581 pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<InFile<SyntaxNode>>> {
583 let src = self.wrap_node_infile(item.clone());
584 let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
585 Some(self.expand(macro_call_id).map(|it| InFile::new(macro_call_id.into(), it)))
586 }
587
588 pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
589 let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
590 let src = self.wrap_node_infile(attr.clone());
591 let call_id = self.with_ctx(|ctx| {
592 ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
593 })?;
594 Some(self.parse_or_expand(call_id.into()))
595 }
596
597 pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
598 let calls = self.derive_macro_calls(attr)?;
599 self.with_ctx(|ctx| {
600 Some(
601 calls
602 .into_iter()
603 .map(|call| macro_call_to_macro_id(ctx, call?).map(|id| Macro { id }))
604 .collect(),
605 )
606 })
607 }
608
609 pub fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<ExpandResult<SyntaxNode>>> {
610 let res: Vec<_> = self
611 .derive_macro_calls(attr)?
612 .into_iter()
613 .flat_map(|call| {
614 let file_id = call?;
615 let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id);
616 let root_node = value.0.syntax_node();
617 self.cache(root_node.clone(), file_id.into());
618 Some(ExpandResult { value: root_node, err })
619 })
620 .collect();
621 Some(res)
622 }
623
624 fn derive_macro_calls(&self, attr: &ast::Attr) -> Option<Vec<Option<MacroCallId>>> {
625 let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
626 let file_id = self.find_file(adt.syntax()).file_id;
627 let adt = InFile::new(file_id, &adt);
628 let src = InFile::new(file_id, attr.clone());
629 self.with_ctx(|ctx| {
630 let (.., res) = ctx.attr_to_derive_macro_call(adt, src)?;
631 Some(res.to_vec())
632 })
633 }
634
635 pub fn is_derive_annotated(&self, adt: InFile<&ast::Adt>) -> bool {
636 self.with_ctx(|ctx| ctx.file_of_adt_has_derives(adt))
637 }
638
639 pub fn derive_helpers_in_scope(&self, adt: &ast::Adt) -> Option<Vec<(Symbol, Symbol)>> {
640 let sa = self.analyze_no_infer(adt.syntax())?;
641 let id = self.db.ast_id_map(sa.file_id).ast_id(adt);
642 let result = sa
643 .resolver
644 .def_map()
645 .derive_helpers_in_scope(InFile::new(sa.file_id, id))?
646 .iter()
647 .map(|(name, macro_, _)| {
648 let macro_name = Macro::from(*macro_).name(self.db).symbol().clone();
649 (name.symbol().clone(), macro_name)
650 })
651 .collect();
652 Some(result)
653 }
654
655 pub fn derive_helper(&self, attr: &ast::Attr) -> Option<Vec<(Macro, MacroCallId)>> {
656 let adt = attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it {
657 ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
658 ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
659 ast::Item::Union(it) => Some(ast::Adt::Union(it)),
660 _ => None,
661 })?;
662 let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
663 let sa = self.analyze_no_infer(adt.syntax())?;
664 let id = self.db.ast_id_map(sa.file_id).ast_id(&adt);
665 let res: Vec<_> = sa
666 .resolver
667 .def_map()
668 .derive_helpers_in_scope(InFile::new(sa.file_id, id))?
669 .iter()
670 .filter(|&(name, _, _)| *name == attr_name)
671 .map(|&(_, macro_, call)| (macro_.into(), call))
672 .collect();
673 res.is_empty().not().then_some(res)
674 }
675
676 pub fn is_attr_macro_call(&self, item: InFile<&ast::Item>) -> bool {
677 self.with_ctx(|ctx| ctx.item_to_macro_call(item).is_some())
678 }
679
680 pub fn speculative_expand_macro_call(
683 &self,
684 actual_macro_call: &ast::MacroCall,
685 speculative_args: &ast::TokenTree,
686 token_to_map: SyntaxToken,
687 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
688 let macro_file = self.to_def(actual_macro_call)?;
689 hir_expand::db::expand_speculative(
690 self.db,
691 macro_file,
692 speculative_args.syntax(),
693 token_to_map,
694 )
695 }
696
697 pub fn speculative_expand_raw(
698 &self,
699 macro_file: MacroCallId,
700 speculative_args: &SyntaxNode,
701 token_to_map: SyntaxToken,
702 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
703 hir_expand::db::expand_speculative(self.db, macro_file, speculative_args, token_to_map)
704 }
705
706 pub fn speculative_expand_attr_macro(
709 &self,
710 actual_macro_call: &ast::Item,
711 speculative_args: &ast::Item,
712 token_to_map: SyntaxToken,
713 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
714 let macro_call = self.wrap_node_infile(actual_macro_call.clone());
715 let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call.as_ref()))?;
716 hir_expand::db::expand_speculative(
717 self.db,
718 macro_call_id,
719 speculative_args.syntax(),
720 token_to_map,
721 )
722 }
723
724 pub fn speculative_expand_derive_as_pseudo_attr_macro(
725 &self,
726 actual_macro_call: &ast::Attr,
727 speculative_args: &ast::Attr,
728 token_to_map: SyntaxToken,
729 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
730 let attr = self.wrap_node_infile(actual_macro_call.clone());
731 let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?;
732 let macro_call_id = self.with_ctx(|ctx| {
733 ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it)
734 })?;
735 hir_expand::db::expand_speculative(
736 self.db,
737 macro_call_id,
738 speculative_args.syntax(),
739 token_to_map,
740 )
741 }
742
743 pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &Name) -> Vec<Local> {
746 let body = self.db.body(to_be_renamed.parent);
747 let resolver = to_be_renamed.parent.resolver(self.db);
748 let starting_expr = body.binding_owner(to_be_renamed.binding_id).unwrap_or(body.body_expr);
749 let mut visitor = RenameConflictsVisitor {
750 body: &body,
751 conflicts: FxHashSet::default(),
752 db: self.db,
753 new_name: new_name.symbol().clone(),
754 old_name: to_be_renamed.name(self.db).symbol().clone(),
755 owner: to_be_renamed.parent,
756 to_be_renamed: to_be_renamed.binding_id,
757 resolver,
758 };
759 visitor.rename_conflicts(starting_expr);
760 visitor
761 .conflicts
762 .into_iter()
763 .map(|binding_id| Local { parent: to_be_renamed.parent, binding_id })
764 .collect()
765 }
766
767 pub fn as_format_args_parts(
769 &self,
770 string: &ast::String,
771 ) -> Option<Vec<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)>> {
772 let string_start = string.syntax().text_range().start();
773 let token = self.wrap_token_infile(string.syntax().clone());
774 self.descend_into_macros_breakable(token, |token, _| {
775 (|| {
776 let token = token.value;
777 let string = ast::String::cast(token)?;
778 let literal =
779 string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
780 let parent = literal.parent()?;
781 if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) {
782 let source_analyzer = self.analyze_no_infer(format_args.syntax())?;
783 let format_args = self.wrap_node_infile(format_args);
784 let res = source_analyzer
785 .as_format_args_parts(self.db, format_args.as_ref())?
786 .map(|(range, res)| (range + string_start, res.map(Either::Left)))
787 .collect();
788 Some(res)
789 } else {
790 let asm = ast::AsmExpr::cast(parent)?;
791 let source_analyzer = self.analyze_no_infer(asm.syntax())?;
792 let line = asm.template().position(|it| *it.syntax() == literal)?;
793 let asm = self.wrap_node_infile(asm);
794 let (owner, (expr, asm_parts)) = source_analyzer.as_asm_parts(asm.as_ref())?;
795 let res = asm_parts
796 .get(line)?
797 .iter()
798 .map(|&(range, index)| {
799 (
800 range + string_start,
801 Some(Either::Right(InlineAsmOperand { owner, expr, index })),
802 )
803 })
804 .collect();
805 Some(res)
806 }
807 })()
808 .map_or(ControlFlow::Continue(()), ControlFlow::Break)
809 })
810 }
811
812 pub fn check_for_format_args_template(
821 &self,
822 original_token: SyntaxToken,
823 offset: TextSize,
824 ) -> Option<(
825 TextRange,
826 HirFileRange,
827 ast::String,
828 Option<Either<PathResolution, InlineAsmOperand>>,
829 )> {
830 let original_token =
831 self.wrap_token_infile(original_token).map(ast::String::cast).transpose()?;
832 self.check_for_format_args_template_with_file(original_token, offset)
833 }
834
835 pub fn check_for_format_args_template_with_file(
843 &self,
844 original_token: InFile<ast::String>,
845 offset: TextSize,
846 ) -> Option<(
847 TextRange,
848 HirFileRange,
849 ast::String,
850 Option<Either<PathResolution, InlineAsmOperand>>,
851 )> {
852 let relative_offset =
853 offset.checked_sub(original_token.value.syntax().text_range().start())?;
854 self.descend_into_macros_breakable(
855 original_token.as_ref().map(|it| it.syntax().clone()),
856 |token, _| {
857 (|| {
858 let token = token.map(ast::String::cast).transpose()?;
859 self.resolve_offset_in_format_args(token.as_ref(), relative_offset).map(
860 |(range, res)| {
861 (
862 range + original_token.value.syntax().text_range().start(),
863 HirFileRange {
864 file_id: token.file_id,
865 range: range + token.value.syntax().text_range().start(),
866 },
867 token.value,
868 res,
869 )
870 },
871 )
872 })()
873 .map_or(ControlFlow::Continue(()), ControlFlow::Break)
874 },
875 )
876 }
877
878 fn resolve_offset_in_format_args(
879 &self,
880 InFile { value: string, file_id }: InFile<&ast::String>,
881 offset: TextSize,
882 ) -> Option<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)> {
883 debug_assert!(offset <= string.syntax().text_range().len());
884 let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
885 let parent = literal.parent()?;
886 if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) {
887 let source_analyzer =
888 &self.analyze_impl(InFile::new(file_id, format_args.syntax()), None, false)?;
889 source_analyzer
890 .resolve_offset_in_format_args(self.db, InFile::new(file_id, &format_args), offset)
891 .map(|(range, res)| (range, res.map(Either::Left)))
892 } else {
893 let asm = ast::AsmExpr::cast(parent)?;
894 let source_analyzer =
895 self.analyze_impl(InFile::new(file_id, asm.syntax()), None, false)?;
896 let line = asm.template().position(|it| *it.syntax() == literal)?;
897 source_analyzer
898 .resolve_offset_in_asm_template(InFile::new(file_id, &asm), line, offset)
899 .map(|(owner, (expr, range, index))| {
900 (range, Some(Either::Right(InlineAsmOperand { owner, expr, index })))
901 })
902 }
903 }
904
905 pub fn debug_hir_at(&self, token: SyntaxToken) -> Option<String> {
906 self.analyze_no_infer(&token.parent()?).and_then(|it| {
907 Some(match it.body_or_sig.as_ref()? {
908 crate::source_analyzer::BodyOrSig::Body { def, body, .. } => {
909 hir_def::expr_store::pretty::print_body_hir(
910 self.db,
911 body,
912 *def,
913 it.file_id.edition(self.db),
914 )
915 }
916 &crate::source_analyzer::BodyOrSig::VariantFields { def, .. } => {
917 hir_def::expr_store::pretty::print_variant_body_hir(
918 self.db,
919 def,
920 it.file_id.edition(self.db),
921 )
922 }
923 &crate::source_analyzer::BodyOrSig::Sig { def, .. } => {
924 hir_def::expr_store::pretty::print_signature(
925 self.db,
926 def,
927 it.file_id.edition(self.db),
928 )
929 }
930 })
931 })
932 }
933
934 pub fn descend_token_into_include_expansion(
936 &self,
937 tok: InRealFile<SyntaxToken>,
938 ) -> InFile<SyntaxToken> {
939 let Some(include) =
940 self.s2d_cache.borrow_mut().get_or_insert_include_for(self.db, tok.file_id)
941 else {
942 return tok.into();
943 };
944 let span = self.db.real_span_map(tok.file_id).span_for_range(tok.value.text_range());
945 let Some(InMacroFile { file_id, value: mut mapped_tokens }) = self.with_ctx(|ctx| {
946 Some(
947 ctx.cache
948 .get_or_insert_expansion(ctx.db, include)
949 .map_range_down(span)?
950 .map(SmallVec::<[_; 2]>::from_iter),
951 )
952 }) else {
953 return tok.into();
954 };
955 mapped_tokens.pop().map_or_else(|| tok.into(), |(tok, _)| InFile::new(file_id.into(), tok))
957 }
958
959 pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
961 let mut res = smallvec![];
963 let tokens = (|| {
964 let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
966 let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?;
967 Some((first, last))
968 })();
969 let (first, last) = match tokens {
970 Some(it) => it,
971 None => return res,
972 };
973 let file = self.find_file(node.syntax());
974
975 if first == last {
976 self.descend_into_macros_all(
978 InFile::new(file.file_id, first),
979 false,
980 &mut |InFile { value, .. }, _ctx| {
981 if let Some(node) = value
982 .parent_ancestors()
983 .take_while(|it| it.text_range() == value.text_range())
984 .find_map(N::cast)
985 {
986 res.push(node)
987 }
988 },
989 );
990 } else {
991 let mut scratch: SmallVec<[_; 1]> = smallvec![];
993 self.descend_into_macros_all(
994 InFile::new(file.file_id, first),
995 false,
996 &mut |token, _ctx| scratch.push(token),
997 );
998
999 let mut scratch = scratch.into_iter();
1000 self.descend_into_macros_all(
1001 InFile::new(file.file_id, last),
1002 false,
1003 &mut |InFile { value: last, file_id: last_fid }, _ctx| {
1004 if let Some(InFile { value: first, file_id: first_fid }) = scratch.next()
1005 && first_fid == last_fid
1006 && let Some(p) = first.parent()
1007 {
1008 let range = first.text_range().cover(last.text_range());
1009 let node = find_root(&p)
1010 .covering_element(range)
1011 .ancestors()
1012 .take_while(|it| it.text_range() == range)
1013 .find_map(N::cast);
1014 if let Some(node) = node {
1015 res.push(node);
1016 }
1017 }
1018 },
1019 );
1020 }
1021 res
1022 }
1023
1024 pub fn is_inside_macro_call(&self, token @ InFile { value, .. }: InFile<&SyntaxToken>) -> bool {
1029 value.parent_ancestors().any(|ancestor| {
1030 if ast::MacroCall::can_cast(ancestor.kind()) {
1031 return true;
1032 }
1033
1034 let Some(item) = ast::Item::cast(ancestor) else {
1035 return false;
1036 };
1037 self.with_ctx(|ctx| {
1038 if ctx.item_to_macro_call(token.with_value(&item)).is_some() {
1039 return true;
1040 }
1041 let adt = match item {
1042 ast::Item::Struct(it) => it.into(),
1043 ast::Item::Enum(it) => it.into(),
1044 ast::Item::Union(it) => it.into(),
1045 _ => return false,
1046 };
1047 ctx.file_of_adt_has_derives(token.with_value(&adt))
1048 })
1049 })
1050 }
1051
1052 pub fn descend_into_macros_cb(
1053 &self,
1054 token: SyntaxToken,
1055 mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext),
1056 ) {
1057 self.descend_into_macros_all(self.wrap_token_infile(token), false, &mut |t, ctx| {
1058 cb(t, ctx)
1059 });
1060 }
1061
1062 pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
1063 let mut res = smallvec![];
1064 self.descend_into_macros_all(
1065 self.wrap_token_infile(token.clone()),
1066 false,
1067 &mut |t, _ctx| res.push(t.value),
1068 );
1069 if res.is_empty() {
1070 res.push(token);
1071 }
1072 res
1073 }
1074
1075 pub fn descend_into_macros_no_opaque(
1076 &self,
1077 token: SyntaxToken,
1078 always_descend_into_derives: bool,
1079 ) -> SmallVec<[InFile<SyntaxToken>; 1]> {
1080 let mut res = smallvec![];
1081 let token = self.wrap_token_infile(token);
1082 self.descend_into_macros_all(token.clone(), always_descend_into_derives, &mut |t, ctx| {
1083 if !ctx.is_opaque(self.db) {
1084 res.push(t);
1086 }
1087 });
1088 if res.is_empty() {
1089 res.push(token);
1090 }
1091 res
1092 }
1093
1094 pub fn descend_into_macros_breakable<T>(
1095 &self,
1096 token: InFile<SyntaxToken>,
1097 mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
1098 ) -> Option<T> {
1099 self.descend_into_macros_impl(token, false, &mut cb)
1100 }
1101
1102 pub fn descend_into_macros_exact(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
1105 let mut r = smallvec![];
1106 let text = token.text();
1107 let kind = token.kind();
1108
1109 self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }, ctx| {
1110 let mapped_kind = value.kind();
1111 let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
1112 let matches = (kind == mapped_kind || any_ident_match())
1113 && text == value.text()
1114 && !ctx.is_opaque(self.db);
1115 if matches {
1116 r.push(value);
1117 }
1118 });
1119 if r.is_empty() {
1120 r.push(token);
1121 }
1122 r
1123 }
1124
1125 pub fn descend_into_macros_exact_with_file(
1128 &self,
1129 token: SyntaxToken,
1130 ) -> SmallVec<[InFile<SyntaxToken>; 1]> {
1131 let mut r = smallvec![];
1132 let text = token.text();
1133 let kind = token.kind();
1134
1135 self.descend_into_macros_cb(token.clone(), |InFile { value, file_id }, ctx| {
1136 let mapped_kind = value.kind();
1137 let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
1138 let matches = (kind == mapped_kind || any_ident_match())
1139 && text == value.text()
1140 && !ctx.is_opaque(self.db);
1141 if matches {
1142 r.push(InFile { value, file_id });
1143 }
1144 });
1145 if r.is_empty() {
1146 r.push(self.wrap_token_infile(token));
1147 }
1148 r
1149 }
1150
1151 pub fn descend_into_macros_single_exact(&self, token: SyntaxToken) -> SyntaxToken {
1154 let text = token.text();
1155 let kind = token.kind();
1156 self.descend_into_macros_breakable(
1157 self.wrap_token_infile(token.clone()),
1158 |InFile { value, file_id: _ }, _ctx| {
1159 let mapped_kind = value.kind();
1160 let any_ident_match =
1161 || kind.is_any_identifier() && value.kind().is_any_identifier();
1162 let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
1163 if matches { ControlFlow::Break(value) } else { ControlFlow::Continue(()) }
1164 },
1165 )
1166 .unwrap_or(token)
1167 }
1168
1169 fn descend_into_macros_all(
1170 &self,
1171 token: InFile<SyntaxToken>,
1172 always_descend_into_derives: bool,
1173 f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContext),
1174 ) {
1175 self.descend_into_macros_impl(token, always_descend_into_derives, &mut |tok, ctx| {
1176 f(tok, ctx);
1177 CONTINUE_NO_BREAKS
1178 });
1179 }
1180
1181 fn descend_into_macros_impl<T>(
1182 &self,
1183 InFile { value: token, file_id }: InFile<SyntaxToken>,
1184 always_descend_into_derives: bool,
1185 f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
1186 ) -> Option<T> {
1187 let _p = tracing::info_span!("descend_into_macros_impl").entered();
1188
1189 let db = self.db;
1190 let span = db.span_map(file_id).span_for_range(token.text_range());
1191
1192 let process_expansion_for_token =
1194 |ctx: &mut SourceToDefCtx<'_, '_>, stack: &mut Vec<_>, macro_file| {
1195 let InMacroFile { file_id, value: mapped_tokens } = ctx
1196 .cache
1197 .get_or_insert_expansion(ctx.db, macro_file)
1198 .map_range_down(span)?
1199 .map(SmallVec::<[_; 2]>::from_iter);
1200 let res = mapped_tokens.is_empty().not().then_some(());
1202 stack.push((HirFileId::from(file_id), mapped_tokens));
1204 res
1205 };
1206
1207 let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![];
1212 let include = file_id
1213 .file_id()
1214 .and_then(|file_id| self.s2d_cache.borrow_mut().get_or_insert_include_for(db, file_id));
1215 match include {
1216 Some(include) => {
1217 self.with_ctx(|ctx| process_expansion_for_token(ctx, &mut stack, include))?;
1219 }
1220 None => {
1221 stack.push((file_id, smallvec![(token, span.ctx)]));
1222 }
1223 }
1224
1225 let mut m_cache = self.macro_call_cache.borrow_mut();
1226
1227 let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| {
1230 tokens.retain(|(t, _): &mut (SyntaxToken, _)| !range.contains_range(t.text_range()))
1231 };
1232
1233 while let Some((expansion, ref mut tokens)) = stack.pop() {
1234 tokens.reverse();
1238 while let Some((token, ctx)) = tokens.pop() {
1239 let was_not_remapped = (|| {
1240 let res = self.with_ctx(|ctx| {
1244 token
1245 .parent_ancestors()
1246 .filter_map(ast::Item::cast)
1247 .find_map(|item| {
1257 item.attrs().next()?;
1259 ctx.item_to_macro_call(InFile::new(expansion, &item))
1260 .zip(Some(item))
1261 })
1262 .map(|(call_id, item)| {
1263 let item_range = item.syntax().text_range();
1264 let loc = db.lookup_intern_macro_call(call_id);
1265 let text_range = match loc.kind {
1266 hir_expand::MacroCallKind::Attr {
1267 censored_attr_ids: attr_ids,
1268 ..
1269 } => {
1270 let (attr, _, _, _) = attr_ids
1284 .invoc_attr()
1285 .find_attr_range_with_source(db, loc.krate, &item);
1286 let start = attr.syntax().text_range().start();
1287 TextRange::new(start, item_range.end())
1288 }
1289 _ => item_range,
1290 };
1291 filter_duplicates(tokens, text_range);
1292 process_expansion_for_token(ctx, &mut stack, call_id)
1293 })
1294 });
1295
1296 if let Some(res) = res {
1297 return res;
1298 }
1299
1300 if always_descend_into_derives {
1301 let res = self.with_ctx(|ctx| {
1302 let (derives, adt) = token
1303 .parent_ancestors()
1304 .filter_map(ast::Adt::cast)
1305 .find_map(|adt| {
1306 Some((
1307 ctx.derive_macro_calls(InFile::new(expansion, &adt))?
1308 .map(|(a, b, c)| (a, b, c.to_owned()))
1309 .collect::<SmallVec<[_; 2]>>(),
1310 adt,
1311 ))
1312 })?;
1313 for (_, derive_attr, derives) in derives {
1314 process_expansion_for_token(ctx, &mut stack, derive_attr);
1318 for derive in derives.into_iter().flatten() {
1319 process_expansion_for_token(ctx, &mut stack, derive);
1320 }
1321 }
1322 filter_duplicates(tokens, adt.syntax().text_range());
1324 Some(())
1325 });
1326 if let Some(()) = res {
1329 return None;
1334 }
1335 }
1336 let tt = token
1339 .parent_ancestors()
1340 .map_while(Either::<ast::TokenTree, ast::Meta>::cast)
1341 .last()?;
1342
1343 match tt {
1344 Either::Left(tt) => {
1346 let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
1347 if tt.left_delimiter_token().map_or(false, |it| it == token) {
1348 return None;
1349 }
1350 if tt.right_delimiter_token().map_or(false, |it| it == token) {
1351 return None;
1352 }
1353 let mcall = InFile::new(expansion, macro_call);
1354 let file_id = match m_cache.get(&mcall) {
1355 Some(&it) => it,
1356 None => {
1357 let it = ast::MacroCall::to_def(self, mcall.as_ref())?;
1358 m_cache.insert(mcall, it);
1359 it
1360 }
1361 };
1362 let text_range = tt.syntax().text_range();
1363 filter_duplicates(tokens, text_range);
1364
1365 self.with_ctx(|ctx| {
1366 process_expansion_for_token(ctx, &mut stack, file_id).or(file_id
1367 .eager_arg(db)
1368 .and_then(|arg| {
1369 process_expansion_for_token(ctx, &mut stack, arg)
1371 }))
1372 })
1373 }
1374 Either::Right(_) if always_descend_into_derives => None,
1375 Either::Right(meta) => {
1377 let attr = meta.parent_attr()?;
1380 let adt = match attr.syntax().parent().and_then(ast::Adt::cast) {
1381 Some(adt) => {
1382 let res = self.with_ctx(|ctx| {
1384 let derive_call = ctx
1387 .attr_to_derive_macro_call(
1388 InFile::new(expansion, &adt),
1389 InFile::new(expansion, attr.clone()),
1390 )?
1391 .1;
1392
1393 let text_range = attr.syntax().text_range();
1395 tokens.retain(|(t, _)| {
1398 !text_range.contains_range(t.text_range())
1399 });
1400 Some(process_expansion_for_token(
1401 ctx,
1402 &mut stack,
1403 derive_call,
1404 ))
1405 });
1406 if let Some(res) = res {
1407 return res;
1408 }
1409 Some(adt)
1410 }
1411 None => {
1412 attr.syntax().ancestors().find_map(ast::Item::cast).and_then(
1414 |it| match it {
1415 ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
1416 ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
1417 ast::Item::Union(it) => Some(ast::Adt::Union(it)),
1418 _ => None,
1419 },
1420 )
1421 }
1422 }?;
1423 let attr_name =
1424 attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
1425 let resolver = &token
1428 .parent()
1429 .and_then(|parent| {
1430 self.analyze_impl(InFile::new(expansion, &parent), None, false)
1431 })?
1432 .resolver;
1433 let id = db.ast_id_map(expansion).ast_id(&adt);
1434 let helpers = resolver
1435 .def_map()
1436 .derive_helpers_in_scope(InFile::new(expansion, id))?;
1437
1438 if !helpers.is_empty() {
1439 let text_range = attr.syntax().text_range();
1440 filter_duplicates(tokens, text_range);
1441 }
1442
1443 let mut res = None;
1444 self.with_ctx(|ctx| {
1445 for (.., derive) in
1446 helpers.iter().filter(|(helper, ..)| *helper == attr_name)
1447 {
1448 res = res
1452 .or(process_expansion_for_token(ctx, &mut stack, *derive));
1453 }
1454 res
1455 })
1456 }
1457 }
1458 })()
1459 .is_none();
1460 if was_not_remapped
1461 && let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx)
1462 {
1463 return Some(b);
1464 }
1465 }
1466 }
1467 None
1468 }
1469
1470 fn descend_node_at_offset(
1475 &self,
1476 node: &SyntaxNode,
1477 offset: TextSize,
1478 ) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
1479 node.token_at_offset(offset)
1480 .map(move |token| self.descend_into_macros_exact(token))
1481 .map(|descendants| {
1482 descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
1483 })
1484 .kmerge_by(|left, right| {
1487 left.clone()
1488 .map(|node| node.text_range().len())
1489 .lt(right.clone().map(|node| node.text_range().len()))
1490 })
1491 }
1492
1493 pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
1497 let node = self.find_file(node);
1498 node.original_file_range_rooted(self.db)
1499 }
1500
1501 pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
1503 let node = self.find_file(node);
1504 node.original_file_range_opt(self.db).filter(|(_, ctx)| ctx.is_root()).map(TupleExt::head)
1505 }
1506
1507 pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
1510 self.wrap_node_infile(node).original_ast_node_rooted(self.db).map(
1511 |InRealFile { file_id, value }| {
1512 self.cache(find_root(value.syntax()), file_id.into());
1513 value
1514 },
1515 )
1516 }
1517
1518 pub fn original_syntax_node_rooted(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
1521 let InFile { file_id, .. } = self.find_file(node);
1522 InFile::new(file_id, node).original_syntax_node_rooted(self.db).map(
1523 |InRealFile { file_id, value }| {
1524 self.cache(find_root(&value), file_id.into());
1525 value
1526 },
1527 )
1528 }
1529
1530 pub fn diagnostics_display_range(
1531 &self,
1532 src: InFile<SyntaxNodePtr>,
1533 ) -> FileRangeWrapper<FileId> {
1534 let root = self.parse_or_expand(src.file_id);
1535 let node = src.map(|it| it.to_node(&root));
1536 let FileRange { file_id, range } = node.as_ref().original_file_range_rooted(self.db);
1537 FileRangeWrapper { file_id: file_id.file_id(self.db), range }
1538 }
1539
1540 pub fn diagnostics_display_range_for_range(
1541 &self,
1542 src: InFile<TextRange>,
1543 ) -> FileRangeWrapper<FileId> {
1544 let FileRange { file_id, range } = src.original_node_file_range_rooted(self.db);
1545 FileRangeWrapper { file_id: file_id.file_id(self.db), range }
1546 }
1547
1548 fn token_ancestors_with_macros(
1549 &self,
1550 token: SyntaxToken,
1551 ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
1552 token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent))
1553 }
1554
1555 pub fn ancestors_with_macros(
1558 &self,
1559 node: SyntaxNode,
1560 ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
1561 let node = self.find_file(&node);
1562 self.ancestors_with_macros_file(node.cloned()).map(|it| it.value)
1563 }
1564
1565 pub fn ancestors_with_macros_file(
1567 &self,
1568 node: InFile<SyntaxNode>,
1569 ) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
1570 iter::successors(Some(node), move |&InFile { file_id, ref value }| match value.parent() {
1571 Some(parent) => Some(InFile::new(file_id, parent)),
1572 None => {
1573 let macro_file = file_id.macro_file()?;
1574
1575 self.with_ctx(|ctx| {
1576 let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
1577 expansion_info.arg().map(|node| node?.parent()).transpose()
1578 })
1579 }
1580 })
1581 }
1582
1583 pub fn ancestors_at_offset_with_macros(
1584 &self,
1585 node: &SyntaxNode,
1586 offset: TextSize,
1587 ) -> impl Iterator<Item = SyntaxNode> + '_ {
1588 node.token_at_offset(offset)
1589 .map(|token| self.token_ancestors_with_macros(token))
1590 .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
1591 }
1592
1593 pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
1594 let text = lifetime.text();
1595 let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| {
1596 let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?;
1597 gpl.lifetime_params()
1598 .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text))
1599 })?;
1600 let src = self.wrap_node_infile(lifetime_param);
1601 ToDef::to_def(self, src.as_ref())
1602 }
1603
1604 pub fn resolve_label(&self, label: &ast::Lifetime) -> Option<Label> {
1605 let src = self.wrap_node_infile(label.clone());
1606 let (parent, label_id) = self.with_ctx(|ctx| ctx.label_ref_to_def(src.as_ref()))?;
1607 Some(Label { parent, label_id })
1608 }
1609
1610 pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type<'db>> {
1611 let analyze = self.analyze(ty.syntax())?;
1612 analyze.type_of_type(self.db, ty)
1613 }
1614
1615 pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
1616 let parent_ty = path.syntax().parent().and_then(ast::Type::cast)?;
1617 let analyze = self.analyze(path.syntax())?;
1618 let ty = analyze.store_sm()?.node_type(InFile::new(analyze.file_id, &parent_ty))?;
1619 let path = match &analyze.store()?.types[ty] {
1620 hir_def::type_ref::TypeRef::Path(path) => path,
1621 _ => return None,
1622 };
1623 match analyze.resolver.resolve_path_in_type_ns_fully(self.db, path)? {
1624 TypeNs::TraitId(trait_id) => Some(trait_id.into()),
1625 _ => None,
1626 }
1627 }
1628
1629 pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment<'db>>> {
1630 let mutability = |m| match m {
1631 hir_ty::next_solver::Mutability::Not => Mutability::Shared,
1632 hir_ty::next_solver::Mutability::Mut => Mutability::Mut,
1633 };
1634
1635 let analyzer = self.analyze(expr.syntax())?;
1636
1637 let (mut source_ty, _) = analyzer.type_of_expr(self.db, expr)?;
1638
1639 analyzer.expr_adjustments(expr).map(|it| {
1640 it.iter()
1641 .map(|adjust| {
1642 let target =
1643 Type::new_with_resolver(self.db, &analyzer.resolver, adjust.target);
1644 let kind = match adjust.kind {
1645 hir_ty::Adjust::NeverToAny => Adjust::NeverToAny,
1646 hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => {
1647 Adjust::Deref(Some(OverloadedDeref(
1649 m.map(mutability).unwrap_or(Mutability::Shared),
1650 )))
1651 }
1652 hir_ty::Adjust::Deref(None) => Adjust::Deref(None),
1653 hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => {
1654 Adjust::Borrow(AutoBorrow::RawPtr(mutability(m)))
1655 }
1656 hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(m)) => {
1657 Adjust::Borrow(AutoBorrow::Ref(mutability(m.into())))
1659 }
1660 hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc),
1661 };
1662
1663 let source = mem::replace(&mut source_ty, target.clone());
1665
1666 Adjustment { source, target, kind }
1667 })
1668 .collect()
1669 })
1670 }
1671
1672 pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo<'db>> {
1673 self.analyze(expr.syntax())?
1674 .type_of_expr(self.db, expr)
1675 .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
1676 }
1677
1678 pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo<'db>> {
1679 self.analyze(pat.syntax())?
1680 .type_of_pat(self.db, pat)
1681 .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
1682 }
1683
1684 pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type<'db>> {
1688 self.analyze(pat.syntax())?.type_of_binding_in_pat(self.db, pat)
1689 }
1690
1691 pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type<'db>> {
1692 self.analyze(param.syntax())?.type_of_self(self.db, param)
1693 }
1694
1695 pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type<'db>; 1]> {
1696 self.analyze(pat.syntax())
1697 .and_then(|it| it.pattern_adjustments(self.db, pat))
1698 .unwrap_or_default()
1699 }
1700
1701 pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
1702 self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
1703 }
1704
1705 pub fn resolve_expr_as_callable(&self, call: &ast::Expr) -> Option<Callable<'db>> {
1706 self.analyze(call.syntax())?.resolve_expr_as_callable(self.db, call)
1707 }
1708
1709 pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
1710 self.analyze(call.syntax())?.resolve_method_call(self.db, call)
1711 }
1712
1713 pub fn resolve_method_call_fallback(
1715 &self,
1716 call: &ast::MethodCallExpr,
1717 ) -> Option<(Either<Function, Field>, Option<GenericSubstitution<'db>>)> {
1718 self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call)
1719 }
1720
1721 pub fn resolve_trait_impl_method(
1724 &self,
1725 env: Type<'db>,
1726 trait_: Trait,
1727 func: Function,
1728 subst: impl IntoIterator<Item = Type<'db>>,
1729 ) -> Option<Function> {
1730 let interner = DbInterner::new_no_crate(self.db);
1731 let mut subst = subst.into_iter();
1732 let substs =
1733 hir_ty::next_solver::GenericArgs::for_item(interner, trait_.id.into(), |_, id, _| {
1734 assert!(matches!(id, hir_def::GenericParamId::TypeParamId(_)), "expected a type");
1735 subst.next().expect("too few subst").ty.into()
1736 });
1737 assert!(subst.next().is_none(), "too many subst");
1738 Some(self.db.lookup_impl_method(env.env, func.into(), substs).0.into())
1739 }
1740
1741 fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option<StructId> {
1742 self.analyze(range_pat.syntax())?.resolve_range_pat(self.db, range_pat)
1743 }
1744
1745 fn resolve_range_expr(&self, range_expr: &ast::RangeExpr) -> Option<StructId> {
1746 self.analyze(range_expr.syntax())?.resolve_range_expr(self.db, range_expr)
1747 }
1748
1749 fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<FunctionId> {
1750 self.analyze(await_expr.syntax())?.resolve_await_to_poll(self.db, await_expr)
1751 }
1752
1753 fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<FunctionId> {
1754 self.analyze(prefix_expr.syntax())?.resolve_prefix_expr(self.db, prefix_expr)
1755 }
1756
1757 fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<FunctionId> {
1758 self.analyze(index_expr.syntax())?.resolve_index_expr(self.db, index_expr)
1759 }
1760
1761 fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<FunctionId> {
1762 self.analyze(bin_expr.syntax())?.resolve_bin_expr(self.db, bin_expr)
1763 }
1764
1765 fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<FunctionId> {
1766 self.analyze(try_expr.syntax())?.resolve_try_expr(self.db, try_expr)
1767 }
1768
1769 pub fn resolve_method_call_as_callable(
1772 &self,
1773 call: &ast::MethodCallExpr,
1774 ) -> Option<Callable<'db>> {
1775 self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
1776 }
1777
1778 pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Either<Field, TupleField>> {
1779 self.analyze(field.syntax())?.resolve_field(field)
1780 }
1781
1782 pub fn resolve_field_fallback(
1783 &self,
1784 field: &ast::FieldExpr,
1785 ) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution<'db>>)>
1786 {
1787 self.analyze(field.syntax())?.resolve_field_fallback(self.db, field)
1788 }
1789
1790 pub fn resolve_record_field(
1791 &self,
1792 field: &ast::RecordExprField,
1793 ) -> Option<(Field, Option<Local>, Type<'db>)> {
1794 self.resolve_record_field_with_substitution(field)
1795 .map(|(field, local, ty, _)| (field, local, ty))
1796 }
1797
1798 pub fn resolve_record_field_with_substitution(
1799 &self,
1800 field: &ast::RecordExprField,
1801 ) -> Option<(Field, Option<Local>, Type<'db>, GenericSubstitution<'db>)> {
1802 self.analyze(field.syntax())?.resolve_record_field(self.db, field)
1803 }
1804
1805 pub fn resolve_record_pat_field(
1806 &self,
1807 field: &ast::RecordPatField,
1808 ) -> Option<(Field, Type<'db>)> {
1809 self.resolve_record_pat_field_with_subst(field).map(|(field, ty, _)| (field, ty))
1810 }
1811
1812 pub fn resolve_record_pat_field_with_subst(
1813 &self,
1814 field: &ast::RecordPatField,
1815 ) -> Option<(Field, Type<'db>, GenericSubstitution<'db>)> {
1816 self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
1817 }
1818
1819 pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
1821 let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
1822 self.resolve_macro_call2(macro_call)
1823 }
1824
1825 pub fn resolve_macro_call2(&self, macro_call: InFile<&ast::MacroCall>) -> Option<Macro> {
1826 self.to_def2(macro_call)
1827 .and_then(|call| self.with_ctx(|ctx| macro_call_to_macro_id(ctx, call)))
1828 .map(Into::into)
1829 }
1830
1831 pub fn is_proc_macro_call(&self, macro_call: InFile<&ast::MacroCall>) -> bool {
1832 self.resolve_macro_call2(macro_call)
1833 .is_some_and(|m| matches!(m.id, MacroId::ProcMacroId(..)))
1834 }
1835
1836 pub fn resolve_macro_call_arm(&self, macro_call: &ast::MacroCall) -> Option<u32> {
1837 let file_id = self.to_def(macro_call)?;
1838 self.db.parse_macro_expansion(file_id).value.1.matched_arm
1839 }
1840
1841 pub fn get_unsafe_ops(&self, def: DefWithBody) -> FxHashSet<ExprOrPatSource> {
1842 let def = DefWithBodyId::from(def);
1843 let (body, source_map) = self.db.body_with_source_map(def);
1844 let infer = InferenceResult::for_body(self.db, def);
1845 let mut res = FxHashSet::default();
1846 unsafe_operations_for_body(self.db, infer, def, &body, &mut |node| {
1847 if let Ok(node) = source_map.expr_or_pat_syntax(node) {
1848 res.insert(node);
1849 }
1850 });
1851 res
1852 }
1853
1854 pub fn get_unsafe_ops_for_unsafe_block(&self, block: ast::BlockExpr) -> Vec<ExprOrPatSource> {
1855 always!(block.unsafe_token().is_some());
1856 let block = self.wrap_node_infile(ast::Expr::from(block));
1857 let Some(def) = self.body_for(block.syntax()) else { return Vec::new() };
1858 let def = def.into();
1859 let (body, source_map) = self.db.body_with_source_map(def);
1860 let infer = InferenceResult::for_body(self.db, def);
1861 let Some(ExprOrPatId::ExprId(block)) = source_map.node_expr(block.as_ref()) else {
1862 return Vec::new();
1863 };
1864 let mut res = Vec::default();
1865 unsafe_operations(self.db, infer, def, &body, block, &mut |node, _| {
1866 if let Ok(node) = source_map.expr_or_pat_syntax(node) {
1867 res.push(node);
1868 }
1869 });
1870 res
1871 }
1872
1873 pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
1874 let Some(mac) = self.resolve_macro_call(macro_call) else { return false };
1875 if mac.is_asm_like(self.db) {
1876 return true;
1877 }
1878
1879 let Some(sa) = self.analyze(macro_call.syntax()) else { return false };
1880 let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
1881 match macro_call.map(|it| it.syntax().parent().and_then(ast::MacroExpr::cast)).transpose() {
1882 Some(it) => sa.is_unsafe_macro_call_expr(self.db, it.as_ref()),
1883 None => false,
1884 }
1885 }
1886
1887 pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
1888 let item_in_file = self.wrap_node_infile(item.clone());
1889 let id = self.with_ctx(|ctx| {
1890 let macro_call_id = ctx.item_to_macro_call(item_in_file.as_ref())?;
1891 macro_call_to_macro_id(ctx, macro_call_id)
1892 })?;
1893 Some(Macro { id })
1894 }
1895
1896 pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
1897 self.resolve_path_with_subst(path).map(|(it, _)| it)
1898 }
1899
1900 pub fn resolve_path_per_ns(&self, path: &ast::Path) -> Option<PathResolutionPerNs> {
1901 self.analyze(path.syntax())?.resolve_hir_path_per_ns(self.db, path)
1902 }
1903
1904 pub fn resolve_path_with_subst(
1905 &self,
1906 path: &ast::Path,
1907 ) -> Option<(PathResolution, Option<GenericSubstitution<'db>>)> {
1908 self.analyze(path.syntax())?.resolve_path(self.db, path)
1909 }
1910
1911 pub fn resolve_use_type_arg(&self, name: &ast::NameRef) -> Option<TypeParam> {
1912 self.analyze(name.syntax())?.resolve_use_type_arg(name)
1913 }
1914
1915 pub fn resolve_offset_of_field(
1916 &self,
1917 name_ref: &ast::NameRef,
1918 ) -> Option<(Either<Variant, Field>, GenericSubstitution<'db>)> {
1919 self.analyze_no_infer(name_ref.syntax())?.resolve_offset_of_field(self.db, name_ref)
1920 }
1921
1922 pub fn resolve_mod_path(
1923 &self,
1924 scope: &SyntaxNode,
1925 path: &ModPath,
1926 ) -> Option<impl Iterator<Item = ItemInNs>> {
1927 let analyze = self.analyze(scope)?;
1928 let items = analyze.resolver.resolve_module_path_in_items(self.db, path);
1929 Some(items.iter_items().map(|(item, _)| item.into()))
1930 }
1931
1932 fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
1933 self.analyze(record_lit.syntax())?.resolve_variant(record_lit)
1934 }
1935
1936 pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
1937 self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
1938 }
1939
1940 pub fn record_literal_missing_fields(
1941 &self,
1942 literal: &ast::RecordExpr,
1943 ) -> Vec<(Field, Type<'db>)> {
1944 self.analyze(literal.syntax())
1945 .and_then(|it| it.record_literal_missing_fields(self.db, literal))
1946 .unwrap_or_default()
1947 }
1948
1949 pub fn record_pattern_missing_fields(
1950 &self,
1951 pattern: &ast::RecordPat,
1952 ) -> Vec<(Field, Type<'db>)> {
1953 self.analyze(pattern.syntax())
1954 .and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
1955 .unwrap_or_default()
1956 }
1957
1958 fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
1959 let mut ctx = SourceToDefCtx { db: self.db, cache: &mut self.s2d_cache.borrow_mut() };
1960 f(&mut ctx)
1961 }
1962
1963 pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
1964 let src = self.find_file(src.syntax()).with_value(src);
1965 T::to_def(self, src)
1966 }
1967
1968 pub fn to_def2<T: ToDef>(&self, src: InFile<&T>) -> Option<T::Def> {
1969 T::to_def(self, src)
1970 }
1971
1972 fn file_to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
1973 self.with_ctx(|ctx| ctx.file_to_def(file).to_owned()).into_iter().map(Module::from)
1974 }
1975
1976 fn hir_file_to_module_defs(&self, file: HirFileId) -> impl Iterator<Item = Module> {
1977 self.file_to_module_defs(file.original_file_respecting_includes(self.db).file_id(self.db))
1979 }
1980
1981 pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
1982 self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
1983 db: self.db,
1984 file_id,
1985 resolver,
1986 })
1987 }
1988
1989 pub fn scope_at_offset(
1990 &self,
1991 node: &SyntaxNode,
1992 offset: TextSize,
1993 ) -> Option<SemanticsScope<'db>> {
1994 self.analyze_with_offset_no_infer(node, offset).map(
1995 |SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
1996 db: self.db,
1997 file_id,
1998 resolver,
1999 },
2000 )
2001 }
2002
2003 pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
2005 where
2006 Def::Ast: AstNode,
2007 {
2008 let res = def.source(self.db)?;
2010 self.cache(find_root(res.value.syntax()), res.file_id);
2011 Some(res)
2012 }
2013
2014 pub fn body_for(&self, node: InFile<&SyntaxNode>) -> Option<DefWithBody> {
2015 let container = self.with_ctx(|ctx| ctx.find_container(node))?;
2016
2017 match container {
2018 ChildContainer::DefWithBodyId(def) => Some(def.into()),
2019 _ => None,
2020 }
2021 }
2022
2023 fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer<'db>> {
2025 let node = self.find_file(node);
2026 self.analyze_impl(node, None, true)
2027 }
2028
2029 fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer<'db>> {
2031 let node = self.find_file(node);
2032 self.analyze_impl(node, None, false)
2033 }
2034
2035 fn analyze_with_offset_no_infer(
2036 &self,
2037 node: &SyntaxNode,
2038 offset: TextSize,
2039 ) -> Option<SourceAnalyzer<'db>> {
2040 let node = self.find_file(node);
2041 self.analyze_impl(node, Some(offset), false)
2042 }
2043
2044 fn analyze_impl(
2045 &self,
2046 node: InFile<&SyntaxNode>,
2047 offset: Option<TextSize>,
2048 infer_body: bool,
2050 ) -> Option<SourceAnalyzer<'db>> {
2051 let _p = tracing::info_span!("SemanticsImpl::analyze_impl").entered();
2052
2053 let container = self.with_ctx(|ctx| ctx.find_container(node))?;
2054
2055 let resolver = match container {
2056 ChildContainer::DefWithBodyId(def) => {
2057 return Some(if infer_body {
2058 SourceAnalyzer::new_for_body(self.db, def, node, offset)
2059 } else {
2060 SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
2061 });
2062 }
2063 ChildContainer::VariantId(def) => {
2064 return Some(SourceAnalyzer::new_variant_body(self.db, def, node, offset));
2065 }
2066 ChildContainer::TraitId(it) => {
2067 return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
2068 }
2069 ChildContainer::ImplId(it) => {
2070 return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
2071 }
2072 ChildContainer::EnumId(it) => {
2073 return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
2074 }
2075 ChildContainer::GenericDefId(it) => {
2076 return Some(SourceAnalyzer::new_generic_def(self.db, it, node, offset));
2077 }
2078 ChildContainer::ModuleId(it) => it.resolver(self.db),
2079 };
2080 Some(SourceAnalyzer::new_for_resolver(resolver, node))
2081 }
2082
2083 fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
2084 SourceToDefCache::cache(
2085 &mut self.s2d_cache.borrow_mut().root_to_file_cache,
2086 root_node,
2087 file_id,
2088 );
2089 }
2090
2091 pub fn assert_contains_node(&self, node: &SyntaxNode) {
2092 self.find_file(node);
2093 }
2094
2095 fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
2096 let cache = self.s2d_cache.borrow();
2097 cache.root_to_file_cache.get(root_node).copied()
2098 }
2099
2100 fn wrap_node_infile<N: AstNode>(&self, node: N) -> InFile<N> {
2101 let InFile { file_id, .. } = self.find_file(node.syntax());
2102 InFile::new(file_id, node)
2103 }
2104
2105 fn wrap_token_infile(&self, token: SyntaxToken) -> InFile<SyntaxToken> {
2106 let InFile { file_id, .. } = self.find_file(&token.parent().unwrap());
2107 InFile::new(file_id, token)
2108 }
2109
2110 fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
2112 let root_node = find_root(node);
2113 let file_id = self.lookup(&root_node).unwrap_or_else(|| {
2114 panic!(
2115 "\n\nFailed to lookup {:?} in this Semantics.\n\
2116 Make sure to only query nodes derived from this instance of Semantics.\n\
2117 root node: {:?}\n\
2118 known nodes: {}\n\n",
2119 node,
2120 root_node,
2121 self.s2d_cache
2122 .borrow()
2123 .root_to_file_cache
2124 .keys()
2125 .map(|it| format!("{it:?}"))
2126 .collect::<Vec<_>>()
2127 .join(", ")
2128 )
2129 });
2130 InFile::new(file_id, node)
2131 }
2132
2133 pub fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool {
2135 let Some(enclosing_item) =
2136 expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast)
2137 else {
2138 return false;
2139 };
2140
2141 let def = match &enclosing_item {
2142 Either::Left(ast::Item::Fn(it)) if it.unsafe_token().is_some() => return true,
2143 Either::Left(ast::Item::Fn(it)) => {
2144 self.to_def(it).map(<_>::into).map(DefWithBodyId::FunctionId)
2145 }
2146 Either::Left(ast::Item::Const(it)) => {
2147 self.to_def(it).map(<_>::into).map(DefWithBodyId::ConstId)
2148 }
2149 Either::Left(ast::Item::Static(it)) => {
2150 self.to_def(it).map(<_>::into).map(DefWithBodyId::StaticId)
2151 }
2152 Either::Left(_) => None,
2153 Either::Right(it) => self.to_def(it).map(<_>::into).map(DefWithBodyId::VariantId),
2154 };
2155 let Some(def) = def else { return false };
2156 let enclosing_node = enclosing_item.as_ref().either(|i| i.syntax(), |v| v.syntax());
2157
2158 let (body, source_map) = self.db.body_with_source_map(def);
2159
2160 let file_id = self.find_file(expr.syntax()).file_id;
2161
2162 let Some(mut parent) = expr.syntax().parent() else { return false };
2163 loop {
2164 if &parent == enclosing_node {
2165 break false;
2166 }
2167
2168 if let Some(parent) = ast::Expr::cast(parent.clone())
2169 && let Some(ExprOrPatId::ExprId(expr_id)) =
2170 source_map.node_expr(InFile { file_id, value: &parent })
2171 && let Expr::Unsafe { .. } = body[expr_id]
2172 {
2173 break true;
2174 }
2175
2176 let Some(parent_) = parent.parent() else { break false };
2177 parent = parent_;
2178 }
2179 }
2180
2181 pub fn impl_generated_from_derive(&self, impl_: Impl) -> Option<Adt> {
2182 let source = hir_def::src::HasSource::ast_ptr(&impl_.id.loc(self.db), self.db);
2183 let mut file_id = source.file_id;
2184 let adt_ast_id = loop {
2185 let macro_call = file_id.macro_file()?;
2186 match macro_call.loc(self.db).kind {
2187 hir_expand::MacroCallKind::Derive { ast_id, .. } => break ast_id,
2188 hir_expand::MacroCallKind::FnLike { ast_id, .. } => file_id = ast_id.file_id,
2189 hir_expand::MacroCallKind::Attr { ast_id, .. } => file_id = ast_id.file_id,
2190 }
2191 };
2192 let adt_source = adt_ast_id.to_in_file_node(self.db);
2193 self.cache(adt_source.value.syntax().ancestors().last().unwrap(), adt_source.file_id);
2194 ToDef::to_def(self, adt_source.as_ref())
2195 }
2196
2197 pub fn locals_used(
2198 &self,
2199 element: Either<&ast::Expr, &ast::StmtList>,
2200 text_range: TextRange,
2201 ) -> Option<FxIndexSet<Local>> {
2202 let sa = self.analyze(element.either(|e| e.syntax(), |s| s.syntax()))?;
2203 let store = sa.store()?;
2204 let mut resolver = sa.resolver.clone();
2205 let def = resolver.body_owner()?;
2206
2207 let is_not_generated = |path: &Path| {
2208 !path.mod_path().and_then(|path| path.as_ident()).is_some_and(Name::is_generated)
2209 };
2210
2211 let exprs = element.either(
2212 |e| vec![e.clone()],
2213 |stmts| {
2214 let mut exprs: Vec<_> = stmts
2215 .statements()
2216 .filter(|stmt| text_range.contains_range(stmt.syntax().text_range()))
2217 .filter_map(|stmt| match stmt {
2218 ast::Stmt::ExprStmt(expr_stmt) => expr_stmt.expr().map(|e| vec![e]),
2219 ast::Stmt::Item(_) => None,
2220 ast::Stmt::LetStmt(let_stmt) => {
2221 let init = let_stmt.initializer();
2222 let let_else = let_stmt
2223 .let_else()
2224 .and_then(|le| le.block_expr())
2225 .map(ast::Expr::BlockExpr);
2226
2227 match (init, let_else) {
2228 (Some(i), Some(le)) => Some(vec![i, le]),
2229 (Some(i), _) => Some(vec![i]),
2230 (_, Some(le)) => Some(vec![le]),
2231 _ => None,
2232 }
2233 }
2234 })
2235 .flatten()
2236 .collect();
2237
2238 if let Some(tail_expr) = stmts.tail_expr()
2239 && text_range.contains_range(tail_expr.syntax().text_range())
2240 {
2241 exprs.push(tail_expr);
2242 }
2243 exprs
2244 },
2245 );
2246 let mut exprs: Vec<_> =
2247 exprs.into_iter().filter_map(|e| sa.expr_id(e).and_then(|e| e.as_expr())).collect();
2248
2249 let mut locals: FxIndexSet<Local> = FxIndexSet::default();
2250 let mut add_to_locals_used = |id, parent_expr| {
2251 let path = match id {
2252 ExprOrPatId::ExprId(expr_id) => {
2253 if let Expr::Path(path) = &store[expr_id] {
2254 Some(path)
2255 } else {
2256 None
2257 }
2258 }
2259 ExprOrPatId::PatId(pat_id) => {
2260 if let Pat::Path(path) = &store[pat_id] {
2261 Some(path)
2262 } else {
2263 None
2264 }
2265 }
2266 };
2267
2268 if let Some(path) = path
2269 && is_not_generated(path)
2270 {
2271 let _ = resolver.update_to_inner_scope(self.db, def, parent_expr);
2272 let hygiene = store.expr_or_pat_path_hygiene(id);
2273 resolver.resolve_path_in_value_ns_fully(self.db, path, hygiene).inspect(|value| {
2274 if let ValueNs::LocalBinding(id) = value {
2275 locals.insert((def, *id).into());
2276 }
2277 });
2278 }
2279 };
2280
2281 while let Some(expr_id) = exprs.pop() {
2282 if let Expr::Assignment { target, .. } = store[expr_id] {
2283 store.walk_pats(target, &mut |id| {
2284 add_to_locals_used(ExprOrPatId::PatId(id), expr_id)
2285 });
2286 };
2287 store.walk_child_exprs(expr_id, |id| {
2288 exprs.push(id);
2289 });
2290
2291 add_to_locals_used(ExprOrPatId::ExprId(expr_id), expr_id)
2292 }
2293
2294 Some(locals)
2295 }
2296}
2297
2298fn macro_call_to_macro_id(
2300 ctx: &mut SourceToDefCtx<'_, '_>,
2301 macro_call_id: MacroCallId,
2302) -> Option<MacroId> {
2303 let db: &dyn ExpandDatabase = ctx.db;
2304 let loc = db.lookup_intern_macro_call(macro_call_id);
2305
2306 match loc.def.ast_id() {
2307 Either::Left(it) => {
2308 let node = match it.file_id {
2309 HirFileId::FileId(file_id) => {
2310 it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
2311 }
2312 HirFileId::MacroFile(macro_file) => {
2313 let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
2314 it.to_ptr(db).to_node(&expansion_info.expanded().value)
2315 }
2316 };
2317 ctx.macro_to_def(InFile::new(it.file_id, &node))
2318 }
2319 Either::Right(it) => {
2320 let node = match it.file_id {
2321 HirFileId::FileId(file_id) => {
2322 it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
2323 }
2324 HirFileId::MacroFile(macro_file) => {
2325 let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
2326 it.to_ptr(db).to_node(&expansion_info.expanded().value)
2327 }
2328 };
2329 ctx.proc_macro_to_def(InFile::new(it.file_id, &node))
2330 }
2331 }
2332}
2333
2334pub trait ToDef: AstNode + Clone {
2335 type Def;
2336 fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def>;
2337}
2338
2339macro_rules! to_def_impls {
2340 ($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
2341 impl ToDef for $ast {
2342 type Def = $def;
2343 fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def> {
2344 sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
2345 }
2346 }
2347 )*}
2348}
2349
2350to_def_impls![
2351 (crate::Module, ast::Module, module_to_def),
2352 (crate::Module, ast::SourceFile, source_file_to_def),
2353 (crate::Struct, ast::Struct, struct_to_def),
2354 (crate::Enum, ast::Enum, enum_to_def),
2355 (crate::Union, ast::Union, union_to_def),
2356 (crate::Trait, ast::Trait, trait_to_def),
2357 (crate::Impl, ast::Impl, impl_to_def),
2358 (crate::TypeAlias, ast::TypeAlias, type_alias_to_def),
2359 (crate::Const, ast::Const, const_to_def),
2360 (crate::Static, ast::Static, static_to_def),
2361 (crate::Function, ast::Fn, fn_to_def),
2362 (crate::Field, ast::RecordField, record_field_to_def),
2363 (crate::Field, ast::TupleField, tuple_field_to_def),
2364 (crate::Variant, ast::Variant, enum_variant_to_def),
2365 (crate::TypeParam, ast::TypeParam, type_param_to_def),
2366 (crate::LifetimeParam, ast::LifetimeParam, lifetime_param_to_def),
2367 (crate::ConstParam, ast::ConstParam, const_param_to_def),
2368 (crate::GenericParam, ast::GenericParam, generic_param_to_def),
2369 (crate::Macro, ast::Macro, macro_to_def),
2370 (crate::Local, ast::IdentPat, bind_pat_to_def),
2371 (crate::Local, ast::SelfParam, self_param_to_def),
2372 (crate::Label, ast::Label, label_to_def),
2373 (crate::Adt, ast::Adt, adt_to_def),
2374 (crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def),
2375 (crate::InlineAsmOperand, ast::AsmOperandNamed, asm_operand_to_def),
2376 (crate::ExternBlock, ast::ExternBlock, extern_block_to_def),
2377 (MacroCallId, ast::MacroCall, macro_call_to_macro_call),
2378];
2379
2380fn find_root(node: &SyntaxNode) -> SyntaxNode {
2381 node.ancestors().last().unwrap()
2382}
2383
2384#[derive(Debug)]
2404pub struct SemanticsScope<'db> {
2405 pub db: &'db dyn HirDatabase,
2406 file_id: HirFileId,
2407 resolver: Resolver<'db>,
2408}
2409
2410impl<'db> SemanticsScope<'db> {
2411 pub fn file_id(&self) -> HirFileId {
2412 self.file_id
2413 }
2414
2415 pub fn module(&self) -> Module {
2416 Module { id: self.resolver.module() }
2417 }
2418
2419 pub fn krate(&self) -> Crate {
2420 Crate { id: self.resolver.krate() }
2421 }
2422
2423 pub fn containing_function(&self) -> Option<Function> {
2424 self.resolver.body_owner().and_then(|owner| match owner {
2425 DefWithBodyId::FunctionId(id) => Some(id.into()),
2426 _ => None,
2427 })
2428 }
2429
2430 pub(crate) fn resolver(&self) -> &Resolver<'db> {
2431 &self.resolver
2432 }
2433
2434 pub fn visible_traits(&self) -> VisibleTraits {
2436 let resolver = &self.resolver;
2437 VisibleTraits(resolver.traits_in_scope(self.db))
2438 }
2439
2440 pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
2442 let scope = self.resolver.names_in_scope(self.db);
2443 for (name, entries) in scope {
2444 for entry in entries {
2445 let def = match entry {
2446 resolver::ScopeDef::ModuleDef(it) => ScopeDef::ModuleDef(it.into()),
2447 resolver::ScopeDef::Unknown => ScopeDef::Unknown,
2448 resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
2449 resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
2450 resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
2451 resolver::ScopeDef::Local(binding_id) => match self.resolver.body_owner() {
2452 Some(parent) => ScopeDef::Local(Local { parent, binding_id }),
2453 None => continue,
2454 },
2455 resolver::ScopeDef::Label(label_id) => match self.resolver.body_owner() {
2456 Some(parent) => ScopeDef::Label(Label { parent, label_id }),
2457 None => continue,
2458 },
2459 };
2460 f(name.clone(), def)
2461 }
2462 }
2463 }
2464
2465 pub fn can_use_trait_methods(&self, t: Trait) -> bool {
2467 self.resolver.traits_in_scope(self.db).contains(&t.id)
2468 }
2469
2470 pub fn speculative_resolve(&self, ast_path: &ast::Path) -> Option<PathResolution> {
2473 let mut kind = PathKind::Plain;
2474 let mut segments = vec![];
2475 let mut first = true;
2476 for segment in ast_path.segments() {
2477 if first {
2478 first = false;
2479 if segment.coloncolon_token().is_some() {
2480 kind = PathKind::Abs;
2481 }
2482 }
2483
2484 let Some(k) = segment.kind() else { continue };
2485 match k {
2486 ast::PathSegmentKind::Name(name_ref) => segments.push(name_ref.as_name()),
2487 ast::PathSegmentKind::Type { .. } => continue,
2488 ast::PathSegmentKind::SelfTypeKw => {
2489 segments.push(Name::new_symbol_root(sym::Self_))
2490 }
2491 ast::PathSegmentKind::SelfKw => kind = PathKind::Super(0),
2492 ast::PathSegmentKind::SuperKw => match kind {
2493 PathKind::Super(s) => kind = PathKind::Super(s + 1),
2494 PathKind::Plain => kind = PathKind::Super(1),
2495 PathKind::Crate | PathKind::Abs | PathKind::DollarCrate(_) => continue,
2496 },
2497 ast::PathSegmentKind::CrateKw => kind = PathKind::Crate,
2498 }
2499 }
2500
2501 resolve_hir_path(
2502 self.db,
2503 &self.resolver,
2504 &Path::BarePath(Interned::new(ModPath::from_segments(kind, segments))),
2505 HygieneId::ROOT,
2506 None,
2507 )
2508 }
2509
2510 pub fn resolve_mod_path(&self, path: &ModPath) -> impl Iterator<Item = ItemInNs> + use<> {
2511 let items = self.resolver.resolve_module_path_in_items(self.db, path);
2512 items.iter_items().map(|(item, _)| item.into())
2513 }
2514
2515 pub fn assoc_type_shorthand_candidates(
2518 &self,
2519 resolution: &PathResolution,
2520 mut cb: impl FnMut(TypeAlias),
2521 ) {
2522 let (Some(def), Some(resolution)) = (self.resolver.generic_def(), resolution.in_type_ns())
2523 else {
2524 return;
2525 };
2526 hir_ty::associated_type_shorthand_candidates(self.db, def, resolution, |_, id| {
2527 cb(id.into());
2528 false
2529 });
2530 }
2531
2532 pub fn generic_def(&self) -> Option<crate::GenericDef> {
2533 self.resolver.generic_def().map(|id| id.into())
2534 }
2535
2536 pub fn extern_crates(&self) -> impl Iterator<Item = (Name, Module)> + '_ {
2537 self.resolver.extern_crates_in_scope().map(|(name, id)| (name, Module { id }))
2538 }
2539
2540 pub fn extern_crate_decls(&self) -> impl Iterator<Item = Name> + '_ {
2541 self.resolver.extern_crate_decls_in_scope(self.db)
2542 }
2543
2544 pub fn has_same_self_type(&self, other: &SemanticsScope<'_>) -> bool {
2545 self.resolver.impl_def() == other.resolver.impl_def()
2546 }
2547}
2548
2549#[derive(Debug)]
2550pub struct VisibleTraits(pub FxHashSet<TraitId>);
2551
2552impl ops::Deref for VisibleTraits {
2553 type Target = FxHashSet<TraitId>;
2554
2555 fn deref(&self) -> &Self::Target {
2556 &self.0
2557 }
2558}
2559
2560struct RenameConflictsVisitor<'a> {
2561 db: &'a dyn HirDatabase,
2562 owner: DefWithBodyId,
2563 resolver: Resolver<'a>,
2564 body: &'a Body,
2565 to_be_renamed: BindingId,
2566 new_name: Symbol,
2567 old_name: Symbol,
2568 conflicts: FxHashSet<BindingId>,
2569}
2570
2571impl RenameConflictsVisitor<'_> {
2572 fn resolve_path(&mut self, node: ExprOrPatId, path: &Path) {
2573 if let Path::BarePath(path) = path
2574 && let Some(name) = path.as_ident()
2575 {
2576 if *name.symbol() == self.new_name {
2577 if let Some(conflicting) = self.resolver.rename_will_conflict_with_renamed(
2578 self.db,
2579 name,
2580 path,
2581 self.body.expr_or_pat_path_hygiene(node),
2582 self.to_be_renamed,
2583 ) {
2584 self.conflicts.insert(conflicting);
2585 }
2586 } else if *name.symbol() == self.old_name
2587 && let Some(conflicting) = self.resolver.rename_will_conflict_with_another_variable(
2588 self.db,
2589 name,
2590 path,
2591 self.body.expr_or_pat_path_hygiene(node),
2592 &self.new_name,
2593 self.to_be_renamed,
2594 )
2595 {
2596 self.conflicts.insert(conflicting);
2597 }
2598 }
2599 }
2600
2601 fn rename_conflicts(&mut self, expr: ExprId) {
2602 match &self.body[expr] {
2603 Expr::Path(path) => {
2604 let guard = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
2605 self.resolve_path(expr.into(), path);
2606 self.resolver.reset_to_guard(guard);
2607 }
2608 &Expr::Assignment { target, .. } => {
2609 let guard = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
2610 self.body.walk_pats(target, &mut |pat| {
2611 if let Pat::Path(path) = &self.body[pat] {
2612 self.resolve_path(pat.into(), path);
2613 }
2614 });
2615 self.resolver.reset_to_guard(guard);
2616 }
2617 _ => {}
2618 }
2619
2620 self.body.walk_child_exprs(expr, |expr| self.rename_conflicts(expr));
2621 }
2622}