1mod child_by_source;
4mod source_to_def;
5
6use std::{
7 cell::RefCell,
8 convert::Infallible,
9 fmt, iter, mem,
10 ops::{self, ControlFlow, Not},
11};
12
13use base_db::FxIndexSet;
14use either::Either;
15use hir_def::{
16 BuiltinDeriveImplId, DefWithBodyId, ExpressionStoreOwnerId, HasModule, MacroId, StructId,
17 TraitId, VariantId,
18 attrs::parse_extra_crate_attrs,
19 expr_store::{Body, ExprOrPatSource, ExpressionStore, HygieneId, path::Path},
20 hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
21 nameres::{ModuleOrigin, crate_def_map},
22 resolver::{self, HasResolver, Resolver, TypeNs, ValueNs},
23 type_ref::Mutability,
24};
25use hir_expand::{
26 EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId,
27 builtin::{BuiltinFnLikeExpander, EagerExpander},
28 db::ExpandDatabase,
29 files::{FileRangeWrapper, HirFileRange, InRealFile},
30 mod_path::{ModPath, PathKind},
31 name::AsName,
32};
33use hir_ty::{
34 InferenceResult,
35 diagnostics::unsafe_operations,
36 infer_query_with_inspect,
37 next_solver::{
38 AnyImplId, DbInterner, Span,
39 format_proof_tree::{ProofTreeData, dump_proof_tree_structured},
40 },
41};
42use intern::{Interned, Symbol, sym};
43use itertools::Itertools;
44use rustc_hash::{FxHashMap, FxHashSet};
45use rustc_type_ir::inherent::Span as _;
46use smallvec::{SmallVec, smallvec};
47use span::{FileId, SyntaxContext};
48use stdx::{TupleExt, always};
49use syntax::{
50 AstNode, AstToken, Direction, SmolStr, SmolStrBuilder, SyntaxElement, SyntaxKind, SyntaxNode,
51 SyntaxNodePtr, SyntaxToken, T, TextRange, TextSize,
52 algo::skip_trivia_token,
53 ast::{self, HasAttrs as _, HasGenericParams},
54};
55
56use crate::{
57 Adjust, Adjustment, Adt, AnyFunctionId, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const,
58 ConstParam, Crate, DeriveHelper, Enum, EnumVariant, ExpressionStoreOwner, Field, Function,
59 GenericSubstitution, HasSource, Impl, InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam,
60 Local, Macro, Module, ModuleDef, Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule,
61 Trait, TupleField, Type, TypeAlias, TypeParam, Union, Variant,
62 db::HirDatabase,
63 semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
64 source_analyzer::{SourceAnalyzer, resolve_hir_path},
65};
66
67const CONTINUE_NO_BREAKS: ControlFlow<Infallible, ()> = ControlFlow::Continue(());
68
69#[derive(Debug, Copy, Clone, PartialEq, Eq)]
70pub enum PathResolution {
71 Def(ModuleDef),
73 Local(Local),
75 TypeParam(TypeParam),
77 ConstParam(ConstParam),
79 SelfType(Impl),
80 BuiltinAttr(BuiltinAttr),
81 ToolModule(ToolModule),
82 DeriveHelper(DeriveHelper),
83}
84
85impl PathResolution {
86 pub(crate) fn in_type_ns(&self) -> Option<TypeNs> {
87 match self {
88 PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())),
89 PathResolution::Def(ModuleDef::BuiltinType(builtin)) => {
90 Some(TypeNs::BuiltinType((*builtin).into()))
91 }
92 PathResolution::Def(
93 ModuleDef::Const(_)
94 | ModuleDef::EnumVariant(_)
95 | ModuleDef::Macro(_)
96 | ModuleDef::Function(_)
97 | ModuleDef::Module(_)
98 | ModuleDef::Static(_)
99 | ModuleDef::Trait(_),
100 ) => None,
101 PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
102 Some(TypeNs::TypeAliasId((*alias).into()))
103 }
104 PathResolution::BuiltinAttr(_)
105 | PathResolution::ToolModule(_)
106 | PathResolution::Local(_)
107 | PathResolution::DeriveHelper(_)
108 | PathResolution::ConstParam(_) => None,
109 PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
110 PathResolution::SelfType(impl_def) => match impl_def.id {
111 AnyImplId::ImplId(id) => Some(TypeNs::SelfType(id)),
112 AnyImplId::BuiltinDeriveImplId(_) => None,
113 },
114 }
115 }
116}
117
118#[derive(Debug, Copy, Clone, PartialEq, Eq)]
119pub struct PathResolutionPerNs {
120 pub type_ns: Option<PathResolution>,
121 pub value_ns: Option<PathResolution>,
122 pub macro_ns: Option<PathResolution>,
123}
124
125impl PathResolutionPerNs {
126 pub fn new(
127 type_ns: Option<PathResolution>,
128 value_ns: Option<PathResolution>,
129 macro_ns: Option<PathResolution>,
130 ) -> Self {
131 PathResolutionPerNs { type_ns, value_ns, macro_ns }
132 }
133 pub fn any(&self) -> Option<PathResolution> {
134 self.type_ns.or(self.value_ns).or(self.macro_ns)
135 }
136}
137
138#[derive(Debug)]
139pub struct TypeInfo<'db> {
140 pub original: Type<'db>,
142 pub adjusted: Option<Type<'db>>,
144}
145
146impl<'db> TypeInfo<'db> {
147 pub fn original(self) -> Type<'db> {
148 self.original
149 }
150
151 pub fn has_adjustment(&self) -> bool {
152 self.adjusted.is_some()
153 }
154
155 pub fn adjusted(self) -> Type<'db> {
157 self.adjusted.unwrap_or(self.original)
158 }
159}
160
161pub struct Semantics<'db, DB: ?Sized> {
163 pub db: &'db DB,
164 imp: SemanticsImpl<'db>,
165}
166
167pub struct SemanticsImpl<'db> {
168 pub db: &'db dyn HirDatabase,
169 s2d_cache: RefCell<SourceToDefCache>,
170 macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroCallId>>,
172}
173
174impl<DB: ?Sized> fmt::Debug for Semantics<'_, DB> {
175 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
176 write!(f, "Semantics {{ ... }}")
177 }
178}
179
180impl<'db, DB: ?Sized> ops::Deref for Semantics<'db, DB> {
181 type Target = SemanticsImpl<'db>;
182
183 fn deref(&self) -> &Self::Target {
184 &self.imp
185 }
186}
187
188#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
189pub enum LintAttr {
190 Allow,
191 Expect,
192 Warn,
193 Deny,
194 Forbid,
195}
196
197impl Semantics<'_, dyn HirDatabase> {
201 pub fn new_dyn(db: &'_ dyn HirDatabase) -> Semantics<'_, dyn HirDatabase> {
203 let impl_ = SemanticsImpl::new(db);
204 Semantics { db, imp: impl_ }
205 }
206}
207
208impl<DB: HirDatabase> Semantics<'_, DB> {
209 pub fn new(db: &DB) -> Semantics<'_, DB> {
211 let impl_ = SemanticsImpl::new(db);
212 Semantics { db, imp: impl_ }
213 }
214}
215
216impl<DB: HirDatabase + ?Sized> Semantics<'_, DB> {
219 pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
220 self.imp.find_file(syntax_node).file_id
221 }
222
223 pub fn token_ancestors_with_macros(
224 &self,
225 token: SyntaxToken,
226 ) -> impl Iterator<Item = SyntaxNode> + '_ {
227 token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
228 }
229
230 pub fn find_node_at_offset_with_macros<N: AstNode>(
233 &self,
234 node: &SyntaxNode,
235 offset: TextSize,
236 ) -> Option<N> {
237 self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
238 }
239
240 pub fn find_node_at_offset_with_descend<N: AstNode>(
244 &self,
245 node: &SyntaxNode,
246 offset: TextSize,
247 ) -> Option<N> {
248 self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
249 }
250
251 pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>(
255 &'slf self,
256 node: &SyntaxNode,
257 offset: TextSize,
258 ) -> impl Iterator<Item = N> + 'slf {
259 self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
260 }
261
262 pub fn find_namelike_at_offset_with_descend<'slf>(
264 &'slf self,
265 node: &SyntaxNode,
266 offset: TextSize,
267 ) -> impl Iterator<Item = ast::NameLike> + 'slf {
268 node.token_at_offset(offset)
269 .map(move |token| self.descend_into_macros_no_opaque(token, true))
270 .map(|descendants| descendants.into_iter().filter_map(move |it| it.value.parent()))
271 .kmerge_by(|left, right| left.text_range().len().lt(&right.text_range().len()))
274 .filter_map(ast::NameLike::cast)
275 }
276
277 pub fn lint_attrs(
278 &self,
279 file_id: FileId,
280 krate: Crate,
281 item: ast::AnyHasAttrs,
282 ) -> impl DoubleEndedIterator<Item = (LintAttr, SmolStr)> {
283 let mut cfg_options = None;
284 let cfg_options = || *cfg_options.get_or_insert_with(|| krate.id.cfg_options(self.db));
285
286 let is_crate_root = file_id == krate.root_file(self.imp.db);
287 let is_source_file = ast::SourceFile::can_cast(item.syntax().kind());
288 let extra_crate_attrs = (is_crate_root && is_source_file)
289 .then(|| {
290 parse_extra_crate_attrs(self.imp.db, krate.id)
291 .into_iter()
292 .flat_map(|src| src.attrs())
293 })
294 .into_iter()
295 .flatten();
296
297 let mut result = Vec::new();
298 hir_expand::attrs::expand_cfg_attr::<Infallible>(
299 extra_crate_attrs.chain(ast::attrs_including_inner(&item)),
300 cfg_options,
301 |attr, _, _, _| {
302 let hir_expand::attrs::Meta::TokenTree { path, tt } = attr else {
303 return ControlFlow::Continue(());
304 };
305 if path.segments.len() != 1 {
306 return ControlFlow::Continue(());
307 }
308 let lint_attr = match path.segments[0].text() {
309 "allow" => LintAttr::Allow,
310 "expect" => LintAttr::Expect,
311 "warn" => LintAttr::Warn,
312 "deny" => LintAttr::Deny,
313 "forbid" => LintAttr::Forbid,
314 _ => return ControlFlow::Continue(()),
315 };
316 let mut lint = SmolStrBuilder::new();
317 for token in
318 tt.syntax().children_with_tokens().filter_map(SyntaxElement::into_token)
319 {
320 match token.kind() {
321 T![:] | T![::] => lint.push_str(token.text()),
322 kind if kind.is_any_identifier() => lint.push_str(token.text()),
323 T![,] => {
324 let lint = mem::replace(&mut lint, SmolStrBuilder::new()).finish();
325 if !lint.is_empty() {
326 result.push((lint_attr, lint));
327 }
328 }
329 _ => {}
330 }
331 }
332 let lint = lint.finish();
333 if !lint.is_empty() {
334 result.push((lint_attr, lint));
335 }
336
337 ControlFlow::Continue(())
338 },
339 );
340 result.into_iter()
341 }
342
343 pub fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option<Struct> {
344 self.imp.resolve_range_pat(range_pat).map(Struct::from)
345 }
346
347 pub fn resolve_range_expr(&self, range_expr: &ast::RangeExpr) -> Option<Struct> {
348 self.imp.resolve_range_expr(range_expr).map(Struct::from)
349 }
350
351 pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
352 self.imp.resolve_await_to_poll(await_expr)
353 }
354
355 pub fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<Function> {
356 self.imp.resolve_prefix_expr(prefix_expr)
357 }
358
359 pub fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<Function> {
360 self.imp.resolve_index_expr(index_expr)
361 }
362
363 pub fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<Function> {
364 self.imp.resolve_bin_expr(bin_expr)
365 }
366
367 pub fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<Function> {
368 self.imp.resolve_try_expr(try_expr)
369 }
370
371 pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<Variant> {
372 self.imp.resolve_variant(record_lit).map(Variant::from)
373 }
374
375 pub fn file_to_module_def(&self, file: impl Into<FileId>) -> Option<Module> {
376 self.imp.file_to_module_defs(file.into()).next()
377 }
378
379 pub fn file_to_module_defs(&self, file: impl Into<FileId>) -> impl Iterator<Item = Module> {
380 self.imp.file_to_module_defs(file.into())
381 }
382
383 pub fn hir_file_to_module_def(&self, file: impl Into<HirFileId>) -> Option<Module> {
384 self.imp.hir_file_to_module_defs(file.into()).next()
385 }
386
387 pub fn hir_file_to_module_defs(
388 &self,
389 file: impl Into<HirFileId>,
390 ) -> impl Iterator<Item = Module> {
391 self.imp.hir_file_to_module_defs(file.into())
392 }
393
394 pub fn is_nightly(&self, krate: Crate) -> bool {
395 let toolchain = self.db.toolchain_channel(krate.into());
396 matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None)
399 }
400
401 pub fn to_adt_def(&self, a: &ast::Adt) -> Option<Adt> {
402 self.imp.to_def(a)
403 }
404
405 pub fn to_const_def(&self, c: &ast::Const) -> Option<Const> {
406 self.imp.to_def(c)
407 }
408
409 pub fn to_enum_def(&self, e: &ast::Enum) -> Option<Enum> {
410 self.imp.to_def(e)
411 }
412
413 pub fn to_enum_variant_def(&self, v: &ast::Variant) -> Option<EnumVariant> {
414 self.imp.to_def(v)
415 }
416
417 pub fn to_fn_def(&self, f: &ast::Fn) -> Option<Function> {
418 self.imp.to_def(f)
419 }
420
421 pub fn to_impl_def(&self, i: &ast::Impl) -> Option<Impl> {
422 self.imp.to_def(i)
423 }
424
425 pub fn to_macro_def(&self, m: &ast::Macro) -> Option<Macro> {
426 self.imp.to_def(m)
427 }
428
429 pub fn to_module_def(&self, m: &ast::Module) -> Option<Module> {
430 self.imp.to_def(m)
431 }
432
433 pub fn to_static_def(&self, s: &ast::Static) -> Option<Static> {
434 self.imp.to_def(s)
435 }
436
437 pub fn to_struct_def(&self, s: &ast::Struct) -> Option<Struct> {
438 self.imp.to_def(s)
439 }
440
441 pub fn to_trait_def(&self, t: &ast::Trait) -> Option<Trait> {
442 self.imp.to_def(t)
443 }
444
445 pub fn to_type_alias_def(&self, t: &ast::TypeAlias) -> Option<TypeAlias> {
446 self.imp.to_def(t)
447 }
448
449 pub fn to_union_def(&self, u: &ast::Union) -> Option<Union> {
450 self.imp.to_def(u)
451 }
452}
453
454impl<'db> SemanticsImpl<'db> {
455 fn new(db: &'db dyn HirDatabase) -> Self {
456 SemanticsImpl { db, s2d_cache: Default::default(), macro_call_cache: Default::default() }
457 }
458
459 pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile {
460 let hir_file_id = file_id.into();
461 let tree = self.db.parse(file_id).tree();
462 self.cache(tree.syntax().clone(), hir_file_id);
463 tree
464 }
465
466 pub fn first_crate(&self, file: FileId) -> Option<Crate> {
468 match self.file_to_module_defs(file).next() {
469 Some(module) => Some(module.krate(self.db)),
470 None => self.db.all_crates().last().copied().map(Into::into),
471 }
472 }
473
474 pub fn attach_first_edition_opt(&self, file: FileId) -> Option<EditionedFileId> {
475 let krate = self.file_to_module_defs(file).next()?.krate(self.db);
476 Some(EditionedFileId::new(self.db, file, krate.edition(self.db)))
477 }
478
479 pub fn attach_first_edition(&self, file: FileId) -> EditionedFileId {
480 self.attach_first_edition_opt(file)
481 .unwrap_or_else(|| EditionedFileId::current_edition(self.db, file))
482 }
483
484 pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
485 let file_id = self.attach_first_edition(file_id);
486
487 let tree = self.db.parse(file_id).tree();
488 self.cache(tree.syntax().clone(), file_id.into());
489 tree
490 }
491
492 pub fn adjust_edition(&self, file_id: HirFileId) -> HirFileId {
493 if let Some(editioned_file_id) = file_id.file_id() {
494 self.attach_first_edition_opt(editioned_file_id.file_id(self.db))
495 .map_or(file_id, Into::into)
496 } else {
497 file_id
498 }
499 }
500
501 pub fn find_parent_file(&self, file_id: HirFileId) -> Option<InFile<SyntaxNode>> {
502 match file_id {
503 HirFileId::FileId(file_id) => {
504 let module = self.file_to_module_defs(file_id.file_id(self.db)).next()?;
505 let def_map = crate_def_map(self.db, module.krate(self.db).id);
506 match def_map[module.id].origin {
507 ModuleOrigin::CrateRoot { .. } => None,
508 ModuleOrigin::File { declaration, declaration_tree_id, .. } => {
509 let file_id = declaration_tree_id.file_id();
510 let in_file = InFile::new(file_id, declaration);
511 let node = in_file.to_node(self.db);
512 let root = find_root(node.syntax());
513 self.cache(root, file_id);
514 Some(in_file.with_value(node.syntax().clone()))
515 }
516 _ => unreachable!("FileId can only belong to a file module"),
517 }
518 }
519 HirFileId::MacroFile(macro_file) => {
520 let node = self.db.lookup_intern_macro_call(macro_file).to_node(self.db);
521 let root = find_root(&node.value);
522 self.cache(root, node.file_id);
523 Some(node)
524 }
525 }
526 }
527
528 pub fn module_definition_node(&self, module: Module) -> InFile<SyntaxNode> {
531 let def_map = module.id.def_map(self.db);
532 let definition = def_map[module.id].origin.definition_source(self.db);
533 let definition = definition.map(|it| it.node());
534 let root_node = find_root(&definition.value);
535 self.cache(root_node, definition.file_id);
536 definition
537 }
538
539 pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode {
540 let node = self.db.parse_or_expand(file_id);
541 self.cache(node.clone(), file_id);
542 node
543 }
544
545 pub fn expand(&self, file_id: MacroCallId) -> ExpandResult<SyntaxNode> {
546 let res = self.db.parse_macro_expansion(file_id).map(|it| it.0.syntax_node());
547 self.cache(res.value.clone(), file_id.into());
548 res
549 }
550
551 pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option<InFile<SyntaxNode>> {
552 let file_id = self.to_def(macro_call)?;
553 let node = self.parse_or_expand(file_id.into());
554 Some(InFile::new(file_id.into(), node))
555 }
556
557 pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> {
558 let file_id = self.find_file(attr.syntax()).file_id;
559 let krate = match file_id {
560 HirFileId::FileId(file_id) => {
561 self.file_to_module_defs(file_id.file_id(self.db)).next()?.krate(self.db).id
562 }
563 HirFileId::MacroFile(macro_file) => self.db.lookup_intern_macro_call(macro_file).krate,
564 };
565 hir_expand::check_cfg_attr_value(self.db, attr, krate)
566 }
567
568 pub fn expand_allowed_builtins(
571 &self,
572 macro_call: &ast::MacroCall,
573 ) -> Option<ExpandResult<SyntaxNode>> {
574 let file_id = self.to_def(macro_call)?;
575 let macro_call = self.db.lookup_intern_macro_call(file_id);
576
577 let skip = matches!(
578 macro_call.def.kind,
579 hir_expand::MacroDefKind::BuiltIn(
580 _,
581 BuiltinFnLikeExpander::Column
582 | BuiltinFnLikeExpander::File
583 | BuiltinFnLikeExpander::ModulePath
584 | BuiltinFnLikeExpander::Asm
585 | BuiltinFnLikeExpander::GlobalAsm
586 | BuiltinFnLikeExpander::NakedAsm
587 | BuiltinFnLikeExpander::LogSyntax
588 | BuiltinFnLikeExpander::TraceMacros
589 | BuiltinFnLikeExpander::FormatArgs
590 | BuiltinFnLikeExpander::FormatArgsNl
591 | BuiltinFnLikeExpander::ConstFormatArgs,
592 ) | hir_expand::MacroDefKind::BuiltInEager(_, EagerExpander::CompileError)
593 );
594 if skip {
595 return None;
598 }
599
600 let node = self.expand(file_id);
601 Some(node)
602 }
603
604 pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<InFile<SyntaxNode>>> {
606 let src = self.wrap_node_infile(item.clone());
607 let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
608 Some(self.expand(macro_call_id).map(|it| InFile::new(macro_call_id.into(), it)))
609 }
610
611 pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
612 let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
613 let src = self.wrap_node_infile(attr.clone());
614 let call_id = self.with_ctx(|ctx| {
615 ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
616 })?;
617 Some(self.parse_or_expand(call_id.into()))
618 }
619
620 pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
621 let calls = self.derive_macro_calls(attr)?;
622 self.with_ctx(|ctx| {
623 Some(
624 calls
625 .into_iter()
626 .map(|call| {
627 let call = call?;
628 match call {
629 Either::Left(call) => {
630 macro_call_to_macro_id(ctx, call).map(|id| Macro { id })
631 }
632 Either::Right(call) => {
633 let call = call.loc(self.db);
634 let krate = call.krate(self.db);
635 let lang_items = hir_def::lang_item::lang_items(self.db, krate);
636 call.trait_.derive_macro(lang_items).map(|id| Macro { id })
637 }
638 }
639 })
640 .collect(),
641 )
642 })
643 }
644
645 pub fn expand_derive_macro(
646 &self,
647 attr: &ast::Attr,
648 ) -> Option<Vec<Option<ExpandResult<SyntaxNode>>>> {
649 let res: Vec<_> = self
650 .derive_macro_calls(attr)?
651 .into_iter()
652 .map(|call| {
653 let file_id = call?.left()?;
654 let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id);
655 let root_node = value.0.syntax_node();
656 self.cache(root_node.clone(), file_id.into());
657 Some(ExpandResult { value: root_node, err })
658 })
659 .collect();
660 Some(res)
661 }
662
663 fn derive_macro_calls(
664 &self,
665 attr: &ast::Attr,
666 ) -> Option<Vec<Option<Either<MacroCallId, BuiltinDeriveImplId>>>> {
667 let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
668 let file_id = self.find_file(adt.syntax()).file_id;
669 let adt = InFile::new(file_id, &adt);
670 let src = InFile::new(file_id, attr.clone());
671 self.with_ctx(|ctx| {
672 let (.., res) = ctx.attr_to_derive_macro_call(adt, src)?;
673 Some(res.to_vec())
674 })
675 }
676
677 pub fn is_derive_annotated(&self, adt: InFile<&ast::Adt>) -> bool {
678 self.with_ctx(|ctx| ctx.file_of_adt_has_derives(adt))
679 }
680
681 pub fn derive_helpers_in_scope(&self, adt: &ast::Adt) -> Option<Vec<(Symbol, Symbol)>> {
682 let sa = self.analyze_no_infer(adt.syntax())?;
683 let id = self.db.ast_id_map(sa.file_id).ast_id(adt);
684 let result = sa
685 .resolver
686 .def_map()
687 .derive_helpers_in_scope(InFile::new(sa.file_id, id))?
688 .iter()
689 .map(|(name, macro_, _)| {
690 let macro_name = Macro::from(*macro_).name(self.db).symbol().clone();
691 (name.symbol().clone(), macro_name)
692 })
693 .collect();
694 Some(result)
695 }
696
697 pub fn derive_helper(&self, attr: &ast::Attr) -> Option<Vec<(Macro, MacroCallId)>> {
698 let adt = attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it {
699 ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
700 ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
701 ast::Item::Union(it) => Some(ast::Adt::Union(it)),
702 _ => None,
703 })?;
704 let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
705 let sa = self.analyze_no_infer(adt.syntax())?;
706 let id = self.db.ast_id_map(sa.file_id).ast_id(&adt);
707 let res: Vec<_> = sa
708 .resolver
709 .def_map()
710 .derive_helpers_in_scope(InFile::new(sa.file_id, id))?
711 .iter()
712 .filter(|&(name, _, _)| *name == attr_name)
713 .filter_map(|&(_, macro_, call)| Some((macro_.into(), call.left()?)))
714 .collect();
715 res.is_empty().not().then_some(res)
717 }
718
719 pub fn is_attr_macro_call(&self, item: InFile<&ast::Item>) -> bool {
720 self.with_ctx(|ctx| ctx.item_to_macro_call(item).is_some())
721 }
722
723 pub fn speculative_expand_macro_call(
726 &self,
727 actual_macro_call: &ast::MacroCall,
728 speculative_args: &ast::TokenTree,
729 token_to_map: SyntaxToken,
730 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
731 let macro_file = self.to_def(actual_macro_call)?;
732 hir_expand::db::expand_speculative(
733 self.db,
734 macro_file,
735 speculative_args.syntax(),
736 token_to_map,
737 )
738 }
739
740 pub fn speculative_expand_raw(
741 &self,
742 macro_file: MacroCallId,
743 speculative_args: &SyntaxNode,
744 token_to_map: SyntaxToken,
745 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
746 hir_expand::db::expand_speculative(self.db, macro_file, speculative_args, token_to_map)
747 }
748
749 pub fn speculative_expand_attr_macro(
752 &self,
753 actual_macro_call: &ast::Item,
754 speculative_args: &ast::Item,
755 token_to_map: SyntaxToken,
756 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
757 let macro_call = self.wrap_node_infile(actual_macro_call.clone());
758 let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call.as_ref()))?;
759 hir_expand::db::expand_speculative(
760 self.db,
761 macro_call_id,
762 speculative_args.syntax(),
763 token_to_map,
764 )
765 }
766
767 pub fn speculative_expand_derive_as_pseudo_attr_macro(
768 &self,
769 actual_macro_call: &ast::Attr,
770 speculative_args: &ast::Attr,
771 token_to_map: SyntaxToken,
772 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
773 let attr = self.wrap_node_infile(actual_macro_call.clone());
774 let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?;
775 let macro_call_id = self.with_ctx(|ctx| {
776 ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it)
777 })?;
778 hir_expand::db::expand_speculative(
779 self.db,
780 macro_call_id,
781 speculative_args.syntax(),
782 token_to_map,
783 )
784 }
785
786 pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &Name) -> Vec<Local> {
789 let Some(def) = to_be_renamed.parent.as_def_with_body() else {
791 return Vec::new();
792 };
793 let body = Body::of(self.db, def);
794 let resolver = to_be_renamed.parent.resolver(self.db);
795 let starting_expr =
796 body.binding_owner(to_be_renamed.binding_id).unwrap_or(body.root_expr());
797 let mut visitor = RenameConflictsVisitor {
798 body,
799 conflicts: FxHashSet::default(),
800 db: self.db,
801 new_name: new_name.symbol().clone(),
802 old_name: to_be_renamed.name(self.db).symbol().clone(),
803 owner: def,
804 to_be_renamed: to_be_renamed.binding_id,
805 resolver,
806 };
807 visitor.rename_conflicts(starting_expr);
808 visitor
809 .conflicts
810 .into_iter()
811 .map(|binding_id| Local { parent: to_be_renamed.parent, binding_id })
812 .collect()
813 }
814
815 pub fn as_format_args_parts(
817 &self,
818 string: &ast::String,
819 ) -> Option<Vec<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)>> {
820 let string_start = string.syntax().text_range().start();
821 let token = self.wrap_token_infile(string.syntax().clone());
822 self.descend_into_macros_breakable(token, |token, _| {
823 (|| {
824 let token = token.value;
825 let string = ast::String::cast(token)?;
826 let literal =
827 string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
828 let parent = literal.parent()?;
829 if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) {
830 let source_analyzer = self.analyze_no_infer(format_args.syntax())?;
831 let format_args = self.wrap_node_infile(format_args);
832 let res = source_analyzer
833 .as_format_args_parts(self.db, format_args.as_ref())?
834 .map(|(range, res)| (range + string_start, res.map(Either::Left)))
835 .collect();
836 Some(res)
837 } else {
838 let asm = ast::AsmExpr::cast(parent)?;
839 let source_analyzer = self.analyze_no_infer(asm.syntax())?;
840 let line = asm.template().position(|it| *it.syntax() == literal)?;
841 let asm = self.wrap_node_infile(asm);
842 let (owner, (expr, asm_parts)) = source_analyzer.as_asm_parts(asm.as_ref())?;
843 let res = asm_parts
844 .get(line)?
845 .iter()
846 .map(|&(range, index)| {
847 (
848 range + string_start,
849 Some(Either::Right(InlineAsmOperand { owner, expr, index })),
850 )
851 })
852 .collect();
853 Some(res)
854 }
855 })()
856 .map_or(ControlFlow::Continue(()), ControlFlow::Break)
857 })
858 }
859
860 pub fn check_for_format_args_template(
869 &self,
870 original_token: SyntaxToken,
871 offset: TextSize,
872 ) -> Option<(
873 TextRange,
874 HirFileRange,
875 ast::String,
876 Option<Either<PathResolution, InlineAsmOperand>>,
877 )> {
878 let original_token =
879 self.wrap_token_infile(original_token).map(ast::String::cast).transpose()?;
880 self.check_for_format_args_template_with_file(original_token, offset)
881 }
882
883 pub fn check_for_format_args_template_with_file(
891 &self,
892 original_token: InFile<ast::String>,
893 offset: TextSize,
894 ) -> Option<(
895 TextRange,
896 HirFileRange,
897 ast::String,
898 Option<Either<PathResolution, InlineAsmOperand>>,
899 )> {
900 let relative_offset =
901 offset.checked_sub(original_token.value.syntax().text_range().start())?;
902 self.descend_into_macros_breakable(
903 original_token.as_ref().map(|it| it.syntax().clone()),
904 |token, _| {
905 (|| {
906 let token = token.map(ast::String::cast).transpose()?;
907 self.resolve_offset_in_format_args(token.as_ref(), relative_offset).map(
908 |(range, res)| {
909 (
910 range + original_token.value.syntax().text_range().start(),
911 HirFileRange {
912 file_id: token.file_id,
913 range: range + token.value.syntax().text_range().start(),
914 },
915 token.value,
916 res,
917 )
918 },
919 )
920 })()
921 .map_or(ControlFlow::Continue(()), ControlFlow::Break)
922 },
923 )
924 }
925
926 fn resolve_offset_in_format_args(
927 &self,
928 InFile { value: string, file_id }: InFile<&ast::String>,
929 offset: TextSize,
930 ) -> Option<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)> {
931 debug_assert!(offset <= string.syntax().text_range().len());
932 let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
933 let parent = literal.parent()?;
934 if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) {
935 let source_analyzer =
936 &self.analyze_impl(InFile::new(file_id, format_args.syntax()), None, false)?;
937 source_analyzer
938 .resolve_offset_in_format_args(self.db, InFile::new(file_id, &format_args), offset)
939 .map(|(range, res)| (range, res.map(Either::Left)))
940 } else {
941 let asm = ast::AsmExpr::cast(parent)?;
942 let source_analyzer =
943 self.analyze_impl(InFile::new(file_id, asm.syntax()), None, false)?;
944 let line = asm.template().position(|it| *it.syntax() == literal)?;
945 source_analyzer
946 .resolve_offset_in_asm_template(InFile::new(file_id, &asm), line, offset)
947 .map(|(owner, (expr, range, index))| {
948 (range, Some(Either::Right(InlineAsmOperand { owner, expr, index })))
949 })
950 }
951 }
952
953 pub fn debug_hir_at(&self, token: SyntaxToken) -> Option<String> {
954 self.analyze_no_infer(&token.parent()?).and_then(|it| {
955 Some(match it.body_or_sig.as_ref()? {
956 crate::source_analyzer::BodyOrSig::Body { def, body, .. } => {
957 hir_def::expr_store::pretty::print_body_hir(
958 self.db,
959 body,
960 *def,
961 it.file_id.edition(self.db),
962 )
963 }
964 &crate::source_analyzer::BodyOrSig::VariantFields { def, .. } => {
965 hir_def::expr_store::pretty::print_variant_body_hir(
966 self.db,
967 def,
968 it.file_id.edition(self.db),
969 )
970 }
971 &crate::source_analyzer::BodyOrSig::Sig { def, .. } => {
972 hir_def::expr_store::pretty::print_signature(
973 self.db,
974 def,
975 it.file_id.edition(self.db),
976 )
977 }
978 })
979 })
980 }
981
982 pub fn descend_token_into_include_expansion(
984 &self,
985 tok: InRealFile<SyntaxToken>,
986 ) -> InFile<SyntaxToken> {
987 let Some(include) =
988 self.s2d_cache.borrow_mut().get_or_insert_include_for(self.db, tok.file_id)
989 else {
990 return tok.into();
991 };
992 let span = self.db.real_span_map(tok.file_id).span_for_range(tok.value.text_range());
993 let Some(InMacroFile { file_id, value: mut mapped_tokens }) = self.with_ctx(|ctx| {
994 Some(
995 ctx.cache
996 .get_or_insert_expansion(ctx.db, include)
997 .map_range_down(span)?
998 .map(SmallVec::<[_; 2]>::from_iter),
999 )
1000 }) else {
1001 return tok.into();
1002 };
1003 mapped_tokens.pop().map_or_else(|| tok.into(), |(tok, _)| InFile::new(file_id.into(), tok))
1005 }
1006
1007 pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
1009 let mut res = smallvec![];
1011 let tokens = (|| {
1012 let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
1014 let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?;
1015 Some((first, last))
1016 })();
1017 let (first, last) = match tokens {
1018 Some(it) => it,
1019 None => return res,
1020 };
1021 let file = self.find_file(node.syntax());
1022
1023 if first == last {
1024 self.descend_into_macros_all(
1026 InFile::new(file.file_id, first),
1027 false,
1028 &mut |InFile { value, .. }, _ctx| {
1029 if let Some(node) = value
1030 .parent_ancestors()
1031 .take_while(|it| it.text_range() == value.text_range())
1032 .find_map(N::cast)
1033 {
1034 res.push(node)
1035 }
1036 },
1037 );
1038 } else {
1039 let mut scratch: SmallVec<[_; 1]> = smallvec![];
1041 self.descend_into_macros_all(
1042 InFile::new(file.file_id, first),
1043 false,
1044 &mut |token, _ctx| scratch.push(token),
1045 );
1046
1047 let mut scratch = scratch.into_iter();
1048 self.descend_into_macros_all(
1049 InFile::new(file.file_id, last),
1050 false,
1051 &mut |InFile { value: last, file_id: last_fid }, _ctx| {
1052 if let Some(InFile { value: first, file_id: first_fid }) = scratch.next()
1053 && first_fid == last_fid
1054 && let Some(p) = first.parent()
1055 {
1056 let range = first.text_range().cover(last.text_range());
1057 let node = find_root(&p)
1058 .covering_element(range)
1059 .ancestors()
1060 .take_while(|it| it.text_range() == range)
1061 .find_map(N::cast);
1062 if let Some(node) = node {
1063 res.push(node);
1064 }
1065 }
1066 },
1067 );
1068 }
1069 res
1070 }
1071
1072 pub fn is_inside_macro_call(&self, token @ InFile { value, .. }: InFile<&SyntaxToken>) -> bool {
1077 value.parent_ancestors().any(|ancestor| {
1078 if ast::MacroCall::can_cast(ancestor.kind()) {
1079 return true;
1080 }
1081
1082 let Some(item) = ast::Item::cast(ancestor) else {
1083 return false;
1084 };
1085 self.with_ctx(|ctx| {
1086 if ctx.item_to_macro_call(token.with_value(&item)).is_some() {
1087 return true;
1088 }
1089 let adt = match item {
1090 ast::Item::Struct(it) => it.into(),
1091 ast::Item::Enum(it) => it.into(),
1092 ast::Item::Union(it) => it.into(),
1093 _ => return false,
1094 };
1095 ctx.file_of_adt_has_derives(token.with_value(&adt))
1096 })
1097 })
1098 }
1099
1100 pub fn descend_into_macros_cb(
1101 &self,
1102 token: SyntaxToken,
1103 mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext),
1104 ) {
1105 self.descend_into_macros_all(self.wrap_token_infile(token), false, &mut |t, ctx| {
1106 cb(t, ctx)
1107 });
1108 }
1109
1110 pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
1111 let mut res = smallvec![];
1112 self.descend_into_macros_all(
1113 self.wrap_token_infile(token.clone()),
1114 false,
1115 &mut |t, _ctx| res.push(t.value),
1116 );
1117 if res.is_empty() {
1118 res.push(token);
1119 }
1120 res
1121 }
1122
1123 pub fn descend_into_macros_no_opaque(
1124 &self,
1125 token: SyntaxToken,
1126 always_descend_into_derives: bool,
1127 ) -> SmallVec<[InFile<SyntaxToken>; 1]> {
1128 let mut res = smallvec![];
1129 let token = self.wrap_token_infile(token);
1130 self.descend_into_macros_all(token.clone(), always_descend_into_derives, &mut |t, ctx| {
1131 if !ctx.is_opaque(self.db) {
1132 res.push(t);
1134 }
1135 });
1136 if res.is_empty() {
1137 res.push(token);
1138 }
1139 res
1140 }
1141
1142 pub fn descend_into_macros_breakable<T>(
1143 &self,
1144 token: InFile<SyntaxToken>,
1145 mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
1146 ) -> Option<T> {
1147 self.descend_into_macros_impl(token, false, &mut cb)
1148 }
1149
1150 pub fn descend_into_macros_exact(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
1153 let mut r = smallvec![];
1154 let text = token.text();
1155 let kind = token.kind();
1156
1157 self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }, ctx| {
1158 let mapped_kind = value.kind();
1159 let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
1160 let matches = (kind == mapped_kind || any_ident_match())
1161 && text == value.text()
1162 && !ctx.is_opaque(self.db);
1163 if matches {
1164 r.push(value);
1165 }
1166 });
1167 if r.is_empty() {
1168 r.push(token);
1169 }
1170 r
1171 }
1172
1173 pub fn descend_into_macros_exact_with_file(
1176 &self,
1177 token: SyntaxToken,
1178 ) -> SmallVec<[InFile<SyntaxToken>; 1]> {
1179 let mut r = smallvec![];
1180 let text = token.text();
1181 let kind = token.kind();
1182
1183 self.descend_into_macros_cb(token.clone(), |InFile { value, file_id }, ctx| {
1184 let mapped_kind = value.kind();
1185 let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
1186 let matches = (kind == mapped_kind || any_ident_match())
1187 && text == value.text()
1188 && !ctx.is_opaque(self.db);
1189 if matches {
1190 r.push(InFile { value, file_id });
1191 }
1192 });
1193 if r.is_empty() {
1194 r.push(self.wrap_token_infile(token));
1195 }
1196 r
1197 }
1198
1199 pub fn descend_into_macros_single_exact(&self, token: SyntaxToken) -> SyntaxToken {
1202 let text = token.text();
1203 let kind = token.kind();
1204 self.descend_into_macros_breakable(
1205 self.wrap_token_infile(token.clone()),
1206 |InFile { value, file_id: _ }, _ctx| {
1207 let mapped_kind = value.kind();
1208 let any_ident_match =
1209 || kind.is_any_identifier() && value.kind().is_any_identifier();
1210 let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
1211 if matches { ControlFlow::Break(value) } else { ControlFlow::Continue(()) }
1212 },
1213 )
1214 .unwrap_or(token)
1215 }
1216
1217 fn descend_into_macros_all(
1218 &self,
1219 token: InFile<SyntaxToken>,
1220 always_descend_into_derives: bool,
1221 f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContext),
1222 ) {
1223 self.descend_into_macros_impl(token, always_descend_into_derives, &mut |tok, ctx| {
1224 f(tok, ctx);
1225 CONTINUE_NO_BREAKS
1226 });
1227 }
1228
1229 fn descend_into_macros_impl<T>(
1230 &self,
1231 InFile { value: token, file_id }: InFile<SyntaxToken>,
1232 always_descend_into_derives: bool,
1233 f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
1234 ) -> Option<T> {
1235 let _p = tracing::info_span!("descend_into_macros_impl").entered();
1236
1237 let db = self.db;
1238 let span = db.span_map(file_id).span_for_range(token.text_range());
1239
1240 let process_expansion_for_token =
1242 |ctx: &mut SourceToDefCtx<'_, '_>, stack: &mut Vec<_>, macro_file| {
1243 let InMacroFile { file_id, value: mapped_tokens } = ctx
1244 .cache
1245 .get_or_insert_expansion(ctx.db, macro_file)
1246 .map_range_down(span)?
1247 .map(SmallVec::<[_; 2]>::from_iter);
1248 let res = mapped_tokens.is_empty().not().then_some(());
1250 stack.push((HirFileId::from(file_id), mapped_tokens));
1252 res
1253 };
1254
1255 let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![];
1260 let include = file_id
1261 .file_id()
1262 .and_then(|file_id| self.s2d_cache.borrow_mut().get_or_insert_include_for(db, file_id));
1263 match include {
1264 Some(include) => {
1265 self.with_ctx(|ctx| process_expansion_for_token(ctx, &mut stack, include))?;
1267 }
1268 None => {
1269 stack.push((file_id, smallvec![(token, span.ctx)]));
1270 }
1271 }
1272
1273 let mut m_cache = self.macro_call_cache.borrow_mut();
1274
1275 let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| {
1278 tokens.retain(|(t, _): &mut (SyntaxToken, _)| !range.contains_range(t.text_range()))
1279 };
1280
1281 while let Some((expansion, ref mut tokens)) = stack.pop() {
1282 tokens.reverse();
1286 while let Some((token, ctx)) = tokens.pop() {
1287 let was_not_remapped = (|| {
1288 let res = self.with_ctx(|ctx| {
1292 token
1293 .parent_ancestors()
1294 .filter_map(ast::Item::cast)
1295 .find_map(|item| {
1305 item.attrs().next()?;
1307 ctx.item_to_macro_call(InFile::new(expansion, &item))
1308 .zip(Some(item))
1309 })
1310 .map(|(call_id, item)| {
1311 let item_range = item.syntax().text_range();
1312 let loc = db.lookup_intern_macro_call(call_id);
1313 let text_range = match loc.kind {
1314 hir_expand::MacroCallKind::Attr {
1315 censored_attr_ids: attr_ids,
1316 ..
1317 } => {
1318 let (attr, _, _, _) = attr_ids
1332 .invoc_attr()
1333 .find_attr_range_with_source(db, loc.krate, &item);
1334 let start = attr.syntax().text_range().start();
1335 TextRange::new(start, item_range.end())
1336 }
1337 _ => item_range,
1338 };
1339 filter_duplicates(tokens, text_range);
1340 process_expansion_for_token(ctx, &mut stack, call_id)
1341 })
1342 });
1343
1344 if let Some(res) = res {
1345 return res;
1346 }
1347
1348 if always_descend_into_derives {
1349 let res = self.with_ctx(|ctx| {
1350 let (derives, adt) = token
1351 .parent_ancestors()
1352 .filter_map(ast::Adt::cast)
1353 .find_map(|adt| {
1354 Some((
1355 ctx.derive_macro_calls(InFile::new(expansion, &adt))?
1356 .map(|(a, b, c)| (a, b, c.to_owned()))
1357 .collect::<SmallVec<[_; 2]>>(),
1358 adt,
1359 ))
1360 })?;
1361 for (_, derive_attr, derives) in derives {
1362 process_expansion_for_token(ctx, &mut stack, derive_attr);
1366 for derive in derives.into_iter().flatten() {
1367 let Either::Left(derive) = derive else { continue };
1368 process_expansion_for_token(ctx, &mut stack, derive);
1369 }
1370 }
1371 filter_duplicates(tokens, adt.syntax().text_range());
1373 Some(())
1374 });
1375 if let Some(()) = res {
1378 return None;
1383 }
1384 }
1385 let tt = token
1388 .parent_ancestors()
1389 .map_while(Either::<ast::TokenTree, ast::Meta>::cast)
1390 .last()?;
1391
1392 match tt {
1393 Either::Left(tt) => {
1395 let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
1396 if tt.left_delimiter_token().map_or(false, |it| it == token) {
1397 return None;
1398 }
1399 if tt.right_delimiter_token().map_or(false, |it| it == token) {
1400 return None;
1401 }
1402 let mcall = InFile::new(expansion, macro_call);
1403 let file_id = match m_cache.get(&mcall) {
1404 Some(&it) => it,
1405 None => {
1406 let it = ast::MacroCall::to_def(self, mcall.as_ref())?;
1407 m_cache.insert(mcall, it);
1408 it
1409 }
1410 };
1411 let text_range = tt.syntax().text_range();
1412 filter_duplicates(tokens, text_range);
1413
1414 self.with_ctx(|ctx| {
1415 process_expansion_for_token(ctx, &mut stack, file_id).or(file_id
1416 .eager_arg(db)
1417 .and_then(|arg| {
1418 process_expansion_for_token(ctx, &mut stack, arg)
1420 }))
1421 })
1422 }
1423 Either::Right(_) if always_descend_into_derives => None,
1424 Either::Right(meta) => {
1426 let attr = meta.parent_attr()?;
1429 let adt = match attr.syntax().parent().and_then(ast::Adt::cast) {
1430 Some(adt) => {
1431 let res = self.with_ctx(|ctx| {
1433 let derive_call = ctx
1436 .attr_to_derive_macro_call(
1437 InFile::new(expansion, &adt),
1438 InFile::new(expansion, attr.clone()),
1439 )?
1440 .1;
1441
1442 let text_range = attr.syntax().text_range();
1444 tokens.retain(|(t, _)| {
1447 !text_range.contains_range(t.text_range())
1448 });
1449 Some(process_expansion_for_token(
1450 ctx,
1451 &mut stack,
1452 derive_call,
1453 ))
1454 });
1455 if let Some(res) = res {
1456 return res;
1457 }
1458 Some(adt)
1459 }
1460 None => {
1461 attr.syntax().ancestors().find_map(ast::Item::cast).and_then(
1463 |it| match it {
1464 ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
1465 ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
1466 ast::Item::Union(it) => Some(ast::Adt::Union(it)),
1467 _ => None,
1468 },
1469 )
1470 }
1471 }?;
1472 let attr_name =
1473 attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
1474 let resolver = &token
1477 .parent()
1478 .and_then(|parent| {
1479 self.analyze_impl(InFile::new(expansion, &parent), None, false)
1480 })?
1481 .resolver;
1482 let id = db.ast_id_map(expansion).ast_id(&adt);
1483 let helpers = resolver
1484 .def_map()
1485 .derive_helpers_in_scope(InFile::new(expansion, id))?;
1486
1487 if !helpers.is_empty() {
1488 let text_range = attr.syntax().text_range();
1489 filter_duplicates(tokens, text_range);
1490 }
1491
1492 let mut res = None;
1493 self.with_ctx(|ctx| {
1494 for (.., derive) in
1495 helpers.iter().filter(|(helper, ..)| *helper == attr_name)
1496 {
1497 let Either::Left(derive) = *derive else { continue };
1498 res = res
1502 .or(process_expansion_for_token(ctx, &mut stack, derive));
1503 }
1504 res
1505 })
1506 }
1507 }
1508 })()
1509 .is_none();
1510 if was_not_remapped
1511 && let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx)
1512 {
1513 return Some(b);
1514 }
1515 }
1516 }
1517 None
1518 }
1519
1520 fn descend_node_at_offset(
1525 &self,
1526 node: &SyntaxNode,
1527 offset: TextSize,
1528 ) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
1529 node.token_at_offset(offset)
1530 .map(move |token| self.descend_into_macros_exact(token))
1531 .map(|descendants| {
1532 descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
1533 })
1534 .kmerge_by(|left, right| {
1537 left.clone()
1538 .map(|node| node.text_range().len())
1539 .lt(right.clone().map(|node| node.text_range().len()))
1540 })
1541 }
1542
1543 pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
1547 let node = self.find_file(node);
1548 node.original_file_range_rooted(self.db)
1549 }
1550
1551 pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
1553 let node = self.find_file(node);
1554 node.original_file_range_opt(self.db).filter(|(_, ctx)| ctx.is_root()).map(TupleExt::head)
1555 }
1556
1557 pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
1560 self.wrap_node_infile(node).original_ast_node_rooted(self.db).map(
1561 |InRealFile { file_id, value }| {
1562 self.cache(find_root(value.syntax()), file_id.into());
1563 value
1564 },
1565 )
1566 }
1567
1568 pub fn original_syntax_node_rooted(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
1571 let InFile { file_id, .. } = self.find_file(node);
1572 InFile::new(file_id, node).original_syntax_node_rooted(self.db).map(
1573 |InRealFile { file_id, value }| {
1574 self.cache(find_root(&value), file_id.into());
1575 value
1576 },
1577 )
1578 }
1579
1580 pub fn diagnostics_display_range(
1581 &self,
1582 src: InFile<SyntaxNodePtr>,
1583 ) -> FileRangeWrapper<FileId> {
1584 let root = self.parse_or_expand(src.file_id);
1585 let node = src.map(|it| it.to_node(&root));
1586 let FileRange { file_id, range } = node.as_ref().original_file_range_rooted(self.db);
1587 FileRangeWrapper { file_id: file_id.file_id(self.db), range }
1588 }
1589
1590 pub fn diagnostics_display_range_for_range(
1591 &self,
1592 src: InFile<TextRange>,
1593 ) -> FileRangeWrapper<FileId> {
1594 let FileRange { file_id, range } = src.original_node_file_range_rooted(self.db);
1595 FileRangeWrapper { file_id: file_id.file_id(self.db), range }
1596 }
1597
1598 fn token_ancestors_with_macros(
1599 &self,
1600 token: SyntaxToken,
1601 ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
1602 token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent))
1603 }
1604
1605 pub fn ancestors_with_macros(
1608 &self,
1609 node: SyntaxNode,
1610 ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
1611 let node = self.find_file(&node);
1612 self.ancestors_with_macros_file(node.cloned()).map(|it| it.value)
1613 }
1614
1615 pub fn ancestors_with_macros_file(
1617 &self,
1618 node: InFile<SyntaxNode>,
1619 ) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
1620 iter::successors(Some(node), move |&InFile { file_id, ref value }| match value.parent() {
1621 Some(parent) => Some(InFile::new(file_id, parent)),
1622 None => {
1623 let macro_file = file_id.macro_file()?;
1624
1625 self.with_ctx(|ctx| {
1626 let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
1627 expansion_info.arg().map(|node| node?.parent()).transpose()
1628 })
1629 }
1630 })
1631 }
1632
1633 pub fn ancestors_at_offset_with_macros(
1634 &self,
1635 node: &SyntaxNode,
1636 offset: TextSize,
1637 ) -> impl Iterator<Item = SyntaxNode> + '_ {
1638 node.token_at_offset(offset)
1639 .map(|token| self.token_ancestors_with_macros(token))
1640 .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
1641 }
1642
1643 pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
1644 let text = lifetime.text();
1645 let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| {
1646 let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?;
1647 gpl.lifetime_params()
1648 .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text))
1649 })?;
1650 let src = self.wrap_node_infile(lifetime_param);
1651 ToDef::to_def(self, src.as_ref())
1652 }
1653
1654 pub fn resolve_label(&self, label: &ast::Lifetime) -> Option<Label> {
1655 let src = self.wrap_node_infile(label.clone());
1656 let (parent, label_id) = self.with_ctx(|ctx| ctx.label_ref_to_def(src.as_ref()))?;
1657 Some(Label { parent, label_id })
1658 }
1659
1660 pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type<'db>> {
1661 let analyze = self.analyze(ty.syntax())?;
1662 analyze.type_of_type(self.db, ty)
1663 }
1664
1665 pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
1666 let parent_ty = path.syntax().parent().and_then(ast::Type::cast)?;
1667 let analyze = self.analyze(path.syntax())?;
1668 let ty = analyze.store_sm()?.node_type(InFile::new(analyze.file_id, &parent_ty))?;
1669 let path = match &analyze.store()?.types[ty] {
1670 hir_def::type_ref::TypeRef::Path(path) => path,
1671 _ => return None,
1672 };
1673 match analyze.resolver.resolve_path_in_type_ns_fully(self.db, path)? {
1674 TypeNs::TraitId(trait_id) => Some(trait_id.into()),
1675 _ => None,
1676 }
1677 }
1678
1679 pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment<'db>>> {
1680 let mutability = |m| match m {
1681 hir_ty::next_solver::Mutability::Not => Mutability::Shared,
1682 hir_ty::next_solver::Mutability::Mut => Mutability::Mut,
1683 };
1684
1685 let analyzer = self.analyze(expr.syntax())?;
1686
1687 let (mut source_ty, _) = analyzer.type_of_expr(self.db, expr)?;
1688
1689 analyzer.expr_adjustments(expr).map(|it| {
1690 it.iter()
1691 .map(|adjust| {
1692 let target = Type::new_with_resolver(
1693 self.db,
1694 &analyzer.resolver,
1695 adjust.target.as_ref(),
1696 );
1697 let kind = match adjust.kind {
1698 hir_ty::Adjust::NeverToAny => Adjust::NeverToAny,
1699 hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => {
1700 Adjust::Deref(Some(OverloadedDeref(
1702 m.map(mutability).unwrap_or(Mutability::Shared),
1703 )))
1704 }
1705 hir_ty::Adjust::Deref(None) => Adjust::Deref(None),
1706 hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => {
1707 Adjust::Borrow(AutoBorrow::RawPtr(mutability(m)))
1708 }
1709 hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(m)) => {
1710 Adjust::Borrow(AutoBorrow::Ref(mutability(m.into())))
1712 }
1713 hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc),
1714 };
1715
1716 let source = mem::replace(&mut source_ty, target.clone());
1718
1719 Adjustment { source, target, kind }
1720 })
1721 .collect()
1722 })
1723 }
1724
1725 pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo<'db>> {
1726 self.analyze(expr.syntax())?
1727 .type_of_expr(self.db, expr)
1728 .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
1729 }
1730
1731 pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo<'db>> {
1732 self.analyze(pat.syntax())?
1733 .type_of_pat(self.db, pat)
1734 .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
1735 }
1736
1737 pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type<'db>> {
1741 self.analyze(pat.syntax())?.type_of_binding_in_pat(self.db, pat)
1742 }
1743
1744 pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type<'db>> {
1745 self.analyze(param.syntax())?.type_of_self(self.db, param)
1746 }
1747
1748 pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type<'db>; 1]> {
1749 self.analyze(pat.syntax())
1750 .and_then(|it| it.pattern_adjustments(self.db, pat))
1751 .unwrap_or_default()
1752 }
1753
1754 pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
1755 self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
1756 }
1757
1758 pub fn resolve_expr_as_callable(&self, call: &ast::Expr) -> Option<Callable<'db>> {
1759 self.analyze(call.syntax())?.resolve_expr_as_callable(self.db, call)
1760 }
1761
1762 pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
1763 self.analyze(call.syntax())?.resolve_method_call(self.db, call)
1764 }
1765
1766 pub fn resolve_method_call_fallback(
1768 &self,
1769 call: &ast::MethodCallExpr,
1770 ) -> Option<(Either<Function, Field>, Option<GenericSubstitution<'db>>)> {
1771 self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call)
1772 }
1773
1774 pub fn resolve_trait_impl_method(
1777 &self,
1778 env: Type<'db>,
1779 trait_: Trait,
1780 func: Function,
1781 subst: impl IntoIterator<Item = Type<'db>>,
1782 ) -> Option<Function> {
1783 let AnyFunctionId::FunctionId(func) = func.id else { return Some(func) };
1784 let interner = DbInterner::new_no_crate(self.db);
1785 let mut subst = subst.into_iter();
1786 let substs =
1787 hir_ty::next_solver::GenericArgs::for_item(interner, trait_.id.into(), |_, id, _| {
1788 assert!(matches!(id, hir_def::GenericParamId::TypeParamId(_)), "expected a type");
1789 subst.next().expect("too few subst").ty.into()
1790 });
1791 assert!(subst.next().is_none(), "too many subst");
1792 Some(match self.db.lookup_impl_method(env.env, func, substs).0 {
1793 Either::Left(it) => it.into(),
1794 Either::Right((impl_, method)) => {
1795 Function { id: AnyFunctionId::BuiltinDeriveImplMethod { method, impl_ } }
1796 }
1797 })
1798 }
1799
1800 fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option<StructId> {
1801 self.analyze(range_pat.syntax())?.resolve_range_pat(self.db, range_pat)
1802 }
1803
1804 fn resolve_range_expr(&self, range_expr: &ast::RangeExpr) -> Option<StructId> {
1805 self.analyze(range_expr.syntax())?.resolve_range_expr(self.db, range_expr)
1806 }
1807
1808 fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
1809 self.analyze(await_expr.syntax())?.resolve_await_to_poll(self.db, await_expr)
1810 }
1811
1812 fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<Function> {
1813 self.analyze(prefix_expr.syntax())?.resolve_prefix_expr(self.db, prefix_expr)
1814 }
1815
1816 fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<Function> {
1817 self.analyze(index_expr.syntax())?.resolve_index_expr(self.db, index_expr)
1818 }
1819
1820 fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<Function> {
1821 self.analyze(bin_expr.syntax())?.resolve_bin_expr(self.db, bin_expr)
1822 }
1823
1824 fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<Function> {
1825 self.analyze(try_expr.syntax())?.resolve_try_expr(self.db, try_expr)
1826 }
1827
1828 pub fn try_expr_returned_type(&self, try_expr: &ast::TryExpr) -> Option<Type<'db>> {
1830 self.ancestors_with_macros(try_expr.syntax().clone()).find_map(|parent| {
1831 if let Some(try_block) = ast::BlockExpr::cast(parent.clone())
1832 && try_block.try_block_modifier().is_some()
1833 {
1834 Some(self.type_of_expr(&try_block.into())?.original)
1835 } else if let Some(closure) = ast::ClosureExpr::cast(parent.clone()) {
1836 Some(
1837 self.type_of_expr(&closure.into())?
1838 .original
1839 .as_callable(self.db)?
1840 .return_type(),
1841 )
1842 } else if let Some(function) = ast::Fn::cast(parent) {
1843 Some(self.to_def(&function)?.ret_type(self.db))
1844 } else {
1845 None
1846 }
1847 })
1848 }
1849
1850 pub fn resolve_method_call_as_callable(
1853 &self,
1854 call: &ast::MethodCallExpr,
1855 ) -> Option<Callable<'db>> {
1856 self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
1857 }
1858
1859 pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Either<Field, TupleField>> {
1860 self.analyze(field.syntax())?.resolve_field(field)
1861 }
1862
1863 pub fn resolve_field_fallback(
1864 &self,
1865 field: &ast::FieldExpr,
1866 ) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution<'db>>)>
1867 {
1868 self.analyze(field.syntax())?.resolve_field_fallback(self.db, field)
1869 }
1870
1871 pub fn resolve_record_field(
1872 &self,
1873 field: &ast::RecordExprField,
1874 ) -> Option<(Field, Option<Local>, Type<'db>)> {
1875 self.resolve_record_field_with_substitution(field)
1876 .map(|(field, local, ty, _)| (field, local, ty))
1877 }
1878
1879 pub fn resolve_record_field_with_substitution(
1880 &self,
1881 field: &ast::RecordExprField,
1882 ) -> Option<(Field, Option<Local>, Type<'db>, GenericSubstitution<'db>)> {
1883 self.analyze(field.syntax())?.resolve_record_field(self.db, field)
1884 }
1885
1886 pub fn resolve_record_pat_field(
1887 &self,
1888 field: &ast::RecordPatField,
1889 ) -> Option<(Field, Type<'db>)> {
1890 self.resolve_record_pat_field_with_subst(field).map(|(field, ty, _)| (field, ty))
1891 }
1892
1893 pub fn resolve_record_pat_field_with_subst(
1894 &self,
1895 field: &ast::RecordPatField,
1896 ) -> Option<(Field, Type<'db>, GenericSubstitution<'db>)> {
1897 self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
1898 }
1899
1900 pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
1902 let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
1903 self.resolve_macro_call2(macro_call)
1904 }
1905
1906 pub fn resolve_macro_call2(&self, macro_call: InFile<&ast::MacroCall>) -> Option<Macro> {
1907 self.to_def2(macro_call)
1908 .and_then(|call| self.with_ctx(|ctx| macro_call_to_macro_id(ctx, call)))
1909 .map(Into::into)
1910 }
1911
1912 pub fn is_proc_macro_call(&self, macro_call: InFile<&ast::MacroCall>) -> bool {
1913 self.resolve_macro_call2(macro_call)
1914 .is_some_and(|m| matches!(m.id, MacroId::ProcMacroId(..)))
1915 }
1916
1917 pub fn resolve_macro_call_arm(&self, macro_call: &ast::MacroCall) -> Option<u32> {
1918 let file_id = self.to_def(macro_call)?;
1919 self.db.parse_macro_expansion(file_id).value.1.matched_arm
1920 }
1921
1922 pub fn get_unsafe_ops(&self, def: ExpressionStoreOwner) -> FxHashSet<ExprOrPatSource> {
1923 let Ok(def) = ExpressionStoreOwnerId::try_from(def) else { return Default::default() };
1924 let (body, source_map) = ExpressionStore::with_source_map(self.db, def);
1925 let infer = InferenceResult::of(self.db, def);
1926 let mut res = FxHashSet::default();
1927 for root in body.expr_roots() {
1928 unsafe_operations(self.db, infer, def, body, root, &mut |node, _| {
1929 if let Ok(node) = source_map.expr_or_pat_syntax(node) {
1930 res.insert(node);
1931 }
1932 });
1933 }
1934 res
1935 }
1936
1937 pub fn get_unsafe_ops_for_unsafe_block(&self, block: ast::BlockExpr) -> Vec<ExprOrPatSource> {
1938 always!(block.unsafe_token().is_some());
1939 let Some(sa) = self.analyze(block.syntax()) else { return vec![] };
1940 let Some((def, store, sm, Some(infer))) = sa.def() else { return vec![] };
1941 let block = self.wrap_node_infile(ast::Expr::from(block));
1942 let Some(ExprOrPatId::ExprId(block)) = sm.node_expr(block.as_ref()) else {
1943 return Vec::new();
1944 };
1945 let mut res = Vec::default();
1946 unsafe_operations(self.db, infer, def, store, block, &mut |node, _| {
1947 if let Ok(node) = sm.expr_or_pat_syntax(node) {
1948 res.push(node);
1949 }
1950 });
1951 res
1952 }
1953
1954 pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
1955 let Some(mac) = self.resolve_macro_call(macro_call) else { return false };
1956 if mac.is_asm_like(self.db) {
1957 return true;
1958 }
1959
1960 let Some(sa) = self.analyze(macro_call.syntax()) else { return false };
1961 let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
1962 match macro_call.map(|it| it.syntax().parent().and_then(ast::MacroExpr::cast)).transpose() {
1963 Some(it) => sa.is_unsafe_macro_call_expr(self.db, it.as_ref()),
1964 None => false,
1965 }
1966 }
1967
1968 pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
1969 let item_in_file = self.wrap_node_infile(item.clone());
1970 let id = self.with_ctx(|ctx| {
1971 let macro_call_id = ctx.item_to_macro_call(item_in_file.as_ref())?;
1972 macro_call_to_macro_id(ctx, macro_call_id)
1973 })?;
1974 Some(Macro { id })
1975 }
1976
1977 pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
1978 self.resolve_path_with_subst(path).map(|(it, _)| it)
1979 }
1980
1981 pub fn resolve_path_per_ns(&self, path: &ast::Path) -> Option<PathResolutionPerNs> {
1982 self.analyze(path.syntax())?.resolve_hir_path_per_ns(self.db, path)
1983 }
1984
1985 pub fn resolve_path_with_subst(
1986 &self,
1987 path: &ast::Path,
1988 ) -> Option<(PathResolution, Option<GenericSubstitution<'db>>)> {
1989 self.analyze(path.syntax())?.resolve_path(self.db, path)
1990 }
1991
1992 pub fn resolve_use_type_arg(&self, name: &ast::NameRef) -> Option<TypeParam> {
1993 self.analyze(name.syntax())?.resolve_use_type_arg(name)
1994 }
1995
1996 pub fn resolve_offset_of_field(
1997 &self,
1998 name_ref: &ast::NameRef,
1999 ) -> Option<(Either<EnumVariant, Field>, GenericSubstitution<'db>)> {
2000 self.analyze_no_infer(name_ref.syntax())?.resolve_offset_of_field(self.db, name_ref)
2001 }
2002
2003 pub fn resolve_mod_path(
2004 &self,
2005 scope: &SyntaxNode,
2006 path: &ModPath,
2007 ) -> Option<impl Iterator<Item = ItemInNs>> {
2008 let analyze = self.analyze(scope)?;
2009 let items = analyze.resolver.resolve_module_path_in_items(self.db, path);
2010 Some(items.iter_items().map(|(item, _)| item.into()))
2011 }
2012
2013 fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
2014 self.analyze(record_lit.syntax())?.resolve_variant(record_lit)
2015 }
2016
2017 pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
2018 self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
2019 }
2020
2021 pub fn record_literal_missing_fields(
2022 &self,
2023 literal: &ast::RecordExpr,
2024 ) -> Vec<(Field, Type<'db>)> {
2025 self.analyze(literal.syntax())
2026 .and_then(|it| it.record_literal_missing_fields(self.db, literal))
2027 .unwrap_or_default()
2028 }
2029
2030 pub fn record_literal_matched_fields(
2031 &self,
2032 literal: &ast::RecordExpr,
2033 ) -> Vec<(Field, Type<'db>)> {
2034 self.analyze(literal.syntax())
2035 .and_then(|it| it.record_literal_matched_fields(self.db, literal))
2036 .unwrap_or_default()
2037 }
2038
2039 pub fn record_pattern_missing_fields(
2040 &self,
2041 pattern: &ast::RecordPat,
2042 ) -> Vec<(Field, Type<'db>)> {
2043 self.analyze(pattern.syntax())
2044 .and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
2045 .unwrap_or_default()
2046 }
2047
2048 pub fn record_pattern_matched_fields(
2049 &self,
2050 pattern: &ast::RecordPat,
2051 ) -> Vec<(Field, Type<'db>)> {
2052 self.analyze(pattern.syntax())
2053 .and_then(|it| it.record_pattern_matched_fields(self.db, pattern))
2054 .unwrap_or_default()
2055 }
2056
2057 fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
2058 let mut ctx = SourceToDefCtx { db: self.db, cache: &mut self.s2d_cache.borrow_mut() };
2059 f(&mut ctx)
2060 }
2061
2062 pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
2063 let src = self.find_file(src.syntax()).with_value(src);
2064 T::to_def(self, src)
2065 }
2066
2067 pub fn to_def2<T: ToDef>(&self, src: InFile<&T>) -> Option<T::Def> {
2068 T::to_def(self, src)
2069 }
2070
2071 fn file_to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
2072 self.with_ctx(|ctx| ctx.file_to_def(file).to_owned()).into_iter().map(Module::from)
2073 }
2074
2075 fn hir_file_to_module_defs(&self, file: HirFileId) -> impl Iterator<Item = Module> {
2076 self.file_to_module_defs(file.original_file_respecting_includes(self.db).file_id(self.db))
2078 }
2079
2080 pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
2081 self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
2082 db: self.db,
2083 file_id,
2084 resolver,
2085 })
2086 }
2087
2088 pub fn scope_at_offset(
2089 &self,
2090 node: &SyntaxNode,
2091 offset: TextSize,
2092 ) -> Option<SemanticsScope<'db>> {
2093 self.analyze_with_offset_no_infer(node, offset).map(
2094 |SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
2095 db: self.db,
2096 file_id,
2097 resolver,
2098 },
2099 )
2100 }
2101
2102 pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>> {
2104 let res = def.source(self.db)?;
2106 self.cache(find_root(res.value.syntax()), res.file_id);
2107 Some(res)
2108 }
2109
2110 pub fn source_with_range<Def: HasSource>(
2111 &self,
2112 def: Def,
2113 ) -> Option<InFile<(TextRange, Option<Def::Ast>)>> {
2114 let res = def.source_with_range(self.db)?;
2115 self.parse_or_expand(res.file_id);
2116 Some(res)
2117 }
2118
2119 pub fn store_owner_for(&self, node: InFile<&SyntaxNode>) -> Option<ExpressionStoreOwner> {
2120 let container = self.with_ctx(|ctx| ctx.find_container(node))?;
2121 container.as_expression_store_owner().map(|id| id.into())
2122 }
2123
2124 fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer<'db>> {
2126 let node = self.find_file(node);
2127 self.analyze_impl(node, None, true)
2128 }
2129
2130 fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer<'db>> {
2132 let node = self.find_file(node);
2133 self.analyze_impl(node, None, false)
2134 }
2135
2136 fn analyze_with_offset_no_infer(
2137 &self,
2138 node: &SyntaxNode,
2139 offset: TextSize,
2140 ) -> Option<SourceAnalyzer<'db>> {
2141 let node = self.find_file(node);
2142 self.analyze_impl(node, Some(offset), false)
2143 }
2144
2145 fn analyze_impl(
2146 &self,
2147 node: InFile<&SyntaxNode>,
2148 offset: Option<TextSize>,
2149 infer: bool,
2151 ) -> Option<SourceAnalyzer<'db>> {
2152 let _p = tracing::info_span!("SemanticsImpl::analyze_impl").entered();
2153
2154 let container = self.with_ctx(|ctx| ctx.find_container(node))?;
2155
2156 let resolver = match container {
2157 ChildContainer::DefWithBodyId(def) => {
2158 return Some(if infer {
2159 SourceAnalyzer::new_for_body(self.db, def, node, offset)
2160 } else {
2161 SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
2162 });
2163 }
2164 ChildContainer::VariantId(def) => {
2165 return Some(SourceAnalyzer::new_variant_body(self.db, def, node, offset, infer));
2166 }
2167 ChildContainer::TraitId(it) => {
2168 return Some(if infer {
2169 SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset)
2170 } else {
2171 SourceAnalyzer::new_generic_def_no_infer(self.db, it.into(), node, offset)
2172 });
2173 }
2174 ChildContainer::ImplId(it) => {
2175 return Some(if infer {
2176 SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset)
2177 } else {
2178 SourceAnalyzer::new_generic_def_no_infer(self.db, it.into(), node, offset)
2179 });
2180 }
2181 ChildContainer::EnumId(it) => {
2182 return Some(if infer {
2183 SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset)
2184 } else {
2185 SourceAnalyzer::new_generic_def_no_infer(self.db, it.into(), node, offset)
2186 });
2187 }
2188 ChildContainer::GenericDefId(it) => {
2189 return Some(if infer {
2190 SourceAnalyzer::new_generic_def(self.db, it, node, offset)
2191 } else {
2192 SourceAnalyzer::new_generic_def_no_infer(self.db, it, node, offset)
2193 });
2194 }
2195 ChildContainer::ModuleId(it) => it.resolver(self.db),
2196 };
2197 Some(SourceAnalyzer::new_for_resolver(resolver, node))
2198 }
2199
2200 fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
2201 SourceToDefCache::cache(
2202 &mut self.s2d_cache.borrow_mut().root_to_file_cache,
2203 root_node,
2204 file_id,
2205 );
2206 }
2207
2208 pub fn assert_contains_node(&self, node: &SyntaxNode) {
2209 self.find_file(node);
2210 }
2211
2212 fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
2213 let cache = self.s2d_cache.borrow();
2214 cache.root_to_file_cache.get(root_node).copied()
2215 }
2216
2217 fn wrap_node_infile<N: AstNode>(&self, node: N) -> InFile<N> {
2218 let InFile { file_id, .. } = self.find_file(node.syntax());
2219 InFile::new(file_id, node)
2220 }
2221
2222 fn wrap_token_infile(&self, token: SyntaxToken) -> InFile<SyntaxToken> {
2223 let InFile { file_id, .. } = self.find_file(&token.parent().unwrap());
2224 InFile::new(file_id, token)
2225 }
2226
2227 fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
2229 let root_node = find_root(node);
2230 let file_id = self.lookup(&root_node).unwrap_or_else(|| {
2231 panic!(
2232 "\n\nFailed to lookup {:?} in this Semantics.\n\
2233 Make sure to only query nodes derived from this instance of Semantics.\n\
2234 root node: {:?}\n\
2235 known nodes: {}\n\n",
2236 node,
2237 root_node,
2238 self.s2d_cache
2239 .borrow()
2240 .root_to_file_cache
2241 .keys()
2242 .map(|it| format!("{it:?}"))
2243 .collect::<Vec<_>>()
2244 .join(", ")
2245 )
2246 });
2247 InFile::new(file_id, node)
2248 }
2249
2250 pub fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool {
2252 let Some(enclosing_item) =
2253 expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast)
2254 else {
2255 return false;
2256 };
2257
2258 let def = match &enclosing_item {
2259 Either::Left(ast::Item::Fn(it)) if it.unsafe_token().is_some() => return true,
2260 Either::Left(ast::Item::Fn(it)) => (|| match self.to_def(it)?.id {
2261 AnyFunctionId::FunctionId(id) => Some(DefWithBodyId::FunctionId(id)),
2262 AnyFunctionId::BuiltinDeriveImplMethod { .. } => None,
2263 })(),
2264 Either::Left(ast::Item::Const(it)) => {
2265 self.to_def(it).map(<_>::into).map(DefWithBodyId::ConstId)
2266 }
2267 Either::Left(ast::Item::Static(it)) => {
2268 self.to_def(it).map(<_>::into).map(DefWithBodyId::StaticId)
2269 }
2270 Either::Left(_) => None,
2271 Either::Right(it) => self.to_def(it).map(<_>::into).map(DefWithBodyId::VariantId),
2272 };
2273 let Some(def) = def else { return false };
2274 let enclosing_node = enclosing_item.as_ref().either(|i| i.syntax(), |v| v.syntax());
2275
2276 let (body, source_map) = Body::with_source_map(self.db, def);
2277
2278 let file_id = self.find_file(expr.syntax()).file_id;
2279
2280 let Some(mut parent) = expr.syntax().parent() else { return false };
2281 loop {
2282 if &parent == enclosing_node {
2283 break false;
2284 }
2285
2286 if let Some(parent) = ast::Expr::cast(parent.clone())
2287 && let Some(ExprOrPatId::ExprId(expr_id)) =
2288 source_map.node_expr(InFile { file_id, value: &parent })
2289 && let Expr::Unsafe { .. } = body[expr_id]
2290 {
2291 break true;
2292 }
2293
2294 let Some(parent_) = parent.parent() else { break false };
2295 parent = parent_;
2296 }
2297 }
2298
2299 pub fn impl_generated_from_derive(&self, impl_: Impl) -> Option<Adt> {
2300 let id = match impl_.id {
2301 AnyImplId::ImplId(id) => id,
2302 AnyImplId::BuiltinDeriveImplId(id) => return Some(id.loc(self.db).adt.into()),
2303 };
2304 let source = hir_def::src::HasSource::ast_ptr(&id.loc(self.db), self.db);
2305 let mut file_id = source.file_id;
2306 let adt_ast_id = loop {
2307 let macro_call = file_id.macro_file()?;
2308 match macro_call.loc(self.db).kind {
2309 hir_expand::MacroCallKind::Derive { ast_id, .. } => break ast_id,
2310 hir_expand::MacroCallKind::FnLike { ast_id, .. } => file_id = ast_id.file_id,
2311 hir_expand::MacroCallKind::Attr { ast_id, .. } => file_id = ast_id.file_id,
2312 }
2313 };
2314 let adt_source = adt_ast_id.to_in_file_node(self.db);
2315 self.cache(adt_source.value.syntax().ancestors().last().unwrap(), adt_source.file_id);
2316 ToDef::to_def(self, adt_source.as_ref())
2317 }
2318
2319 pub fn locals_used(
2320 &self,
2321 element: Either<&ast::Expr, &ast::StmtList>,
2322 text_range: TextRange,
2323 ) -> Option<FxIndexSet<Local>> {
2324 let sa = self.analyze(element.either(|e| e.syntax(), |s| s.syntax()))?;
2325 let store = sa.store()?;
2326 let mut resolver = sa.resolver.clone();
2327 let def = resolver.expression_store_owner()?;
2328
2329 let is_not_generated = |path: &Path| {
2330 !path.mod_path().and_then(|path| path.as_ident()).is_some_and(Name::is_generated)
2331 };
2332
2333 let exprs = element.either(
2334 |e| vec![e.clone()],
2335 |stmts| {
2336 let mut exprs: Vec<_> = stmts
2337 .statements()
2338 .filter(|stmt| text_range.contains_range(stmt.syntax().text_range()))
2339 .filter_map(|stmt| match stmt {
2340 ast::Stmt::ExprStmt(expr_stmt) => expr_stmt.expr().map(|e| vec![e]),
2341 ast::Stmt::Item(_) => None,
2342 ast::Stmt::LetStmt(let_stmt) => {
2343 let init = let_stmt.initializer();
2344 let let_else = let_stmt
2345 .let_else()
2346 .and_then(|le| le.block_expr())
2347 .map(ast::Expr::BlockExpr);
2348
2349 match (init, let_else) {
2350 (Some(i), Some(le)) => Some(vec![i, le]),
2351 (Some(i), _) => Some(vec![i]),
2352 (_, Some(le)) => Some(vec![le]),
2353 _ => None,
2354 }
2355 }
2356 })
2357 .flatten()
2358 .collect();
2359
2360 if let Some(tail_expr) = stmts.tail_expr()
2361 && text_range.contains_range(tail_expr.syntax().text_range())
2362 {
2363 exprs.push(tail_expr);
2364 }
2365 exprs
2366 },
2367 );
2368 let mut exprs: Vec<_> =
2369 exprs.into_iter().filter_map(|e| sa.expr_id(e).and_then(|e| e.as_expr())).collect();
2370
2371 let mut locals: FxIndexSet<Local> = FxIndexSet::default();
2372 let mut add_to_locals_used = |id, parent_expr| {
2373 let path = match id {
2374 ExprOrPatId::ExprId(expr_id) => {
2375 if let Expr::Path(path) = &store[expr_id] {
2376 Some(path)
2377 } else {
2378 None
2379 }
2380 }
2381 ExprOrPatId::PatId(pat_id) => {
2382 if let Pat::Path(path) = &store[pat_id] {
2383 Some(path)
2384 } else {
2385 None
2386 }
2387 }
2388 };
2389
2390 if let Some(path) = path
2391 && is_not_generated(path)
2392 {
2393 let _ = resolver.update_to_inner_scope(self.db, def, parent_expr);
2394 let hygiene = store.expr_or_pat_path_hygiene(id);
2395 resolver.resolve_path_in_value_ns_fully(self.db, path, hygiene).inspect(|value| {
2396 if let ValueNs::LocalBinding(id) = value {
2397 locals.insert((def, *id).into());
2398 }
2399 });
2400 }
2401 };
2402
2403 while let Some(expr_id) = exprs.pop() {
2404 if let Expr::Assignment { target, .. } = store[expr_id] {
2405 store.walk_pats(target, &mut |id| {
2406 add_to_locals_used(ExprOrPatId::PatId(id), expr_id)
2407 });
2408 };
2409 store.walk_child_exprs(expr_id, |id| {
2410 exprs.push(id);
2411 });
2412
2413 add_to_locals_used(ExprOrPatId::ExprId(expr_id), expr_id)
2414 }
2415
2416 Some(locals)
2417 }
2418
2419 pub fn get_failed_obligations(&self, token: SyntaxToken) -> Option<String> {
2420 let node = token.parent()?;
2421 let node = self.find_file(&node);
2422
2423 let container = self.with_ctx(|ctx| ctx.find_container(node))?;
2424
2425 match container {
2426 ChildContainer::DefWithBodyId(def) => {
2427 thread_local! {
2428 static RESULT: RefCell<Vec<ProofTreeData>> = const { RefCell::new(Vec::new()) };
2429 }
2430 infer_query_with_inspect(
2431 self.db,
2432 def,
2433 Some(|infer_ctxt, _obligation, result, proof_tree| {
2434 if result.is_err()
2435 && let Some(tree) = proof_tree
2436 {
2437 let data = dump_proof_tree_structured(tree, Span::dummy(), infer_ctxt);
2438 RESULT.with(|ctx| ctx.borrow_mut().push(data));
2439 }
2440 }),
2441 );
2442 let data: Vec<ProofTreeData> =
2443 RESULT.with(|data| data.borrow_mut().drain(..).collect());
2444 let data = serde_json::to_string_pretty(&data).unwrap_or_else(|_| "[]".to_owned());
2445 Some(data)
2446 }
2447 _ => None,
2448 }
2449 }
2450}
2451
2452fn macro_call_to_macro_id(
2454 ctx: &mut SourceToDefCtx<'_, '_>,
2455 macro_call_id: MacroCallId,
2456) -> Option<MacroId> {
2457 let db: &dyn ExpandDatabase = ctx.db;
2458 let loc = db.lookup_intern_macro_call(macro_call_id);
2459
2460 match loc.def.ast_id() {
2461 Either::Left(it) => {
2462 let node = match it.file_id {
2463 HirFileId::FileId(file_id) => {
2464 it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
2465 }
2466 HirFileId::MacroFile(macro_file) => {
2467 let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
2468 it.to_ptr(db).to_node(&expansion_info.expanded().value)
2469 }
2470 };
2471 ctx.macro_to_def(InFile::new(it.file_id, &node))
2472 }
2473 Either::Right(it) => {
2474 let node = match it.file_id {
2475 HirFileId::FileId(file_id) => {
2476 it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
2477 }
2478 HirFileId::MacroFile(macro_file) => {
2479 let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
2480 it.to_ptr(db).to_node(&expansion_info.expanded().value)
2481 }
2482 };
2483 ctx.proc_macro_to_def(InFile::new(it.file_id, &node))
2484 }
2485 }
2486}
2487
2488pub trait ToDef: AstNode + Clone {
2489 type Def;
2490 fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def>;
2491}
2492
2493macro_rules! to_def_impls {
2494 ($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
2495 impl ToDef for $ast {
2496 type Def = $def;
2497 fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def> {
2498 sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
2499 }
2500 }
2501 )*}
2502}
2503
2504to_def_impls![
2505 (crate::Module, ast::Module, module_to_def),
2506 (crate::Module, ast::SourceFile, source_file_to_def),
2507 (crate::Struct, ast::Struct, struct_to_def),
2508 (crate::Enum, ast::Enum, enum_to_def),
2509 (crate::Union, ast::Union, union_to_def),
2510 (crate::Trait, ast::Trait, trait_to_def),
2511 (crate::Impl, ast::Impl, impl_to_def),
2512 (crate::TypeAlias, ast::TypeAlias, type_alias_to_def),
2513 (crate::Const, ast::Const, const_to_def),
2514 (crate::Static, ast::Static, static_to_def),
2515 (crate::Function, ast::Fn, fn_to_def),
2516 (crate::Field, ast::RecordField, record_field_to_def),
2517 (crate::Field, ast::TupleField, tuple_field_to_def),
2518 (crate::EnumVariant, ast::Variant, enum_variant_to_def),
2519 (crate::TypeParam, ast::TypeParam, type_param_to_def),
2520 (crate::LifetimeParam, ast::LifetimeParam, lifetime_param_to_def),
2521 (crate::ConstParam, ast::ConstParam, const_param_to_def),
2522 (crate::GenericParam, ast::GenericParam, generic_param_to_def),
2523 (crate::Macro, ast::Macro, macro_to_def),
2524 (crate::Local, ast::IdentPat, bind_pat_to_def),
2525 (crate::Local, ast::SelfParam, self_param_to_def),
2526 (crate::Label, ast::Label, label_to_def),
2527 (crate::Adt, ast::Adt, adt_to_def),
2528 (crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def),
2529 (crate::InlineAsmOperand, ast::AsmOperandNamed, asm_operand_to_def),
2530 (crate::ExternBlock, ast::ExternBlock, extern_block_to_def),
2531 (MacroCallId, ast::MacroCall, macro_call_to_macro_call),
2532];
2533
2534fn find_root(node: &SyntaxNode) -> SyntaxNode {
2535 node.ancestors().last().unwrap()
2536}
2537
2538#[derive(Debug)]
2558pub struct SemanticsScope<'db> {
2559 pub db: &'db dyn HirDatabase,
2560 file_id: HirFileId,
2561 resolver: Resolver<'db>,
2562}
2563
2564impl<'db> SemanticsScope<'db> {
2565 pub fn file_id(&self) -> HirFileId {
2566 self.file_id
2567 }
2568
2569 pub fn module(&self) -> Module {
2570 Module { id: self.resolver.module() }
2571 }
2572
2573 pub fn krate(&self) -> Crate {
2574 Crate { id: self.resolver.krate() }
2575 }
2576
2577 pub fn containing_function(&self) -> Option<Function> {
2579 self.resolver.expression_store_owner().and_then(|owner| match owner {
2580 ExpressionStoreOwnerId::Body(DefWithBodyId::FunctionId(id)) => Some(id.into()),
2581 _ => None,
2582 })
2583 }
2584
2585 pub fn expression_store_owner(&self) -> Option<ExpressionStoreOwner> {
2586 self.resolver.expression_store_owner().map(Into::into)
2587 }
2588
2589 pub(crate) fn resolver(&self) -> &Resolver<'db> {
2590 &self.resolver
2591 }
2592
2593 pub fn visible_traits(&self) -> VisibleTraits {
2595 let resolver = &self.resolver;
2596 VisibleTraits(resolver.traits_in_scope(self.db))
2597 }
2598
2599 pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
2601 let scope = self.resolver.names_in_scope(self.db);
2602 for (name, entries) in scope {
2603 for entry in entries {
2604 let def = match entry {
2605 resolver::ScopeDef::ModuleDef(it) => ScopeDef::ModuleDef(it.into()),
2606 resolver::ScopeDef::Unknown => ScopeDef::Unknown,
2607 resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
2608 resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
2609 resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
2610 resolver::ScopeDef::Local(binding_id) => {
2611 match self.resolver.expression_store_owner() {
2612 Some(parent) => ScopeDef::Local(Local { parent, binding_id }),
2613 None => continue,
2614 }
2615 }
2616 resolver::ScopeDef::Label(label_id) => {
2617 match self.resolver.expression_store_owner() {
2618 Some(parent) => ScopeDef::Label(Label { parent, label_id }),
2619 None => continue,
2620 }
2621 }
2622 };
2623 f(name.clone(), def)
2624 }
2625 }
2626 }
2627
2628 pub fn can_use_trait_methods(&self, t: Trait) -> bool {
2630 self.resolver.traits_in_scope(self.db).contains(&t.id)
2631 }
2632
2633 pub fn speculative_resolve(&self, ast_path: &ast::Path) -> Option<PathResolution> {
2636 let mut kind = PathKind::Plain;
2637 let mut segments = vec![];
2638 let mut first = true;
2639 for segment in ast_path.segments() {
2640 if first {
2641 first = false;
2642 if segment.coloncolon_token().is_some() {
2643 kind = PathKind::Abs;
2644 }
2645 }
2646
2647 let Some(k) = segment.kind() else { continue };
2648 match k {
2649 ast::PathSegmentKind::Name(name_ref) => segments.push(name_ref.as_name()),
2650 ast::PathSegmentKind::Type { .. } => continue,
2651 ast::PathSegmentKind::SelfTypeKw => {
2652 segments.push(Name::new_symbol_root(sym::Self_))
2653 }
2654 ast::PathSegmentKind::SelfKw => kind = PathKind::Super(0),
2655 ast::PathSegmentKind::SuperKw => match kind {
2656 PathKind::Super(s) => kind = PathKind::Super(s + 1),
2657 PathKind::Plain => kind = PathKind::Super(1),
2658 PathKind::Crate | PathKind::Abs | PathKind::DollarCrate(_) => continue,
2659 },
2660 ast::PathSegmentKind::CrateKw => kind = PathKind::Crate,
2661 }
2662 }
2663
2664 resolve_hir_path(
2665 self.db,
2666 &self.resolver,
2667 &Path::BarePath(Interned::new(ModPath::from_segments(kind, segments))),
2668 HygieneId::ROOT,
2669 None,
2670 )
2671 }
2672
2673 pub fn resolve_mod_path(&self, path: &ModPath) -> impl Iterator<Item = ItemInNs> + use<> {
2674 let items = self.resolver.resolve_module_path_in_items(self.db, path);
2675 items.iter_items().map(|(item, _)| item.into())
2676 }
2677
2678 pub fn assoc_type_shorthand_candidates(
2681 &self,
2682 resolution: &PathResolution,
2683 mut cb: impl FnMut(TypeAlias),
2684 ) {
2685 let (Some(def), Some(resolution)) = (self.resolver.generic_def(), resolution.in_type_ns())
2686 else {
2687 return;
2688 };
2689 hir_ty::associated_type_shorthand_candidates(self.db, def, resolution, |_, id| {
2690 cb(id.into());
2691 false
2692 });
2693 }
2694
2695 pub fn generic_def(&self) -> Option<crate::GenericDef> {
2696 self.resolver.generic_def().map(|id| id.into())
2697 }
2698
2699 pub fn extern_crates(&self) -> impl Iterator<Item = (Name, Module)> + '_ {
2700 self.resolver.extern_crates_in_scope().map(|(name, id)| (name, Module { id }))
2701 }
2702
2703 pub fn extern_crate_decls(&self) -> impl Iterator<Item = Name> + '_ {
2704 self.resolver.extern_crate_decls_in_scope(self.db)
2705 }
2706
2707 pub fn has_same_self_type(&self, other: &SemanticsScope<'_>) -> bool {
2708 self.resolver.impl_def() == other.resolver.impl_def()
2709 }
2710}
2711
2712#[derive(Debug)]
2713pub struct VisibleTraits(pub FxHashSet<TraitId>);
2714
2715impl ops::Deref for VisibleTraits {
2716 type Target = FxHashSet<TraitId>;
2717
2718 fn deref(&self) -> &Self::Target {
2719 &self.0
2720 }
2721}
2722
2723struct RenameConflictsVisitor<'a> {
2724 db: &'a dyn HirDatabase,
2725 owner: DefWithBodyId,
2726 resolver: Resolver<'a>,
2727 body: &'a Body,
2728 to_be_renamed: BindingId,
2729 new_name: Symbol,
2730 old_name: Symbol,
2731 conflicts: FxHashSet<BindingId>,
2732}
2733
2734impl RenameConflictsVisitor<'_> {
2735 fn resolve_path(&mut self, node: ExprOrPatId, path: &Path) {
2736 if let Path::BarePath(path) = path
2737 && let Some(name) = path.as_ident()
2738 {
2739 if *name.symbol() == self.new_name {
2740 if let Some(conflicting) = self.resolver.rename_will_conflict_with_renamed(
2741 self.db,
2742 name,
2743 path,
2744 self.body.expr_or_pat_path_hygiene(node),
2745 self.to_be_renamed,
2746 ) {
2747 self.conflicts.insert(conflicting);
2748 }
2749 } else if *name.symbol() == self.old_name
2750 && let Some(conflicting) = self.resolver.rename_will_conflict_with_another_variable(
2751 self.db,
2752 name,
2753 path,
2754 self.body.expr_or_pat_path_hygiene(node),
2755 &self.new_name,
2756 self.to_be_renamed,
2757 )
2758 {
2759 self.conflicts.insert(conflicting);
2760 }
2761 }
2762 }
2763
2764 fn rename_conflicts(&mut self, expr: ExprId) {
2765 match &self.body[expr] {
2766 Expr::Path(path) => {
2767 let guard = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
2768 self.resolve_path(expr.into(), path);
2769 self.resolver.reset_to_guard(guard);
2770 }
2771 &Expr::Assignment { target, .. } => {
2772 let guard = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
2773 self.body.walk_pats(target, &mut |pat| {
2774 if let Pat::Path(path) = &self.body[pat] {
2775 self.resolve_path(pat.into(), path);
2776 }
2777 });
2778 self.resolver.reset_to_guard(guard);
2779 }
2780 _ => {}
2781 }
2782
2783 self.body.walk_child_exprs(expr, |expr| self.rename_conflicts(expr));
2784 }
2785}