1mod child_by_source;
4mod source_to_def;
5
6use std::{
7 cell::RefCell,
8 convert::Infallible,
9 fmt, iter, mem,
10 ops::{self, ControlFlow, Not},
11};
12
13use base_db::{FxIndexSet, all_crates, toolchain_channel};
14use either::Either;
15use hir_def::{
16 BuiltinDeriveImplId, DefWithBodyId, ExpressionStoreOwnerId, HasModule, MacroId, StructId,
17 TraitId, VariantId,
18 attrs::parse_extra_crate_attrs,
19 expr_store::{Body, ExprOrPatSource, ExpressionStore, HygieneId, path::Path},
20 hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
21 nameres::{ModuleOrigin, crate_def_map},
22 resolver::{self, HasResolver, Resolver, TypeNs, ValueNs},
23 type_ref::Mutability,
24};
25use hir_expand::{
26 EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId,
27 attrs::AstPathExt,
28 builtin::{BuiltinFnLikeExpander, EagerExpander},
29 db::ExpandDatabase,
30 files::{FileRangeWrapper, HirFileRange, InRealFile},
31 mod_path::{ModPath, PathKind},
32 name::AsName,
33};
34use hir_ty::{
35 InferenceResult,
36 diagnostics::unsafe_operations,
37 infer_query_with_inspect,
38 next_solver::{
39 AnyImplId, DbInterner, Span,
40 format_proof_tree::{ProofTreeData, dump_proof_tree_structured},
41 },
42};
43use intern::{Interned, Symbol, sym};
44use itertools::Itertools;
45use rustc_hash::{FxHashMap, FxHashSet};
46use rustc_type_ir::inherent::Span as _;
47use smallvec::{SmallVec, smallvec};
48use span::{FileId, SyntaxContext};
49use stdx::{TupleExt, always};
50use syntax::{
51 AstNode, AstToken, Direction, SmolStr, SmolStrBuilder, SyntaxElement, SyntaxKind, SyntaxNode,
52 SyntaxNodePtr, SyntaxToken, T, TextRange, TextSize,
53 algo::skip_trivia_token,
54 ast::{self, HasAttrs as _, HasGenericParams},
55};
56
57use crate::{
58 Adjust, Adjustment, Adt, AnyFunctionId, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const,
59 ConstParam, Crate, DeriveHelper, Enum, EnumVariant, ExpressionStoreOwner, Field, Function,
60 GenericSubstitution, HasSource, Impl, InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam,
61 Local, Macro, Module, ModuleDef, Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule,
62 Trait, TupleField, Type, TypeAlias, TypeParam, Union, Variant,
63 db::HirDatabase,
64 semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
65 source_analyzer::{SourceAnalyzer, resolve_hir_path},
66};
67
68const CONTINUE_NO_BREAKS: ControlFlow<Infallible, ()> = ControlFlow::Continue(());
69
70#[derive(Debug, Copy, Clone, PartialEq, Eq)]
71pub enum PathResolution {
72 Def(ModuleDef),
74 Local(Local),
76 TypeParam(TypeParam),
78 ConstParam(ConstParam),
80 SelfType(Impl),
81 BuiltinAttr(BuiltinAttr),
82 ToolModule(ToolModule),
83 DeriveHelper(DeriveHelper),
84}
85
86impl PathResolution {
87 pub(crate) fn in_type_ns(&self) -> Option<TypeNs> {
88 match self {
89 PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())),
90 PathResolution::Def(ModuleDef::BuiltinType(builtin)) => {
91 Some(TypeNs::BuiltinType((*builtin).into()))
92 }
93 PathResolution::Def(
94 ModuleDef::Const(_)
95 | ModuleDef::EnumVariant(_)
96 | ModuleDef::Macro(_)
97 | ModuleDef::Function(_)
98 | ModuleDef::Module(_)
99 | ModuleDef::Static(_)
100 | ModuleDef::Trait(_),
101 ) => None,
102 PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
103 Some(TypeNs::TypeAliasId((*alias).into()))
104 }
105 PathResolution::BuiltinAttr(_)
106 | PathResolution::ToolModule(_)
107 | PathResolution::Local(_)
108 | PathResolution::DeriveHelper(_)
109 | PathResolution::ConstParam(_) => None,
110 PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
111 PathResolution::SelfType(impl_def) => match impl_def.id {
112 AnyImplId::ImplId(id) => Some(TypeNs::SelfType(id)),
113 AnyImplId::BuiltinDeriveImplId(_) => None,
114 },
115 }
116 }
117}
118
119#[derive(Debug, Copy, Clone, PartialEq, Eq)]
120pub struct PathResolutionPerNs {
121 pub type_ns: Option<PathResolution>,
122 pub value_ns: Option<PathResolution>,
123 pub macro_ns: Option<PathResolution>,
124}
125
126impl PathResolutionPerNs {
127 pub fn new(
128 type_ns: Option<PathResolution>,
129 value_ns: Option<PathResolution>,
130 macro_ns: Option<PathResolution>,
131 ) -> Self {
132 PathResolutionPerNs { type_ns, value_ns, macro_ns }
133 }
134 pub fn any(&self) -> Option<PathResolution> {
135 self.type_ns.or(self.value_ns).or(self.macro_ns)
136 }
137}
138
139#[derive(Debug)]
140pub struct TypeInfo<'db> {
141 pub original: Type<'db>,
143 pub adjusted: Option<Type<'db>>,
145}
146
147impl<'db> TypeInfo<'db> {
148 pub fn original(self) -> Type<'db> {
149 self.original
150 }
151
152 pub fn has_adjustment(&self) -> bool {
153 self.adjusted.is_some()
154 }
155
156 pub fn adjusted(self) -> Type<'db> {
158 self.adjusted.unwrap_or(self.original)
159 }
160}
161
162pub struct Semantics<'db, DB: ?Sized> {
164 pub db: &'db DB,
165 imp: SemanticsImpl<'db>,
166}
167
168pub struct SemanticsImpl<'db> {
169 pub db: &'db dyn HirDatabase,
170 s2d_cache: RefCell<SourceToDefCache>,
171 macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroCallId>>,
173}
174
175impl<DB: ?Sized> fmt::Debug for Semantics<'_, DB> {
176 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
177 write!(f, "Semantics {{ ... }}")
178 }
179}
180
181impl<'db, DB: ?Sized> ops::Deref for Semantics<'db, DB> {
182 type Target = SemanticsImpl<'db>;
183
184 fn deref(&self) -> &Self::Target {
185 &self.imp
186 }
187}
188
189#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
190pub enum LintAttr {
191 Allow,
192 Expect,
193 Warn,
194 Deny,
195 Forbid,
196}
197
198impl Semantics<'_, dyn HirDatabase> {
202 pub fn new_dyn(db: &'_ dyn HirDatabase) -> Semantics<'_, dyn HirDatabase> {
204 let impl_ = SemanticsImpl::new(db);
205 Semantics { db, imp: impl_ }
206 }
207}
208
209impl<DB: HirDatabase> Semantics<'_, DB> {
210 pub fn new(db: &DB) -> Semantics<'_, DB> {
212 let impl_ = SemanticsImpl::new(db);
213 Semantics { db, imp: impl_ }
214 }
215}
216
217impl<DB: HirDatabase + ?Sized> Semantics<'_, DB> {
220 pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
221 self.imp.find_file(syntax_node).file_id
222 }
223
224 pub fn token_ancestors_with_macros(
225 &self,
226 token: SyntaxToken,
227 ) -> impl Iterator<Item = SyntaxNode> + '_ {
228 token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
229 }
230
231 pub fn find_node_at_offset_with_macros<N: AstNode>(
234 &self,
235 node: &SyntaxNode,
236 offset: TextSize,
237 ) -> Option<N> {
238 self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
239 }
240
241 pub fn find_node_at_offset_with_descend<N: AstNode>(
245 &self,
246 node: &SyntaxNode,
247 offset: TextSize,
248 ) -> Option<N> {
249 self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
250 }
251
252 pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>(
256 &'slf self,
257 node: &SyntaxNode,
258 offset: TextSize,
259 ) -> impl Iterator<Item = N> + 'slf {
260 self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
261 }
262
263 pub fn find_namelike_at_offset_with_descend<'slf>(
265 &'slf self,
266 node: &SyntaxNode,
267 offset: TextSize,
268 ) -> impl Iterator<Item = ast::NameLike> + 'slf {
269 node.token_at_offset(offset)
270 .map(move |token| self.descend_into_macros_no_opaque(token, true))
271 .map(|descendants| descendants.into_iter().filter_map(move |it| it.value.parent()))
272 .kmerge_by(|left, right| left.text_range().len().lt(&right.text_range().len()))
275 .filter_map(ast::NameLike::cast)
276 }
277
278 pub fn lint_attrs(
279 &self,
280 file_id: FileId,
281 krate: Crate,
282 item: ast::AnyHasAttrs,
283 ) -> impl DoubleEndedIterator<Item = (LintAttr, SmolStr)> {
284 let mut cfg_options = None;
285 let cfg_options = || *cfg_options.get_or_insert_with(|| krate.id.cfg_options(self.db));
286
287 let is_crate_root = file_id == krate.root_file(self.imp.db);
288 let is_source_file = ast::SourceFile::can_cast(item.syntax().kind());
289 let extra_crate_attrs = (is_crate_root && is_source_file)
290 .then(|| {
291 parse_extra_crate_attrs(self.imp.db, krate.id)
292 .into_iter()
293 .flat_map(|src| src.attrs())
294 })
295 .into_iter()
296 .flatten();
297
298 let mut result = Vec::new();
299 hir_expand::attrs::expand_cfg_attr::<Infallible>(
300 extra_crate_attrs.chain(ast::attrs_including_inner(&item)),
301 cfg_options,
302 |attr, _| {
303 let ast::Meta::TokenTreeMeta(attr) = attr else {
304 return ControlFlow::Continue(());
305 };
306 let (Some(segment), Some(tt)) = (attr.path().as_one_segment(), attr.token_tree())
307 else {
308 return ControlFlow::Continue(());
309 };
310 let lint_attr = match &*segment {
311 "allow" => LintAttr::Allow,
312 "expect" => LintAttr::Expect,
313 "warn" => LintAttr::Warn,
314 "deny" => LintAttr::Deny,
315 "forbid" => LintAttr::Forbid,
316 _ => return ControlFlow::Continue(()),
317 };
318 let mut lint = SmolStrBuilder::new();
319 for token in
320 tt.syntax().children_with_tokens().filter_map(SyntaxElement::into_token)
321 {
322 match token.kind() {
323 T![:] | T![::] => lint.push_str(token.text()),
324 kind if kind.is_any_identifier() => lint.push_str(token.text()),
325 T![,] => {
326 let lint = mem::replace(&mut lint, SmolStrBuilder::new()).finish();
327 if !lint.is_empty() {
328 result.push((lint_attr, lint));
329 }
330 }
331 _ => {}
332 }
333 }
334 let lint = lint.finish();
335 if !lint.is_empty() {
336 result.push((lint_attr, lint));
337 }
338
339 ControlFlow::Continue(())
340 },
341 );
342 result.into_iter()
343 }
344
345 pub fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option<Struct> {
346 self.imp.resolve_range_pat(range_pat).map(Struct::from)
347 }
348
349 pub fn resolve_range_expr(&self, range_expr: &ast::RangeExpr) -> Option<Struct> {
350 self.imp.resolve_range_expr(range_expr).map(Struct::from)
351 }
352
353 pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
354 self.imp.resolve_await_to_poll(await_expr)
355 }
356
357 pub fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<Function> {
358 self.imp.resolve_prefix_expr(prefix_expr)
359 }
360
361 pub fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<Function> {
362 self.imp.resolve_index_expr(index_expr)
363 }
364
365 pub fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<Function> {
366 self.imp.resolve_bin_expr(bin_expr)
367 }
368
369 pub fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<Function> {
370 self.imp.resolve_try_expr(try_expr)
371 }
372
373 pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<Variant> {
374 self.imp.resolve_variant(record_lit).map(Variant::from)
375 }
376
377 pub fn file_to_module_def(&self, file: impl Into<FileId>) -> Option<Module> {
378 self.imp.file_to_module_defs(file.into()).next()
379 }
380
381 pub fn file_to_module_defs(&self, file: impl Into<FileId>) -> impl Iterator<Item = Module> {
382 self.imp.file_to_module_defs(file.into())
383 }
384
385 pub fn hir_file_to_module_def(&self, file: impl Into<HirFileId>) -> Option<Module> {
386 self.imp.hir_file_to_module_defs(file.into()).next()
387 }
388
389 pub fn hir_file_to_module_defs(
390 &self,
391 file: impl Into<HirFileId>,
392 ) -> impl Iterator<Item = Module> {
393 self.imp.hir_file_to_module_defs(file.into())
394 }
395
396 pub fn is_nightly(&self, krate: Crate) -> bool {
397 let toolchain = toolchain_channel(self.db.as_dyn_database(), krate.into());
398 matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None)
401 }
402
403 pub fn to_adt_def(&self, a: &ast::Adt) -> Option<Adt> {
404 self.imp.to_def(a)
405 }
406
407 pub fn to_const_def(&self, c: &ast::Const) -> Option<Const> {
408 self.imp.to_def(c)
409 }
410
411 pub fn to_enum_def(&self, e: &ast::Enum) -> Option<Enum> {
412 self.imp.to_def(e)
413 }
414
415 pub fn to_enum_variant_def(&self, v: &ast::Variant) -> Option<EnumVariant> {
416 self.imp.to_def(v)
417 }
418
419 pub fn to_fn_def(&self, f: &ast::Fn) -> Option<Function> {
420 self.imp.to_def(f)
421 }
422
423 pub fn to_impl_def(&self, i: &ast::Impl) -> Option<Impl> {
424 self.imp.to_def(i)
425 }
426
427 pub fn to_macro_def(&self, m: &ast::Macro) -> Option<Macro> {
428 self.imp.to_def(m)
429 }
430
431 pub fn to_module_def(&self, m: &ast::Module) -> Option<Module> {
432 self.imp.to_def(m)
433 }
434
435 pub fn to_static_def(&self, s: &ast::Static) -> Option<Static> {
436 self.imp.to_def(s)
437 }
438
439 pub fn to_struct_def(&self, s: &ast::Struct) -> Option<Struct> {
440 self.imp.to_def(s)
441 }
442
443 pub fn to_trait_def(&self, t: &ast::Trait) -> Option<Trait> {
444 self.imp.to_def(t)
445 }
446
447 pub fn to_type_alias_def(&self, t: &ast::TypeAlias) -> Option<TypeAlias> {
448 self.imp.to_def(t)
449 }
450
451 pub fn to_union_def(&self, u: &ast::Union) -> Option<Union> {
452 self.imp.to_def(u)
453 }
454}
455
456impl<'db> SemanticsImpl<'db> {
457 fn new(db: &'db dyn HirDatabase) -> Self {
458 SemanticsImpl { db, s2d_cache: Default::default(), macro_call_cache: Default::default() }
459 }
460
461 pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile {
462 let hir_file_id = file_id.into();
463 let tree = file_id.parse(self.db).tree();
464 self.cache(tree.syntax().clone(), hir_file_id);
465 tree
466 }
467
468 pub fn first_crate(&self, file: FileId) -> Option<Crate> {
470 match self.file_to_module_defs(file).next() {
471 Some(module) => Some(module.krate(self.db)),
472 None => all_crates(self.db).last().copied().map(Into::into),
473 }
474 }
475
476 pub fn attach_first_edition_opt(&self, file: FileId) -> Option<EditionedFileId> {
477 let krate = self.file_to_module_defs(file).next()?.krate(self.db);
478 Some(EditionedFileId::new(self.db, file, krate.edition(self.db)))
479 }
480
481 pub fn attach_first_edition(&self, file: FileId) -> EditionedFileId {
482 self.attach_first_edition_opt(file)
483 .unwrap_or_else(|| EditionedFileId::current_edition(self.db, file))
484 }
485
486 pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
487 let file_id = self.attach_first_edition(file_id);
488
489 let tree = file_id.parse(self.db).tree();
490 self.cache(tree.syntax().clone(), file_id.into());
491 tree
492 }
493
494 pub fn adjust_edition(&self, file_id: HirFileId) -> HirFileId {
495 if let Some(editioned_file_id) = file_id.file_id() {
496 self.attach_first_edition_opt(editioned_file_id.file_id(self.db))
497 .map_or(file_id, Into::into)
498 } else {
499 file_id
500 }
501 }
502
503 pub fn find_parent_file(&self, file_id: HirFileId) -> Option<InFile<SyntaxNode>> {
504 match file_id {
505 HirFileId::FileId(file_id) => {
506 let module = self.file_to_module_defs(file_id.file_id(self.db)).next()?;
507 let def_map = crate_def_map(self.db, module.krate(self.db).id);
508 match def_map[module.id].origin {
509 ModuleOrigin::CrateRoot { .. } => None,
510 ModuleOrigin::File { declaration, declaration_tree_id, .. } => {
511 let file_id = declaration_tree_id.file_id();
512 let in_file = InFile::new(file_id, declaration);
513 let node = in_file.to_node(self.db);
514 let root = find_root(node.syntax());
515 self.cache(root, file_id);
516 Some(in_file.with_value(node.syntax().clone()))
517 }
518 _ => unreachable!("FileId can only belong to a file module"),
519 }
520 }
521 HirFileId::MacroFile(macro_file) => {
522 let node = self.db.lookup_intern_macro_call(macro_file).to_node(self.db);
523 let root = find_root(&node.value);
524 self.cache(root, node.file_id);
525 Some(node)
526 }
527 }
528 }
529
530 pub fn module_definition_node(&self, module: Module) -> InFile<SyntaxNode> {
533 let def_map = module.id.def_map(self.db);
534 let definition = def_map[module.id].origin.definition_source(self.db);
535 let definition = definition.map(|it| it.node());
536 let root_node = find_root(&definition.value);
537 self.cache(root_node, definition.file_id);
538 definition
539 }
540
541 pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode {
542 let node = self.db.parse_or_expand(file_id);
543 self.cache(node.clone(), file_id);
544 node
545 }
546
547 pub fn expand(&self, file_id: MacroCallId) -> ExpandResult<SyntaxNode> {
548 let res = self.db.parse_macro_expansion(file_id).map(|it| it.0.syntax_node());
549 self.cache(res.value.clone(), file_id.into());
550 res
551 }
552
553 pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option<InFile<SyntaxNode>> {
554 let file_id = self.to_def(macro_call)?;
555 let node = self.parse_or_expand(file_id.into());
556 Some(InFile::new(file_id.into(), node))
557 }
558
559 pub fn expand_allowed_builtins(
562 &self,
563 macro_call: &ast::MacroCall,
564 ) -> Option<ExpandResult<SyntaxNode>> {
565 let file_id = self.to_def(macro_call)?;
566 let macro_call = self.db.lookup_intern_macro_call(file_id);
567
568 let skip = matches!(
569 macro_call.def.kind,
570 hir_expand::MacroDefKind::BuiltIn(
571 _,
572 BuiltinFnLikeExpander::Column
573 | BuiltinFnLikeExpander::File
574 | BuiltinFnLikeExpander::ModulePath
575 | BuiltinFnLikeExpander::Asm
576 | BuiltinFnLikeExpander::GlobalAsm
577 | BuiltinFnLikeExpander::NakedAsm
578 | BuiltinFnLikeExpander::LogSyntax
579 | BuiltinFnLikeExpander::TraceMacros
580 | BuiltinFnLikeExpander::FormatArgs
581 | BuiltinFnLikeExpander::FormatArgsNl
582 | BuiltinFnLikeExpander::ConstFormatArgs,
583 ) | hir_expand::MacroDefKind::BuiltInEager(_, EagerExpander::CompileError)
584 );
585 if skip {
586 return None;
589 }
590
591 let node = self.expand(file_id);
592 Some(node)
593 }
594
595 pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<InFile<SyntaxNode>>> {
597 let src = self.wrap_node_infile(item.clone());
598 let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
599 Some(self.expand(macro_call_id).map(|it| InFile::new(macro_call_id.into(), it)))
600 }
601
602 pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Meta) -> Option<SyntaxNode> {
603 let adt = attr.parent_attr()?.syntax().parent().and_then(ast::Adt::cast)?;
604 let src = self.wrap_node_infile(attr.clone());
605 let call_id = self.with_ctx(|ctx| {
606 ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
607 })?;
608 Some(self.parse_or_expand(call_id.into()))
609 }
610
611 pub fn resolve_derive_macro(&self, attr: &ast::Meta) -> Option<Vec<Option<Macro>>> {
612 let calls = self.derive_macro_calls(attr)?;
613 self.with_ctx(|ctx| {
614 Some(
615 calls
616 .into_iter()
617 .map(|call| {
618 let call = call?;
619 match call {
620 Either::Left(call) => {
621 macro_call_to_macro_id(ctx, call).map(|id| Macro { id })
622 }
623 Either::Right(call) => {
624 let call = call.loc(self.db);
625 let krate = call.krate(self.db);
626 let lang_items = hir_def::lang_item::lang_items(self.db, krate);
627 call.trait_.derive_macro(lang_items).map(|id| Macro { id })
628 }
629 }
630 })
631 .collect(),
632 )
633 })
634 }
635
636 pub fn expand_derive_macro(
637 &self,
638 attr: &ast::Meta,
639 ) -> Option<Vec<Option<ExpandResult<SyntaxNode>>>> {
640 let res: Vec<_> = self
641 .derive_macro_calls(attr)?
642 .into_iter()
643 .map(|call| {
644 let file_id = call?.left()?;
645 let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id);
646 let root_node = value.0.syntax_node();
647 self.cache(root_node.clone(), file_id.into());
648 Some(ExpandResult { value: root_node, err })
649 })
650 .collect();
651 Some(res)
652 }
653
654 fn derive_macro_calls(
655 &self,
656 attr: &ast::Meta,
657 ) -> Option<Vec<Option<Either<MacroCallId, BuiltinDeriveImplId>>>> {
658 let adt = attr.parent_attr()?.syntax().parent().and_then(ast::Adt::cast)?;
659 let file_id = self.find_file(adt.syntax()).file_id;
660 let adt = InFile::new(file_id, &adt);
661 let src = InFile::new(file_id, attr.clone());
662 self.with_ctx(|ctx| {
663 let (.., res) = ctx.attr_to_derive_macro_call(adt, src)?;
664 Some(res.to_vec())
665 })
666 }
667
668 pub fn is_derive_annotated(&self, adt: InFile<&ast::Adt>) -> bool {
669 self.with_ctx(|ctx| ctx.file_of_adt_has_derives(adt))
670 }
671
672 pub fn derive_helpers_in_scope(&self, adt: &ast::Adt) -> Option<Vec<(Symbol, Symbol)>> {
673 let sa = self.analyze_no_infer(adt.syntax())?;
674 let id = self.db.ast_id_map(sa.file_id).ast_id(adt);
675 let result = sa
676 .resolver
677 .def_map()
678 .derive_helpers_in_scope(InFile::new(sa.file_id, id))?
679 .iter()
680 .map(|(name, macro_, _)| {
681 let macro_name = Macro::from(*macro_).name(self.db).symbol().clone();
682 (name.symbol().clone(), macro_name)
683 })
684 .collect();
685 Some(result)
686 }
687
688 pub fn derive_helper(&self, attr: &ast::Attr) -> Option<Vec<(Macro, MacroCallId)>> {
689 let adt = attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it {
690 ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
691 ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
692 ast::Item::Union(it) => Some(ast::Adt::Union(it)),
693 _ => None,
694 })?;
695 let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
696 let sa = self.analyze_no_infer(adt.syntax())?;
697 let id = self.db.ast_id_map(sa.file_id).ast_id(&adt);
698 let res: Vec<_> = sa
699 .resolver
700 .def_map()
701 .derive_helpers_in_scope(InFile::new(sa.file_id, id))?
702 .iter()
703 .filter(|&(name, _, _)| *name == attr_name)
704 .filter_map(|&(_, macro_, call)| Some((macro_.into(), call.left()?)))
705 .collect();
706 res.is_empty().not().then_some(res)
708 }
709
710 pub fn is_attr_macro_call(&self, item: InFile<&ast::Item>) -> bool {
711 self.with_ctx(|ctx| ctx.item_to_macro_call(item).is_some())
712 }
713
714 pub fn speculative_expand_macro_call(
717 &self,
718 actual_macro_call: &ast::MacroCall,
719 speculative_args: &ast::TokenTree,
720 token_to_map: SyntaxToken,
721 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
722 let macro_file = self.to_def(actual_macro_call)?;
723 hir_expand::db::expand_speculative(
724 self.db,
725 macro_file,
726 speculative_args.syntax(),
727 token_to_map,
728 )
729 }
730
731 pub fn speculative_expand_raw(
732 &self,
733 macro_file: MacroCallId,
734 speculative_args: &SyntaxNode,
735 token_to_map: SyntaxToken,
736 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
737 hir_expand::db::expand_speculative(self.db, macro_file, speculative_args, token_to_map)
738 }
739
740 pub fn speculative_expand_attr_macro(
743 &self,
744 actual_macro_call: &ast::Item,
745 speculative_args: &ast::Item,
746 token_to_map: SyntaxToken,
747 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
748 let macro_call = self.wrap_node_infile(actual_macro_call.clone());
749 let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call.as_ref()))?;
750 hir_expand::db::expand_speculative(
751 self.db,
752 macro_call_id,
753 speculative_args.syntax(),
754 token_to_map,
755 )
756 }
757
758 pub fn speculative_expand_derive_as_pseudo_attr_macro(
759 &self,
760 actual_macro_call: &ast::Attr,
761 speculative_args: &ast::Attr,
762 token_to_map: SyntaxToken,
763 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
764 let attr = self.wrap_node_infile(actual_macro_call.clone());
765 let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?;
766 let macro_call_id = self.with_ctx(|ctx| {
767 ctx.attr_to_derive_macro_call(
768 attr.with_value(&adt),
769 attr.with_value(attr.value.meta()?),
770 )
771 .map(|(_, it, _)| it)
772 })?;
773 hir_expand::db::expand_speculative(
774 self.db,
775 macro_call_id,
776 speculative_args.syntax(),
777 token_to_map,
778 )
779 }
780
781 pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &Name) -> Vec<Local> {
784 let Some(def) = to_be_renamed.parent.as_def_with_body() else {
786 return Vec::new();
787 };
788 let body = Body::of(self.db, def);
789 let resolver = to_be_renamed.parent.resolver(self.db);
790 let starting_expr =
791 body.binding_owner(to_be_renamed.binding_id).unwrap_or(body.root_expr());
792 let mut visitor = RenameConflictsVisitor {
793 body,
794 conflicts: FxHashSet::default(),
795 db: self.db,
796 new_name: new_name.symbol().clone(),
797 old_name: to_be_renamed.name(self.db).symbol().clone(),
798 owner: def,
799 to_be_renamed: to_be_renamed.binding_id,
800 resolver,
801 };
802 visitor.rename_conflicts(starting_expr);
803 visitor
804 .conflicts
805 .into_iter()
806 .map(|binding_id| Local { parent: to_be_renamed.parent, binding_id })
807 .collect()
808 }
809
810 pub fn as_format_args_parts(
812 &self,
813 string: &ast::String,
814 ) -> Option<Vec<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)>> {
815 let string_start = string.syntax().text_range().start();
816 let token = self.wrap_token_infile(string.syntax().clone());
817 self.descend_into_macros_breakable(token, |token, _| {
818 (|| {
819 let token = token.value;
820 let string = ast::String::cast(token)?;
821 let literal =
822 string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
823 let parent = literal.parent()?;
824 if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) {
825 let source_analyzer = self.analyze_no_infer(format_args.syntax())?;
826 let format_args = self.wrap_node_infile(format_args);
827 let res = source_analyzer
828 .as_format_args_parts(self.db, format_args.as_ref())?
829 .map(|(range, res)| (range + string_start, res.map(Either::Left)))
830 .collect();
831 Some(res)
832 } else {
833 let asm = ast::AsmExpr::cast(parent)?;
834 let source_analyzer = self.analyze_no_infer(asm.syntax())?;
835 let line = asm.template().position(|it| *it.syntax() == literal)?;
836 let asm = self.wrap_node_infile(asm);
837 let (owner, (expr, asm_parts)) = source_analyzer.as_asm_parts(asm.as_ref())?;
838 let res = asm_parts
839 .get(line)?
840 .iter()
841 .map(|&(range, index)| {
842 (
843 range + string_start,
844 Some(Either::Right(InlineAsmOperand { owner, expr, index })),
845 )
846 })
847 .collect();
848 Some(res)
849 }
850 })()
851 .map_or(ControlFlow::Continue(()), ControlFlow::Break)
852 })
853 }
854
855 pub fn check_for_format_args_template(
864 &self,
865 original_token: SyntaxToken,
866 offset: TextSize,
867 ) -> Option<(
868 TextRange,
869 HirFileRange,
870 ast::String,
871 Option<Either<PathResolution, InlineAsmOperand>>,
872 )> {
873 let original_token =
874 self.wrap_token_infile(original_token).map(ast::String::cast).transpose()?;
875 self.check_for_format_args_template_with_file(original_token, offset)
876 }
877
878 pub fn check_for_format_args_template_with_file(
886 &self,
887 original_token: InFile<ast::String>,
888 offset: TextSize,
889 ) -> Option<(
890 TextRange,
891 HirFileRange,
892 ast::String,
893 Option<Either<PathResolution, InlineAsmOperand>>,
894 )> {
895 let relative_offset =
896 offset.checked_sub(original_token.value.syntax().text_range().start())?;
897 self.descend_into_macros_breakable(
898 original_token.as_ref().map(|it| it.syntax().clone()),
899 |token, _| {
900 (|| {
901 let token = token.map(ast::String::cast).transpose()?;
902 self.resolve_offset_in_format_args(token.as_ref(), relative_offset).map(
903 |(range, res)| {
904 (
905 range + original_token.value.syntax().text_range().start(),
906 HirFileRange {
907 file_id: token.file_id,
908 range: range + token.value.syntax().text_range().start(),
909 },
910 token.value,
911 res,
912 )
913 },
914 )
915 })()
916 .map_or(ControlFlow::Continue(()), ControlFlow::Break)
917 },
918 )
919 }
920
921 fn resolve_offset_in_format_args(
922 &self,
923 InFile { value: string, file_id }: InFile<&ast::String>,
924 offset: TextSize,
925 ) -> Option<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)> {
926 debug_assert!(offset <= string.syntax().text_range().len());
927 let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
928 let parent = literal.parent()?;
929 if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) {
930 let source_analyzer =
931 &self.analyze_impl(InFile::new(file_id, format_args.syntax()), None, false)?;
932 source_analyzer
933 .resolve_offset_in_format_args(self.db, InFile::new(file_id, &format_args), offset)
934 .map(|(range, res)| (range, res.map(Either::Left)))
935 } else {
936 let asm = ast::AsmExpr::cast(parent)?;
937 let source_analyzer =
938 self.analyze_impl(InFile::new(file_id, asm.syntax()), None, false)?;
939 let line = asm.template().position(|it| *it.syntax() == literal)?;
940 source_analyzer
941 .resolve_offset_in_asm_template(InFile::new(file_id, &asm), line, offset)
942 .map(|(owner, (expr, range, index))| {
943 (range, Some(Either::Right(InlineAsmOperand { owner, expr, index })))
944 })
945 }
946 }
947
948 pub fn debug_hir_at(&self, token: SyntaxToken) -> Option<String> {
949 self.analyze_no_infer(&token.parent()?).and_then(|it| {
950 Some(match it.body_or_sig.as_ref()? {
951 crate::source_analyzer::BodyOrSig::Body { def, body, .. } => {
952 hir_def::expr_store::pretty::print_body_hir(
953 self.db,
954 body,
955 *def,
956 it.file_id.edition(self.db),
957 )
958 }
959 &crate::source_analyzer::BodyOrSig::VariantFields { def, .. } => {
960 hir_def::expr_store::pretty::print_variant_body_hir(
961 self.db,
962 def,
963 it.file_id.edition(self.db),
964 )
965 }
966 &crate::source_analyzer::BodyOrSig::Sig { def, .. } => {
967 hir_def::expr_store::pretty::print_signature(
968 self.db,
969 def,
970 it.file_id.edition(self.db),
971 )
972 }
973 })
974 })
975 }
976
977 pub fn descend_token_into_include_expansion(
979 &self,
980 tok: InRealFile<SyntaxToken>,
981 ) -> InFile<SyntaxToken> {
982 let Some(include) =
983 self.s2d_cache.borrow_mut().get_or_insert_include_for(self.db, tok.file_id)
984 else {
985 return tok.into();
986 };
987 let span = self.db.real_span_map(tok.file_id).span_for_range(tok.value.text_range());
988 let Some(InMacroFile { file_id, value: mut mapped_tokens }) = self.with_ctx(|ctx| {
989 Some(
990 ctx.cache
991 .get_or_insert_expansion(ctx.db, include)
992 .map_range_down(span)?
993 .map(SmallVec::<[_; 2]>::from_iter),
994 )
995 }) else {
996 return tok.into();
997 };
998 mapped_tokens.pop().map_or_else(|| tok.into(), |(tok, _)| InFile::new(file_id.into(), tok))
1000 }
1001
1002 pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
1004 let mut res = smallvec![];
1006 let tokens = (|| {
1007 let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
1009 let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?;
1010 Some((first, last))
1011 })();
1012 let (first, last) = match tokens {
1013 Some(it) => it,
1014 None => return res,
1015 };
1016 let file = self.find_file(node.syntax());
1017
1018 if first == last {
1019 self.descend_into_macros_all(
1021 InFile::new(file.file_id, first),
1022 false,
1023 &mut |InFile { value, .. }, _ctx| {
1024 if let Some(node) = value
1025 .parent_ancestors()
1026 .take_while(|it| it.text_range() == value.text_range())
1027 .find_map(N::cast)
1028 {
1029 res.push(node)
1030 }
1031 },
1032 );
1033 } else {
1034 let mut scratch: SmallVec<[_; 1]> = smallvec![];
1036 self.descend_into_macros_all(
1037 InFile::new(file.file_id, first),
1038 false,
1039 &mut |token, _ctx| scratch.push(token),
1040 );
1041
1042 let mut scratch = scratch.into_iter();
1043 self.descend_into_macros_all(
1044 InFile::new(file.file_id, last),
1045 false,
1046 &mut |InFile { value: last, file_id: last_fid }, _ctx| {
1047 if let Some(InFile { value: first, file_id: first_fid }) = scratch.next()
1048 && first_fid == last_fid
1049 && let Some(p) = first.parent()
1050 {
1051 let range = first.text_range().cover(last.text_range());
1052 let node = find_root(&p)
1053 .covering_element(range)
1054 .ancestors()
1055 .take_while(|it| it.text_range() == range)
1056 .find_map(N::cast);
1057 if let Some(node) = node {
1058 res.push(node);
1059 }
1060 }
1061 },
1062 );
1063 }
1064 res
1065 }
1066
1067 pub fn is_inside_macro_call(&self, token @ InFile { value, .. }: InFile<&SyntaxToken>) -> bool {
1072 value.parent_ancestors().any(|ancestor| {
1073 if ast::MacroCall::can_cast(ancestor.kind()) {
1074 return true;
1075 }
1076
1077 let Some(item) = ast::Item::cast(ancestor) else {
1078 return false;
1079 };
1080 self.with_ctx(|ctx| {
1081 if ctx.item_to_macro_call(token.with_value(&item)).is_some() {
1082 return true;
1083 }
1084 let adt = match item {
1085 ast::Item::Struct(it) => it.into(),
1086 ast::Item::Enum(it) => it.into(),
1087 ast::Item::Union(it) => it.into(),
1088 _ => return false,
1089 };
1090 ctx.file_of_adt_has_derives(token.with_value(&adt))
1091 })
1092 })
1093 }
1094
1095 pub fn descend_into_macros_cb(
1096 &self,
1097 token: SyntaxToken,
1098 mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext),
1099 ) {
1100 self.descend_into_macros_all(self.wrap_token_infile(token), false, &mut |t, ctx| {
1101 cb(t, ctx)
1102 });
1103 }
1104
1105 pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
1106 let mut res = smallvec![];
1107 self.descend_into_macros_all(
1108 self.wrap_token_infile(token.clone()),
1109 false,
1110 &mut |t, _ctx| res.push(t.value),
1111 );
1112 if res.is_empty() {
1113 res.push(token);
1114 }
1115 res
1116 }
1117
1118 pub fn descend_into_macros_no_opaque(
1119 &self,
1120 token: SyntaxToken,
1121 always_descend_into_derives: bool,
1122 ) -> SmallVec<[InFile<SyntaxToken>; 1]> {
1123 let mut res = smallvec![];
1124 let token = self.wrap_token_infile(token);
1125 self.descend_into_macros_all(token.clone(), always_descend_into_derives, &mut |t, ctx| {
1126 if !ctx.is_opaque(self.db) {
1127 res.push(t);
1129 }
1130 });
1131 if res.is_empty() {
1132 res.push(token);
1133 }
1134 res
1135 }
1136
1137 pub fn descend_into_macros_breakable<T>(
1138 &self,
1139 token: InFile<SyntaxToken>,
1140 mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
1141 ) -> Option<T> {
1142 self.descend_into_macros_impl(token, false, &mut cb)
1143 }
1144
1145 pub fn descend_into_macros_exact(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
1148 let mut r = smallvec![];
1149 let text = token.text();
1150 let kind = token.kind();
1151
1152 self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }, ctx| {
1153 let mapped_kind = value.kind();
1154 let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
1155 let matches = (kind == mapped_kind || any_ident_match())
1156 && text == value.text()
1157 && !ctx.is_opaque(self.db);
1158 if matches {
1159 r.push(value);
1160 }
1161 });
1162 if r.is_empty() {
1163 r.push(token);
1164 }
1165 r
1166 }
1167
1168 pub fn descend_into_macros_exact_with_file(
1171 &self,
1172 token: SyntaxToken,
1173 ) -> SmallVec<[InFile<SyntaxToken>; 1]> {
1174 let mut r = smallvec![];
1175 let text = token.text();
1176 let kind = token.kind();
1177
1178 self.descend_into_macros_cb(token.clone(), |InFile { value, file_id }, ctx| {
1179 let mapped_kind = value.kind();
1180 let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
1181 let matches = (kind == mapped_kind || any_ident_match())
1182 && text == value.text()
1183 && !ctx.is_opaque(self.db);
1184 if matches {
1185 r.push(InFile { value, file_id });
1186 }
1187 });
1188 if r.is_empty() {
1189 r.push(self.wrap_token_infile(token));
1190 }
1191 r
1192 }
1193
1194 pub fn descend_into_macros_single_exact(&self, token: SyntaxToken) -> SyntaxToken {
1197 let text = token.text();
1198 let kind = token.kind();
1199 self.descend_into_macros_breakable(
1200 self.wrap_token_infile(token.clone()),
1201 |InFile { value, file_id: _ }, _ctx| {
1202 let mapped_kind = value.kind();
1203 let any_ident_match =
1204 || kind.is_any_identifier() && value.kind().is_any_identifier();
1205 let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
1206 if matches { ControlFlow::Break(value) } else { ControlFlow::Continue(()) }
1207 },
1208 )
1209 .unwrap_or(token)
1210 }
1211
1212 fn descend_into_macros_all(
1213 &self,
1214 token: InFile<SyntaxToken>,
1215 always_descend_into_derives: bool,
1216 f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContext),
1217 ) {
1218 self.descend_into_macros_impl(token, always_descend_into_derives, &mut |tok, ctx| {
1219 f(tok, ctx);
1220 CONTINUE_NO_BREAKS
1221 });
1222 }
1223
1224 fn descend_into_macros_impl<T>(
1225 &self,
1226 InFile { value: token, file_id }: InFile<SyntaxToken>,
1227 always_descend_into_derives: bool,
1228 f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
1229 ) -> Option<T> {
1230 let _p = tracing::info_span!("descend_into_macros_impl").entered();
1231
1232 let db = self.db;
1233 let span = db.span_map(file_id).span_for_range(token.text_range());
1234
1235 let process_expansion_for_token =
1237 |ctx: &mut SourceToDefCtx<'_, '_>, stack: &mut Vec<_>, macro_file| {
1238 let InMacroFile { file_id, value: mapped_tokens } = ctx
1239 .cache
1240 .get_or_insert_expansion(ctx.db, macro_file)
1241 .map_range_down(span)?
1242 .map(SmallVec::<[_; 2]>::from_iter);
1243 let res = mapped_tokens.is_empty().not().then_some(());
1245 stack.push((HirFileId::from(file_id), mapped_tokens));
1247 res
1248 };
1249
1250 let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![];
1255 let include = file_id
1256 .file_id()
1257 .and_then(|file_id| self.s2d_cache.borrow_mut().get_or_insert_include_for(db, file_id));
1258 match include {
1259 Some(include) => {
1260 self.with_ctx(|ctx| process_expansion_for_token(ctx, &mut stack, include))?;
1262 }
1263 None => {
1264 stack.push((file_id, smallvec![(token, span.ctx)]));
1265 }
1266 }
1267
1268 let mut m_cache = self.macro_call_cache.borrow_mut();
1269
1270 let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| {
1273 tokens.retain(|(t, _): &mut (SyntaxToken, _)| !range.contains_range(t.text_range()))
1274 };
1275
1276 while let Some((expansion, ref mut tokens)) = stack.pop() {
1277 tokens.reverse();
1281 while let Some((token, ctx)) = tokens.pop() {
1282 let was_not_remapped = (|| {
1283 let res = self.with_ctx(|ctx| {
1287 token
1288 .parent_ancestors()
1289 .filter_map(ast::Item::cast)
1290 .find_map(|item| {
1300 item.attrs().next()?;
1302 ctx.item_to_macro_call(InFile::new(expansion, &item))
1303 .zip(Some(item))
1304 })
1305 .map(|(call_id, item)| {
1306 let item_range = item.syntax().text_range();
1307 let loc = db.lookup_intern_macro_call(call_id);
1308 let text_range = match loc.kind {
1309 hir_expand::MacroCallKind::Attr {
1310 censored_attr_ids: attr_ids,
1311 ..
1312 } => {
1313 let (attr, _) = attr_ids
1327 .invoc_attr()
1328 .find_attr_range_with_source(db, loc.krate, &item);
1329 let start = attr.syntax().text_range().start();
1330 TextRange::new(start, item_range.end())
1331 }
1332 _ => item_range,
1333 };
1334 filter_duplicates(tokens, text_range);
1335 process_expansion_for_token(ctx, &mut stack, call_id)
1336 })
1337 });
1338
1339 if let Some(res) = res {
1340 return res;
1341 }
1342
1343 if always_descend_into_derives {
1344 let res = self.with_ctx(|ctx| {
1345 let (derives, adt) = token
1346 .parent_ancestors()
1347 .filter_map(ast::Adt::cast)
1348 .find_map(|adt| {
1349 Some((
1350 ctx.derive_macro_calls(InFile::new(expansion, &adt))?
1351 .map(|(a, b, c)| (a, b, c.to_owned()))
1352 .collect::<SmallVec<[_; 2]>>(),
1353 adt,
1354 ))
1355 })?;
1356 for (_, derive_attr, derives) in derives {
1357 process_expansion_for_token(ctx, &mut stack, derive_attr);
1361 for derive in derives.into_iter().flatten() {
1362 let Either::Left(derive) = derive else { continue };
1363 process_expansion_for_token(ctx, &mut stack, derive);
1364 }
1365 }
1366 filter_duplicates(tokens, adt.syntax().text_range());
1368 Some(())
1369 });
1370 if let Some(()) = res {
1373 return None;
1378 }
1379 }
1380 let tt = token
1383 .parent_ancestors()
1384 .map_while(Either::<ast::TokenTree, ast::Meta>::cast)
1385 .last()?;
1386
1387 match tt {
1388 Either::Left(tt) => {
1390 let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
1391 if tt.left_delimiter_token().map_or(false, |it| it == token) {
1392 return None;
1393 }
1394 if tt.right_delimiter_token().map_or(false, |it| it == token) {
1395 return None;
1396 }
1397 let mcall = InFile::new(expansion, macro_call);
1398 let file_id = match m_cache.get(&mcall) {
1399 Some(&it) => it,
1400 None => {
1401 let it = ast::MacroCall::to_def(self, mcall.as_ref())?;
1402 m_cache.insert(mcall, it);
1403 it
1404 }
1405 };
1406 let text_range = tt.syntax().text_range();
1407 filter_duplicates(tokens, text_range);
1408
1409 self.with_ctx(|ctx| {
1410 process_expansion_for_token(ctx, &mut stack, file_id).or(file_id
1411 .eager_arg(db)
1412 .and_then(|arg| {
1413 process_expansion_for_token(ctx, &mut stack, arg)
1415 }))
1416 })
1417 }
1418 Either::Right(_) if always_descend_into_derives => None,
1419 Either::Right(meta) => {
1421 let attr = meta.parent_attr()?;
1424 let adt = match attr.syntax().parent().and_then(ast::Adt::cast) {
1425 Some(adt) => {
1426 let res = self.with_ctx(|ctx| {
1428 let derive_call = ctx
1431 .attr_to_derive_macro_call(
1432 InFile::new(expansion, &adt),
1433 InFile::new(expansion, meta.clone()),
1434 )?
1435 .1;
1436
1437 let text_range = attr.syntax().text_range();
1439 tokens.retain(|(t, _)| {
1442 !text_range.contains_range(t.text_range())
1443 });
1444 Some(process_expansion_for_token(
1445 ctx,
1446 &mut stack,
1447 derive_call,
1448 ))
1449 });
1450 if let Some(res) = res {
1451 return res;
1452 }
1453 Some(adt)
1454 }
1455 None => {
1456 attr.syntax().ancestors().find_map(ast::Item::cast).and_then(
1458 |it| match it {
1459 ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
1460 ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
1461 ast::Item::Union(it) => Some(ast::Adt::Union(it)),
1462 _ => None,
1463 },
1464 )
1465 }
1466 }?;
1467 let attr_name =
1468 attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
1469 let resolver = &token
1472 .parent()
1473 .and_then(|parent| {
1474 self.analyze_impl(InFile::new(expansion, &parent), None, false)
1475 })?
1476 .resolver;
1477 let id = db.ast_id_map(expansion).ast_id(&adt);
1478 let helpers = resolver
1479 .def_map()
1480 .derive_helpers_in_scope(InFile::new(expansion, id))?;
1481
1482 if !helpers.is_empty() {
1483 let text_range = attr.syntax().text_range();
1484 filter_duplicates(tokens, text_range);
1485 }
1486
1487 let mut res = None;
1488 self.with_ctx(|ctx| {
1489 for (.., derive) in
1490 helpers.iter().filter(|(helper, ..)| *helper == attr_name)
1491 {
1492 let Either::Left(derive) = *derive else { continue };
1493 res = res
1497 .or(process_expansion_for_token(ctx, &mut stack, derive));
1498 }
1499 res
1500 })
1501 }
1502 }
1503 })()
1504 .is_none();
1505 if was_not_remapped
1506 && let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx)
1507 {
1508 return Some(b);
1509 }
1510 }
1511 }
1512 None
1513 }
1514
1515 fn descend_node_at_offset(
1520 &self,
1521 node: &SyntaxNode,
1522 offset: TextSize,
1523 ) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
1524 node.token_at_offset(offset)
1525 .map(move |token| self.descend_into_macros_exact(token))
1526 .map(|descendants| {
1527 descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
1528 })
1529 .kmerge_by(|left, right| {
1532 left.clone()
1533 .map(|node| node.text_range().len())
1534 .lt(right.clone().map(|node| node.text_range().len()))
1535 })
1536 }
1537
1538 pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
1542 let node = self.find_file(node);
1543 node.original_file_range_rooted(self.db)
1544 }
1545
1546 pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
1548 let node = self.find_file(node);
1549 node.original_file_range_opt(self.db).filter(|(_, ctx)| ctx.is_root()).map(TupleExt::head)
1550 }
1551
1552 pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
1555 self.wrap_node_infile(node).original_ast_node_rooted(self.db).map(
1556 |InRealFile { file_id, value }| {
1557 self.cache(find_root(value.syntax()), file_id.into());
1558 value
1559 },
1560 )
1561 }
1562
1563 pub fn original_syntax_node_rooted(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
1566 let InFile { file_id, .. } = self.find_file(node);
1567 InFile::new(file_id, node).original_syntax_node_rooted(self.db).map(
1568 |InRealFile { file_id, value }| {
1569 self.cache(find_root(&value), file_id.into());
1570 value
1571 },
1572 )
1573 }
1574
1575 pub fn diagnostics_display_range(
1576 &self,
1577 src: InFile<SyntaxNodePtr>,
1578 ) -> FileRangeWrapper<FileId> {
1579 let root = self.parse_or_expand(src.file_id);
1580 let node = src.map(|it| it.to_node(&root));
1581 let FileRange { file_id, range } = node.as_ref().original_file_range_rooted(self.db);
1582 FileRangeWrapper { file_id: file_id.file_id(self.db), range }
1583 }
1584
1585 pub fn diagnostics_display_range_for_range(
1586 &self,
1587 src: InFile<TextRange>,
1588 ) -> FileRangeWrapper<FileId> {
1589 let FileRange { file_id, range } = src.original_node_file_range_rooted(self.db);
1590 FileRangeWrapper { file_id: file_id.file_id(self.db), range }
1591 }
1592
1593 fn token_ancestors_with_macros(
1594 &self,
1595 token: SyntaxToken,
1596 ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
1597 token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent))
1598 }
1599
1600 pub fn ancestors_with_macros(
1603 &self,
1604 node: SyntaxNode,
1605 ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
1606 let node = self.find_file(&node);
1607 self.ancestors_with_macros_file(node.cloned()).map(|it| it.value)
1608 }
1609
1610 pub fn ancestors_with_macros_file(
1612 &self,
1613 node: InFile<SyntaxNode>,
1614 ) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
1615 iter::successors(Some(node), move |&InFile { file_id, ref value }| match value.parent() {
1616 Some(parent) => Some(InFile::new(file_id, parent)),
1617 None => {
1618 let macro_file = file_id.macro_file()?;
1619
1620 self.with_ctx(|ctx| {
1621 let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
1622 expansion_info.arg().map(|node| node?.parent()).transpose()
1623 })
1624 }
1625 })
1626 }
1627
1628 pub fn ancestors_at_offset_with_macros(
1629 &self,
1630 node: &SyntaxNode,
1631 offset: TextSize,
1632 ) -> impl Iterator<Item = SyntaxNode> + '_ {
1633 node.token_at_offset(offset)
1634 .map(|token| self.token_ancestors_with_macros(token))
1635 .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
1636 }
1637
1638 pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
1639 let text = lifetime.text();
1640 let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| {
1641 let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?;
1642 gpl.lifetime_params()
1643 .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text))
1644 })?;
1645 let src = self.wrap_node_infile(lifetime_param);
1646 ToDef::to_def(self, src.as_ref())
1647 }
1648
1649 pub fn resolve_label(&self, label: &ast::Lifetime) -> Option<Label> {
1650 let src = self.wrap_node_infile(label.clone());
1651 let (parent, label_id) = self.with_ctx(|ctx| ctx.label_ref_to_def(src.as_ref()))?;
1652 Some(Label { parent, label_id })
1653 }
1654
1655 pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type<'db>> {
1656 let analyze = self.analyze(ty.syntax())?;
1657 analyze.type_of_type(self.db, ty)
1658 }
1659
1660 pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
1661 let parent_ty = path.syntax().parent().and_then(ast::Type::cast)?;
1662 let analyze = self.analyze(path.syntax())?;
1663 let ty = analyze.store_sm()?.node_type(InFile::new(analyze.file_id, &parent_ty))?;
1664 let path = match &analyze.store()?.types[ty] {
1665 hir_def::type_ref::TypeRef::Path(path) => path,
1666 _ => return None,
1667 };
1668 match analyze.resolver.resolve_path_in_type_ns_fully(self.db, path)? {
1669 TypeNs::TraitId(trait_id) => Some(trait_id.into()),
1670 _ => None,
1671 }
1672 }
1673
1674 pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment<'db>>> {
1675 let mutability = |m| match m {
1676 hir_ty::next_solver::Mutability::Not => Mutability::Shared,
1677 hir_ty::next_solver::Mutability::Mut => Mutability::Mut,
1678 };
1679
1680 let analyzer = self.analyze(expr.syntax())?;
1681
1682 let (mut source_ty, _) = analyzer.type_of_expr(self.db, expr)?;
1683
1684 analyzer.expr_adjustments(expr).map(|it| {
1685 it.iter()
1686 .map(|adjust| {
1687 let target = Type::new_with_resolver(
1688 self.db,
1689 &analyzer.resolver,
1690 adjust.target.as_ref(),
1691 );
1692 let kind = match adjust.kind {
1693 hir_ty::Adjust::NeverToAny => Adjust::NeverToAny,
1694 hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => {
1695 Adjust::Deref(Some(OverloadedDeref(
1697 m.map(mutability).unwrap_or(Mutability::Shared),
1698 )))
1699 }
1700 hir_ty::Adjust::Deref(None) => Adjust::Deref(None),
1701 hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => {
1702 Adjust::Borrow(AutoBorrow::RawPtr(mutability(m)))
1703 }
1704 hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(m)) => {
1705 Adjust::Borrow(AutoBorrow::Ref(mutability(m.into())))
1707 }
1708 hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc),
1709 };
1710
1711 let source = mem::replace(&mut source_ty, target.clone());
1713
1714 Adjustment { source, target, kind }
1715 })
1716 .collect()
1717 })
1718 }
1719
1720 pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo<'db>> {
1721 self.analyze(expr.syntax())?
1722 .type_of_expr(self.db, expr)
1723 .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
1724 }
1725
1726 pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo<'db>> {
1727 self.analyze(pat.syntax())?
1728 .type_of_pat(self.db, pat)
1729 .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
1730 }
1731
1732 pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type<'db>> {
1736 self.analyze(pat.syntax())?.type_of_binding_in_pat(self.db, pat)
1737 }
1738
1739 pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type<'db>> {
1740 self.analyze(param.syntax())?.type_of_self(self.db, param)
1741 }
1742
1743 pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type<'db>; 1]> {
1744 self.analyze(pat.syntax())
1745 .and_then(|it| it.pattern_adjustments(self.db, pat))
1746 .unwrap_or_default()
1747 }
1748
1749 pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
1750 self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
1751 }
1752
1753 pub fn resolve_expr_as_callable(&self, call: &ast::Expr) -> Option<Callable<'db>> {
1754 self.analyze(call.syntax())?.resolve_expr_as_callable(self.db, call)
1755 }
1756
1757 pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
1758 self.analyze(call.syntax())?.resolve_method_call(self.db, call)
1759 }
1760
1761 pub fn resolve_method_call_fallback(
1763 &self,
1764 call: &ast::MethodCallExpr,
1765 ) -> Option<(Either<Function, Field>, Option<GenericSubstitution<'db>>)> {
1766 self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call)
1767 }
1768
1769 pub fn resolve_trait_impl_method(
1772 &self,
1773 env: Type<'db>,
1774 trait_: Trait,
1775 func: Function,
1776 subst: impl IntoIterator<Item = Type<'db>>,
1777 ) -> Option<Function> {
1778 let AnyFunctionId::FunctionId(func) = func.id else { return Some(func) };
1779 let interner = DbInterner::new_no_crate(self.db);
1780 let mut subst = subst.into_iter();
1781 let substs =
1782 hir_ty::next_solver::GenericArgs::for_item(interner, trait_.id.into(), |_, id, _| {
1783 assert!(matches!(id, hir_def::GenericParamId::TypeParamId(_)), "expected a type");
1784 subst.next().expect("too few subst").ty.into()
1785 });
1786 assert!(subst.next().is_none(), "too many subst");
1787 Some(match self.db.lookup_impl_method(env.env, func, substs).0 {
1788 Either::Left(it) => it.into(),
1789 Either::Right((impl_, method)) => {
1790 Function { id: AnyFunctionId::BuiltinDeriveImplMethod { method, impl_ } }
1791 }
1792 })
1793 }
1794
1795 fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option<StructId> {
1796 self.analyze(range_pat.syntax())?.resolve_range_pat(self.db, range_pat)
1797 }
1798
1799 fn resolve_range_expr(&self, range_expr: &ast::RangeExpr) -> Option<StructId> {
1800 self.analyze(range_expr.syntax())?.resolve_range_expr(self.db, range_expr)
1801 }
1802
1803 fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
1804 self.analyze(await_expr.syntax())?.resolve_await_to_poll(self.db, await_expr)
1805 }
1806
1807 fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<Function> {
1808 self.analyze(prefix_expr.syntax())?.resolve_prefix_expr(self.db, prefix_expr)
1809 }
1810
1811 fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<Function> {
1812 self.analyze(index_expr.syntax())?.resolve_index_expr(self.db, index_expr)
1813 }
1814
1815 fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<Function> {
1816 self.analyze(bin_expr.syntax())?.resolve_bin_expr(self.db, bin_expr)
1817 }
1818
1819 fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<Function> {
1820 self.analyze(try_expr.syntax())?.resolve_try_expr(self.db, try_expr)
1821 }
1822
1823 pub fn try_expr_returned_type(&self, try_expr: &ast::TryExpr) -> Option<Type<'db>> {
1825 self.ancestors_with_macros(try_expr.syntax().clone()).find_map(|parent| {
1826 if let Some(try_block) = ast::BlockExpr::cast(parent.clone())
1827 && try_block.try_block_modifier().is_some()
1828 {
1829 Some(self.type_of_expr(&try_block.into())?.original)
1830 } else if let Some(closure) = ast::ClosureExpr::cast(parent.clone()) {
1831 Some(
1832 self.type_of_expr(&closure.into())?
1833 .original
1834 .as_callable(self.db)?
1835 .return_type(),
1836 )
1837 } else if let Some(function) = ast::Fn::cast(parent) {
1838 Some(self.to_def(&function)?.ret_type(self.db))
1839 } else {
1840 None
1841 }
1842 })
1843 }
1844
1845 pub fn resolve_method_call_as_callable(
1848 &self,
1849 call: &ast::MethodCallExpr,
1850 ) -> Option<Callable<'db>> {
1851 self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
1852 }
1853
1854 pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Either<Field, TupleField>> {
1855 self.analyze(field.syntax())?.resolve_field(field)
1856 }
1857
1858 pub fn resolve_field_fallback(
1859 &self,
1860 field: &ast::FieldExpr,
1861 ) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution<'db>>)>
1862 {
1863 self.analyze(field.syntax())?.resolve_field_fallback(self.db, field)
1864 }
1865
1866 pub fn resolve_record_field(
1867 &self,
1868 field: &ast::RecordExprField,
1869 ) -> Option<(Field, Option<Local>, Type<'db>)> {
1870 self.resolve_record_field_with_substitution(field)
1871 .map(|(field, local, ty, _)| (field, local, ty))
1872 }
1873
1874 pub fn resolve_record_field_with_substitution(
1875 &self,
1876 field: &ast::RecordExprField,
1877 ) -> Option<(Field, Option<Local>, Type<'db>, GenericSubstitution<'db>)> {
1878 self.analyze(field.syntax())?.resolve_record_field(self.db, field)
1879 }
1880
1881 pub fn resolve_record_pat_field(
1882 &self,
1883 field: &ast::RecordPatField,
1884 ) -> Option<(Field, Type<'db>)> {
1885 self.resolve_record_pat_field_with_subst(field).map(|(field, ty, _)| (field, ty))
1886 }
1887
1888 pub fn resolve_record_pat_field_with_subst(
1889 &self,
1890 field: &ast::RecordPatField,
1891 ) -> Option<(Field, Type<'db>, GenericSubstitution<'db>)> {
1892 self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
1893 }
1894
1895 pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
1897 let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
1898 self.resolve_macro_call2(macro_call)
1899 }
1900
1901 pub fn resolve_macro_call2(&self, macro_call: InFile<&ast::MacroCall>) -> Option<Macro> {
1902 self.to_def2(macro_call)
1903 .and_then(|call| self.with_ctx(|ctx| macro_call_to_macro_id(ctx, call)))
1904 .map(Into::into)
1905 }
1906
1907 pub fn is_proc_macro_call(&self, macro_call: InFile<&ast::MacroCall>) -> bool {
1908 self.resolve_macro_call2(macro_call)
1909 .is_some_and(|m| matches!(m.id, MacroId::ProcMacroId(..)))
1910 }
1911
1912 pub fn resolve_macro_call_arm(&self, macro_call: &ast::MacroCall) -> Option<u32> {
1913 let file_id = self.to_def(macro_call)?;
1914 self.db.parse_macro_expansion(file_id).value.1.matched_arm
1915 }
1916
1917 pub fn get_unsafe_ops(&self, def: ExpressionStoreOwner) -> FxHashSet<ExprOrPatSource> {
1918 let Ok(def) = ExpressionStoreOwnerId::try_from(def) else { return Default::default() };
1919 let (body, source_map) = ExpressionStore::with_source_map(self.db, def);
1920 let infer = InferenceResult::of(self.db, def);
1921 let mut res = FxHashSet::default();
1922 for root in body.expr_roots() {
1923 unsafe_operations(self.db, infer, def, body, root, &mut |node, _| {
1924 if let Ok(node) = source_map.expr_or_pat_syntax(node) {
1925 res.insert(node);
1926 }
1927 });
1928 }
1929 res
1930 }
1931
1932 pub fn get_unsafe_ops_for_unsafe_block(&self, block: ast::BlockExpr) -> Vec<ExprOrPatSource> {
1933 always!(block.unsafe_token().is_some());
1934 let Some(sa) = self.analyze(block.syntax()) else { return vec![] };
1935 let Some((def, store, sm, Some(infer))) = sa.def() else { return vec![] };
1936 let block = self.wrap_node_infile(ast::Expr::from(block));
1937 let Some(ExprOrPatId::ExprId(block)) = sm.node_expr(block.as_ref()) else {
1938 return Vec::new();
1939 };
1940 let mut res = Vec::default();
1941 unsafe_operations(self.db, infer, def, store, block, &mut |node, _| {
1942 if let Ok(node) = sm.expr_or_pat_syntax(node) {
1943 res.push(node);
1944 }
1945 });
1946 res
1947 }
1948
1949 pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
1950 let Some(mac) = self.resolve_macro_call(macro_call) else { return false };
1951 if mac.is_asm_like(self.db) {
1952 return true;
1953 }
1954
1955 let Some(sa) = self.analyze(macro_call.syntax()) else { return false };
1956 let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
1957 match macro_call.map(|it| it.syntax().parent().and_then(ast::MacroExpr::cast)).transpose() {
1958 Some(it) => sa.is_unsafe_macro_call_expr(self.db, it.as_ref()),
1959 None => false,
1960 }
1961 }
1962
1963 pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
1964 let item_in_file = self.wrap_node_infile(item.clone());
1965 let id = self.with_ctx(|ctx| {
1966 let macro_call_id = ctx.item_to_macro_call(item_in_file.as_ref())?;
1967 macro_call_to_macro_id(ctx, macro_call_id)
1968 })?;
1969 Some(Macro { id })
1970 }
1971
1972 pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
1973 self.resolve_path_with_subst(path).map(|(it, _)| it)
1974 }
1975
1976 pub fn resolve_path_per_ns(&self, path: &ast::Path) -> Option<PathResolutionPerNs> {
1977 self.analyze(path.syntax())?.resolve_hir_path_per_ns(self.db, path)
1978 }
1979
1980 pub fn resolve_path_with_subst(
1981 &self,
1982 path: &ast::Path,
1983 ) -> Option<(PathResolution, Option<GenericSubstitution<'db>>)> {
1984 self.analyze(path.syntax())?.resolve_path(self.db, path)
1985 }
1986
1987 pub fn resolve_use_type_arg(&self, name: &ast::NameRef) -> Option<TypeParam> {
1988 self.analyze(name.syntax())?.resolve_use_type_arg(name)
1989 }
1990
1991 pub fn resolve_offset_of_field(
1992 &self,
1993 name_ref: &ast::NameRef,
1994 ) -> Option<(Either<EnumVariant, Field>, GenericSubstitution<'db>)> {
1995 self.analyze_no_infer(name_ref.syntax())?.resolve_offset_of_field(self.db, name_ref)
1996 }
1997
1998 pub fn resolve_mod_path(
1999 &self,
2000 scope: &SyntaxNode,
2001 path: &ModPath,
2002 ) -> Option<impl Iterator<Item = ItemInNs>> {
2003 let analyze = self.analyze(scope)?;
2004 let items = analyze.resolver.resolve_module_path_in_items(self.db, path);
2005 Some(items.iter_items().map(|(item, _)| item.into()))
2006 }
2007
2008 fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
2009 self.analyze(record_lit.syntax())?.resolve_variant(record_lit)
2010 }
2011
2012 pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
2013 self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
2014 }
2015
2016 pub fn record_literal_missing_fields(
2017 &self,
2018 literal: &ast::RecordExpr,
2019 ) -> Vec<(Field, Type<'db>)> {
2020 self.analyze(literal.syntax())
2021 .and_then(|it| it.record_literal_missing_fields(self.db, literal))
2022 .unwrap_or_default()
2023 }
2024
2025 pub fn record_literal_matched_fields(
2026 &self,
2027 literal: &ast::RecordExpr,
2028 ) -> Vec<(Field, Type<'db>)> {
2029 self.analyze(literal.syntax())
2030 .and_then(|it| it.record_literal_matched_fields(self.db, literal))
2031 .unwrap_or_default()
2032 }
2033
2034 pub fn record_pattern_missing_fields(
2035 &self,
2036 pattern: &ast::RecordPat,
2037 ) -> Vec<(Field, Type<'db>)> {
2038 self.analyze(pattern.syntax())
2039 .and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
2040 .unwrap_or_default()
2041 }
2042
2043 pub fn record_pattern_matched_fields(
2044 &self,
2045 pattern: &ast::RecordPat,
2046 ) -> Vec<(Field, Type<'db>)> {
2047 self.analyze(pattern.syntax())
2048 .and_then(|it| it.record_pattern_matched_fields(self.db, pattern))
2049 .unwrap_or_default()
2050 }
2051
2052 fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
2053 let mut ctx = SourceToDefCtx { db: self.db, cache: &mut self.s2d_cache.borrow_mut() };
2054 f(&mut ctx)
2055 }
2056
2057 pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
2058 let src = self.find_file(src.syntax()).with_value(src);
2059 T::to_def(self, src)
2060 }
2061
2062 pub fn to_def2<T: ToDef>(&self, src: InFile<&T>) -> Option<T::Def> {
2063 T::to_def(self, src)
2064 }
2065
2066 fn file_to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
2067 self.with_ctx(|ctx| ctx.file_to_def(file).to_owned()).into_iter().map(Module::from)
2068 }
2069
2070 fn hir_file_to_module_defs(&self, file: HirFileId) -> impl Iterator<Item = Module> {
2071 self.file_to_module_defs(file.original_file_respecting_includes(self.db).file_id(self.db))
2073 }
2074
2075 pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
2076 self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
2077 db: self.db,
2078 file_id,
2079 resolver,
2080 })
2081 }
2082
2083 pub fn scope_at_offset(
2084 &self,
2085 node: &SyntaxNode,
2086 offset: TextSize,
2087 ) -> Option<SemanticsScope<'db>> {
2088 self.analyze_with_offset_no_infer(node, offset).map(
2089 |SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
2090 db: self.db,
2091 file_id,
2092 resolver,
2093 },
2094 )
2095 }
2096
2097 pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>> {
2099 let res = def.source(self.db)?;
2101 self.cache(find_root(res.value.syntax()), res.file_id);
2102 Some(res)
2103 }
2104
2105 pub fn source_with_range<Def: HasSource>(
2106 &self,
2107 def: Def,
2108 ) -> Option<InFile<(TextRange, Option<Def::Ast>)>> {
2109 let res = def.source_with_range(self.db)?;
2110 self.parse_or_expand(res.file_id);
2111 Some(res)
2112 }
2113
2114 pub fn store_owner_for(&self, node: InFile<&SyntaxNode>) -> Option<ExpressionStoreOwner> {
2115 let container = self.with_ctx(|ctx| ctx.find_container(node))?;
2116 container.as_expression_store_owner().map(|id| id.into())
2117 }
2118
2119 fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer<'db>> {
2121 let node = self.find_file(node);
2122 self.analyze_impl(node, None, true)
2123 }
2124
2125 fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer<'db>> {
2127 let node = self.find_file(node);
2128 self.analyze_impl(node, None, false)
2129 }
2130
2131 fn analyze_with_offset_no_infer(
2132 &self,
2133 node: &SyntaxNode,
2134 offset: TextSize,
2135 ) -> Option<SourceAnalyzer<'db>> {
2136 let node = self.find_file(node);
2137 self.analyze_impl(node, Some(offset), false)
2138 }
2139
2140 fn analyze_impl(
2141 &self,
2142 node: InFile<&SyntaxNode>,
2143 offset: Option<TextSize>,
2144 infer: bool,
2146 ) -> Option<SourceAnalyzer<'db>> {
2147 let _p = tracing::info_span!("SemanticsImpl::analyze_impl").entered();
2148
2149 let container = self.with_ctx(|ctx| ctx.find_container(node))?;
2150
2151 let resolver = match container {
2152 ChildContainer::DefWithBodyId(def) => {
2153 return Some(if infer {
2154 SourceAnalyzer::new_for_body(self.db, def, node, offset)
2155 } else {
2156 SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
2157 });
2158 }
2159 ChildContainer::VariantId(def) => {
2160 return Some(SourceAnalyzer::new_variant_body(self.db, def, node, offset, infer));
2161 }
2162 ChildContainer::TraitId(it) => {
2163 return Some(if infer {
2164 SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset)
2165 } else {
2166 SourceAnalyzer::new_generic_def_no_infer(self.db, it.into(), node, offset)
2167 });
2168 }
2169 ChildContainer::ImplId(it) => {
2170 return Some(if infer {
2171 SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset)
2172 } else {
2173 SourceAnalyzer::new_generic_def_no_infer(self.db, it.into(), node, offset)
2174 });
2175 }
2176 ChildContainer::EnumId(it) => {
2177 return Some(if infer {
2178 SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset)
2179 } else {
2180 SourceAnalyzer::new_generic_def_no_infer(self.db, it.into(), node, offset)
2181 });
2182 }
2183 ChildContainer::GenericDefId(it) => {
2184 return Some(if infer {
2185 SourceAnalyzer::new_generic_def(self.db, it, node, offset)
2186 } else {
2187 SourceAnalyzer::new_generic_def_no_infer(self.db, it, node, offset)
2188 });
2189 }
2190 ChildContainer::ModuleId(it) => it.resolver(self.db),
2191 };
2192 Some(SourceAnalyzer::new_for_resolver(resolver, node))
2193 }
2194
2195 fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
2196 SourceToDefCache::cache(
2197 &mut self.s2d_cache.borrow_mut().root_to_file_cache,
2198 root_node,
2199 file_id,
2200 );
2201 }
2202
2203 pub fn assert_contains_node(&self, node: &SyntaxNode) {
2204 self.find_file(node);
2205 }
2206
2207 fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
2208 let cache = self.s2d_cache.borrow();
2209 cache.root_to_file_cache.get(root_node).copied()
2210 }
2211
2212 fn wrap_node_infile<N: AstNode>(&self, node: N) -> InFile<N> {
2213 let InFile { file_id, .. } = self.find_file(node.syntax());
2214 InFile::new(file_id, node)
2215 }
2216
2217 fn wrap_token_infile(&self, token: SyntaxToken) -> InFile<SyntaxToken> {
2218 let InFile { file_id, .. } = self.find_file(&token.parent().unwrap());
2219 InFile::new(file_id, token)
2220 }
2221
2222 fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
2224 let root_node = find_root(node);
2225 let file_id = self.lookup(&root_node).unwrap_or_else(|| {
2226 panic!(
2227 "\n\nFailed to lookup {:?} in this Semantics.\n\
2228 Make sure to only query nodes derived from this instance of Semantics.\n\
2229 root node: {:?}\n\
2230 known nodes: {}\n\n",
2231 node,
2232 root_node,
2233 self.s2d_cache
2234 .borrow()
2235 .root_to_file_cache
2236 .keys()
2237 .map(|it| format!("{it:?}"))
2238 .collect::<Vec<_>>()
2239 .join(", ")
2240 )
2241 });
2242 InFile::new(file_id, node)
2243 }
2244
2245 pub fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool {
2247 let Some(enclosing_item) =
2248 expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast)
2249 else {
2250 return false;
2251 };
2252
2253 let def = match &enclosing_item {
2254 Either::Left(ast::Item::Fn(it)) if it.unsafe_token().is_some() => return true,
2255 Either::Left(ast::Item::Fn(it)) => (|| match self.to_def(it)?.id {
2256 AnyFunctionId::FunctionId(id) => Some(DefWithBodyId::FunctionId(id)),
2257 AnyFunctionId::BuiltinDeriveImplMethod { .. } => None,
2258 })(),
2259 Either::Left(ast::Item::Const(it)) => {
2260 self.to_def(it).map(<_>::into).map(DefWithBodyId::ConstId)
2261 }
2262 Either::Left(ast::Item::Static(it)) => {
2263 self.to_def(it).map(<_>::into).map(DefWithBodyId::StaticId)
2264 }
2265 Either::Left(_) => None,
2266 Either::Right(it) => self.to_def(it).map(<_>::into).map(DefWithBodyId::VariantId),
2267 };
2268 let Some(def) = def else { return false };
2269 let enclosing_node = enclosing_item.as_ref().either(|i| i.syntax(), |v| v.syntax());
2270
2271 let (body, source_map) = Body::with_source_map(self.db, def);
2272
2273 let file_id = self.find_file(expr.syntax()).file_id;
2274
2275 let Some(mut parent) = expr.syntax().parent() else { return false };
2276 loop {
2277 if &parent == enclosing_node {
2278 break false;
2279 }
2280
2281 if let Some(parent) = ast::Expr::cast(parent.clone())
2282 && let Some(ExprOrPatId::ExprId(expr_id)) =
2283 source_map.node_expr(InFile { file_id, value: &parent })
2284 && let Expr::Unsafe { .. } = body[expr_id]
2285 {
2286 break true;
2287 }
2288
2289 let Some(parent_) = parent.parent() else { break false };
2290 parent = parent_;
2291 }
2292 }
2293
2294 pub fn impl_generated_from_derive(&self, impl_: Impl) -> Option<Adt> {
2295 let id = match impl_.id {
2296 AnyImplId::ImplId(id) => id,
2297 AnyImplId::BuiltinDeriveImplId(id) => return Some(id.loc(self.db).adt.into()),
2298 };
2299 let source = hir_def::src::HasSource::ast_ptr(&id.loc(self.db), self.db);
2300 let mut file_id = source.file_id;
2301 let adt_ast_id = loop {
2302 let macro_call = file_id.macro_file()?;
2303 match macro_call.loc(self.db).kind {
2304 hir_expand::MacroCallKind::Derive { ast_id, .. } => break ast_id,
2305 hir_expand::MacroCallKind::FnLike { ast_id, .. } => file_id = ast_id.file_id,
2306 hir_expand::MacroCallKind::Attr { ast_id, .. } => file_id = ast_id.file_id,
2307 }
2308 };
2309 let adt_source = adt_ast_id.to_in_file_node(self.db);
2310 self.cache(adt_source.value.syntax().ancestors().last().unwrap(), adt_source.file_id);
2311 ToDef::to_def(self, adt_source.as_ref())
2312 }
2313
2314 pub fn locals_used(
2315 &self,
2316 element: Either<&ast::Expr, &ast::StmtList>,
2317 text_range: TextRange,
2318 ) -> Option<FxIndexSet<Local>> {
2319 let sa = self.analyze(element.either(|e| e.syntax(), |s| s.syntax()))?;
2320 let store = sa.store()?;
2321 let mut resolver = sa.resolver.clone();
2322 let def = resolver.expression_store_owner()?;
2323
2324 let is_not_generated = |path: &Path| {
2325 !path.mod_path().and_then(|path| path.as_ident()).is_some_and(Name::is_generated)
2326 };
2327
2328 let exprs = element.either(
2329 |e| vec![e.clone()],
2330 |stmts| {
2331 let mut exprs: Vec<_> = stmts
2332 .statements()
2333 .filter(|stmt| text_range.contains_range(stmt.syntax().text_range()))
2334 .filter_map(|stmt| match stmt {
2335 ast::Stmt::ExprStmt(expr_stmt) => expr_stmt.expr().map(|e| vec![e]),
2336 ast::Stmt::Item(_) => None,
2337 ast::Stmt::LetStmt(let_stmt) => {
2338 let init = let_stmt.initializer();
2339 let let_else = let_stmt
2340 .let_else()
2341 .and_then(|le| le.block_expr())
2342 .map(ast::Expr::BlockExpr);
2343
2344 match (init, let_else) {
2345 (Some(i), Some(le)) => Some(vec![i, le]),
2346 (Some(i), _) => Some(vec![i]),
2347 (_, Some(le)) => Some(vec![le]),
2348 _ => None,
2349 }
2350 }
2351 })
2352 .flatten()
2353 .collect();
2354
2355 if let Some(tail_expr) = stmts.tail_expr()
2356 && text_range.contains_range(tail_expr.syntax().text_range())
2357 {
2358 exprs.push(tail_expr);
2359 }
2360 exprs
2361 },
2362 );
2363 let mut exprs: Vec<_> =
2364 exprs.into_iter().filter_map(|e| sa.expr_id(e).and_then(|e| e.as_expr())).collect();
2365
2366 let mut locals: FxIndexSet<Local> = FxIndexSet::default();
2367 let mut add_to_locals_used = |id, parent_expr| {
2368 let path = match id {
2369 ExprOrPatId::ExprId(expr_id) => {
2370 if let Expr::Path(path) = &store[expr_id] {
2371 Some(path)
2372 } else {
2373 None
2374 }
2375 }
2376 ExprOrPatId::PatId(pat_id) => {
2377 if let Pat::Path(path) = &store[pat_id] {
2378 Some(path)
2379 } else {
2380 None
2381 }
2382 }
2383 };
2384
2385 if let Some(path) = path
2386 && is_not_generated(path)
2387 {
2388 let _ = resolver.update_to_inner_scope(self.db, def, parent_expr);
2389 let hygiene = store.expr_or_pat_path_hygiene(id);
2390 resolver.resolve_path_in_value_ns_fully(self.db, path, hygiene).inspect(|value| {
2391 if let ValueNs::LocalBinding(id) = value {
2392 locals.insert((def, *id).into());
2393 }
2394 });
2395 }
2396 };
2397
2398 while let Some(expr_id) = exprs.pop() {
2399 if let Expr::Assignment { target, .. } = store[expr_id] {
2400 store.walk_pats(target, &mut |id| {
2401 add_to_locals_used(ExprOrPatId::PatId(id), expr_id)
2402 });
2403 };
2404 store.walk_child_exprs(expr_id, |id| {
2405 exprs.push(id);
2406 });
2407
2408 add_to_locals_used(ExprOrPatId::ExprId(expr_id), expr_id)
2409 }
2410
2411 Some(locals)
2412 }
2413
2414 pub fn get_failed_obligations(&self, token: SyntaxToken) -> Option<String> {
2415 let node = token.parent()?;
2416 let node = self.find_file(&node);
2417
2418 let container = self.with_ctx(|ctx| ctx.find_container(node))?;
2419
2420 match container {
2421 ChildContainer::DefWithBodyId(def) => {
2422 thread_local! {
2423 static RESULT: RefCell<Vec<ProofTreeData>> = const { RefCell::new(Vec::new()) };
2424 }
2425 infer_query_with_inspect(
2426 self.db,
2427 def,
2428 Some(|infer_ctxt, _obligation, result, proof_tree| {
2429 if result.is_err()
2430 && let Some(tree) = proof_tree
2431 {
2432 let data = dump_proof_tree_structured(tree, Span::dummy(), infer_ctxt);
2433 RESULT.with(|ctx| ctx.borrow_mut().push(data));
2434 }
2435 }),
2436 );
2437 let data: Vec<ProofTreeData> =
2438 RESULT.with(|data| data.borrow_mut().drain(..).collect());
2439 let data = serde_json::to_string_pretty(&data).unwrap_or_else(|_| "[]".to_owned());
2440 Some(data)
2441 }
2442 _ => None,
2443 }
2444 }
2445}
2446
2447fn macro_call_to_macro_id(
2449 ctx: &mut SourceToDefCtx<'_, '_>,
2450 macro_call_id: MacroCallId,
2451) -> Option<MacroId> {
2452 let db: &dyn ExpandDatabase = ctx.db;
2453 let loc = db.lookup_intern_macro_call(macro_call_id);
2454
2455 match loc.def.ast_id() {
2456 Either::Left(it) => {
2457 let node = match it.file_id {
2458 HirFileId::FileId(file_id) => {
2459 it.to_ptr(db).to_node(&file_id.parse(db).syntax_node())
2460 }
2461 HirFileId::MacroFile(macro_file) => {
2462 let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
2463 it.to_ptr(db).to_node(&expansion_info.expanded().value)
2464 }
2465 };
2466 ctx.macro_to_def(InFile::new(it.file_id, &node))
2467 }
2468 Either::Right(it) => {
2469 let node = match it.file_id {
2470 HirFileId::FileId(file_id) => {
2471 it.to_ptr(db).to_node(&file_id.parse(db).syntax_node())
2472 }
2473 HirFileId::MacroFile(macro_file) => {
2474 let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
2475 it.to_ptr(db).to_node(&expansion_info.expanded().value)
2476 }
2477 };
2478 ctx.proc_macro_to_def(InFile::new(it.file_id, &node))
2479 }
2480 }
2481}
2482
2483pub trait ToDef: AstNode + Clone {
2484 type Def;
2485 fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def>;
2486}
2487
2488macro_rules! to_def_impls {
2489 ($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
2490 impl ToDef for $ast {
2491 type Def = $def;
2492 fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def> {
2493 sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
2494 }
2495 }
2496 )*}
2497}
2498
2499to_def_impls![
2500 (crate::Module, ast::Module, module_to_def),
2501 (crate::Module, ast::SourceFile, source_file_to_def),
2502 (crate::Struct, ast::Struct, struct_to_def),
2503 (crate::Enum, ast::Enum, enum_to_def),
2504 (crate::Union, ast::Union, union_to_def),
2505 (crate::Trait, ast::Trait, trait_to_def),
2506 (crate::Impl, ast::Impl, impl_to_def),
2507 (crate::TypeAlias, ast::TypeAlias, type_alias_to_def),
2508 (crate::Const, ast::Const, const_to_def),
2509 (crate::Static, ast::Static, static_to_def),
2510 (crate::Function, ast::Fn, fn_to_def),
2511 (crate::Field, ast::RecordField, record_field_to_def),
2512 (crate::Field, ast::TupleField, tuple_field_to_def),
2513 (crate::EnumVariant, ast::Variant, enum_variant_to_def),
2514 (crate::TypeParam, ast::TypeParam, type_param_to_def),
2515 (crate::LifetimeParam, ast::LifetimeParam, lifetime_param_to_def),
2516 (crate::ConstParam, ast::ConstParam, const_param_to_def),
2517 (crate::GenericParam, ast::GenericParam, generic_param_to_def),
2518 (crate::Macro, ast::Macro, macro_to_def),
2519 (crate::Local, ast::IdentPat, bind_pat_to_def),
2520 (crate::Local, ast::SelfParam, self_param_to_def),
2521 (crate::Label, ast::Label, label_to_def),
2522 (crate::Adt, ast::Adt, adt_to_def),
2523 (crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def),
2524 (crate::InlineAsmOperand, ast::AsmOperandNamed, asm_operand_to_def),
2525 (crate::ExternBlock, ast::ExternBlock, extern_block_to_def),
2526 (MacroCallId, ast::MacroCall, macro_call_to_macro_call),
2527];
2528
2529fn find_root(node: &SyntaxNode) -> SyntaxNode {
2530 node.ancestors().last().unwrap()
2531}
2532
2533#[derive(Debug)]
2553pub struct SemanticsScope<'db> {
2554 pub db: &'db dyn HirDatabase,
2555 file_id: HirFileId,
2556 resolver: Resolver<'db>,
2557}
2558
2559impl<'db> SemanticsScope<'db> {
2560 pub fn file_id(&self) -> HirFileId {
2561 self.file_id
2562 }
2563
2564 pub fn module(&self) -> Module {
2565 Module { id: self.resolver.module() }
2566 }
2567
2568 pub fn krate(&self) -> Crate {
2569 Crate { id: self.resolver.krate() }
2570 }
2571
2572 pub fn containing_function(&self) -> Option<Function> {
2574 self.resolver.expression_store_owner().and_then(|owner| match owner {
2575 ExpressionStoreOwnerId::Body(DefWithBodyId::FunctionId(id)) => Some(id.into()),
2576 _ => None,
2577 })
2578 }
2579
2580 pub fn expression_store_owner(&self) -> Option<ExpressionStoreOwner> {
2581 self.resolver.expression_store_owner().map(Into::into)
2582 }
2583
2584 pub(crate) fn resolver(&self) -> &Resolver<'db> {
2585 &self.resolver
2586 }
2587
2588 pub fn visible_traits(&self) -> VisibleTraits {
2590 let resolver = &self.resolver;
2591 VisibleTraits(resolver.traits_in_scope(self.db))
2592 }
2593
2594 pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
2596 let scope = self.resolver.names_in_scope(self.db);
2597 for (name, entries) in scope {
2598 for entry in entries {
2599 let def = match entry {
2600 resolver::ScopeDef::ModuleDef(it) => ScopeDef::ModuleDef(it.into()),
2601 resolver::ScopeDef::Unknown => ScopeDef::Unknown,
2602 resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
2603 resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
2604 resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
2605 resolver::ScopeDef::Local(binding_id) => {
2606 match self.resolver.expression_store_owner() {
2607 Some(parent) => ScopeDef::Local(Local { parent, binding_id }),
2608 None => continue,
2609 }
2610 }
2611 resolver::ScopeDef::Label(label_id) => {
2612 match self.resolver.expression_store_owner() {
2613 Some(parent) => ScopeDef::Label(Label { parent, label_id }),
2614 None => continue,
2615 }
2616 }
2617 };
2618 f(name.clone(), def)
2619 }
2620 }
2621 }
2622
2623 pub fn can_use_trait_methods(&self, t: Trait) -> bool {
2625 self.resolver.traits_in_scope(self.db).contains(&t.id)
2626 }
2627
2628 pub fn speculative_resolve(&self, ast_path: &ast::Path) -> Option<PathResolution> {
2631 let mut kind = PathKind::Plain;
2632 let mut segments = vec![];
2633 let mut first = true;
2634 for segment in ast_path.segments() {
2635 if first {
2636 first = false;
2637 if segment.coloncolon_token().is_some() {
2638 kind = PathKind::Abs;
2639 }
2640 }
2641
2642 let Some(k) = segment.kind() else { continue };
2643 match k {
2644 ast::PathSegmentKind::Name(name_ref) => segments.push(name_ref.as_name()),
2645 ast::PathSegmentKind::Type { .. } => continue,
2646 ast::PathSegmentKind::SelfTypeKw => {
2647 segments.push(Name::new_symbol_root(sym::Self_))
2648 }
2649 ast::PathSegmentKind::SelfKw => kind = PathKind::Super(0),
2650 ast::PathSegmentKind::SuperKw => match kind {
2651 PathKind::Super(s) => kind = PathKind::Super(s + 1),
2652 PathKind::Plain => kind = PathKind::Super(1),
2653 PathKind::Crate | PathKind::Abs | PathKind::DollarCrate(_) => continue,
2654 },
2655 ast::PathSegmentKind::CrateKw => kind = PathKind::Crate,
2656 }
2657 }
2658
2659 resolve_hir_path(
2660 self.db,
2661 &self.resolver,
2662 &Path::BarePath(Interned::new(ModPath::from_segments(kind, segments))),
2663 HygieneId::ROOT,
2664 None,
2665 )
2666 }
2667
2668 pub fn resolve_mod_path(&self, path: &ModPath) -> impl Iterator<Item = ItemInNs> + use<> {
2669 let items = self.resolver.resolve_module_path_in_items(self.db, path);
2670 items.iter_items().map(|(item, _)| item.into())
2671 }
2672
2673 pub fn assoc_type_shorthand_candidates(
2676 &self,
2677 resolution: &PathResolution,
2678 mut cb: impl FnMut(TypeAlias),
2679 ) {
2680 let (Some(def), Some(resolution)) = (self.resolver.generic_def(), resolution.in_type_ns())
2681 else {
2682 return;
2683 };
2684 hir_ty::associated_type_shorthand_candidates(self.db, def, resolution, |_, id| {
2685 cb(id.into());
2686 false
2687 });
2688 }
2689
2690 pub fn generic_def(&self) -> Option<crate::GenericDef> {
2691 self.resolver.generic_def().map(|id| id.into())
2692 }
2693
2694 pub fn extern_crates(&self) -> impl Iterator<Item = (Name, Module)> + '_ {
2695 self.resolver.extern_crates_in_scope().map(|(name, id)| (name, Module { id }))
2696 }
2697
2698 pub fn extern_crate_decls(&self) -> impl Iterator<Item = Name> + '_ {
2699 self.resolver.extern_crate_decls_in_scope(self.db)
2700 }
2701
2702 pub fn has_same_self_type(&self, other: &SemanticsScope<'_>) -> bool {
2703 self.resolver.impl_def() == other.resolver.impl_def()
2704 }
2705}
2706
2707#[derive(Debug)]
2708pub struct VisibleTraits(pub FxHashSet<TraitId>);
2709
2710impl ops::Deref for VisibleTraits {
2711 type Target = FxHashSet<TraitId>;
2712
2713 fn deref(&self) -> &Self::Target {
2714 &self.0
2715 }
2716}
2717
2718struct RenameConflictsVisitor<'a> {
2719 db: &'a dyn HirDatabase,
2720 owner: DefWithBodyId,
2721 resolver: Resolver<'a>,
2722 body: &'a Body,
2723 to_be_renamed: BindingId,
2724 new_name: Symbol,
2725 old_name: Symbol,
2726 conflicts: FxHashSet<BindingId>,
2727}
2728
2729impl RenameConflictsVisitor<'_> {
2730 fn resolve_path(&mut self, node: ExprOrPatId, path: &Path) {
2731 if let Path::BarePath(path) = path
2732 && let Some(name) = path.as_ident()
2733 {
2734 if *name.symbol() == self.new_name {
2735 if let Some(conflicting) = self.resolver.rename_will_conflict_with_renamed(
2736 self.db,
2737 name,
2738 path,
2739 self.body.expr_or_pat_path_hygiene(node),
2740 self.to_be_renamed,
2741 ) {
2742 self.conflicts.insert(conflicting);
2743 }
2744 } else if *name.symbol() == self.old_name
2745 && let Some(conflicting) = self.resolver.rename_will_conflict_with_another_variable(
2746 self.db,
2747 name,
2748 path,
2749 self.body.expr_or_pat_path_hygiene(node),
2750 &self.new_name,
2751 self.to_be_renamed,
2752 )
2753 {
2754 self.conflicts.insert(conflicting);
2755 }
2756 }
2757 }
2758
2759 fn rename_conflicts(&mut self, expr: ExprId) {
2760 match &self.body[expr] {
2761 Expr::Path(path) => {
2762 let guard = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
2763 self.resolve_path(expr.into(), path);
2764 self.resolver.reset_to_guard(guard);
2765 }
2766 &Expr::Assignment { target, .. } => {
2767 let guard = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
2768 self.body.walk_pats(target, &mut |pat| {
2769 if let Pat::Path(path) = &self.body[pat] {
2770 self.resolve_path(pat.into(), path);
2771 }
2772 });
2773 self.resolver.reset_to_guard(guard);
2774 }
2775 _ => {}
2776 }
2777
2778 self.body.walk_child_exprs(expr, |expr| self.rename_conflicts(expr));
2779 }
2780}