1mod child_by_source;
4mod source_to_def;
5
6use std::{
7 cell::RefCell,
8 convert::Infallible,
9 fmt, iter, mem,
10 ops::{self, ControlFlow, Not},
11};
12
13use either::Either;
14use hir_def::{
15 DefWithBodyId, FunctionId, MacroId, StructId, TraitId, VariantId,
16 expr_store::{Body, ExprOrPatSource, HygieneId, path::Path},
17 hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
18 nameres::{ModuleOrigin, crate_def_map},
19 resolver::{self, HasResolver, Resolver, TypeNs},
20 type_ref::Mutability,
21};
22use hir_expand::{
23 EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId,
24 builtin::{BuiltinFnLikeExpander, EagerExpander},
25 db::ExpandDatabase,
26 files::{FileRangeWrapper, HirFileRange, InRealFile},
27 mod_path::{ModPath, PathKind},
28 name::AsName,
29};
30use hir_ty::{
31 InferenceResult,
32 diagnostics::{unsafe_operations, unsafe_operations_for_body},
33 next_solver::DbInterner,
34};
35use intern::{Interned, Symbol, sym};
36use itertools::Itertools;
37use rustc_hash::{FxHashMap, FxHashSet};
38use smallvec::{SmallVec, smallvec};
39use span::{FileId, SyntaxContext};
40use stdx::{TupleExt, always};
41use syntax::{
42 AstNode, AstToken, Direction, SmolStr, SmolStrBuilder, SyntaxElement, SyntaxKind, SyntaxNode,
43 SyntaxNodePtr, SyntaxToken, T, TextRange, TextSize,
44 algo::skip_trivia_token,
45 ast::{self, HasAttrs as _, HasGenericParams},
46};
47
48use crate::{
49 Adjust, Adjustment, Adt, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const, ConstParam,
50 Crate, DefWithBody, DeriveHelper, Enum, Field, Function, GenericSubstitution, HasSource, Impl,
51 InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro, Module, ModuleDef,
52 Name, OverloadedDeref, ScopeDef, Static, Struct, ToolModule, Trait, TupleField, Type,
53 TypeAlias, TypeParam, Union, Variant, VariantDef,
54 db::HirDatabase,
55 semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
56 source_analyzer::{SourceAnalyzer, resolve_hir_path},
57};
58
59const CONTINUE_NO_BREAKS: ControlFlow<Infallible, ()> = ControlFlow::Continue(());
60
61#[derive(Debug, Copy, Clone, PartialEq, Eq)]
62pub enum PathResolution {
63 Def(ModuleDef),
65 Local(Local),
67 TypeParam(TypeParam),
69 ConstParam(ConstParam),
71 SelfType(Impl),
72 BuiltinAttr(BuiltinAttr),
73 ToolModule(ToolModule),
74 DeriveHelper(DeriveHelper),
75}
76
77impl PathResolution {
78 pub(crate) fn in_type_ns(&self) -> Option<TypeNs> {
79 match self {
80 PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())),
81 PathResolution::Def(ModuleDef::BuiltinType(builtin)) => {
82 Some(TypeNs::BuiltinType((*builtin).into()))
83 }
84 PathResolution::Def(
85 ModuleDef::Const(_)
86 | ModuleDef::Variant(_)
87 | ModuleDef::Macro(_)
88 | ModuleDef::Function(_)
89 | ModuleDef::Module(_)
90 | ModuleDef::Static(_)
91 | ModuleDef::Trait(_),
92 ) => None,
93 PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
94 Some(TypeNs::TypeAliasId((*alias).into()))
95 }
96 PathResolution::BuiltinAttr(_)
97 | PathResolution::ToolModule(_)
98 | PathResolution::Local(_)
99 | PathResolution::DeriveHelper(_)
100 | PathResolution::ConstParam(_) => None,
101 PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
102 PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())),
103 }
104 }
105}
106
107#[derive(Debug, Copy, Clone, PartialEq, Eq)]
108pub struct PathResolutionPerNs {
109 pub type_ns: Option<PathResolution>,
110 pub value_ns: Option<PathResolution>,
111 pub macro_ns: Option<PathResolution>,
112}
113
114impl PathResolutionPerNs {
115 pub fn new(
116 type_ns: Option<PathResolution>,
117 value_ns: Option<PathResolution>,
118 macro_ns: Option<PathResolution>,
119 ) -> Self {
120 PathResolutionPerNs { type_ns, value_ns, macro_ns }
121 }
122 pub fn any(&self) -> Option<PathResolution> {
123 self.type_ns.or(self.value_ns).or(self.macro_ns)
124 }
125}
126
127#[derive(Debug)]
128pub struct TypeInfo<'db> {
129 pub original: Type<'db>,
131 pub adjusted: Option<Type<'db>>,
133}
134
135impl<'db> TypeInfo<'db> {
136 pub fn original(self) -> Type<'db> {
137 self.original
138 }
139
140 pub fn has_adjustment(&self) -> bool {
141 self.adjusted.is_some()
142 }
143
144 pub fn adjusted(self) -> Type<'db> {
146 self.adjusted.unwrap_or(self.original)
147 }
148}
149
150pub struct Semantics<'db, DB: ?Sized> {
152 pub db: &'db DB,
153 imp: SemanticsImpl<'db>,
154}
155
156pub struct SemanticsImpl<'db> {
157 pub db: &'db dyn HirDatabase,
158 s2d_cache: RefCell<SourceToDefCache>,
159 macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroCallId>>,
161}
162
163impl<DB: ?Sized> fmt::Debug for Semantics<'_, DB> {
164 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
165 write!(f, "Semantics {{ ... }}")
166 }
167}
168
169impl<'db, DB: ?Sized> ops::Deref for Semantics<'db, DB> {
170 type Target = SemanticsImpl<'db>;
171
172 fn deref(&self) -> &Self::Target {
173 &self.imp
174 }
175}
176
177#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
178pub enum LintAttr {
179 Allow,
180 Expect,
181 Warn,
182 Deny,
183 Forbid,
184}
185
186impl Semantics<'_, dyn HirDatabase> {
190 pub fn new_dyn(db: &'_ dyn HirDatabase) -> Semantics<'_, dyn HirDatabase> {
192 let impl_ = SemanticsImpl::new(db);
193 Semantics { db, imp: impl_ }
194 }
195}
196
197impl<DB: HirDatabase> Semantics<'_, DB> {
198 pub fn new(db: &DB) -> Semantics<'_, DB> {
200 let impl_ = SemanticsImpl::new(db);
201 Semantics { db, imp: impl_ }
202 }
203}
204
205impl<DB: HirDatabase + ?Sized> Semantics<'_, DB> {
208 pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
209 self.imp.find_file(syntax_node).file_id
210 }
211
212 pub fn token_ancestors_with_macros(
213 &self,
214 token: SyntaxToken,
215 ) -> impl Iterator<Item = SyntaxNode> + '_ {
216 token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
217 }
218
219 pub fn find_node_at_offset_with_macros<N: AstNode>(
222 &self,
223 node: &SyntaxNode,
224 offset: TextSize,
225 ) -> Option<N> {
226 self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
227 }
228
229 pub fn find_node_at_offset_with_descend<N: AstNode>(
233 &self,
234 node: &SyntaxNode,
235 offset: TextSize,
236 ) -> Option<N> {
237 self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
238 }
239
240 pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>(
244 &'slf self,
245 node: &SyntaxNode,
246 offset: TextSize,
247 ) -> impl Iterator<Item = N> + 'slf {
248 self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
249 }
250
251 pub fn find_namelike_at_offset_with_descend<'slf>(
253 &'slf self,
254 node: &SyntaxNode,
255 offset: TextSize,
256 ) -> impl Iterator<Item = ast::NameLike> + 'slf {
257 node.token_at_offset(offset)
258 .map(move |token| self.descend_into_macros_no_opaque(token, true))
259 .map(|descendants| descendants.into_iter().filter_map(move |it| it.value.parent()))
260 .kmerge_by(|left, right| left.text_range().len().lt(&right.text_range().len()))
263 .filter_map(ast::NameLike::cast)
264 }
265
266 pub fn lint_attrs(
267 &self,
268 krate: Crate,
269 item: ast::AnyHasAttrs,
270 ) -> impl DoubleEndedIterator<Item = (LintAttr, SmolStr)> {
271 let mut cfg_options = None;
272 let cfg_options = || *cfg_options.get_or_insert_with(|| krate.id.cfg_options(self.db));
273 let mut result = Vec::new();
274 hir_expand::attrs::expand_cfg_attr::<Infallible>(
275 ast::attrs_including_inner(&item),
276 cfg_options,
277 |attr, _, _, _| {
278 let hir_expand::attrs::Meta::TokenTree { path, tt } = attr else {
279 return ControlFlow::Continue(());
280 };
281 if path.segments.len() != 1 {
282 return ControlFlow::Continue(());
283 }
284 let lint_attr = match path.segments[0].text() {
285 "allow" => LintAttr::Allow,
286 "expect" => LintAttr::Expect,
287 "warn" => LintAttr::Warn,
288 "deny" => LintAttr::Deny,
289 "forbid" => LintAttr::Forbid,
290 _ => return ControlFlow::Continue(()),
291 };
292 let mut lint = SmolStrBuilder::new();
293 for token in
294 tt.syntax().children_with_tokens().filter_map(SyntaxElement::into_token)
295 {
296 match token.kind() {
297 T![:] | T![::] => lint.push_str(token.text()),
298 kind if kind.is_any_identifier() => lint.push_str(token.text()),
299 T![,] => {
300 let lint = mem::replace(&mut lint, SmolStrBuilder::new()).finish();
301 if !lint.is_empty() {
302 result.push((lint_attr, lint));
303 }
304 }
305 _ => {}
306 }
307 }
308 let lint = lint.finish();
309 if !lint.is_empty() {
310 result.push((lint_attr, lint));
311 }
312
313 ControlFlow::Continue(())
314 },
315 );
316 result.into_iter()
317 }
318
319 pub fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option<Struct> {
320 self.imp.resolve_range_pat(range_pat).map(Struct::from)
321 }
322
323 pub fn resolve_range_expr(&self, range_expr: &ast::RangeExpr) -> Option<Struct> {
324 self.imp.resolve_range_expr(range_expr).map(Struct::from)
325 }
326
327 pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
328 self.imp.resolve_await_to_poll(await_expr).map(Function::from)
329 }
330
331 pub fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<Function> {
332 self.imp.resolve_prefix_expr(prefix_expr).map(Function::from)
333 }
334
335 pub fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<Function> {
336 self.imp.resolve_index_expr(index_expr).map(Function::from)
337 }
338
339 pub fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<Function> {
340 self.imp.resolve_bin_expr(bin_expr).map(Function::from)
341 }
342
343 pub fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<Function> {
344 self.imp.resolve_try_expr(try_expr).map(Function::from)
345 }
346
347 pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
348 self.imp.resolve_variant(record_lit).map(VariantDef::from)
349 }
350
351 pub fn file_to_module_def(&self, file: impl Into<FileId>) -> Option<Module> {
352 self.imp.file_to_module_defs(file.into()).next()
353 }
354
355 pub fn file_to_module_defs(&self, file: impl Into<FileId>) -> impl Iterator<Item = Module> {
356 self.imp.file_to_module_defs(file.into())
357 }
358
359 pub fn hir_file_to_module_def(&self, file: impl Into<HirFileId>) -> Option<Module> {
360 self.imp.hir_file_to_module_defs(file.into()).next()
361 }
362
363 pub fn hir_file_to_module_defs(
364 &self,
365 file: impl Into<HirFileId>,
366 ) -> impl Iterator<Item = Module> {
367 self.imp.hir_file_to_module_defs(file.into())
368 }
369
370 pub fn is_nightly(&self, krate: Crate) -> bool {
371 let toolchain = self.db.toolchain_channel(krate.into());
372 matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None)
375 }
376
377 pub fn to_adt_def(&self, a: &ast::Adt) -> Option<Adt> {
378 self.imp.to_def(a)
379 }
380
381 pub fn to_const_def(&self, c: &ast::Const) -> Option<Const> {
382 self.imp.to_def(c)
383 }
384
385 pub fn to_enum_def(&self, e: &ast::Enum) -> Option<Enum> {
386 self.imp.to_def(e)
387 }
388
389 pub fn to_enum_variant_def(&self, v: &ast::Variant) -> Option<Variant> {
390 self.imp.to_def(v)
391 }
392
393 pub fn to_fn_def(&self, f: &ast::Fn) -> Option<Function> {
394 self.imp.to_def(f)
395 }
396
397 pub fn to_impl_def(&self, i: &ast::Impl) -> Option<Impl> {
398 self.imp.to_def(i)
399 }
400
401 pub fn to_macro_def(&self, m: &ast::Macro) -> Option<Macro> {
402 self.imp.to_def(m)
403 }
404
405 pub fn to_module_def(&self, m: &ast::Module) -> Option<Module> {
406 self.imp.to_def(m)
407 }
408
409 pub fn to_static_def(&self, s: &ast::Static) -> Option<Static> {
410 self.imp.to_def(s)
411 }
412
413 pub fn to_struct_def(&self, s: &ast::Struct) -> Option<Struct> {
414 self.imp.to_def(s)
415 }
416
417 pub fn to_trait_def(&self, t: &ast::Trait) -> Option<Trait> {
418 self.imp.to_def(t)
419 }
420
421 pub fn to_type_alias_def(&self, t: &ast::TypeAlias) -> Option<TypeAlias> {
422 self.imp.to_def(t)
423 }
424
425 pub fn to_union_def(&self, u: &ast::Union) -> Option<Union> {
426 self.imp.to_def(u)
427 }
428}
429
430impl<'db> SemanticsImpl<'db> {
431 fn new(db: &'db dyn HirDatabase) -> Self {
432 SemanticsImpl { db, s2d_cache: Default::default(), macro_call_cache: Default::default() }
433 }
434
435 pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile {
436 let hir_file_id = file_id.into();
437 let tree = self.db.parse(file_id).tree();
438 self.cache(tree.syntax().clone(), hir_file_id);
439 tree
440 }
441
442 pub fn first_crate(&self, file: FileId) -> Option<Crate> {
444 match self.file_to_module_defs(file).next() {
445 Some(module) => Some(module.krate(self.db)),
446 None => self.db.all_crates().last().copied().map(Into::into),
447 }
448 }
449
450 pub fn attach_first_edition_opt(&self, file: FileId) -> Option<EditionedFileId> {
451 let krate = self.file_to_module_defs(file).next()?.krate(self.db);
452 Some(EditionedFileId::new(self.db, file, krate.edition(self.db), krate.id))
453 }
454
455 pub fn attach_first_edition(&self, file: FileId) -> EditionedFileId {
456 self.attach_first_edition_opt(file)
457 .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(self.db, file))
458 }
459
460 pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
461 let file_id = self.attach_first_edition(file_id);
462
463 let tree = self.db.parse(file_id).tree();
464 self.cache(tree.syntax().clone(), file_id.into());
465 tree
466 }
467
468 pub fn adjust_edition(&self, file_id: HirFileId) -> HirFileId {
469 if let Some(editioned_file_id) = file_id.file_id() {
470 self.attach_first_edition_opt(editioned_file_id.file_id(self.db))
471 .map_or(file_id, Into::into)
472 } else {
473 file_id
474 }
475 }
476
477 pub fn find_parent_file(&self, file_id: HirFileId) -> Option<InFile<SyntaxNode>> {
478 match file_id {
479 HirFileId::FileId(file_id) => {
480 let module = self.file_to_module_defs(file_id.file_id(self.db)).next()?;
481 let def_map = crate_def_map(self.db, module.krate(self.db).id);
482 match def_map[module.id].origin {
483 ModuleOrigin::CrateRoot { .. } => None,
484 ModuleOrigin::File { declaration, declaration_tree_id, .. } => {
485 let file_id = declaration_tree_id.file_id();
486 let in_file = InFile::new(file_id, declaration);
487 let node = in_file.to_node(self.db);
488 let root = find_root(node.syntax());
489 self.cache(root, file_id);
490 Some(in_file.with_value(node.syntax().clone()))
491 }
492 _ => unreachable!("FileId can only belong to a file module"),
493 }
494 }
495 HirFileId::MacroFile(macro_file) => {
496 let node = self.db.lookup_intern_macro_call(macro_file).to_node(self.db);
497 let root = find_root(&node.value);
498 self.cache(root, node.file_id);
499 Some(node)
500 }
501 }
502 }
503
504 pub fn module_definition_node(&self, module: Module) -> InFile<SyntaxNode> {
507 let def_map = module.id.def_map(self.db);
508 let definition = def_map[module.id].origin.definition_source(self.db);
509 let definition = definition.map(|it| it.node());
510 let root_node = find_root(&definition.value);
511 self.cache(root_node, definition.file_id);
512 definition
513 }
514
515 pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode {
516 let node = self.db.parse_or_expand(file_id);
517 self.cache(node.clone(), file_id);
518 node
519 }
520
521 pub fn expand(&self, file_id: MacroCallId) -> ExpandResult<SyntaxNode> {
522 let res = self.db.parse_macro_expansion(file_id).map(|it| it.0.syntax_node());
523 self.cache(res.value.clone(), file_id.into());
524 res
525 }
526
527 pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option<InFile<SyntaxNode>> {
528 let file_id = self.to_def(macro_call)?;
529 let node = self.parse_or_expand(file_id.into());
530 Some(InFile::new(file_id.into(), node))
531 }
532
533 pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> {
534 let file_id = self.find_file(attr.syntax()).file_id;
535 let krate = match file_id {
536 HirFileId::FileId(file_id) => {
537 self.file_to_module_defs(file_id.file_id(self.db)).next()?.krate(self.db).id
538 }
539 HirFileId::MacroFile(macro_file) => self.db.lookup_intern_macro_call(macro_file).krate,
540 };
541 hir_expand::check_cfg_attr_value(self.db, attr, krate)
542 }
543
544 pub fn expand_allowed_builtins(
547 &self,
548 macro_call: &ast::MacroCall,
549 ) -> Option<ExpandResult<SyntaxNode>> {
550 let file_id = self.to_def(macro_call)?;
551 let macro_call = self.db.lookup_intern_macro_call(file_id);
552
553 let skip = matches!(
554 macro_call.def.kind,
555 hir_expand::MacroDefKind::BuiltIn(
556 _,
557 BuiltinFnLikeExpander::Column
558 | BuiltinFnLikeExpander::File
559 | BuiltinFnLikeExpander::ModulePath
560 | BuiltinFnLikeExpander::Asm
561 | BuiltinFnLikeExpander::GlobalAsm
562 | BuiltinFnLikeExpander::NakedAsm
563 | BuiltinFnLikeExpander::LogSyntax
564 | BuiltinFnLikeExpander::TraceMacros
565 | BuiltinFnLikeExpander::FormatArgs
566 | BuiltinFnLikeExpander::FormatArgsNl
567 | BuiltinFnLikeExpander::ConstFormatArgs,
568 ) | hir_expand::MacroDefKind::BuiltInEager(_, EagerExpander::CompileError)
569 );
570 if skip {
571 return None;
574 }
575
576 let node = self.expand(file_id);
577 Some(node)
578 }
579
580 pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<InFile<SyntaxNode>>> {
582 let src = self.wrap_node_infile(item.clone());
583 let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
584 Some(self.expand(macro_call_id).map(|it| InFile::new(macro_call_id.into(), it)))
585 }
586
587 pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
588 let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
589 let src = self.wrap_node_infile(attr.clone());
590 let call_id = self.with_ctx(|ctx| {
591 ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
592 })?;
593 Some(self.parse_or_expand(call_id.into()))
594 }
595
596 pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
597 let calls = self.derive_macro_calls(attr)?;
598 self.with_ctx(|ctx| {
599 Some(
600 calls
601 .into_iter()
602 .map(|call| macro_call_to_macro_id(ctx, call?).map(|id| Macro { id }))
603 .collect(),
604 )
605 })
606 }
607
608 pub fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<ExpandResult<SyntaxNode>>> {
609 let res: Vec<_> = self
610 .derive_macro_calls(attr)?
611 .into_iter()
612 .flat_map(|call| {
613 let file_id = call?;
614 let ExpandResult { value, err } = self.db.parse_macro_expansion(file_id);
615 let root_node = value.0.syntax_node();
616 self.cache(root_node.clone(), file_id.into());
617 Some(ExpandResult { value: root_node, err })
618 })
619 .collect();
620 Some(res)
621 }
622
623 fn derive_macro_calls(&self, attr: &ast::Attr) -> Option<Vec<Option<MacroCallId>>> {
624 let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
625 let file_id = self.find_file(adt.syntax()).file_id;
626 let adt = InFile::new(file_id, &adt);
627 let src = InFile::new(file_id, attr.clone());
628 self.with_ctx(|ctx| {
629 let (.., res) = ctx.attr_to_derive_macro_call(adt, src)?;
630 Some(res.to_vec())
631 })
632 }
633
634 pub fn is_derive_annotated(&self, adt: InFile<&ast::Adt>) -> bool {
635 self.with_ctx(|ctx| ctx.file_of_adt_has_derives(adt))
636 }
637
638 pub fn derive_helpers_in_scope(&self, adt: &ast::Adt) -> Option<Vec<(Symbol, Symbol)>> {
639 let sa = self.analyze_no_infer(adt.syntax())?;
640 let id = self.db.ast_id_map(sa.file_id).ast_id(adt);
641 let result = sa
642 .resolver
643 .def_map()
644 .derive_helpers_in_scope(InFile::new(sa.file_id, id))?
645 .iter()
646 .map(|(name, macro_, _)| {
647 let macro_name = Macro::from(*macro_).name(self.db).symbol().clone();
648 (name.symbol().clone(), macro_name)
649 })
650 .collect();
651 Some(result)
652 }
653
654 pub fn derive_helper(&self, attr: &ast::Attr) -> Option<Vec<(Macro, MacroCallId)>> {
655 let adt = attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it {
656 ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
657 ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
658 ast::Item::Union(it) => Some(ast::Adt::Union(it)),
659 _ => None,
660 })?;
661 let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
662 let sa = self.analyze_no_infer(adt.syntax())?;
663 let id = self.db.ast_id_map(sa.file_id).ast_id(&adt);
664 let res: Vec<_> = sa
665 .resolver
666 .def_map()
667 .derive_helpers_in_scope(InFile::new(sa.file_id, id))?
668 .iter()
669 .filter(|&(name, _, _)| *name == attr_name)
670 .map(|&(_, macro_, call)| (macro_.into(), call))
671 .collect();
672 res.is_empty().not().then_some(res)
673 }
674
675 pub fn is_attr_macro_call(&self, item: InFile<&ast::Item>) -> bool {
676 self.with_ctx(|ctx| ctx.item_to_macro_call(item).is_some())
677 }
678
679 pub fn speculative_expand_macro_call(
682 &self,
683 actual_macro_call: &ast::MacroCall,
684 speculative_args: &ast::TokenTree,
685 token_to_map: SyntaxToken,
686 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
687 let macro_file = self.to_def(actual_macro_call)?;
688 hir_expand::db::expand_speculative(
689 self.db,
690 macro_file,
691 speculative_args.syntax(),
692 token_to_map,
693 )
694 }
695
696 pub fn speculative_expand_raw(
697 &self,
698 macro_file: MacroCallId,
699 speculative_args: &SyntaxNode,
700 token_to_map: SyntaxToken,
701 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
702 hir_expand::db::expand_speculative(self.db, macro_file, speculative_args, token_to_map)
703 }
704
705 pub fn speculative_expand_attr_macro(
708 &self,
709 actual_macro_call: &ast::Item,
710 speculative_args: &ast::Item,
711 token_to_map: SyntaxToken,
712 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
713 let macro_call = self.wrap_node_infile(actual_macro_call.clone());
714 let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call.as_ref()))?;
715 hir_expand::db::expand_speculative(
716 self.db,
717 macro_call_id,
718 speculative_args.syntax(),
719 token_to_map,
720 )
721 }
722
723 pub fn speculative_expand_derive_as_pseudo_attr_macro(
724 &self,
725 actual_macro_call: &ast::Attr,
726 speculative_args: &ast::Attr,
727 token_to_map: SyntaxToken,
728 ) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
729 let attr = self.wrap_node_infile(actual_macro_call.clone());
730 let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?;
731 let macro_call_id = self.with_ctx(|ctx| {
732 ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it)
733 })?;
734 hir_expand::db::expand_speculative(
735 self.db,
736 macro_call_id,
737 speculative_args.syntax(),
738 token_to_map,
739 )
740 }
741
742 pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &Name) -> Vec<Local> {
745 let body = self.db.body(to_be_renamed.parent);
746 let resolver = to_be_renamed.parent.resolver(self.db);
747 let starting_expr = body.binding_owner(to_be_renamed.binding_id).unwrap_or(body.body_expr);
748 let mut visitor = RenameConflictsVisitor {
749 body: &body,
750 conflicts: FxHashSet::default(),
751 db: self.db,
752 new_name: new_name.symbol().clone(),
753 old_name: to_be_renamed.name(self.db).symbol().clone(),
754 owner: to_be_renamed.parent,
755 to_be_renamed: to_be_renamed.binding_id,
756 resolver,
757 };
758 visitor.rename_conflicts(starting_expr);
759 visitor
760 .conflicts
761 .into_iter()
762 .map(|binding_id| Local { parent: to_be_renamed.parent, binding_id })
763 .collect()
764 }
765
766 pub fn as_format_args_parts(
768 &self,
769 string: &ast::String,
770 ) -> Option<Vec<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)>> {
771 let string_start = string.syntax().text_range().start();
772 let token = self.wrap_token_infile(string.syntax().clone());
773 self.descend_into_macros_breakable(token, |token, _| {
774 (|| {
775 let token = token.value;
776 let string = ast::String::cast(token)?;
777 let literal =
778 string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
779 let parent = literal.parent()?;
780 if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) {
781 let source_analyzer = self.analyze_no_infer(format_args.syntax())?;
782 let format_args = self.wrap_node_infile(format_args);
783 let res = source_analyzer
784 .as_format_args_parts(self.db, format_args.as_ref())?
785 .map(|(range, res)| (range + string_start, res.map(Either::Left)))
786 .collect();
787 Some(res)
788 } else {
789 let asm = ast::AsmExpr::cast(parent)?;
790 let source_analyzer = self.analyze_no_infer(asm.syntax())?;
791 let line = asm.template().position(|it| *it.syntax() == literal)?;
792 let asm = self.wrap_node_infile(asm);
793 let (owner, (expr, asm_parts)) = source_analyzer.as_asm_parts(asm.as_ref())?;
794 let res = asm_parts
795 .get(line)?
796 .iter()
797 .map(|&(range, index)| {
798 (
799 range + string_start,
800 Some(Either::Right(InlineAsmOperand { owner, expr, index })),
801 )
802 })
803 .collect();
804 Some(res)
805 }
806 })()
807 .map_or(ControlFlow::Continue(()), ControlFlow::Break)
808 })
809 }
810
811 pub fn check_for_format_args_template(
820 &self,
821 original_token: SyntaxToken,
822 offset: TextSize,
823 ) -> Option<(
824 TextRange,
825 HirFileRange,
826 ast::String,
827 Option<Either<PathResolution, InlineAsmOperand>>,
828 )> {
829 let original_token =
830 self.wrap_token_infile(original_token).map(ast::String::cast).transpose()?;
831 self.check_for_format_args_template_with_file(original_token, offset)
832 }
833
834 pub fn check_for_format_args_template_with_file(
842 &self,
843 original_token: InFile<ast::String>,
844 offset: TextSize,
845 ) -> Option<(
846 TextRange,
847 HirFileRange,
848 ast::String,
849 Option<Either<PathResolution, InlineAsmOperand>>,
850 )> {
851 let relative_offset =
852 offset.checked_sub(original_token.value.syntax().text_range().start())?;
853 self.descend_into_macros_breakable(
854 original_token.as_ref().map(|it| it.syntax().clone()),
855 |token, _| {
856 (|| {
857 let token = token.map(ast::String::cast).transpose()?;
858 self.resolve_offset_in_format_args(token.as_ref(), relative_offset).map(
859 |(range, res)| {
860 (
861 range + original_token.value.syntax().text_range().start(),
862 HirFileRange {
863 file_id: token.file_id,
864 range: range + token.value.syntax().text_range().start(),
865 },
866 token.value,
867 res,
868 )
869 },
870 )
871 })()
872 .map_or(ControlFlow::Continue(()), ControlFlow::Break)
873 },
874 )
875 }
876
877 fn resolve_offset_in_format_args(
878 &self,
879 InFile { value: string, file_id }: InFile<&ast::String>,
880 offset: TextSize,
881 ) -> Option<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)> {
882 debug_assert!(offset <= string.syntax().text_range().len());
883 let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
884 let parent = literal.parent()?;
885 if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) {
886 let source_analyzer =
887 &self.analyze_impl(InFile::new(file_id, format_args.syntax()), None, false)?;
888 source_analyzer
889 .resolve_offset_in_format_args(self.db, InFile::new(file_id, &format_args), offset)
890 .map(|(range, res)| (range, res.map(Either::Left)))
891 } else {
892 let asm = ast::AsmExpr::cast(parent)?;
893 let source_analyzer =
894 self.analyze_impl(InFile::new(file_id, asm.syntax()), None, false)?;
895 let line = asm.template().position(|it| *it.syntax() == literal)?;
896 source_analyzer
897 .resolve_offset_in_asm_template(InFile::new(file_id, &asm), line, offset)
898 .map(|(owner, (expr, range, index))| {
899 (range, Some(Either::Right(InlineAsmOperand { owner, expr, index })))
900 })
901 }
902 }
903
904 pub fn debug_hir_at(&self, token: SyntaxToken) -> Option<String> {
905 self.analyze_no_infer(&token.parent()?).and_then(|it| {
906 Some(match it.body_or_sig.as_ref()? {
907 crate::source_analyzer::BodyOrSig::Body { def, body, .. } => {
908 hir_def::expr_store::pretty::print_body_hir(
909 self.db,
910 body,
911 *def,
912 it.file_id.edition(self.db),
913 )
914 }
915 &crate::source_analyzer::BodyOrSig::VariantFields { def, .. } => {
916 hir_def::expr_store::pretty::print_variant_body_hir(
917 self.db,
918 def,
919 it.file_id.edition(self.db),
920 )
921 }
922 &crate::source_analyzer::BodyOrSig::Sig { def, .. } => {
923 hir_def::expr_store::pretty::print_signature(
924 self.db,
925 def,
926 it.file_id.edition(self.db),
927 )
928 }
929 })
930 })
931 }
932
933 pub fn descend_token_into_include_expansion(
935 &self,
936 tok: InRealFile<SyntaxToken>,
937 ) -> InFile<SyntaxToken> {
938 let Some(include) =
939 self.s2d_cache.borrow_mut().get_or_insert_include_for(self.db, tok.file_id)
940 else {
941 return tok.into();
942 };
943 let span = self.db.real_span_map(tok.file_id).span_for_range(tok.value.text_range());
944 let Some(InMacroFile { file_id, value: mut mapped_tokens }) = self.with_ctx(|ctx| {
945 Some(
946 ctx.cache
947 .get_or_insert_expansion(ctx.db, include)
948 .map_range_down(span)?
949 .map(SmallVec::<[_; 2]>::from_iter),
950 )
951 }) else {
952 return tok.into();
953 };
954 mapped_tokens.pop().map_or_else(|| tok.into(), |(tok, _)| InFile::new(file_id.into(), tok))
956 }
957
958 pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
960 let mut res = smallvec![];
962 let tokens = (|| {
963 let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
965 let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?;
966 Some((first, last))
967 })();
968 let (first, last) = match tokens {
969 Some(it) => it,
970 None => return res,
971 };
972 let file = self.find_file(node.syntax());
973
974 if first == last {
975 self.descend_into_macros_all(
977 InFile::new(file.file_id, first),
978 false,
979 &mut |InFile { value, .. }, _ctx| {
980 if let Some(node) = value
981 .parent_ancestors()
982 .take_while(|it| it.text_range() == value.text_range())
983 .find_map(N::cast)
984 {
985 res.push(node)
986 }
987 },
988 );
989 } else {
990 let mut scratch: SmallVec<[_; 1]> = smallvec![];
992 self.descend_into_macros_all(
993 InFile::new(file.file_id, first),
994 false,
995 &mut |token, _ctx| scratch.push(token),
996 );
997
998 let mut scratch = scratch.into_iter();
999 self.descend_into_macros_all(
1000 InFile::new(file.file_id, last),
1001 false,
1002 &mut |InFile { value: last, file_id: last_fid }, _ctx| {
1003 if let Some(InFile { value: first, file_id: first_fid }) = scratch.next()
1004 && first_fid == last_fid
1005 && let Some(p) = first.parent()
1006 {
1007 let range = first.text_range().cover(last.text_range());
1008 let node = find_root(&p)
1009 .covering_element(range)
1010 .ancestors()
1011 .take_while(|it| it.text_range() == range)
1012 .find_map(N::cast);
1013 if let Some(node) = node {
1014 res.push(node);
1015 }
1016 }
1017 },
1018 );
1019 }
1020 res
1021 }
1022
1023 pub fn is_inside_macro_call(&self, token @ InFile { value, .. }: InFile<&SyntaxToken>) -> bool {
1028 value.parent_ancestors().any(|ancestor| {
1029 if ast::MacroCall::can_cast(ancestor.kind()) {
1030 return true;
1031 }
1032
1033 let Some(item) = ast::Item::cast(ancestor) else {
1034 return false;
1035 };
1036 self.with_ctx(|ctx| {
1037 if ctx.item_to_macro_call(token.with_value(&item)).is_some() {
1038 return true;
1039 }
1040 let adt = match item {
1041 ast::Item::Struct(it) => it.into(),
1042 ast::Item::Enum(it) => it.into(),
1043 ast::Item::Union(it) => it.into(),
1044 _ => return false,
1045 };
1046 ctx.file_of_adt_has_derives(token.with_value(&adt))
1047 })
1048 })
1049 }
1050
1051 pub fn descend_into_macros_cb(
1052 &self,
1053 token: SyntaxToken,
1054 mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext),
1055 ) {
1056 self.descend_into_macros_all(self.wrap_token_infile(token), false, &mut |t, ctx| {
1057 cb(t, ctx)
1058 });
1059 }
1060
1061 pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
1062 let mut res = smallvec![];
1063 self.descend_into_macros_all(
1064 self.wrap_token_infile(token.clone()),
1065 false,
1066 &mut |t, _ctx| res.push(t.value),
1067 );
1068 if res.is_empty() {
1069 res.push(token);
1070 }
1071 res
1072 }
1073
1074 pub fn descend_into_macros_no_opaque(
1075 &self,
1076 token: SyntaxToken,
1077 always_descend_into_derives: bool,
1078 ) -> SmallVec<[InFile<SyntaxToken>; 1]> {
1079 let mut res = smallvec![];
1080 let token = self.wrap_token_infile(token);
1081 self.descend_into_macros_all(token.clone(), always_descend_into_derives, &mut |t, ctx| {
1082 if !ctx.is_opaque(self.db) {
1083 res.push(t);
1085 }
1086 });
1087 if res.is_empty() {
1088 res.push(token);
1089 }
1090 res
1091 }
1092
1093 pub fn descend_into_macros_breakable<T>(
1094 &self,
1095 token: InFile<SyntaxToken>,
1096 mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
1097 ) -> Option<T> {
1098 self.descend_into_macros_impl(token, false, &mut cb)
1099 }
1100
1101 pub fn descend_into_macros_exact(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
1104 let mut r = smallvec![];
1105 let text = token.text();
1106 let kind = token.kind();
1107
1108 self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }, ctx| {
1109 let mapped_kind = value.kind();
1110 let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
1111 let matches = (kind == mapped_kind || any_ident_match())
1112 && text == value.text()
1113 && !ctx.is_opaque(self.db);
1114 if matches {
1115 r.push(value);
1116 }
1117 });
1118 if r.is_empty() {
1119 r.push(token);
1120 }
1121 r
1122 }
1123
1124 pub fn descend_into_macros_exact_with_file(
1127 &self,
1128 token: SyntaxToken,
1129 ) -> SmallVec<[InFile<SyntaxToken>; 1]> {
1130 let mut r = smallvec![];
1131 let text = token.text();
1132 let kind = token.kind();
1133
1134 self.descend_into_macros_cb(token.clone(), |InFile { value, file_id }, ctx| {
1135 let mapped_kind = value.kind();
1136 let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
1137 let matches = (kind == mapped_kind || any_ident_match())
1138 && text == value.text()
1139 && !ctx.is_opaque(self.db);
1140 if matches {
1141 r.push(InFile { value, file_id });
1142 }
1143 });
1144 if r.is_empty() {
1145 r.push(self.wrap_token_infile(token));
1146 }
1147 r
1148 }
1149
1150 pub fn descend_into_macros_single_exact(&self, token: SyntaxToken) -> SyntaxToken {
1153 let text = token.text();
1154 let kind = token.kind();
1155 self.descend_into_macros_breakable(
1156 self.wrap_token_infile(token.clone()),
1157 |InFile { value, file_id: _ }, _ctx| {
1158 let mapped_kind = value.kind();
1159 let any_ident_match =
1160 || kind.is_any_identifier() && value.kind().is_any_identifier();
1161 let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
1162 if matches { ControlFlow::Break(value) } else { ControlFlow::Continue(()) }
1163 },
1164 )
1165 .unwrap_or(token)
1166 }
1167
1168 fn descend_into_macros_all(
1169 &self,
1170 token: InFile<SyntaxToken>,
1171 always_descend_into_derives: bool,
1172 f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContext),
1173 ) {
1174 self.descend_into_macros_impl(token, always_descend_into_derives, &mut |tok, ctx| {
1175 f(tok, ctx);
1176 CONTINUE_NO_BREAKS
1177 });
1178 }
1179
1180 fn descend_into_macros_impl<T>(
1181 &self,
1182 InFile { value: token, file_id }: InFile<SyntaxToken>,
1183 always_descend_into_derives: bool,
1184 f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
1185 ) -> Option<T> {
1186 let _p = tracing::info_span!("descend_into_macros_impl").entered();
1187
1188 let db = self.db;
1189 let span = db.span_map(file_id).span_for_range(token.text_range());
1190
1191 let process_expansion_for_token =
1193 |ctx: &mut SourceToDefCtx<'_, '_>, stack: &mut Vec<_>, macro_file| {
1194 let InMacroFile { file_id, value: mapped_tokens } = ctx
1195 .cache
1196 .get_or_insert_expansion(ctx.db, macro_file)
1197 .map_range_down(span)?
1198 .map(SmallVec::<[_; 2]>::from_iter);
1199 let res = mapped_tokens.is_empty().not().then_some(());
1201 stack.push((HirFileId::from(file_id), mapped_tokens));
1203 res
1204 };
1205
1206 let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![];
1211 let include = file_id
1212 .file_id()
1213 .and_then(|file_id| self.s2d_cache.borrow_mut().get_or_insert_include_for(db, file_id));
1214 match include {
1215 Some(include) => {
1216 self.with_ctx(|ctx| process_expansion_for_token(ctx, &mut stack, include))?;
1218 }
1219 None => {
1220 stack.push((file_id, smallvec![(token, span.ctx)]));
1221 }
1222 }
1223
1224 let mut m_cache = self.macro_call_cache.borrow_mut();
1225
1226 let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| {
1229 tokens.retain(|(t, _): &mut (SyntaxToken, _)| !range.contains_range(t.text_range()))
1230 };
1231
1232 while let Some((expansion, ref mut tokens)) = stack.pop() {
1233 tokens.reverse();
1237 while let Some((token, ctx)) = tokens.pop() {
1238 let was_not_remapped = (|| {
1239 let res = self.with_ctx(|ctx| {
1243 token
1244 .parent_ancestors()
1245 .filter_map(ast::Item::cast)
1246 .find_map(|item| {
1256 item.attrs().next()?;
1258 ctx.item_to_macro_call(InFile::new(expansion, &item))
1259 .zip(Some(item))
1260 })
1261 .map(|(call_id, item)| {
1262 let item_range = item.syntax().text_range();
1263 let loc = db.lookup_intern_macro_call(call_id);
1264 let text_range = match loc.kind {
1265 hir_expand::MacroCallKind::Attr {
1266 censored_attr_ids: attr_ids,
1267 ..
1268 } => {
1269 let (attr, _, _, _) = attr_ids
1283 .invoc_attr()
1284 .find_attr_range_with_source(db, loc.krate, &item);
1285 let start = attr.syntax().text_range().start();
1286 TextRange::new(start, item_range.end())
1287 }
1288 _ => item_range,
1289 };
1290 filter_duplicates(tokens, text_range);
1291 process_expansion_for_token(ctx, &mut stack, call_id)
1292 })
1293 });
1294
1295 if let Some(res) = res {
1296 return res;
1297 }
1298
1299 if always_descend_into_derives {
1300 let res = self.with_ctx(|ctx| {
1301 let (derives, adt) = token
1302 .parent_ancestors()
1303 .filter_map(ast::Adt::cast)
1304 .find_map(|adt| {
1305 Some((
1306 ctx.derive_macro_calls(InFile::new(expansion, &adt))?
1307 .map(|(a, b, c)| (a, b, c.to_owned()))
1308 .collect::<SmallVec<[_; 2]>>(),
1309 adt,
1310 ))
1311 })?;
1312 for (_, derive_attr, derives) in derives {
1313 process_expansion_for_token(ctx, &mut stack, derive_attr);
1317 for derive in derives.into_iter().flatten() {
1318 process_expansion_for_token(ctx, &mut stack, derive);
1319 }
1320 }
1321 filter_duplicates(tokens, adt.syntax().text_range());
1323 Some(())
1324 });
1325 if let Some(()) = res {
1328 return None;
1333 }
1334 }
1335 let tt = token
1338 .parent_ancestors()
1339 .map_while(Either::<ast::TokenTree, ast::Meta>::cast)
1340 .last()?;
1341
1342 match tt {
1343 Either::Left(tt) => {
1345 let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
1346 if tt.left_delimiter_token().map_or(false, |it| it == token) {
1347 return None;
1348 }
1349 if tt.right_delimiter_token().map_or(false, |it| it == token) {
1350 return None;
1351 }
1352 let mcall = InFile::new(expansion, macro_call);
1353 let file_id = match m_cache.get(&mcall) {
1354 Some(&it) => it,
1355 None => {
1356 let it = ast::MacroCall::to_def(self, mcall.as_ref())?;
1357 m_cache.insert(mcall, it);
1358 it
1359 }
1360 };
1361 let text_range = tt.syntax().text_range();
1362 filter_duplicates(tokens, text_range);
1363
1364 self.with_ctx(|ctx| {
1365 process_expansion_for_token(ctx, &mut stack, file_id).or(file_id
1366 .eager_arg(db)
1367 .and_then(|arg| {
1368 process_expansion_for_token(ctx, &mut stack, arg)
1370 }))
1371 })
1372 }
1373 Either::Right(_) if always_descend_into_derives => None,
1374 Either::Right(meta) => {
1376 let attr = meta.parent_attr()?;
1379 let adt = match attr.syntax().parent().and_then(ast::Adt::cast) {
1380 Some(adt) => {
1381 let res = self.with_ctx(|ctx| {
1383 let derive_call = ctx
1386 .attr_to_derive_macro_call(
1387 InFile::new(expansion, &adt),
1388 InFile::new(expansion, attr.clone()),
1389 )?
1390 .1;
1391
1392 let text_range = attr.syntax().text_range();
1394 tokens.retain(|(t, _)| {
1397 !text_range.contains_range(t.text_range())
1398 });
1399 Some(process_expansion_for_token(
1400 ctx,
1401 &mut stack,
1402 derive_call,
1403 ))
1404 });
1405 if let Some(res) = res {
1406 return res;
1407 }
1408 Some(adt)
1409 }
1410 None => {
1411 attr.syntax().ancestors().find_map(ast::Item::cast).and_then(
1413 |it| match it {
1414 ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
1415 ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
1416 ast::Item::Union(it) => Some(ast::Adt::Union(it)),
1417 _ => None,
1418 },
1419 )
1420 }
1421 }?;
1422 let attr_name =
1423 attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
1424 let resolver = &token
1427 .parent()
1428 .and_then(|parent| {
1429 self.analyze_impl(InFile::new(expansion, &parent), None, false)
1430 })?
1431 .resolver;
1432 let id = db.ast_id_map(expansion).ast_id(&adt);
1433 let helpers = resolver
1434 .def_map()
1435 .derive_helpers_in_scope(InFile::new(expansion, id))?;
1436
1437 if !helpers.is_empty() {
1438 let text_range = attr.syntax().text_range();
1439 filter_duplicates(tokens, text_range);
1440 }
1441
1442 let mut res = None;
1443 self.with_ctx(|ctx| {
1444 for (.., derive) in
1445 helpers.iter().filter(|(helper, ..)| *helper == attr_name)
1446 {
1447 res = res
1451 .or(process_expansion_for_token(ctx, &mut stack, *derive));
1452 }
1453 res
1454 })
1455 }
1456 }
1457 })()
1458 .is_none();
1459 if was_not_remapped
1460 && let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx)
1461 {
1462 return Some(b);
1463 }
1464 }
1465 }
1466 None
1467 }
1468
1469 fn descend_node_at_offset(
1474 &self,
1475 node: &SyntaxNode,
1476 offset: TextSize,
1477 ) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
1478 node.token_at_offset(offset)
1479 .map(move |token| self.descend_into_macros_exact(token))
1480 .map(|descendants| {
1481 descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
1482 })
1483 .kmerge_by(|left, right| {
1486 left.clone()
1487 .map(|node| node.text_range().len())
1488 .lt(right.clone().map(|node| node.text_range().len()))
1489 })
1490 }
1491
1492 pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
1496 let node = self.find_file(node);
1497 node.original_file_range_rooted(self.db)
1498 }
1499
1500 pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
1502 let node = self.find_file(node);
1503 node.original_file_range_opt(self.db).filter(|(_, ctx)| ctx.is_root()).map(TupleExt::head)
1504 }
1505
1506 pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
1509 self.wrap_node_infile(node).original_ast_node_rooted(self.db).map(
1510 |InRealFile { file_id, value }| {
1511 self.cache(find_root(value.syntax()), file_id.into());
1512 value
1513 },
1514 )
1515 }
1516
1517 pub fn original_syntax_node_rooted(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
1520 let InFile { file_id, .. } = self.find_file(node);
1521 InFile::new(file_id, node).original_syntax_node_rooted(self.db).map(
1522 |InRealFile { file_id, value }| {
1523 self.cache(find_root(&value), file_id.into());
1524 value
1525 },
1526 )
1527 }
1528
1529 pub fn diagnostics_display_range(
1530 &self,
1531 src: InFile<SyntaxNodePtr>,
1532 ) -> FileRangeWrapper<FileId> {
1533 let root = self.parse_or_expand(src.file_id);
1534 let node = src.map(|it| it.to_node(&root));
1535 let FileRange { file_id, range } = node.as_ref().original_file_range_rooted(self.db);
1536 FileRangeWrapper { file_id: file_id.file_id(self.db), range }
1537 }
1538
1539 pub fn diagnostics_display_range_for_range(
1540 &self,
1541 src: InFile<TextRange>,
1542 ) -> FileRangeWrapper<FileId> {
1543 let FileRange { file_id, range } = src.original_node_file_range_rooted(self.db);
1544 FileRangeWrapper { file_id: file_id.file_id(self.db), range }
1545 }
1546
1547 fn token_ancestors_with_macros(
1548 &self,
1549 token: SyntaxToken,
1550 ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
1551 token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent))
1552 }
1553
1554 pub fn ancestors_with_macros(
1557 &self,
1558 node: SyntaxNode,
1559 ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
1560 let node = self.find_file(&node);
1561 self.ancestors_with_macros_file(node.cloned()).map(|it| it.value)
1562 }
1563
1564 pub fn ancestors_with_macros_file(
1566 &self,
1567 node: InFile<SyntaxNode>,
1568 ) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
1569 iter::successors(Some(node), move |&InFile { file_id, ref value }| match value.parent() {
1570 Some(parent) => Some(InFile::new(file_id, parent)),
1571 None => {
1572 let macro_file = file_id.macro_file()?;
1573
1574 self.with_ctx(|ctx| {
1575 let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
1576 expansion_info.arg().map(|node| node?.parent()).transpose()
1577 })
1578 }
1579 })
1580 }
1581
1582 pub fn ancestors_at_offset_with_macros(
1583 &self,
1584 node: &SyntaxNode,
1585 offset: TextSize,
1586 ) -> impl Iterator<Item = SyntaxNode> + '_ {
1587 node.token_at_offset(offset)
1588 .map(|token| self.token_ancestors_with_macros(token))
1589 .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
1590 }
1591
1592 pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
1593 let text = lifetime.text();
1594 let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| {
1595 let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?;
1596 gpl.lifetime_params()
1597 .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text))
1598 })?;
1599 let src = self.wrap_node_infile(lifetime_param);
1600 ToDef::to_def(self, src.as_ref())
1601 }
1602
1603 pub fn resolve_label(&self, label: &ast::Lifetime) -> Option<Label> {
1604 let src = self.wrap_node_infile(label.clone());
1605 let (parent, label_id) = self.with_ctx(|ctx| ctx.label_ref_to_def(src.as_ref()))?;
1606 Some(Label { parent, label_id })
1607 }
1608
1609 pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type<'db>> {
1610 let analyze = self.analyze(ty.syntax())?;
1611 analyze.type_of_type(self.db, ty)
1612 }
1613
1614 pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
1615 let parent_ty = path.syntax().parent().and_then(ast::Type::cast)?;
1616 let analyze = self.analyze(path.syntax())?;
1617 let ty = analyze.store_sm()?.node_type(InFile::new(analyze.file_id, &parent_ty))?;
1618 let path = match &analyze.store()?.types[ty] {
1619 hir_def::type_ref::TypeRef::Path(path) => path,
1620 _ => return None,
1621 };
1622 match analyze.resolver.resolve_path_in_type_ns_fully(self.db, path)? {
1623 TypeNs::TraitId(trait_id) => Some(trait_id.into()),
1624 _ => None,
1625 }
1626 }
1627
1628 pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment<'db>>> {
1629 let mutability = |m| match m {
1630 hir_ty::next_solver::Mutability::Not => Mutability::Shared,
1631 hir_ty::next_solver::Mutability::Mut => Mutability::Mut,
1632 };
1633
1634 let analyzer = self.analyze(expr.syntax())?;
1635
1636 let (mut source_ty, _) = analyzer.type_of_expr(self.db, expr)?;
1637
1638 analyzer.expr_adjustments(expr).map(|it| {
1639 it.iter()
1640 .map(|adjust| {
1641 let target =
1642 Type::new_with_resolver(self.db, &analyzer.resolver, adjust.target);
1643 let kind = match adjust.kind {
1644 hir_ty::Adjust::NeverToAny => Adjust::NeverToAny,
1645 hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => {
1646 Adjust::Deref(Some(OverloadedDeref(
1648 m.map(mutability).unwrap_or(Mutability::Shared),
1649 )))
1650 }
1651 hir_ty::Adjust::Deref(None) => Adjust::Deref(None),
1652 hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => {
1653 Adjust::Borrow(AutoBorrow::RawPtr(mutability(m)))
1654 }
1655 hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(m)) => {
1656 Adjust::Borrow(AutoBorrow::Ref(mutability(m.into())))
1658 }
1659 hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc),
1660 };
1661
1662 let source = mem::replace(&mut source_ty, target.clone());
1664
1665 Adjustment { source, target, kind }
1666 })
1667 .collect()
1668 })
1669 }
1670
1671 pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo<'db>> {
1672 self.analyze(expr.syntax())?
1673 .type_of_expr(self.db, expr)
1674 .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
1675 }
1676
1677 pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo<'db>> {
1678 self.analyze(pat.syntax())?
1679 .type_of_pat(self.db, pat)
1680 .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
1681 }
1682
1683 pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type<'db>> {
1687 self.analyze(pat.syntax())?.type_of_binding_in_pat(self.db, pat)
1688 }
1689
1690 pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type<'db>> {
1691 self.analyze(param.syntax())?.type_of_self(self.db, param)
1692 }
1693
1694 pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type<'db>; 1]> {
1695 self.analyze(pat.syntax())
1696 .and_then(|it| it.pattern_adjustments(self.db, pat))
1697 .unwrap_or_default()
1698 }
1699
1700 pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
1701 self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
1702 }
1703
1704 pub fn resolve_expr_as_callable(&self, call: &ast::Expr) -> Option<Callable<'db>> {
1705 self.analyze(call.syntax())?.resolve_expr_as_callable(self.db, call)
1706 }
1707
1708 pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
1709 self.analyze(call.syntax())?.resolve_method_call(self.db, call)
1710 }
1711
1712 pub fn resolve_method_call_fallback(
1714 &self,
1715 call: &ast::MethodCallExpr,
1716 ) -> Option<(Either<Function, Field>, Option<GenericSubstitution<'db>>)> {
1717 self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call)
1718 }
1719
1720 pub fn resolve_trait_impl_method(
1723 &self,
1724 env: Type<'db>,
1725 trait_: Trait,
1726 func: Function,
1727 subst: impl IntoIterator<Item = Type<'db>>,
1728 ) -> Option<Function> {
1729 let interner = DbInterner::new_no_crate(self.db);
1730 let mut subst = subst.into_iter();
1731 let substs =
1732 hir_ty::next_solver::GenericArgs::for_item(interner, trait_.id.into(), |_, id, _| {
1733 assert!(matches!(id, hir_def::GenericParamId::TypeParamId(_)), "expected a type");
1734 subst.next().expect("too few subst").ty.into()
1735 });
1736 assert!(subst.next().is_none(), "too many subst");
1737 Some(self.db.lookup_impl_method(env.env, func.into(), substs).0.into())
1738 }
1739
1740 fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option<StructId> {
1741 self.analyze(range_pat.syntax())?.resolve_range_pat(self.db, range_pat)
1742 }
1743
1744 fn resolve_range_expr(&self, range_expr: &ast::RangeExpr) -> Option<StructId> {
1745 self.analyze(range_expr.syntax())?.resolve_range_expr(self.db, range_expr)
1746 }
1747
1748 fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<FunctionId> {
1749 self.analyze(await_expr.syntax())?.resolve_await_to_poll(self.db, await_expr)
1750 }
1751
1752 fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<FunctionId> {
1753 self.analyze(prefix_expr.syntax())?.resolve_prefix_expr(self.db, prefix_expr)
1754 }
1755
1756 fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<FunctionId> {
1757 self.analyze(index_expr.syntax())?.resolve_index_expr(self.db, index_expr)
1758 }
1759
1760 fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<FunctionId> {
1761 self.analyze(bin_expr.syntax())?.resolve_bin_expr(self.db, bin_expr)
1762 }
1763
1764 fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<FunctionId> {
1765 self.analyze(try_expr.syntax())?.resolve_try_expr(self.db, try_expr)
1766 }
1767
1768 pub fn resolve_method_call_as_callable(
1771 &self,
1772 call: &ast::MethodCallExpr,
1773 ) -> Option<Callable<'db>> {
1774 self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
1775 }
1776
1777 pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Either<Field, TupleField>> {
1778 self.analyze(field.syntax())?.resolve_field(field)
1779 }
1780
1781 pub fn resolve_field_fallback(
1782 &self,
1783 field: &ast::FieldExpr,
1784 ) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution<'db>>)>
1785 {
1786 self.analyze(field.syntax())?.resolve_field_fallback(self.db, field)
1787 }
1788
1789 pub fn resolve_record_field(
1790 &self,
1791 field: &ast::RecordExprField,
1792 ) -> Option<(Field, Option<Local>, Type<'db>)> {
1793 self.resolve_record_field_with_substitution(field)
1794 .map(|(field, local, ty, _)| (field, local, ty))
1795 }
1796
1797 pub fn resolve_record_field_with_substitution(
1798 &self,
1799 field: &ast::RecordExprField,
1800 ) -> Option<(Field, Option<Local>, Type<'db>, GenericSubstitution<'db>)> {
1801 self.analyze(field.syntax())?.resolve_record_field(self.db, field)
1802 }
1803
1804 pub fn resolve_record_pat_field(
1805 &self,
1806 field: &ast::RecordPatField,
1807 ) -> Option<(Field, Type<'db>)> {
1808 self.resolve_record_pat_field_with_subst(field).map(|(field, ty, _)| (field, ty))
1809 }
1810
1811 pub fn resolve_record_pat_field_with_subst(
1812 &self,
1813 field: &ast::RecordPatField,
1814 ) -> Option<(Field, Type<'db>, GenericSubstitution<'db>)> {
1815 self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
1816 }
1817
1818 pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
1820 let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
1821 self.resolve_macro_call2(macro_call)
1822 }
1823
1824 pub fn resolve_macro_call2(&self, macro_call: InFile<&ast::MacroCall>) -> Option<Macro> {
1825 self.to_def2(macro_call)
1826 .and_then(|call| self.with_ctx(|ctx| macro_call_to_macro_id(ctx, call)))
1827 .map(Into::into)
1828 }
1829
1830 pub fn is_proc_macro_call(&self, macro_call: InFile<&ast::MacroCall>) -> bool {
1831 self.resolve_macro_call2(macro_call)
1832 .is_some_and(|m| matches!(m.id, MacroId::ProcMacroId(..)))
1833 }
1834
1835 pub fn resolve_macro_call_arm(&self, macro_call: &ast::MacroCall) -> Option<u32> {
1836 let file_id = self.to_def(macro_call)?;
1837 self.db.parse_macro_expansion(file_id).value.1.matched_arm
1838 }
1839
1840 pub fn get_unsafe_ops(&self, def: DefWithBody) -> FxHashSet<ExprOrPatSource> {
1841 let def = DefWithBodyId::from(def);
1842 let (body, source_map) = self.db.body_with_source_map(def);
1843 let infer = InferenceResult::for_body(self.db, def);
1844 let mut res = FxHashSet::default();
1845 unsafe_operations_for_body(self.db, infer, def, &body, &mut |node| {
1846 if let Ok(node) = source_map.expr_or_pat_syntax(node) {
1847 res.insert(node);
1848 }
1849 });
1850 res
1851 }
1852
1853 pub fn get_unsafe_ops_for_unsafe_block(&self, block: ast::BlockExpr) -> Vec<ExprOrPatSource> {
1854 always!(block.unsafe_token().is_some());
1855 let block = self.wrap_node_infile(ast::Expr::from(block));
1856 let Some(def) = self.body_for(block.syntax()) else { return Vec::new() };
1857 let def = def.into();
1858 let (body, source_map) = self.db.body_with_source_map(def);
1859 let infer = InferenceResult::for_body(self.db, def);
1860 let Some(ExprOrPatId::ExprId(block)) = source_map.node_expr(block.as_ref()) else {
1861 return Vec::new();
1862 };
1863 let mut res = Vec::default();
1864 unsafe_operations(self.db, infer, def, &body, block, &mut |node, _| {
1865 if let Ok(node) = source_map.expr_or_pat_syntax(node) {
1866 res.push(node);
1867 }
1868 });
1869 res
1870 }
1871
1872 pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
1873 let Some(mac) = self.resolve_macro_call(macro_call) else { return false };
1874 if mac.is_asm_like(self.db) {
1875 return true;
1876 }
1877
1878 let Some(sa) = self.analyze(macro_call.syntax()) else { return false };
1879 let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
1880 match macro_call.map(|it| it.syntax().parent().and_then(ast::MacroExpr::cast)).transpose() {
1881 Some(it) => sa.is_unsafe_macro_call_expr(self.db, it.as_ref()),
1882 None => false,
1883 }
1884 }
1885
1886 pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
1887 let item_in_file = self.wrap_node_infile(item.clone());
1888 let id = self.with_ctx(|ctx| {
1889 let macro_call_id = ctx.item_to_macro_call(item_in_file.as_ref())?;
1890 macro_call_to_macro_id(ctx, macro_call_id)
1891 })?;
1892 Some(Macro { id })
1893 }
1894
1895 pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
1896 self.resolve_path_with_subst(path).map(|(it, _)| it)
1897 }
1898
1899 pub fn resolve_path_per_ns(&self, path: &ast::Path) -> Option<PathResolutionPerNs> {
1900 self.analyze(path.syntax())?.resolve_hir_path_per_ns(self.db, path)
1901 }
1902
1903 pub fn resolve_path_with_subst(
1904 &self,
1905 path: &ast::Path,
1906 ) -> Option<(PathResolution, Option<GenericSubstitution<'db>>)> {
1907 self.analyze(path.syntax())?.resolve_path(self.db, path)
1908 }
1909
1910 pub fn resolve_use_type_arg(&self, name: &ast::NameRef) -> Option<TypeParam> {
1911 self.analyze(name.syntax())?.resolve_use_type_arg(name)
1912 }
1913
1914 pub fn resolve_offset_of_field(
1915 &self,
1916 name_ref: &ast::NameRef,
1917 ) -> Option<(Either<Variant, Field>, GenericSubstitution<'db>)> {
1918 self.analyze_no_infer(name_ref.syntax())?.resolve_offset_of_field(self.db, name_ref)
1919 }
1920
1921 pub fn resolve_mod_path(
1922 &self,
1923 scope: &SyntaxNode,
1924 path: &ModPath,
1925 ) -> Option<impl Iterator<Item = ItemInNs>> {
1926 let analyze = self.analyze(scope)?;
1927 let items = analyze.resolver.resolve_module_path_in_items(self.db, path);
1928 Some(items.iter_items().map(|(item, _)| item.into()))
1929 }
1930
1931 fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
1932 self.analyze(record_lit.syntax())?.resolve_variant(record_lit)
1933 }
1934
1935 pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
1936 self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
1937 }
1938
1939 pub fn record_literal_missing_fields(
1940 &self,
1941 literal: &ast::RecordExpr,
1942 ) -> Vec<(Field, Type<'db>)> {
1943 self.analyze(literal.syntax())
1944 .and_then(|it| it.record_literal_missing_fields(self.db, literal))
1945 .unwrap_or_default()
1946 }
1947
1948 pub fn record_pattern_missing_fields(
1949 &self,
1950 pattern: &ast::RecordPat,
1951 ) -> Vec<(Field, Type<'db>)> {
1952 self.analyze(pattern.syntax())
1953 .and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
1954 .unwrap_or_default()
1955 }
1956
1957 fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
1958 let mut ctx = SourceToDefCtx { db: self.db, cache: &mut self.s2d_cache.borrow_mut() };
1959 f(&mut ctx)
1960 }
1961
1962 pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
1963 let src = self.find_file(src.syntax()).with_value(src);
1964 T::to_def(self, src)
1965 }
1966
1967 pub fn to_def2<T: ToDef>(&self, src: InFile<&T>) -> Option<T::Def> {
1968 T::to_def(self, src)
1969 }
1970
1971 fn file_to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
1972 self.with_ctx(|ctx| ctx.file_to_def(file).to_owned()).into_iter().map(Module::from)
1973 }
1974
1975 fn hir_file_to_module_defs(&self, file: HirFileId) -> impl Iterator<Item = Module> {
1976 self.file_to_module_defs(file.original_file_respecting_includes(self.db).file_id(self.db))
1978 }
1979
1980 pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
1981 self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
1982 db: self.db,
1983 file_id,
1984 resolver,
1985 })
1986 }
1987
1988 pub fn scope_at_offset(
1989 &self,
1990 node: &SyntaxNode,
1991 offset: TextSize,
1992 ) -> Option<SemanticsScope<'db>> {
1993 self.analyze_with_offset_no_infer(node, offset).map(
1994 |SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
1995 db: self.db,
1996 file_id,
1997 resolver,
1998 },
1999 )
2000 }
2001
2002 pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
2004 where
2005 Def::Ast: AstNode,
2006 {
2007 let res = def.source(self.db)?;
2009 self.cache(find_root(res.value.syntax()), res.file_id);
2010 Some(res)
2011 }
2012
2013 pub fn body_for(&self, node: InFile<&SyntaxNode>) -> Option<DefWithBody> {
2014 let container = self.with_ctx(|ctx| ctx.find_container(node))?;
2015
2016 match container {
2017 ChildContainer::DefWithBodyId(def) => Some(def.into()),
2018 _ => None,
2019 }
2020 }
2021
2022 fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer<'db>> {
2024 let node = self.find_file(node);
2025 self.analyze_impl(node, None, true)
2026 }
2027
2028 fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer<'db>> {
2030 let node = self.find_file(node);
2031 self.analyze_impl(node, None, false)
2032 }
2033
2034 fn analyze_with_offset_no_infer(
2035 &self,
2036 node: &SyntaxNode,
2037 offset: TextSize,
2038 ) -> Option<SourceAnalyzer<'db>> {
2039 let node = self.find_file(node);
2040 self.analyze_impl(node, Some(offset), false)
2041 }
2042
2043 fn analyze_impl(
2044 &self,
2045 node: InFile<&SyntaxNode>,
2046 offset: Option<TextSize>,
2047 infer_body: bool,
2049 ) -> Option<SourceAnalyzer<'db>> {
2050 let _p = tracing::info_span!("SemanticsImpl::analyze_impl").entered();
2051
2052 let container = self.with_ctx(|ctx| ctx.find_container(node))?;
2053
2054 let resolver = match container {
2055 ChildContainer::DefWithBodyId(def) => {
2056 return Some(if infer_body {
2057 SourceAnalyzer::new_for_body(self.db, def, node, offset)
2058 } else {
2059 SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
2060 });
2061 }
2062 ChildContainer::VariantId(def) => {
2063 return Some(SourceAnalyzer::new_variant_body(self.db, def, node, offset));
2064 }
2065 ChildContainer::TraitId(it) => {
2066 return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
2067 }
2068 ChildContainer::ImplId(it) => {
2069 return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
2070 }
2071 ChildContainer::EnumId(it) => {
2072 return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
2073 }
2074 ChildContainer::GenericDefId(it) => {
2075 return Some(SourceAnalyzer::new_generic_def(self.db, it, node, offset));
2076 }
2077 ChildContainer::ModuleId(it) => it.resolver(self.db),
2078 };
2079 Some(SourceAnalyzer::new_for_resolver(resolver, node))
2080 }
2081
2082 fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
2083 SourceToDefCache::cache(
2084 &mut self.s2d_cache.borrow_mut().root_to_file_cache,
2085 root_node,
2086 file_id,
2087 );
2088 }
2089
2090 pub fn assert_contains_node(&self, node: &SyntaxNode) {
2091 self.find_file(node);
2092 }
2093
2094 fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
2095 let cache = self.s2d_cache.borrow();
2096 cache.root_to_file_cache.get(root_node).copied()
2097 }
2098
2099 fn wrap_node_infile<N: AstNode>(&self, node: N) -> InFile<N> {
2100 let InFile { file_id, .. } = self.find_file(node.syntax());
2101 InFile::new(file_id, node)
2102 }
2103
2104 fn wrap_token_infile(&self, token: SyntaxToken) -> InFile<SyntaxToken> {
2105 let InFile { file_id, .. } = self.find_file(&token.parent().unwrap());
2106 InFile::new(file_id, token)
2107 }
2108
2109 fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
2111 let root_node = find_root(node);
2112 let file_id = self.lookup(&root_node).unwrap_or_else(|| {
2113 panic!(
2114 "\n\nFailed to lookup {:?} in this Semantics.\n\
2115 Make sure to only query nodes derived from this instance of Semantics.\n\
2116 root node: {:?}\n\
2117 known nodes: {}\n\n",
2118 node,
2119 root_node,
2120 self.s2d_cache
2121 .borrow()
2122 .root_to_file_cache
2123 .keys()
2124 .map(|it| format!("{it:?}"))
2125 .collect::<Vec<_>>()
2126 .join(", ")
2127 )
2128 });
2129 InFile::new(file_id, node)
2130 }
2131
2132 pub fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool {
2134 let Some(enclosing_item) =
2135 expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast)
2136 else {
2137 return false;
2138 };
2139
2140 let def = match &enclosing_item {
2141 Either::Left(ast::Item::Fn(it)) if it.unsafe_token().is_some() => return true,
2142 Either::Left(ast::Item::Fn(it)) => {
2143 self.to_def(it).map(<_>::into).map(DefWithBodyId::FunctionId)
2144 }
2145 Either::Left(ast::Item::Const(it)) => {
2146 self.to_def(it).map(<_>::into).map(DefWithBodyId::ConstId)
2147 }
2148 Either::Left(ast::Item::Static(it)) => {
2149 self.to_def(it).map(<_>::into).map(DefWithBodyId::StaticId)
2150 }
2151 Either::Left(_) => None,
2152 Either::Right(it) => self.to_def(it).map(<_>::into).map(DefWithBodyId::VariantId),
2153 };
2154 let Some(def) = def else { return false };
2155 let enclosing_node = enclosing_item.as_ref().either(|i| i.syntax(), |v| v.syntax());
2156
2157 let (body, source_map) = self.db.body_with_source_map(def);
2158
2159 let file_id = self.find_file(expr.syntax()).file_id;
2160
2161 let Some(mut parent) = expr.syntax().parent() else { return false };
2162 loop {
2163 if &parent == enclosing_node {
2164 break false;
2165 }
2166
2167 if let Some(parent) = ast::Expr::cast(parent.clone())
2168 && let Some(ExprOrPatId::ExprId(expr_id)) =
2169 source_map.node_expr(InFile { file_id, value: &parent })
2170 && let Expr::Unsafe { .. } = body[expr_id]
2171 {
2172 break true;
2173 }
2174
2175 let Some(parent_) = parent.parent() else { break false };
2176 parent = parent_;
2177 }
2178 }
2179
2180 pub fn impl_generated_from_derive(&self, impl_: Impl) -> Option<Adt> {
2181 let source = hir_def::src::HasSource::ast_ptr(&impl_.id.loc(self.db), self.db);
2182 let mut file_id = source.file_id;
2183 let adt_ast_id = loop {
2184 let macro_call = file_id.macro_file()?;
2185 match macro_call.loc(self.db).kind {
2186 hir_expand::MacroCallKind::Derive { ast_id, .. } => break ast_id,
2187 hir_expand::MacroCallKind::FnLike { ast_id, .. } => file_id = ast_id.file_id,
2188 hir_expand::MacroCallKind::Attr { ast_id, .. } => file_id = ast_id.file_id,
2189 }
2190 };
2191 let adt_source = adt_ast_id.to_in_file_node(self.db);
2192 self.cache(adt_source.value.syntax().ancestors().last().unwrap(), adt_source.file_id);
2193 ToDef::to_def(self, adt_source.as_ref())
2194 }
2195}
2196
2197fn macro_call_to_macro_id(
2199 ctx: &mut SourceToDefCtx<'_, '_>,
2200 macro_call_id: MacroCallId,
2201) -> Option<MacroId> {
2202 let db: &dyn ExpandDatabase = ctx.db;
2203 let loc = db.lookup_intern_macro_call(macro_call_id);
2204
2205 match loc.def.ast_id() {
2206 Either::Left(it) => {
2207 let node = match it.file_id {
2208 HirFileId::FileId(file_id) => {
2209 it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
2210 }
2211 HirFileId::MacroFile(macro_file) => {
2212 let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
2213 it.to_ptr(db).to_node(&expansion_info.expanded().value)
2214 }
2215 };
2216 ctx.macro_to_def(InFile::new(it.file_id, &node))
2217 }
2218 Either::Right(it) => {
2219 let node = match it.file_id {
2220 HirFileId::FileId(file_id) => {
2221 it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
2222 }
2223 HirFileId::MacroFile(macro_file) => {
2224 let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
2225 it.to_ptr(db).to_node(&expansion_info.expanded().value)
2226 }
2227 };
2228 ctx.proc_macro_to_def(InFile::new(it.file_id, &node))
2229 }
2230 }
2231}
2232
2233pub trait ToDef: AstNode + Clone {
2234 type Def;
2235 fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def>;
2236}
2237
2238macro_rules! to_def_impls {
2239 ($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
2240 impl ToDef for $ast {
2241 type Def = $def;
2242 fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def> {
2243 sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
2244 }
2245 }
2246 )*}
2247}
2248
2249to_def_impls![
2250 (crate::Module, ast::Module, module_to_def),
2251 (crate::Module, ast::SourceFile, source_file_to_def),
2252 (crate::Struct, ast::Struct, struct_to_def),
2253 (crate::Enum, ast::Enum, enum_to_def),
2254 (crate::Union, ast::Union, union_to_def),
2255 (crate::Trait, ast::Trait, trait_to_def),
2256 (crate::Impl, ast::Impl, impl_to_def),
2257 (crate::TypeAlias, ast::TypeAlias, type_alias_to_def),
2258 (crate::Const, ast::Const, const_to_def),
2259 (crate::Static, ast::Static, static_to_def),
2260 (crate::Function, ast::Fn, fn_to_def),
2261 (crate::Field, ast::RecordField, record_field_to_def),
2262 (crate::Field, ast::TupleField, tuple_field_to_def),
2263 (crate::Variant, ast::Variant, enum_variant_to_def),
2264 (crate::TypeParam, ast::TypeParam, type_param_to_def),
2265 (crate::LifetimeParam, ast::LifetimeParam, lifetime_param_to_def),
2266 (crate::ConstParam, ast::ConstParam, const_param_to_def),
2267 (crate::GenericParam, ast::GenericParam, generic_param_to_def),
2268 (crate::Macro, ast::Macro, macro_to_def),
2269 (crate::Local, ast::IdentPat, bind_pat_to_def),
2270 (crate::Local, ast::SelfParam, self_param_to_def),
2271 (crate::Label, ast::Label, label_to_def),
2272 (crate::Adt, ast::Adt, adt_to_def),
2273 (crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def),
2274 (crate::InlineAsmOperand, ast::AsmOperandNamed, asm_operand_to_def),
2275 (crate::ExternBlock, ast::ExternBlock, extern_block_to_def),
2276 (MacroCallId, ast::MacroCall, macro_call_to_macro_call),
2277];
2278
2279fn find_root(node: &SyntaxNode) -> SyntaxNode {
2280 node.ancestors().last().unwrap()
2281}
2282
2283#[derive(Debug)]
2303pub struct SemanticsScope<'db> {
2304 pub db: &'db dyn HirDatabase,
2305 file_id: HirFileId,
2306 resolver: Resolver<'db>,
2307}
2308
2309impl<'db> SemanticsScope<'db> {
2310 pub fn file_id(&self) -> HirFileId {
2311 self.file_id
2312 }
2313
2314 pub fn module(&self) -> Module {
2315 Module { id: self.resolver.module() }
2316 }
2317
2318 pub fn krate(&self) -> Crate {
2319 Crate { id: self.resolver.krate() }
2320 }
2321
2322 pub fn containing_function(&self) -> Option<Function> {
2323 self.resolver.body_owner().and_then(|owner| match owner {
2324 DefWithBodyId::FunctionId(id) => Some(id.into()),
2325 _ => None,
2326 })
2327 }
2328
2329 pub(crate) fn resolver(&self) -> &Resolver<'db> {
2330 &self.resolver
2331 }
2332
2333 pub fn visible_traits(&self) -> VisibleTraits {
2335 let resolver = &self.resolver;
2336 VisibleTraits(resolver.traits_in_scope(self.db))
2337 }
2338
2339 pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
2341 let scope = self.resolver.names_in_scope(self.db);
2342 for (name, entries) in scope {
2343 for entry in entries {
2344 let def = match entry {
2345 resolver::ScopeDef::ModuleDef(it) => ScopeDef::ModuleDef(it.into()),
2346 resolver::ScopeDef::Unknown => ScopeDef::Unknown,
2347 resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
2348 resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
2349 resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
2350 resolver::ScopeDef::Local(binding_id) => match self.resolver.body_owner() {
2351 Some(parent) => ScopeDef::Local(Local { parent, binding_id }),
2352 None => continue,
2353 },
2354 resolver::ScopeDef::Label(label_id) => match self.resolver.body_owner() {
2355 Some(parent) => ScopeDef::Label(Label { parent, label_id }),
2356 None => continue,
2357 },
2358 };
2359 f(name.clone(), def)
2360 }
2361 }
2362 }
2363
2364 pub fn can_use_trait_methods(&self, t: Trait) -> bool {
2366 self.resolver.traits_in_scope(self.db).contains(&t.id)
2367 }
2368
2369 pub fn speculative_resolve(&self, ast_path: &ast::Path) -> Option<PathResolution> {
2372 let mut kind = PathKind::Plain;
2373 let mut segments = vec![];
2374 let mut first = true;
2375 for segment in ast_path.segments() {
2376 if first {
2377 first = false;
2378 if segment.coloncolon_token().is_some() {
2379 kind = PathKind::Abs;
2380 }
2381 }
2382
2383 let Some(k) = segment.kind() else { continue };
2384 match k {
2385 ast::PathSegmentKind::Name(name_ref) => segments.push(name_ref.as_name()),
2386 ast::PathSegmentKind::Type { .. } => continue,
2387 ast::PathSegmentKind::SelfTypeKw => {
2388 segments.push(Name::new_symbol_root(sym::Self_))
2389 }
2390 ast::PathSegmentKind::SelfKw => kind = PathKind::Super(0),
2391 ast::PathSegmentKind::SuperKw => match kind {
2392 PathKind::Super(s) => kind = PathKind::Super(s + 1),
2393 PathKind::Plain => kind = PathKind::Super(1),
2394 PathKind::Crate | PathKind::Abs | PathKind::DollarCrate(_) => continue,
2395 },
2396 ast::PathSegmentKind::CrateKw => kind = PathKind::Crate,
2397 }
2398 }
2399
2400 resolve_hir_path(
2401 self.db,
2402 &self.resolver,
2403 &Path::BarePath(Interned::new(ModPath::from_segments(kind, segments))),
2404 HygieneId::ROOT,
2405 None,
2406 )
2407 }
2408
2409 pub fn resolve_mod_path(&self, path: &ModPath) -> impl Iterator<Item = ItemInNs> + use<> {
2410 let items = self.resolver.resolve_module_path_in_items(self.db, path);
2411 items.iter_items().map(|(item, _)| item.into())
2412 }
2413
2414 pub fn assoc_type_shorthand_candidates(
2417 &self,
2418 resolution: &PathResolution,
2419 mut cb: impl FnMut(TypeAlias),
2420 ) {
2421 let (Some(def), Some(resolution)) = (self.resolver.generic_def(), resolution.in_type_ns())
2422 else {
2423 return;
2424 };
2425 hir_ty::associated_type_shorthand_candidates(self.db, def, resolution, |_, id| {
2426 cb(id.into());
2427 false
2428 });
2429 }
2430
2431 pub fn generic_def(&self) -> Option<crate::GenericDef> {
2432 self.resolver.generic_def().map(|id| id.into())
2433 }
2434
2435 pub fn extern_crates(&self) -> impl Iterator<Item = (Name, Module)> + '_ {
2436 self.resolver.extern_crates_in_scope().map(|(name, id)| (name, Module { id }))
2437 }
2438
2439 pub fn extern_crate_decls(&self) -> impl Iterator<Item = Name> + '_ {
2440 self.resolver.extern_crate_decls_in_scope(self.db)
2441 }
2442
2443 pub fn has_same_self_type(&self, other: &SemanticsScope<'_>) -> bool {
2444 self.resolver.impl_def() == other.resolver.impl_def()
2445 }
2446}
2447
2448#[derive(Debug)]
2449pub struct VisibleTraits(pub FxHashSet<TraitId>);
2450
2451impl ops::Deref for VisibleTraits {
2452 type Target = FxHashSet<TraitId>;
2453
2454 fn deref(&self) -> &Self::Target {
2455 &self.0
2456 }
2457}
2458
2459struct RenameConflictsVisitor<'a> {
2460 db: &'a dyn HirDatabase,
2461 owner: DefWithBodyId,
2462 resolver: Resolver<'a>,
2463 body: &'a Body,
2464 to_be_renamed: BindingId,
2465 new_name: Symbol,
2466 old_name: Symbol,
2467 conflicts: FxHashSet<BindingId>,
2468}
2469
2470impl RenameConflictsVisitor<'_> {
2471 fn resolve_path(&mut self, node: ExprOrPatId, path: &Path) {
2472 if let Path::BarePath(path) = path
2473 && let Some(name) = path.as_ident()
2474 {
2475 if *name.symbol() == self.new_name {
2476 if let Some(conflicting) = self.resolver.rename_will_conflict_with_renamed(
2477 self.db,
2478 name,
2479 path,
2480 self.body.expr_or_pat_path_hygiene(node),
2481 self.to_be_renamed,
2482 ) {
2483 self.conflicts.insert(conflicting);
2484 }
2485 } else if *name.symbol() == self.old_name
2486 && let Some(conflicting) = self.resolver.rename_will_conflict_with_another_variable(
2487 self.db,
2488 name,
2489 path,
2490 self.body.expr_or_pat_path_hygiene(node),
2491 &self.new_name,
2492 self.to_be_renamed,
2493 )
2494 {
2495 self.conflicts.insert(conflicting);
2496 }
2497 }
2498 }
2499
2500 fn rename_conflicts(&mut self, expr: ExprId) {
2501 match &self.body[expr] {
2502 Expr::Path(path) => {
2503 let guard = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
2504 self.resolve_path(expr.into(), path);
2505 self.resolver.reset_to_guard(guard);
2506 }
2507 &Expr::Assignment { target, .. } => {
2508 let guard = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
2509 self.body.walk_pats(target, &mut |pat| {
2510 if let Pat::Path(path) = &self.body[pat] {
2511 self.resolve_path(pat.into(), path);
2512 }
2513 });
2514 self.resolver.reset_to_guard(guard);
2515 }
2516 _ => {}
2517 }
2518
2519 self.body.walk_child_exprs(expr, |expr| self.rename_conflicts(expr));
2520 }
2521}