ide_completion/context/
analysis.rs

1//! Module responsible for analyzing the code surrounding the cursor for completion.
2use std::iter;
3
4use hir::{ExpandResult, InFile, Semantics, Type, TypeInfo, Variant};
5use ide_db::{RootDatabase, active_parameter::ActiveParameter};
6use itertools::Either;
7use stdx::always;
8use syntax::{
9    AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken,
10    T, TextRange, TextSize,
11    algo::{
12        self, ancestors_at_offset, find_node_at_offset, non_trivia_sibling,
13        previous_non_trivia_token,
14    },
15    ast::{
16        self, AttrKind, HasArgList, HasGenericArgs, HasGenericParams, HasLoopBody, HasName,
17        NameOrNameRef,
18    },
19    match_ast,
20};
21
22use crate::{
23    completions::postfix::is_in_condition,
24    context::{
25        AttrCtx, BreakableKind, COMPLETION_MARKER, CompletionAnalysis, DotAccess, DotAccessExprCtx,
26        DotAccessKind, ItemListKind, LifetimeContext, LifetimeKind, NameContext, NameKind,
27        NameRefContext, NameRefKind, ParamContext, ParamKind, PathCompletionCtx, PathExprCtx,
28        PathKind, PatternContext, PatternRefutability, Qualified, QualifierCtx,
29        TypeAscriptionTarget, TypeLocation,
30    },
31};
32
33#[derive(Debug)]
34struct ExpansionResult {
35    original_file: SyntaxNode,
36    speculative_file: SyntaxNode,
37    /// The offset in the original file.
38    original_offset: TextSize,
39    /// The offset in the speculatively expanded file.
40    speculative_offset: TextSize,
41    fake_ident_token: SyntaxToken,
42    derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
43}
44
45pub(super) struct AnalysisResult<'db> {
46    pub(super) analysis: CompletionAnalysis<'db>,
47    pub(super) expected: (Option<Type<'db>>, Option<ast::NameOrNameRef>),
48    pub(super) qualifier_ctx: QualifierCtx,
49    /// the original token of the expanded file
50    pub(super) token: SyntaxToken,
51    /// The offset in the original file.
52    pub(super) original_offset: TextSize,
53}
54
55pub(super) fn expand_and_analyze<'db>(
56    sema: &Semantics<'db, RootDatabase>,
57    original_file: InFile<SyntaxNode>,
58    speculative_file: SyntaxNode,
59    offset: TextSize,
60    original_token: &SyntaxToken,
61) -> Option<AnalysisResult<'db>> {
62    // as we insert after the offset, right biased will *always* pick the identifier no matter
63    // if there is an ident already typed or not
64    let fake_ident_token = speculative_file.token_at_offset(offset).right_biased()?;
65    // the relative offset between the cursor and the *identifier* token we are completing on
66    let relative_offset = offset - fake_ident_token.text_range().start();
67    // make the offset point to the start of the original token, as that is what the
68    // intermediate offsets calculated in expansion always points to
69    let offset = offset - relative_offset;
70    let expansion = expand_maybe_stop(
71        sema,
72        original_file.clone(),
73        speculative_file.clone(),
74        offset,
75        fake_ident_token.clone(),
76        relative_offset,
77    )
78    .unwrap_or(ExpansionResult {
79        original_file: original_file.value,
80        speculative_file,
81        original_offset: offset,
82        speculative_offset: fake_ident_token.text_range().start(),
83        fake_ident_token,
84        derive_ctx: None,
85    });
86
87    // add the relative offset back, so that left_biased finds the proper token
88    let original_offset = expansion.original_offset + relative_offset;
89    let token = expansion.original_file.token_at_offset(original_offset).left_biased()?;
90
91    hir::attach_db(sema.db, || analyze(sema, expansion, original_token, &token)).map(
92        |(analysis, expected, qualifier_ctx)| AnalysisResult {
93            analysis,
94            expected,
95            qualifier_ctx,
96            token,
97            original_offset,
98        },
99    )
100}
101
102fn token_at_offset_ignore_whitespace(file: &SyntaxNode, offset: TextSize) -> Option<SyntaxToken> {
103    let token = file.token_at_offset(offset).left_biased()?;
104    algo::skip_whitespace_token(token, Direction::Prev)
105}
106
107/// Expand attributes and macro calls at the current cursor position for both the original file
108/// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
109/// and speculative states stay in sync.
110///
111/// We do this by recursively expanding all macros and picking the best possible match. We cannot just
112/// choose the first expansion each time because macros can expand to something that does not include
113/// our completion marker, e.g.:
114///
115/// ```ignore
116/// macro_rules! helper { ($v:ident) => {} }
117/// macro_rules! my_macro {
118///     ($v:ident) => {
119///         helper!($v);
120///         $v
121///     };
122/// }
123///
124/// my_macro!(complete_me_here);
125/// ```
126/// If we would expand the first thing we encounter only (which in fact this method used to do), we would
127/// be unable to complete here, because we would be walking directly into the void. So we instead try
128/// *every* possible path.
129///
130/// This can also creates discrepancies between the speculative and real expansions: because we insert
131/// tokens, we insert characters, which means if we try the second occurrence it may not be at the same
132/// position in the original and speculative file. We take an educated guess here, and for each token
133/// that we check, we subtract `COMPLETION_MARKER.len()`. This may not be accurate because proc macros
134/// can insert the text of the completion marker in other places while removing the span, but this is
135/// the best we can do.
136fn expand_maybe_stop(
137    sema: &Semantics<'_, RootDatabase>,
138    original_file: InFile<SyntaxNode>,
139    speculative_file: SyntaxNode,
140    original_offset: TextSize,
141    fake_ident_token: SyntaxToken,
142    relative_offset: TextSize,
143) -> Option<ExpansionResult> {
144    if let result @ Some(_) = expand(
145        sema,
146        original_file.clone(),
147        speculative_file.clone(),
148        original_offset,
149        fake_ident_token.clone(),
150        relative_offset,
151    ) {
152        return result;
153    }
154
155    // We can't check whether the fake expansion is inside macro call, because that requires semantic info.
156    // But hopefully checking just the real one should be enough.
157    if token_at_offset_ignore_whitespace(&original_file.value, original_offset + relative_offset)
158        .is_some_and(|original_token| {
159            !sema.is_inside_macro_call(original_file.with_value(&original_token))
160        })
161    {
162        // Recursion base case.
163        Some(ExpansionResult {
164            original_file: original_file.value,
165            speculative_file,
166            original_offset,
167            speculative_offset: fake_ident_token.text_range().start(),
168            fake_ident_token,
169            derive_ctx: None,
170        })
171    } else {
172        None
173    }
174}
175
176fn expand(
177    sema: &Semantics<'_, RootDatabase>,
178    original_file: InFile<SyntaxNode>,
179    speculative_file: SyntaxNode,
180    original_offset: TextSize,
181    fake_ident_token: SyntaxToken,
182    relative_offset: TextSize,
183) -> Option<ExpansionResult> {
184    let _p = tracing::info_span!("CompletionContext::expand").entered();
185
186    let parent_item =
187        |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
188    let original_node = token_at_offset_ignore_whitespace(&original_file.value, original_offset)
189        .and_then(|token| token.parent_ancestors().find_map(ast::Item::cast));
190    let ancestor_items = iter::successors(
191        Option::zip(
192            original_node,
193            find_node_at_offset::<ast::Item>(
194                &speculative_file,
195                fake_ident_token.text_range().start(),
196            ),
197        ),
198        |(a, b)| parent_item(a).zip(parent_item(b)),
199    );
200
201    // first try to expand attributes as these are always the outermost macro calls
202    'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
203        match (
204            sema.expand_attr_macro(&actual_item),
205            sema.speculative_expand_attr_macro(
206                &actual_item,
207                &item_with_fake_ident,
208                fake_ident_token.clone(),
209            ),
210        ) {
211            // maybe parent items have attributes, so continue walking the ancestors
212            (None, None) => continue 'ancestors,
213            // successful expansions
214            (
215                Some(ExpandResult { value: actual_expansion, err: _ }),
216                Some((fake_expansion, fake_mapped_tokens)),
217            ) => {
218                let mut accumulated_offset_from_fake_tokens = 0;
219                let actual_range = actual_expansion.text_range().end();
220                let result = fake_mapped_tokens
221                    .into_iter()
222                    .filter_map(|(fake_mapped_token, rank)| {
223                        let accumulated_offset = accumulated_offset_from_fake_tokens;
224                        if !fake_mapped_token.text().contains(COMPLETION_MARKER) {
225                            // Proc macros can make the same span with different text, we don't
226                            // want them to participate in completion because the macro author probably
227                            // didn't intend them to.
228                            return None;
229                        }
230                        accumulated_offset_from_fake_tokens += COMPLETION_MARKER.len();
231
232                        let new_offset = fake_mapped_token.text_range().start()
233                            - TextSize::new(accumulated_offset as u32);
234                        if new_offset + relative_offset > actual_range {
235                            // offset outside of bounds from the original expansion,
236                            // stop here to prevent problems from happening
237                            return None;
238                        }
239                        let result = expand_maybe_stop(
240                            sema,
241                            actual_expansion.clone(),
242                            fake_expansion.clone(),
243                            new_offset,
244                            fake_mapped_token,
245                            relative_offset,
246                        )?;
247                        Some((result, rank))
248                    })
249                    .min_by_key(|(_, rank)| *rank)
250                    .map(|(result, _)| result);
251                if result.is_some() {
252                    return result;
253                }
254            }
255            // exactly one expansion failed, inconsistent state so stop expanding completely
256            _ => break 'ancestors,
257        }
258    }
259
260    // No attributes have been expanded, so look for macro_call! token trees or derive token trees
261    let orig_tt = ancestors_at_offset(&original_file.value, original_offset)
262        .map_while(Either::<ast::TokenTree, ast::Meta>::cast)
263        .last()?;
264    let spec_tt = ancestors_at_offset(&speculative_file, fake_ident_token.text_range().start())
265        .map_while(Either::<ast::TokenTree, ast::Meta>::cast)
266        .last()?;
267
268    let (tts, attrs) = match (orig_tt, spec_tt) {
269        (Either::Left(orig_tt), Either::Left(spec_tt)) => {
270            let attrs = orig_tt
271                .syntax()
272                .parent()
273                .and_then(ast::Meta::cast)
274                .and_then(|it| it.parent_attr())
275                .zip(
276                    spec_tt
277                        .syntax()
278                        .parent()
279                        .and_then(ast::Meta::cast)
280                        .and_then(|it| it.parent_attr()),
281                );
282            (Some((orig_tt, spec_tt)), attrs)
283        }
284        (Either::Right(orig_path), Either::Right(spec_path)) => {
285            (None, orig_path.parent_attr().zip(spec_path.parent_attr()))
286        }
287        _ => return None,
288    };
289
290    // Expand pseudo-derive expansion aka `derive(Debug$0)`
291    if let Some((orig_attr, spec_attr)) = attrs {
292        if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_tokens))) = (
293            sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
294            sema.speculative_expand_derive_as_pseudo_attr_macro(
295                &orig_attr,
296                &spec_attr,
297                fake_ident_token.clone(),
298            ),
299        ) && let Some((fake_mapped_token, _)) =
300            fake_mapped_tokens.into_iter().min_by_key(|(_, rank)| *rank)
301        {
302            return Some(ExpansionResult {
303                original_file: original_file.value,
304                speculative_file,
305                original_offset,
306                speculative_offset: fake_ident_token.text_range().start(),
307                fake_ident_token,
308                derive_ctx: Some((
309                    actual_expansion,
310                    fake_expansion,
311                    fake_mapped_token.text_range().start(),
312                    orig_attr,
313                )),
314            });
315        }
316
317        if let Some(spec_adt) =
318            spec_attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it {
319                ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
320                ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
321                ast::Item::Union(it) => Some(ast::Adt::Union(it)),
322                _ => None,
323            })
324        {
325            // might be the path of derive helper or a token tree inside of one
326            if let Some(helpers) = sema.derive_helper(&orig_attr) {
327                for (_mac, file) in helpers {
328                    if let Some((fake_expansion, fake_mapped_tokens)) = sema.speculative_expand_raw(
329                        file,
330                        spec_adt.syntax(),
331                        fake_ident_token.clone(),
332                    ) {
333                        // we are inside a derive helper token tree, treat this as being inside
334                        // the derive expansion
335                        let actual_expansion = sema.parse_or_expand(file.into());
336                        let mut accumulated_offset_from_fake_tokens = 0;
337                        let actual_range = actual_expansion.text_range().end();
338                        let result = fake_mapped_tokens
339                            .into_iter()
340                            .filter_map(|(fake_mapped_token, rank)| {
341                                let accumulated_offset = accumulated_offset_from_fake_tokens;
342                                if !fake_mapped_token.text().contains(COMPLETION_MARKER) {
343                                    // Proc macros can make the same span with different text, we don't
344                                    // want them to participate in completion because the macro author probably
345                                    // didn't intend them to.
346                                    return None;
347                                }
348                                accumulated_offset_from_fake_tokens += COMPLETION_MARKER.len();
349
350                                let new_offset = fake_mapped_token.text_range().start()
351                                    - TextSize::new(accumulated_offset as u32);
352                                if new_offset + relative_offset > actual_range {
353                                    // offset outside of bounds from the original expansion,
354                                    // stop here to prevent problems from happening
355                                    return None;
356                                }
357                                let result = expand_maybe_stop(
358                                    sema,
359                                    InFile::new(file.into(), actual_expansion.clone()),
360                                    fake_expansion.clone(),
361                                    new_offset,
362                                    fake_mapped_token,
363                                    relative_offset,
364                                )?;
365                                Some((result, rank))
366                            })
367                            .min_by_key(|(_, rank)| *rank)
368                            .map(|(result, _)| result);
369                        if result.is_some() {
370                            return result;
371                        }
372                    }
373                }
374            }
375        }
376        // at this point we won't have any more successful expansions, so stop
377        return None;
378    }
379
380    // Expand fn-like macro calls
381    let (orig_tt, spec_tt) = tts?;
382    let (actual_macro_call, macro_call_with_fake_ident) = (
383        orig_tt.syntax().parent().and_then(ast::MacroCall::cast)?,
384        spec_tt.syntax().parent().and_then(ast::MacroCall::cast)?,
385    );
386    let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
387    let mac_call_path1 = macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
388
389    // inconsistent state, stop expanding
390    if mac_call_path0 != mac_call_path1 {
391        return None;
392    }
393    let speculative_args = macro_call_with_fake_ident.token_tree()?;
394
395    match (
396        sema.expand_macro_call(&actual_macro_call),
397        sema.speculative_expand_macro_call(&actual_macro_call, &speculative_args, fake_ident_token),
398    ) {
399        // successful expansions
400        (Some(actual_expansion), Some((fake_expansion, fake_mapped_tokens))) => {
401            let mut accumulated_offset_from_fake_tokens = 0;
402            let actual_range = actual_expansion.text_range().end();
403            fake_mapped_tokens
404                .into_iter()
405                .filter_map(|(fake_mapped_token, rank)| {
406                    let accumulated_offset = accumulated_offset_from_fake_tokens;
407                    if !fake_mapped_token.text().contains(COMPLETION_MARKER) {
408                        // Proc macros can make the same span with different text, we don't
409                        // want them to participate in completion because the macro author probably
410                        // didn't intend them to.
411                        return None;
412                    }
413                    accumulated_offset_from_fake_tokens += COMPLETION_MARKER.len();
414
415                    let new_offset = fake_mapped_token.text_range().start()
416                        - TextSize::new(accumulated_offset as u32);
417                    if new_offset + relative_offset > actual_range {
418                        // offset outside of bounds from the original expansion,
419                        // stop here to prevent problems from happening
420                        return None;
421                    }
422                    let result = expand_maybe_stop(
423                        sema,
424                        actual_expansion.clone(),
425                        fake_expansion.clone(),
426                        new_offset,
427                        fake_mapped_token,
428                        relative_offset,
429                    )?;
430                    Some((result, rank))
431                })
432                .min_by_key(|(_, rank)| *rank)
433                .map(|(result, _)| result)
434        }
435        // at least one expansion failed, we won't have anything to expand from this point
436        // onwards so break out
437        _ => None,
438    }
439}
440
441/// Fill the completion context, this is what does semantic reasoning about the surrounding context
442/// of the completion location.
443fn analyze<'db>(
444    sema: &Semantics<'db, RootDatabase>,
445    expansion_result: ExpansionResult,
446    original_token: &SyntaxToken,
447    self_token: &SyntaxToken,
448) -> Option<(CompletionAnalysis<'db>, (Option<Type<'db>>, Option<ast::NameOrNameRef>), QualifierCtx)>
449{
450    let _p = tracing::info_span!("CompletionContext::analyze").entered();
451    let ExpansionResult {
452        original_file,
453        speculative_file,
454        original_offset: _,
455        speculative_offset,
456        fake_ident_token,
457        derive_ctx,
458    } = expansion_result;
459
460    if original_token.kind() != self_token.kind()
461        // FIXME: This check can be removed once we use speculative database forking for completions
462        && !(original_token.kind().is_punct() || original_token.kind().is_trivia())
463        && !(SyntaxKind::is_any_identifier(original_token.kind())
464            && SyntaxKind::is_any_identifier(self_token.kind()))
465    {
466        return None;
467    }
468
469    // Overwrite the path kind for derives
470    if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
471        if let Some(ast::NameLike::NameRef(name_ref)) =
472            find_node_at_offset(&file_with_fake_ident, offset)
473        {
474            let parent = name_ref.syntax().parent()?;
475            let (mut nameref_ctx, _) =
476                classify_name_ref(sema, &original_file, name_ref, offset, parent)?;
477            if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
478                path_ctx.kind = PathKind::Derive {
479                    existing_derives: sema
480                        .resolve_derive_macro(&origin_attr)
481                        .into_iter()
482                        .flatten()
483                        .flatten()
484                        .collect(),
485                };
486            }
487            return Some((
488                CompletionAnalysis::NameRef(nameref_ctx),
489                (None, None),
490                QualifierCtx::default(),
491            ));
492        }
493        return None;
494    }
495
496    let Some(name_like) = find_node_at_offset(&speculative_file, speculative_offset) else {
497        let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
498            CompletionAnalysis::String { original, expanded: ast::String::cast(self_token.clone()) }
499        } else {
500            // Fix up trailing whitespace problem
501            // #[attr(foo = $0
502            let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?;
503            let p = token.parent()?;
504            if p.kind() == SyntaxKind::TOKEN_TREE
505                && p.ancestors().any(|it| it.kind() == SyntaxKind::META)
506            {
507                let colon_prefix = previous_non_trivia_token(self_token.clone())
508                    .is_some_and(|it| T![:] == it.kind());
509
510                CompletionAnalysis::UnexpandedAttrTT {
511                    fake_attribute_under_caret: fake_ident_token
512                        .parent_ancestors()
513                        .find_map(ast::Attr::cast),
514                    colon_prefix,
515                    extern_crate: p.ancestors().find_map(ast::ExternCrate::cast),
516                }
517            } else {
518                return None;
519            }
520        };
521        return Some((analysis, (None, None), QualifierCtx::default()));
522    };
523
524    let expected = expected_type_and_name(sema, self_token, &name_like);
525    let mut qual_ctx = QualifierCtx::default();
526    let analysis = match name_like {
527        ast::NameLike::Lifetime(lifetime) => {
528            CompletionAnalysis::Lifetime(classify_lifetime(sema, &original_file, lifetime)?)
529        }
530        ast::NameLike::NameRef(name_ref) => {
531            let parent = name_ref.syntax().parent()?;
532            let (nameref_ctx, qualifier_ctx) = classify_name_ref(
533                sema,
534                &original_file,
535                name_ref,
536                expansion_result.original_offset,
537                parent,
538            )?;
539
540            if let NameRefContext {
541                kind:
542                    NameRefKind::Path(PathCompletionCtx { kind: PathKind::Expr { .. }, path, .. }, ..),
543                ..
544            } = &nameref_ctx
545                && is_in_token_of_for_loop(path)
546            {
547                // for pat $0
548                // there is nothing to complete here except `in` keyword
549                // don't bother populating the context
550                // Ideally this special casing wouldn't be needed, but the parser recovers
551                return None;
552            }
553
554            qual_ctx = qualifier_ctx;
555            CompletionAnalysis::NameRef(nameref_ctx)
556        }
557        ast::NameLike::Name(name) => {
558            let name_ctx = classify_name(sema, &original_file, name)?;
559            CompletionAnalysis::Name(name_ctx)
560        }
561    };
562    Some((analysis, expected, qual_ctx))
563}
564
565/// Calculate the expected type and name of the cursor position.
566fn expected_type_and_name<'db>(
567    sema: &Semantics<'db, RootDatabase>,
568    self_token: &SyntaxToken,
569    name_like: &ast::NameLike,
570) -> (Option<Type<'db>>, Option<NameOrNameRef>) {
571    let token = prev_special_biased_token_at_trivia(self_token.clone());
572    let mut node = match token.parent() {
573        Some(it) => it,
574        None => return (None, None),
575    };
576
577    let strip_refs = |mut ty: Type<'db>| match name_like {
578        ast::NameLike::NameRef(n) => {
579            let p = match n.syntax().parent() {
580                Some(it) => it,
581                None => return ty,
582            };
583            let top_syn = match_ast! {
584                match p {
585                    ast::FieldExpr(e) => e
586                        .syntax()
587                        .ancestors()
588                        .take_while(|it| ast::FieldExpr::can_cast(it.kind()))
589                        .last(),
590                    ast::PathSegment(e) => e
591                        .syntax()
592                        .ancestors()
593                        .skip(1)
594                        .take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind()))
595                        .find(|it| ast::PathExpr::can_cast(it.kind())),
596                    _ => None
597                }
598            };
599            let top_syn = match top_syn {
600                Some(it) => it,
601                None => return ty,
602            };
603            let refs_level = top_syn
604                .ancestors()
605                .skip(1)
606                .map_while(Either::<ast::RefExpr, ast::PrefixExpr>::cast)
607                .take_while(|it| match it {
608                    Either::Left(_) => true,
609                    Either::Right(prefix) => prefix.op_kind() == Some(ast::UnaryOp::Deref),
610                })
611                .fold(0i32, |level, expr| match expr {
612                    Either::Left(_) => level + 1,
613                    Either::Right(_) => level - 1,
614                });
615            for _ in 0..refs_level {
616                cov_mark::hit!(expected_type_fn_param_ref);
617                ty = ty.strip_reference();
618            }
619            for _ in refs_level..0 {
620                cov_mark::hit!(expected_type_fn_param_deref);
621                ty = ty.add_reference(hir::Mutability::Shared);
622            }
623            ty
624        }
625        _ => ty,
626    };
627
628    let (ty, name) = loop {
629        break match_ast! {
630            match node {
631                ast::LetStmt(it) => {
632                    cov_mark::hit!(expected_type_let_with_leading_char);
633                    cov_mark::hit!(expected_type_let_without_leading_char);
634                    let ty = it.pat()
635                        .and_then(|pat| sema.type_of_pat(&pat))
636                        .or_else(|| it.initializer().and_then(|it| sema.type_of_expr(&it)))
637                        .map(TypeInfo::original)
638                        .filter(|ty| {
639                            // don't infer the let type if the expr is a function,
640                            // preventing parenthesis from vanishing
641                            it.ty().is_some() || !ty.is_fn()
642                        });
643                    let name = match it.pat() {
644                        Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
645                        Some(_) | None => None,
646                    };
647
648                    (ty, name)
649                },
650                ast::LetExpr(it) => {
651                    cov_mark::hit!(expected_type_if_let_without_leading_char);
652                    let ty = it.pat()
653                        .and_then(|pat| sema.type_of_pat(&pat))
654                        .or_else(|| it.expr().and_then(|it| sema.type_of_expr(&it)))
655                        .map(TypeInfo::original);
656                    (ty, None)
657                },
658                ast::BinExpr(it) => {
659                    if let Some(ast::BinaryOp::Assignment { op: None }) = it.op_kind() {
660                        let ty = it.lhs()
661                            .and_then(|lhs| sema.type_of_expr(&lhs))
662                            .or_else(|| it.rhs().and_then(|rhs| sema.type_of_expr(&rhs)))
663                            .map(TypeInfo::original);
664                        (ty, None)
665                    } else if let Some(ast::BinaryOp::LogicOp(_)) = it.op_kind() {
666                        let ty = sema.type_of_expr(&it.clone().into()).map(TypeInfo::original);
667                        (ty, None)
668                    } else {
669                        (None, None)
670                    }
671                },
672                ast::ArgList(_) => {
673                    cov_mark::hit!(expected_type_fn_param);
674                    ActiveParameter::at_token(
675                        sema,
676                        token.clone(),
677                    ).map(|ap| {
678                        let name = ap.ident().map(NameOrNameRef::Name);
679                        (Some(ap.ty), name)
680                    })
681                    .unwrap_or((None, None))
682                },
683                ast::RecordExprFieldList(it) => {
684                    // wouldn't try {} be nice...
685                    (|| {
686                        if token.kind() == T![..]
687                            ||token.prev_token().map(|t| t.kind()) == Some(T![..])
688                        {
689                            cov_mark::hit!(expected_type_struct_func_update);
690                            let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?;
691                            let ty = sema.type_of_expr(&record_expr.into())?;
692                            Some((
693                                Some(ty.original),
694                                None
695                            ))
696                        } else {
697                            cov_mark::hit!(expected_type_struct_field_without_leading_char);
698                            cov_mark::hit!(expected_type_struct_field_followed_by_comma);
699                            let expr_field = previous_non_trivia_token(token.clone())?.parent().and_then(ast::RecordExprField::cast)?;
700                            let (_, _, ty) = sema.resolve_record_field(&expr_field)?;
701                            Some((
702                                Some(ty),
703                                expr_field.field_name().map(NameOrNameRef::NameRef),
704                            ))
705                        }
706                    })().unwrap_or((None, None))
707                },
708                ast::RecordExprField(it) => {
709                    let field_ty = sema.resolve_record_field(&it).map(|(_, _, ty)| ty);
710                    let field_name = it.field_name().map(NameOrNameRef::NameRef);
711                    if let Some(expr) = it.expr() {
712                        cov_mark::hit!(expected_type_struct_field_with_leading_char);
713                        let ty = field_ty
714                            .or_else(|| sema.type_of_expr(&expr).map(TypeInfo::original));
715                        (ty, field_name)
716                    } else {
717                        (field_ty, field_name)
718                    }
719                },
720                // match foo { $0 }
721                // match foo { ..., pat => $0 }
722                ast::MatchExpr(it) => {
723                    let on_arrow = previous_non_trivia_token(token.clone()).is_some_and(|it| T![=>] == it.kind());
724
725                    let ty = if on_arrow {
726                        // match foo { ..., pat => $0 }
727                        cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
728                        cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
729                        sema.type_of_expr(&it.into())
730                    } else {
731                        // match foo { $0 }
732                        cov_mark::hit!(expected_type_match_arm_without_leading_char);
733                        it.expr().and_then(|e| sema.type_of_expr(&e))
734                    }.map(TypeInfo::original);
735                    (ty, None)
736                },
737                ast::IfExpr(it) => {
738                    let ty = if let Some(body) = it.then_branch()
739                        && token.text_range().end() > body.syntax().text_range().start()
740                    {
741                        sema.type_of_expr(&body.into())
742                    } else {
743                        it.condition().and_then(|e| sema.type_of_expr(&e))
744                    }.map(TypeInfo::original);
745                    (ty, None)
746                },
747                ast::IdentPat(it) => {
748                    cov_mark::hit!(expected_type_if_let_with_leading_char);
749                    cov_mark::hit!(expected_type_match_arm_with_leading_char);
750                    let ty = sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
751                    (ty, None)
752                },
753                ast::Fn(it) => {
754                    cov_mark::hit!(expected_type_fn_ret_with_leading_char);
755                    cov_mark::hit!(expected_type_fn_ret_without_leading_char);
756                    let def = sema.to_def(&it);
757                    (def.map(|def| def.ret_type(sema.db)), None)
758                },
759                ast::ReturnExpr(it) => {
760                    let fn_ = sema.ancestors_with_macros(it.syntax().clone())
761                        .find_map(Either::<ast::Fn, ast::ClosureExpr>::cast);
762                    let ty = fn_.and_then(|f| match f {
763                        Either::Left(f) => Some(sema.to_def(&f)?.ret_type(sema.db)),
764                        Either::Right(f) => {
765                            let ty = sema.type_of_expr(&f.into())?.original.as_callable(sema.db)?;
766                            Some(ty.return_type())
767                        },
768                    });
769                    (ty, None)
770                },
771                ast::ClosureExpr(it) => {
772                    let ty = sema.type_of_expr(&it.into());
773                    ty.and_then(|ty| ty.original.as_callable(sema.db))
774                        .map(|c| (Some(c.return_type()), None))
775                        .unwrap_or((None, None))
776                },
777                ast::ParamList(it) => {
778                    let closure = it.syntax().parent().and_then(ast::ClosureExpr::cast);
779                    let ty = closure
780                        .filter(|_| it.syntax().text_range().end() <= self_token.text_range().start())
781                        .and_then(|it| sema.type_of_expr(&it.into()));
782                    ty.and_then(|ty| ty.original.as_callable(sema.db))
783                        .map(|c| (Some(c.return_type()), None))
784                        .unwrap_or((None, None))
785                },
786                ast::Stmt(_) => (None, None),
787                ast::Item(_) => (None, None),
788                _ => {
789                    match node.parent() {
790                        Some(n) => {
791                            node = n;
792                            continue;
793                        },
794                        None => (None, None),
795                    }
796                },
797            }
798        };
799    };
800    (ty.map(strip_refs), name)
801}
802
803fn classify_lifetime(
804    sema: &Semantics<'_, RootDatabase>,
805    original_file: &SyntaxNode,
806    lifetime: ast::Lifetime,
807) -> Option<LifetimeContext> {
808    let parent = lifetime.syntax().parent()?;
809    if parent.kind() == SyntaxKind::ERROR {
810        return None;
811    }
812
813    let lifetime =
814        find_node_at_offset::<ast::Lifetime>(original_file, lifetime.syntax().text_range().start());
815    let kind = match_ast! {
816        match parent {
817            ast::LifetimeParam(_) => LifetimeKind::LifetimeParam,
818            ast::BreakExpr(_) => LifetimeKind::LabelRef,
819            ast::ContinueExpr(_) => LifetimeKind::LabelRef,
820            ast::Label(_) => LifetimeKind::LabelDef,
821            _ => {
822                let def = lifetime.as_ref().and_then(|lt| sema.scope(lt.syntax())?.generic_def());
823                LifetimeKind::Lifetime { in_lifetime_param_bound: ast::TypeBound::can_cast(parent.kind()), def }
824            },
825        }
826    };
827
828    Some(LifetimeContext { kind })
829}
830
831fn classify_name(
832    sema: &Semantics<'_, RootDatabase>,
833    original_file: &SyntaxNode,
834    name: ast::Name,
835) -> Option<NameContext> {
836    let parent = name.syntax().parent()?;
837    let kind = match_ast! {
838        match parent {
839            ast::Const(_) => NameKind::Const,
840            ast::ConstParam(_) => NameKind::ConstParam,
841            ast::Enum(_) => NameKind::Enum,
842            ast::Fn(_) => NameKind::Function,
843            ast::IdentPat(bind_pat) => {
844                let mut pat_ctx = pattern_context_for(sema, original_file, bind_pat.into());
845                if let Some(record_field) = ast::RecordPatField::for_field_name(&name) {
846                    pat_ctx.record_pat = find_node_in_file_compensated(sema, original_file, &record_field.parent_record_pat());
847                }
848
849                NameKind::IdentPat(pat_ctx)
850            },
851            ast::MacroDef(_) => NameKind::MacroDef,
852            ast::MacroRules(_) => NameKind::MacroRules,
853            ast::Module(module) => NameKind::Module(module),
854            ast::RecordField(_) => NameKind::RecordField,
855            ast::Rename(_) => NameKind::Rename,
856            ast::SelfParam(_) => NameKind::SelfParam,
857            ast::Static(_) => NameKind::Static,
858            ast::Struct(_) => NameKind::Struct,
859            ast::Trait(_) => NameKind::Trait,
860            ast::TypeAlias(_) => NameKind::TypeAlias,
861            ast::TypeParam(_) => NameKind::TypeParam,
862            ast::Union(_) => NameKind::Union,
863            ast::Variant(_) => NameKind::Variant,
864            _ => return None,
865        }
866    };
867    let name = find_node_at_offset(original_file, name.syntax().text_range().start());
868    Some(NameContext { name, kind })
869}
870
871fn classify_name_ref<'db>(
872    sema: &Semantics<'db, RootDatabase>,
873    original_file: &SyntaxNode,
874    name_ref: ast::NameRef,
875    original_offset: TextSize,
876    parent: SyntaxNode,
877) -> Option<(NameRefContext<'db>, QualifierCtx)> {
878    let nameref = find_node_at_offset(original_file, original_offset);
879
880    let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
881
882    if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
883        let dot_prefix = previous_non_trivia_token(name_ref.syntax().clone())
884            .is_some_and(|it| T![.] == it.kind());
885
886        return find_node_in_file_compensated(
887            sema,
888            original_file,
889            &record_field.parent_record_lit(),
890        )
891        .map(|expr| NameRefKind::RecordExpr { expr, dot_prefix })
892        .map(make_res);
893    }
894    if let Some(record_field) = ast::RecordPatField::for_field_name_ref(&name_ref) {
895        let kind = NameRefKind::Pattern(PatternContext {
896            param_ctx: None,
897            has_type_ascription: false,
898            ref_token: None,
899            mut_token: None,
900            record_pat: find_node_in_file_compensated(
901                sema,
902                original_file,
903                &record_field.parent_record_pat(),
904            ),
905            ..pattern_context_for(sema, original_file, record_field.parent_record_pat().into())
906        });
907        return Some(make_res(kind));
908    }
909
910    let field_expr_handle = |receiver, node| {
911        let receiver = find_opt_node_in_file(original_file, receiver);
912        let receiver_is_ambiguous_float_literal = match &receiver {
913            Some(ast::Expr::Literal(l)) => matches! {
914                l.kind(),
915                ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().is_some_and(|it| it.text().ends_with('.'))
916            },
917            _ => false,
918        };
919
920        let receiver_is_part_of_indivisible_expression = match &receiver {
921            Some(ast::Expr::IfExpr(_)) => {
922                let next_token_kind =
923                    next_non_trivia_token(name_ref.syntax().clone()).map(|t| t.kind());
924                next_token_kind == Some(SyntaxKind::ELSE_KW)
925            }
926            _ => false,
927        };
928        if receiver_is_part_of_indivisible_expression {
929            return None;
930        }
931
932        let mut receiver_ty = receiver.as_ref().and_then(|it| sema.type_of_expr(it));
933        if receiver_is_ambiguous_float_literal {
934            // `123.|` is parsed as a float but should actually be an integer.
935            always!(receiver_ty.as_ref().is_none_or(|receiver_ty| receiver_ty.original.is_float()));
936            receiver_ty =
937                Some(TypeInfo { original: hir::BuiltinType::i32().ty(sema.db), adjusted: None });
938        }
939
940        let kind = NameRefKind::DotAccess(DotAccess {
941            receiver_ty,
942            kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
943            receiver,
944            ctx: DotAccessExprCtx {
945                in_block_expr: is_in_block(node),
946                in_breakable: is_in_breakable(node).unzip().0,
947            },
948        });
949        Some(make_res(kind))
950    };
951
952    let segment = match_ast! {
953        match parent {
954            ast::PathSegment(segment) => segment,
955            ast::FieldExpr(field) => {
956                return field_expr_handle(field.expr(), field.syntax());
957            },
958            ast::ExternCrate(_) => {
959                let kind = NameRefKind::ExternCrate;
960                return Some(make_res(kind));
961            },
962            ast::MethodCallExpr(method) => {
963                let receiver = find_opt_node_in_file(original_file, method.receiver());
964                let has_parens = has_parens(&method);
965                if !has_parens && let Some(res) = field_expr_handle(method.receiver(), method.syntax()) {
966                    return Some(res)
967                }
968                let kind = NameRefKind::DotAccess(DotAccess {
969                    receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
970                    kind: DotAccessKind::Method,
971                    receiver,
972                    ctx: DotAccessExprCtx { in_block_expr: is_in_block(method.syntax()), in_breakable: is_in_breakable(method.syntax()).unzip().0 }
973                });
974                return Some(make_res(kind));
975            },
976            _ => return None,
977        }
978    };
979
980    let path = segment.parent_path();
981    let original_path = find_node_in_file_compensated(sema, original_file, &path);
982
983    let mut path_ctx = PathCompletionCtx {
984        has_call_parens: false,
985        has_macro_bang: false,
986        qualified: Qualified::No,
987        parent: None,
988        path: path.clone(),
989        original_path,
990        kind: PathKind::Item { kind: ItemListKind::SourceFile },
991        has_type_args: false,
992        use_tree_parent: false,
993    };
994
995    let func_update_record = |syn: &SyntaxNode| {
996        if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) {
997            find_node_in_file_compensated(sema, original_file, &record_expr)
998        } else {
999            None
1000        }
1001    };
1002    let prev_expr = |node: SyntaxNode| {
1003        let node = match node.parent().and_then(ast::ExprStmt::cast) {
1004            Some(stmt) => stmt.syntax().clone(),
1005            None => node,
1006        };
1007        let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?;
1008
1009        match_ast! {
1010            match prev_sibling {
1011                ast::ExprStmt(stmt) => stmt.expr().filter(|_| stmt.semicolon_token().is_none()),
1012                ast::LetStmt(stmt) => stmt.initializer().filter(|_| stmt.semicolon_token().is_none()),
1013                ast::Expr(expr) => Some(expr),
1014                _ => None,
1015            }
1016        }
1017    };
1018    let after_incomplete_let = |node: SyntaxNode| {
1019        prev_expr(node).and_then(|it| it.syntax().parent()).and_then(ast::LetStmt::cast)
1020    };
1021    let before_else_kw = |node: &SyntaxNode| {
1022        node.parent()
1023            .and_then(ast::ExprStmt::cast)
1024            .filter(|stmt| stmt.semicolon_token().is_none())
1025            .and_then(|stmt| non_trivia_sibling(stmt.syntax().clone().into(), Direction::Next))
1026            .and_then(NodeOrToken::into_node)
1027            .filter(|next| next.kind() == SyntaxKind::ERROR)
1028            .and_then(|next| next.first_token())
1029            .is_some_and(|token| token.kind() == SyntaxKind::ELSE_KW)
1030    };
1031    let is_in_value = |it: &SyntaxNode| {
1032        let Some(node) = it.parent() else { return false };
1033        let kind = node.kind();
1034        ast::LetStmt::can_cast(kind)
1035            || ast::ArgList::can_cast(kind)
1036            || ast::ArrayExpr::can_cast(kind)
1037            || ast::ParenExpr::can_cast(kind)
1038            || ast::BreakExpr::can_cast(kind)
1039            || ast::ReturnExpr::can_cast(kind)
1040            || ast::PrefixExpr::can_cast(kind)
1041            || ast::FormatArgsArg::can_cast(kind)
1042            || ast::RecordExprField::can_cast(kind)
1043            || ast::BinExpr::cast(node.clone())
1044                .and_then(|expr| expr.rhs())
1045                .is_some_and(|expr| expr.syntax() == it)
1046            || ast::IndexExpr::cast(node)
1047                .and_then(|expr| expr.index())
1048                .is_some_and(|expr| expr.syntax() == it)
1049    };
1050
1051    // We do not want to generate path completions when we are sandwiched between an item decl signature and its body.
1052    // ex. trait Foo $0 {}
1053    // in these cases parser recovery usually kicks in for our inserted identifier, causing it
1054    // to either be parsed as an ExprStmt or a ItemRecovery, depending on whether it is in a block
1055    // expression or an item list.
1056    // The following code checks if the body is missing, if it is we either cut off the body
1057    // from the item or it was missing in the first place
1058    let inbetween_body_and_decl_check = |node: SyntaxNode| {
1059        if let Some(NodeOrToken::Node(n)) =
1060            syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev)
1061            && let Some(item) = ast::Item::cast(n)
1062        {
1063            let is_inbetween = match &item {
1064                ast::Item::Const(it) => it.body().is_none() && it.semicolon_token().is_none(),
1065                ast::Item::Enum(it) => it.variant_list().is_none(),
1066                ast::Item::ExternBlock(it) => it.extern_item_list().is_none(),
1067                ast::Item::Fn(it) => it.body().is_none() && it.semicolon_token().is_none(),
1068                ast::Item::Impl(it) => it.assoc_item_list().is_none(),
1069                ast::Item::Module(it) => it.item_list().is_none() && it.semicolon_token().is_none(),
1070                ast::Item::Static(it) => it.body().is_none(),
1071                ast::Item::Struct(it) => {
1072                    it.field_list().is_none() && it.semicolon_token().is_none()
1073                }
1074                ast::Item::Trait(it) => it.assoc_item_list().is_none(),
1075                ast::Item::TypeAlias(it) => it.ty().is_none() && it.semicolon_token().is_none(),
1076                ast::Item::Union(it) => it.record_field_list().is_none(),
1077                _ => false,
1078            };
1079            if is_inbetween {
1080                return Some(item);
1081            }
1082        }
1083        None
1084    };
1085
1086    let generic_arg_location = |arg: ast::GenericArg| {
1087        let mut override_location = None;
1088        let location = find_opt_node_in_file_compensated(
1089            sema,
1090            original_file,
1091            arg.syntax().parent().and_then(ast::GenericArgList::cast),
1092        )
1093        .map(|args| {
1094            let mut in_trait = None;
1095            let param = (|| {
1096                let parent = args.syntax().parent()?;
1097                let params = match_ast! {
1098                    match parent {
1099                        ast::PathSegment(segment) => {
1100                            match sema.resolve_path(&segment.parent_path().top_path())? {
1101                                hir::PathResolution::Def(def) => match def {
1102                                    hir::ModuleDef::Function(func) => {
1103                                         sema.source(func)?.value.generic_param_list()
1104                                    }
1105                                    hir::ModuleDef::Adt(adt) => {
1106                                        sema.source(adt)?.value.generic_param_list()
1107                                    }
1108                                    hir::ModuleDef::Variant(variant) => {
1109                                        sema.source(variant.parent_enum(sema.db))?.value.generic_param_list()
1110                                    }
1111                                    hir::ModuleDef::Trait(trait_) => {
1112                                        if let ast::GenericArg::AssocTypeArg(arg) = &arg {
1113                                            let arg_name = arg.name_ref()?;
1114                                            let arg_name = arg_name.text();
1115                                            for item in trait_.items_with_supertraits(sema.db) {
1116                                                match item {
1117                                                    hir::AssocItem::TypeAlias(assoc_ty) => {
1118                                                        if assoc_ty.name(sema.db).as_str() == arg_name {
1119                                                            override_location = Some(TypeLocation::AssocTypeEq);
1120                                                            return None;
1121                                                        }
1122                                                    },
1123                                                    hir::AssocItem::Const(const_) => {
1124                                                        if const_.name(sema.db)?.as_str() == arg_name {
1125                                                            override_location =  Some(TypeLocation::AssocConstEq);
1126                                                            return None;
1127                                                        }
1128                                                    },
1129                                                    _ => (),
1130                                                }
1131                                            }
1132                                            return None;
1133                                        } else {
1134                                            in_trait = Some(trait_);
1135                                            sema.source(trait_)?.value.generic_param_list()
1136                                        }
1137                                    }
1138                                    hir::ModuleDef::TypeAlias(ty_) => {
1139                                        sema.source(ty_)?.value.generic_param_list()
1140                                    }
1141                                    _ => None,
1142                                },
1143                                _ => None,
1144                            }
1145                        },
1146                        ast::MethodCallExpr(call) => {
1147                            let func = sema.resolve_method_call(&call)?;
1148                            sema.source(func)?.value.generic_param_list()
1149                        },
1150                        ast::AssocTypeArg(arg) => {
1151                            let trait_ = ast::PathSegment::cast(arg.syntax().parent()?.parent()?)?;
1152                            match sema.resolve_path(&trait_.parent_path().top_path())? {
1153                                hir::PathResolution::Def(hir::ModuleDef::Trait(trait_)) =>  {
1154                                        let arg_name = arg.name_ref()?;
1155                                        let arg_name = arg_name.text();
1156                                        let trait_items = trait_.items_with_supertraits(sema.db);
1157                                        let assoc_ty = trait_items.iter().find_map(|item| match item {
1158                                            hir::AssocItem::TypeAlias(assoc_ty) => {
1159                                                (assoc_ty.name(sema.db).as_str() == arg_name)
1160                                                    .then_some(assoc_ty)
1161                                            },
1162                                            _ => None,
1163                                        })?;
1164                                        sema.source(*assoc_ty)?.value.generic_param_list()
1165                                    }
1166                                _ => None,
1167                            }
1168                        },
1169                        _ => None,
1170                    }
1171                }?;
1172                // Determine the index of the argument in the `GenericArgList` and match it with
1173                // the corresponding parameter in the `GenericParamList`. Since lifetime parameters
1174                // are often omitted, ignore them for the purposes of matching the argument with
1175                // its parameter unless a lifetime argument is provided explicitly. That is, for
1176                // `struct S<'a, 'b, T>`, match `S::<$0>` to `T` and `S::<'a, $0, _>` to `'b`.
1177                // FIXME: This operates on the syntax tree and will produce incorrect results when
1178                // generic parameters are disabled by `#[cfg]` directives. It should operate on the
1179                // HIR, but the functionality necessary to do so is not exposed at the moment.
1180                let mut explicit_lifetime_arg = false;
1181                let arg_idx = arg
1182                    .syntax()
1183                    .siblings(Direction::Prev)
1184                    // Skip the node itself
1185                    .skip(1)
1186                    .map(|arg| if ast::LifetimeArg::can_cast(arg.kind()) { explicit_lifetime_arg = true })
1187                    .count();
1188                let param_idx = if explicit_lifetime_arg {
1189                    arg_idx
1190                } else {
1191                    // Lifetimes parameters always precede type and generic parameters,
1192                    // so offset the argument index by the total number of lifetime params
1193                    arg_idx + params.lifetime_params().count()
1194                };
1195                params.generic_params().nth(param_idx)
1196            })();
1197            (args, in_trait, param)
1198        });
1199        let (arg_list, of_trait, corresponding_param) = match location {
1200            Some((arg_list, of_trait, param)) => (Some(arg_list), of_trait, param),
1201            _ => (None, None, None),
1202        };
1203        override_location.unwrap_or(TypeLocation::GenericArg {
1204            args: arg_list,
1205            of_trait,
1206            corresponding_param,
1207        })
1208    };
1209
1210    let type_location = |node: &SyntaxNode| {
1211        let parent = node.parent()?;
1212        let res = match_ast! {
1213            match parent {
1214                ast::Const(it) => {
1215                    let name = find_opt_node_in_file(original_file, it.name())?;
1216                    let original = ast::Const::cast(name.syntax().parent()?)?;
1217                    TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
1218                },
1219                ast::RetType(it) => {
1220                    it.thin_arrow_token()?;
1221                    let parent = match ast::Fn::cast(parent.parent()?) {
1222                        Some(it) => it.param_list(),
1223                        None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(),
1224                    };
1225
1226                    let parent = find_opt_node_in_file(original_file, parent)?.syntax().parent()?;
1227                    TypeLocation::TypeAscription(TypeAscriptionTarget::RetType(match_ast! {
1228                        match parent {
1229                            ast::ClosureExpr(it) => {
1230                                it.body()
1231                            },
1232                            ast::Fn(it) => {
1233                                it.body().map(ast::Expr::BlockExpr)
1234                            },
1235                            _ => return None,
1236                        }
1237                    }))
1238                },
1239                ast::Param(it) => {
1240                    it.colon_token()?;
1241                    TypeLocation::TypeAscription(TypeAscriptionTarget::FnParam(find_opt_node_in_file(original_file, it.pat())))
1242                },
1243                ast::LetStmt(it) => {
1244                    it.colon_token()?;
1245                    TypeLocation::TypeAscription(TypeAscriptionTarget::Let(find_opt_node_in_file(original_file, it.pat())))
1246                },
1247                ast::Impl(it) => {
1248                    match it.trait_() {
1249                        Some(t) if t.syntax() == node => TypeLocation::ImplTrait,
1250                        _ => match it.self_ty() {
1251                            Some(t) if t.syntax() == node => TypeLocation::ImplTarget,
1252                            _ => return None,
1253                        },
1254                    }
1255                },
1256                ast::TypeBound(_) => TypeLocation::TypeBound,
1257                // is this case needed?
1258                ast::TypeBoundList(_) => TypeLocation::TypeBound,
1259                ast::GenericArg(it) => generic_arg_location(it),
1260                // is this case needed?
1261                ast::GenericArgList(it) => {
1262                    let args = find_opt_node_in_file_compensated(sema, original_file, Some(it));
1263                    TypeLocation::GenericArg { args, of_trait: None, corresponding_param: None }
1264                },
1265                ast::TupleField(_) => TypeLocation::TupleField,
1266                _ => return None,
1267            }
1268        };
1269        Some(res)
1270    };
1271
1272    let make_path_kind_expr = |expr: ast::Expr| {
1273        let it = expr.syntax();
1274        let in_block_expr = is_in_block(it);
1275        let (in_loop_body, innermost_breakable) = is_in_breakable(it).unzip();
1276        let after_if_expr = is_after_if_expr(it.clone());
1277        let ref_expr_parent =
1278            path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
1279        let after_amp = non_trivia_sibling(it.clone().into(), Direction::Prev)
1280            .map(|it| it.kind() == SyntaxKind::AMP)
1281            .unwrap_or(false);
1282        let (innermost_ret_ty, self_param) = {
1283            let find_ret_ty = |it: SyntaxNode| {
1284                if let Some(item) = ast::Item::cast(it.clone()) {
1285                    match item {
1286                        ast::Item::Fn(f) => Some(sema.to_def(&f).map(|it| it.ret_type(sema.db))),
1287                        ast::Item::MacroCall(_) => None,
1288                        _ => Some(None),
1289                    }
1290                } else {
1291                    let expr = ast::Expr::cast(it)?;
1292                    let callable = match expr {
1293                        // FIXME
1294                        // ast::Expr::BlockExpr(b) if b.async_token().is_some() || b.try_token().is_some() => sema.type_of_expr(b),
1295                        ast::Expr::ClosureExpr(_) => sema.type_of_expr(&expr),
1296                        _ => return None,
1297                    };
1298                    Some(
1299                        callable
1300                            .and_then(|c| c.adjusted().as_callable(sema.db))
1301                            .map(|it| it.return_type()),
1302                    )
1303                }
1304            };
1305            let find_fn_self_param = |it| match it {
1306                ast::Item::Fn(fn_) => Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db))),
1307                ast::Item::MacroCall(_) => None,
1308                _ => Some(None),
1309            };
1310
1311            match find_node_in_file_compensated(sema, original_file, &expr) {
1312                Some(it) => {
1313                    // buggy
1314                    let innermost_ret_ty = sema
1315                        .ancestors_with_macros(it.syntax().clone())
1316                        .find_map(find_ret_ty)
1317                        .flatten();
1318
1319                    let self_param = sema
1320                        .ancestors_with_macros(it.syntax().clone())
1321                        .filter_map(ast::Item::cast)
1322                        .find_map(find_fn_self_param)
1323                        .flatten();
1324                    (innermost_ret_ty, self_param)
1325                }
1326                None => (None, None),
1327            }
1328        };
1329        let innermost_breakable_ty = innermost_breakable
1330            .and_then(ast::Expr::cast)
1331            .and_then(|expr| find_node_in_file_compensated(sema, original_file, &expr))
1332            .and_then(|expr| sema.type_of_expr(&expr))
1333            .map(|ty| if ty.original.is_never() { ty.adjusted() } else { ty.original() });
1334        let is_func_update = func_update_record(it);
1335        let in_condition = is_in_condition(&expr);
1336        let after_incomplete_let = after_incomplete_let(it.clone()).is_some();
1337        let incomplete_expr_stmt =
1338            it.parent().and_then(ast::ExprStmt::cast).map(|it| it.semicolon_token().is_none());
1339        let before_else_kw = before_else_kw(it);
1340        let incomplete_let = left_ancestors(it.parent())
1341            .find_map(ast::LetStmt::cast)
1342            .is_some_and(|it| it.semicolon_token().is_none())
1343            || after_incomplete_let && incomplete_expr_stmt.unwrap_or(true) && !before_else_kw;
1344        let in_value = is_in_value(it);
1345        let impl_ = fetch_immediate_impl_or_trait(sema, original_file, expr.syntax())
1346            .and_then(Either::left);
1347
1348        let in_match_guard = match it.parent().and_then(ast::MatchArm::cast) {
1349            Some(arm) => arm
1350                .fat_arrow_token()
1351                .is_none_or(|arrow| it.text_range().start() < arrow.text_range().start()),
1352            None => false,
1353        };
1354
1355        PathKind::Expr {
1356            expr_ctx: PathExprCtx {
1357                in_block_expr,
1358                in_breakable: in_loop_body,
1359                after_if_expr,
1360                before_else_kw,
1361                in_condition,
1362                ref_expr_parent,
1363                after_amp,
1364                is_func_update,
1365                innermost_ret_ty,
1366                innermost_breakable_ty,
1367                self_param,
1368                in_value,
1369                incomplete_let,
1370                after_incomplete_let,
1371                impl_,
1372                in_match_guard,
1373            },
1374        }
1375    };
1376    let make_path_kind_type = |ty: ast::Type| {
1377        let location = type_location(ty.syntax());
1378        PathKind::Type { location: location.unwrap_or(TypeLocation::Other) }
1379    };
1380
1381    let kind_item = |it: &SyntaxNode| {
1382        let parent = it.parent()?;
1383        let kind = match_ast! {
1384            match parent {
1385                ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
1386                ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
1387                    Some(it) => match_ast! {
1388                        match it {
1389                            ast::Trait(_) => ItemListKind::Trait,
1390                            ast::Impl(it) => if it.trait_().is_some() {
1391                                ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it))
1392                            } else {
1393                                ItemListKind::Impl
1394                            },
1395                            _ => return None
1396                        }
1397                    },
1398                    None => return None,
1399                } },
1400                ast::ExternItemList(it) => {
1401                    let exn_blk = it.syntax().parent().and_then(ast::ExternBlock::cast);
1402                    PathKind::Item {
1403                        kind: ItemListKind::ExternBlock {
1404                            is_unsafe: exn_blk.and_then(|it| it.unsafe_token()).is_some(),
1405                        }
1406                    }
1407                },
1408                ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile },
1409                _ => return None,
1410            }
1411        };
1412        Some(kind)
1413    };
1414
1415    let mut kind_macro_call = |it: ast::MacroCall| {
1416        path_ctx.has_macro_bang = it.excl_token().is_some();
1417        let parent = it.syntax().parent()?;
1418        if let Some(kind) = kind_item(it.syntax()) {
1419            return Some(kind);
1420        }
1421        let kind = match_ast! {
1422            match parent {
1423                ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
1424                ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
1425                ast::MacroType(ty) => make_path_kind_type(ty.into()),
1426                _ => return None,
1427            }
1428        };
1429        Some(kind)
1430    };
1431    let make_path_kind_attr = |meta: ast::Meta| {
1432        let attr = meta.parent_attr()?;
1433        let kind = attr.kind();
1434        let attached = attr.syntax().parent()?;
1435        let is_trailing_outer_attr = kind != AttrKind::Inner
1436            && non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next).is_none();
1437        let annotated_item_kind = if is_trailing_outer_attr { None } else { Some(attached.kind()) };
1438        let derive_helpers = annotated_item_kind
1439            .filter(|kind| {
1440                matches!(
1441                    kind,
1442                    SyntaxKind::STRUCT
1443                        | SyntaxKind::ENUM
1444                        | SyntaxKind::UNION
1445                        | SyntaxKind::VARIANT
1446                        | SyntaxKind::TUPLE_FIELD
1447                        | SyntaxKind::RECORD_FIELD
1448                )
1449            })
1450            .and_then(|_| nameref.as_ref()?.syntax().ancestors().find_map(ast::Adt::cast))
1451            .and_then(|adt| sema.derive_helpers_in_scope(&adt))
1452            .unwrap_or_default();
1453        Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind, derive_helpers } })
1454    };
1455
1456    // Infer the path kind
1457    let parent = path.syntax().parent()?;
1458    let kind = 'find_kind: {
1459        if parent.kind() == SyntaxKind::ERROR {
1460            if let Some(kind) = inbetween_body_and_decl_check(parent.clone()) {
1461                return Some(make_res(NameRefKind::Keyword(kind)));
1462            }
1463
1464            break 'find_kind kind_item(&parent)?;
1465        }
1466        match_ast! {
1467            match parent {
1468                ast::PathType(it) => make_path_kind_type(it.into()),
1469                ast::PathExpr(it) => {
1470                    if let Some(p) = it.syntax().parent() {
1471                        let p_kind = p.kind();
1472                        // The syntax node of interest, for which we want to check whether
1473                        // it is sandwiched between an item decl signature and its body.
1474                        let probe = if ast::ExprStmt::can_cast(p_kind) {
1475                            Some(p)
1476                        } else if ast::StmtList::can_cast(p_kind) {
1477                            Some(it.syntax().clone())
1478                        } else {
1479                            None
1480                        };
1481                        if let Some(kind) = probe.and_then(inbetween_body_and_decl_check) {
1482                            return Some(make_res(NameRefKind::Keyword(kind)));
1483                        }
1484                    }
1485
1486                    path_ctx.has_call_parens = it.syntax().parent().is_some_and(|it| ast::CallExpr::cast(it).is_some_and(|it| has_parens(&it)));
1487
1488                    make_path_kind_expr(it.into())
1489                },
1490                ast::TupleStructPat(it) => {
1491                    path_ctx.has_call_parens = true;
1492                    PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
1493                },
1494                ast::RecordPat(it) => {
1495                    path_ctx.has_call_parens = true;
1496                    PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
1497                },
1498                ast::PathPat(it) => {
1499                    PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
1500                },
1501                ast::MacroCall(it) => {
1502                    kind_macro_call(it)?
1503                },
1504                ast::Meta(meta) => make_path_kind_attr(meta)?,
1505                ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
1506                ast::UseTree(_) => PathKind::Use,
1507                // completing inside a qualifier
1508                ast::Path(parent) => {
1509                    path_ctx.parent = Some(parent.clone());
1510                    let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?;
1511                    match_ast! {
1512                        match parent {
1513                            ast::PathType(it) => make_path_kind_type(it.into()),
1514                            ast::PathExpr(it) => {
1515                                path_ctx.has_call_parens = it.syntax().parent().is_some_and(|it| ast::CallExpr::cast(it).is_some_and(|it| has_parens(&it)));
1516
1517                                make_path_kind_expr(it.into())
1518                            },
1519                            ast::TupleStructPat(it) => {
1520                                path_ctx.has_call_parens = true;
1521                                PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
1522                            },
1523                            ast::RecordPat(it) => {
1524                                path_ctx.has_call_parens = true;
1525                                PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
1526                            },
1527                            ast::PathPat(it) => {
1528                                PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
1529                            },
1530                            ast::MacroCall(it) => {
1531                                kind_macro_call(it)?
1532                            },
1533                            ast::Meta(meta) => make_path_kind_attr(meta)?,
1534                            ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
1535                            ast::UseTree(_) => PathKind::Use,
1536                            ast::RecordExpr(it) => make_path_kind_expr(it.into()),
1537                            _ => return None,
1538                        }
1539                    }
1540                },
1541                ast::RecordExpr(it) => {
1542                    // A record expression in this position is usually a result of parsing recovery, so check that
1543                    if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) {
1544                        return Some(make_res(NameRefKind::Keyword(kind)));
1545                    }
1546                    make_path_kind_expr(it.into())
1547                },
1548                _ => return None,
1549            }
1550        }
1551    };
1552
1553    path_ctx.kind = kind;
1554    path_ctx.has_type_args = segment.generic_arg_list().is_some();
1555
1556    // calculate the qualifier context
1557    if let Some((qualifier, use_tree_parent)) = path_or_use_tree_qualifier(&path) {
1558        path_ctx.use_tree_parent = use_tree_parent;
1559        if !use_tree_parent && segment.coloncolon_token().is_some() {
1560            path_ctx.qualified = Qualified::Absolute;
1561        } else {
1562            let qualifier = qualifier
1563                .segment()
1564                .and_then(|it| find_node_in_file(original_file, &it))
1565                .map(|it| it.parent_path());
1566            if let Some(qualifier) = qualifier {
1567                let type_anchor = match qualifier.segment().and_then(|it| it.kind()) {
1568                    Some(ast::PathSegmentKind::Type { type_ref: Some(type_ref), trait_ref })
1569                        if qualifier.qualifier().is_none() =>
1570                    {
1571                        Some((type_ref, trait_ref))
1572                    }
1573                    _ => None,
1574                };
1575
1576                path_ctx.qualified = if let Some((ty, trait_ref)) = type_anchor {
1577                    let ty = match ty {
1578                        ast::Type::InferType(_) => None,
1579                        ty => sema.resolve_type(&ty),
1580                    };
1581                    let trait_ = trait_ref.and_then(|it| sema.resolve_trait(&it.path()?));
1582                    Qualified::TypeAnchor { ty, trait_ }
1583                } else {
1584                    let res = sema.resolve_path(&qualifier);
1585
1586                    // For understanding how and why super_chain_len is calculated the way it
1587                    // is check the documentation at it's definition
1588                    let mut segment_count = 0;
1589                    let super_count = iter::successors(Some(qualifier.clone()), |p| p.qualifier())
1590                        .take_while(|p| {
1591                            p.segment()
1592                                .and_then(|s| {
1593                                    segment_count += 1;
1594                                    s.super_token()
1595                                })
1596                                .is_some()
1597                        })
1598                        .count();
1599
1600                    let super_chain_len =
1601                        if segment_count > super_count { None } else { Some(super_count) };
1602
1603                    Qualified::With { path: qualifier, resolution: res, super_chain_len }
1604                }
1605            };
1606        }
1607    } else if let Some(segment) = path.segment()
1608        && segment.coloncolon_token().is_some()
1609    {
1610        path_ctx.qualified = Qualified::Absolute;
1611    }
1612
1613    let mut qualifier_ctx = QualifierCtx::default();
1614    if path_ctx.is_trivial_path() {
1615        // fetch the full expression that may have qualifiers attached to it
1616        let top_node = match path_ctx.kind {
1617            PathKind::Expr { expr_ctx: PathExprCtx { in_block_expr: true, .. } } => {
1618                parent.ancestors().find(|it| ast::PathExpr::can_cast(it.kind())).and_then(|p| {
1619                    let parent = p.parent()?;
1620                    if ast::StmtList::can_cast(parent.kind()) {
1621                        Some(p)
1622                    } else if ast::ExprStmt::can_cast(parent.kind()) {
1623                        Some(parent)
1624                    } else {
1625                        None
1626                    }
1627                })
1628            }
1629            PathKind::Item { .. } => parent.ancestors().find(|it| it.kind() == SyntaxKind::ERROR),
1630            _ => None,
1631        };
1632        if let Some(top) = top_node {
1633            if let Some(NodeOrToken::Node(error_node)) =
1634                syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev)
1635                && error_node.kind() == SyntaxKind::ERROR
1636            {
1637                for token in error_node.children_with_tokens().filter_map(NodeOrToken::into_token) {
1638                    match token.kind() {
1639                        SyntaxKind::UNSAFE_KW => qualifier_ctx.unsafe_tok = Some(token),
1640                        SyntaxKind::ASYNC_KW => qualifier_ctx.async_tok = Some(token),
1641                        SyntaxKind::SAFE_KW => qualifier_ctx.safe_tok = Some(token),
1642                        _ => {}
1643                    }
1644                }
1645                qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast);
1646                qualifier_ctx.abi_node = error_node.children().find_map(ast::Abi::cast);
1647            }
1648
1649            if let PathKind::Item { .. } = path_ctx.kind
1650                && qualifier_ctx.none()
1651                && let Some(t) = top.first_token()
1652                && let Some(prev) =
1653                    t.prev_token().and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev))
1654                && ![T![;], T!['}'], T!['{'], T![']']].contains(&prev.kind())
1655            {
1656                // This was inferred to be an item position path, but it seems
1657                // to be part of some other broken node which leaked into an item
1658                // list
1659                return None;
1660            }
1661        }
1662    }
1663    Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx))
1664}
1665
1666/// When writing in the middle of some code the following situation commonly occurs (`|` denotes the cursor):
1667/// ```ignore
1668/// value.method|
1669/// (1, 2, 3)
1670/// ```
1671/// Here, we want to complete the method parentheses & arguments (if the corresponding settings are on),
1672/// but the thing is parsed as a method call with parentheses. Therefore we use heuristics: if the parentheses
1673/// are on the next line, consider them non-existent.
1674fn has_parens(node: &dyn HasArgList) -> bool {
1675    let Some(arg_list) = node.arg_list() else { return false };
1676    if arg_list.l_paren_token().is_none() {
1677        return false;
1678    }
1679    let prev_siblings = iter::successors(arg_list.syntax().prev_sibling_or_token(), |it| {
1680        it.prev_sibling_or_token()
1681    });
1682    prev_siblings
1683        .take_while(|syntax| syntax.kind().is_trivia())
1684        .filter_map(|syntax| {
1685            syntax.into_token().filter(|token| token.kind() == SyntaxKind::WHITESPACE)
1686        })
1687        .all(|whitespace| !whitespace.text().contains('\n'))
1688}
1689
1690fn pattern_context_for(
1691    sema: &Semantics<'_, RootDatabase>,
1692    original_file: &SyntaxNode,
1693    pat: ast::Pat,
1694) -> PatternContext {
1695    let mut param_ctx = None;
1696
1697    let mut missing_variants = vec![];
1698    let is_pat_like = |kind| {
1699        ast::Pat::can_cast(kind)
1700            || ast::RecordPatField::can_cast(kind)
1701            || ast::RecordPatFieldList::can_cast(kind)
1702    };
1703
1704    let (refutability, has_type_ascription) = pat
1705        .syntax()
1706        .ancestors()
1707        .find(|it| !is_pat_like(it.kind()))
1708        .map_or((PatternRefutability::Irrefutable, false), |node| {
1709            let refutability = match_ast! {
1710                match node {
1711                    ast::LetStmt(let_) => return (PatternRefutability::Refutable, let_.ty().is_some()),
1712                    ast::Param(param) => {
1713                        let has_type_ascription = param.ty().is_some();
1714                        param_ctx = (|| {
1715                            let fake_param_list = param.syntax().parent().and_then(ast::ParamList::cast)?;
1716                            let param_list = find_node_in_file_compensated(sema, original_file, &fake_param_list)?;
1717                            let param_list_owner = param_list.syntax().parent()?;
1718                            let kind = match_ast! {
1719                                match param_list_owner {
1720                                    ast::ClosureExpr(closure) => ParamKind::Closure(closure),
1721                                    ast::Fn(fn_) => ParamKind::Function(fn_),
1722                                    _ => return None,
1723                                }
1724                            };
1725                            Some(ParamContext {
1726                                param_list, param, kind
1727                            })
1728                        })();
1729                        return (PatternRefutability::Irrefutable, has_type_ascription)
1730                    },
1731                    ast::MatchArm(match_arm) => {
1732                       let missing_variants_opt = match_arm
1733                            .syntax()
1734                            .parent()
1735                            .and_then(ast::MatchArmList::cast)
1736                            .and_then(|match_arm_list| {
1737                                match_arm_list
1738                                .syntax()
1739                                .parent()
1740                                .and_then(ast::MatchExpr::cast)
1741                                .and_then(|match_expr| {
1742                                    let expr_opt = find_opt_node_in_file(original_file, match_expr.expr());
1743
1744                                    expr_opt.and_then(|expr| {
1745                                        sema.type_of_expr(&expr)?
1746                                        .adjusted()
1747                                        .autoderef(sema.db)
1748                                        .find_map(|ty| match ty.as_adt() {
1749                                            Some(hir::Adt::Enum(e)) => Some(e),
1750                                            _ => None,
1751                                        }).map(|enum_| enum_.variants(sema.db))
1752                                    })
1753                                }).map(|variants| variants.iter().filter_map(|variant| {
1754                                        let variant_name = variant.name(sema.db);
1755
1756                                        let variant_already_present = match_arm_list.arms().any(|arm| {
1757                                            arm.pat().and_then(|pat| {
1758                                                let pat_already_present = pat.syntax().to_string().contains(variant_name.as_str());
1759                                                pat_already_present.then_some(pat_already_present)
1760                                            }).is_some()
1761                                        });
1762
1763                                        (!variant_already_present).then_some(*variant)
1764                                    }).collect::<Vec<Variant>>())
1765                        });
1766
1767                        if let Some(missing_variants_) = missing_variants_opt {
1768                            missing_variants = missing_variants_;
1769                        };
1770
1771                        PatternRefutability::Refutable
1772                    },
1773                    ast::LetExpr(_) => PatternRefutability::Refutable,
1774                    ast::ForExpr(_) => PatternRefutability::Irrefutable,
1775                    _ => PatternRefutability::Irrefutable,
1776                }
1777            };
1778            (refutability, false)
1779        });
1780    let (ref_token, mut_token) = match &pat {
1781        ast::Pat::IdentPat(it) => (it.ref_token(), it.mut_token()),
1782        _ => (None, None),
1783    };
1784
1785    // Only suggest name in let-stmt or fn param
1786    let should_suggest_name = matches!(
1787            &pat,
1788            ast::Pat::IdentPat(it)
1789                if it.syntax()
1790                .parent().is_some_and(|node| {
1791                    let kind = node.kind();
1792                    ast::LetStmt::can_cast(kind) || ast::Param::can_cast(kind)
1793                })
1794    );
1795
1796    PatternContext {
1797        refutability,
1798        param_ctx,
1799        has_type_ascription,
1800        should_suggest_name,
1801        after_if_expr: is_after_if_expr(pat.syntax().clone()),
1802        parent_pat: pat.syntax().parent().and_then(ast::Pat::cast),
1803        mut_token,
1804        ref_token,
1805        record_pat: None,
1806        impl_or_trait: fetch_immediate_impl_or_trait(sema, original_file, pat.syntax()),
1807        missing_variants,
1808    }
1809}
1810
1811fn fetch_immediate_impl_or_trait(
1812    sema: &Semantics<'_, RootDatabase>,
1813    original_file: &SyntaxNode,
1814    node: &SyntaxNode,
1815) -> Option<Either<ast::Impl, ast::Trait>> {
1816    let mut ancestors = ancestors_in_file_compensated(sema, original_file, node)?
1817        .filter_map(ast::Item::cast)
1818        .filter(|it| !matches!(it, ast::Item::MacroCall(_)));
1819
1820    match ancestors.next()? {
1821        ast::Item::Const(_) | ast::Item::Fn(_) | ast::Item::TypeAlias(_) => (),
1822        ast::Item::Impl(it) => return Some(Either::Left(it)),
1823        ast::Item::Trait(it) => return Some(Either::Right(it)),
1824        _ => return None,
1825    }
1826    match ancestors.next()? {
1827        ast::Item::Impl(it) => Some(Either::Left(it)),
1828        ast::Item::Trait(it) => Some(Either::Right(it)),
1829        _ => None,
1830    }
1831}
1832
1833/// Attempts to find `node` inside `syntax` via `node`'s text range.
1834/// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
1835fn find_opt_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: Option<N>) -> Option<N> {
1836    find_node_in_file(syntax, &node?)
1837}
1838
1839/// Attempts to find `node` inside `syntax` via `node`'s text range.
1840/// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
1841fn find_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
1842    let syntax_range = syntax.text_range();
1843    let range = node.syntax().text_range();
1844    let intersection = range.intersect(syntax_range)?;
1845    syntax.covering_element(intersection).ancestors().find_map(N::cast)
1846}
1847
1848/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
1849/// for the offset introduced by the fake ident.
1850/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
1851fn find_node_in_file_compensated<N: AstNode>(
1852    sema: &Semantics<'_, RootDatabase>,
1853    in_file: &SyntaxNode,
1854    node: &N,
1855) -> Option<N> {
1856    ancestors_in_file_compensated(sema, in_file, node.syntax())?.find_map(N::cast)
1857}
1858
1859fn ancestors_in_file_compensated<'sema>(
1860    sema: &'sema Semantics<'_, RootDatabase>,
1861    in_file: &SyntaxNode,
1862    node: &SyntaxNode,
1863) -> Option<impl Iterator<Item = SyntaxNode> + 'sema> {
1864    let syntax_range = in_file.text_range();
1865    let range = node.text_range();
1866    let end = range.end().checked_sub(TextSize::try_from(COMPLETION_MARKER.len()).ok()?)?;
1867    if end < range.start() {
1868        return None;
1869    }
1870    let range = TextRange::new(range.start(), end);
1871    // our inserted ident could cause `range` to go outside of the original syntax, so cap it
1872    let intersection = range.intersect(syntax_range)?;
1873    let node = match in_file.covering_element(intersection) {
1874        NodeOrToken::Node(node) => node,
1875        NodeOrToken::Token(tok) => tok.parent()?,
1876    };
1877    Some(sema.ancestors_with_macros(node))
1878}
1879
1880/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
1881/// for the offset introduced by the fake ident..
1882/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
1883fn find_opt_node_in_file_compensated<N: AstNode>(
1884    sema: &Semantics<'_, RootDatabase>,
1885    syntax: &SyntaxNode,
1886    node: Option<N>,
1887) -> Option<N> {
1888    find_node_in_file_compensated(sema, syntax, &node?)
1889}
1890
1891fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
1892    if let Some(qual) = path.qualifier() {
1893        return Some((qual, false));
1894    }
1895    let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
1896    let use_tree = use_tree_list.syntax().parent().and_then(ast::UseTree::cast)?;
1897    Some((use_tree.path()?, true))
1898}
1899
1900fn left_ancestors(node: Option<SyntaxNode>) -> impl Iterator<Item = SyntaxNode> {
1901    node.into_iter().flat_map(|node| {
1902        let end = node.text_range().end();
1903        node.ancestors().take_while(move |it| it.text_range().end() == end)
1904    })
1905}
1906
1907fn is_in_token_of_for_loop(path: &ast::Path) -> bool {
1908    // oh my ...
1909    (|| {
1910        let expr = path.syntax().parent().and_then(ast::PathExpr::cast)?;
1911        let for_expr = expr.syntax().parent().and_then(ast::ForExpr::cast)?;
1912        if for_expr.in_token().is_some() {
1913            return Some(false);
1914        }
1915        let pat = for_expr.pat()?;
1916        let next_sibl = next_non_trivia_sibling(pat.syntax().clone().into())?;
1917        Some(match next_sibl {
1918            syntax::NodeOrToken::Node(n) => {
1919                n.text_range().start() == path.syntax().text_range().start()
1920            }
1921            syntax::NodeOrToken::Token(t) => {
1922                t.text_range().start() == path.syntax().text_range().start()
1923            }
1924        })
1925    })()
1926    .unwrap_or(false)
1927}
1928
1929fn is_in_breakable(node: &SyntaxNode) -> Option<(BreakableKind, SyntaxNode)> {
1930    node.ancestors()
1931        .take_while(|it| it.kind() != SyntaxKind::FN && it.kind() != SyntaxKind::CLOSURE_EXPR)
1932        .find_map(|it| {
1933            let (breakable, loop_body) = match_ast! {
1934                match it {
1935                    ast::ForExpr(it) => (BreakableKind::For, it.loop_body()?),
1936                    ast::WhileExpr(it) => (BreakableKind::While, it.loop_body()?),
1937                    ast::LoopExpr(it) => (BreakableKind::Loop, it.loop_body()?),
1938                    ast::BlockExpr(it) => return it.label().map(|_| (BreakableKind::Block, it.syntax().clone())),
1939                    _ => return None,
1940                }
1941            };
1942            loop_body.syntax().text_range().contains_range(node.text_range())
1943                .then_some((breakable, it))
1944        })
1945}
1946
1947fn is_in_block(node: &SyntaxNode) -> bool {
1948    if has_in_newline_expr_first(node) {
1949        return true;
1950    };
1951    node.parent()
1952        .map(|node| ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind()))
1953        .unwrap_or(false)
1954}
1955
1956/// Similar to `has_parens`, heuristic sensing incomplete statement before ambiguous `Expr`
1957///
1958/// Heuristic:
1959///
1960/// If the `PathExpr` is left part of the `Expr` and there is a newline after the `PathExpr`,
1961/// it is considered that the `PathExpr` is not part of the `Expr`.
1962fn has_in_newline_expr_first(node: &SyntaxNode) -> bool {
1963    if ast::PathExpr::can_cast(node.kind())
1964        && let Some(NodeOrToken::Token(next)) = node.next_sibling_or_token()
1965        && next.kind() == SyntaxKind::WHITESPACE
1966        && next.text().contains('\n')
1967        && let Some(stmt_like) = node
1968            .ancestors()
1969            .take_while(|it| it.text_range().start() == node.text_range().start())
1970            .filter_map(Either::<ast::ExprStmt, ast::Expr>::cast)
1971            .last()
1972    {
1973        stmt_like.syntax().parent().and_then(ast::StmtList::cast).is_some()
1974    } else {
1975        false
1976    }
1977}
1978
1979fn is_after_if_expr(node: SyntaxNode) -> bool {
1980    let node = match node.parent().and_then(Either::<ast::ExprStmt, ast::MatchArm>::cast) {
1981        Some(stmt) => stmt.syntax().clone(),
1982        None => node,
1983    };
1984    let prev_sibling =
1985        non_trivia_sibling(node.into(), Direction::Prev).and_then(NodeOrToken::into_node);
1986    iter::successors(prev_sibling, |it| it.last_child_or_token()?.into_node())
1987        .find_map(ast::IfExpr::cast)
1988        .is_some()
1989}
1990
1991fn next_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken> {
1992    let mut token = match e.into() {
1993        SyntaxElement::Node(n) => n.last_token()?,
1994        SyntaxElement::Token(t) => t,
1995    }
1996    .next_token();
1997    while let Some(inner) = token {
1998        if !inner.kind().is_trivia() {
1999            return Some(inner);
2000        } else {
2001            token = inner.next_token();
2002        }
2003    }
2004    None
2005}
2006
2007fn next_non_trivia_sibling(ele: SyntaxElement) -> Option<SyntaxElement> {
2008    let mut e = ele.next_sibling_or_token();
2009    while let Some(inner) = e {
2010        if !inner.kind().is_trivia() {
2011            return Some(inner);
2012        } else {
2013            e = inner.next_sibling_or_token();
2014        }
2015    }
2016    None
2017}
2018
2019fn prev_special_biased_token_at_trivia(mut token: SyntaxToken) -> SyntaxToken {
2020    while token.kind().is_trivia()
2021        && let Some(prev) = token.prev_token()
2022        && let T![=]
2023        | T![+=]
2024        | T![/=]
2025        | T![*=]
2026        | T![%=]
2027        | T![>>=]
2028        | T![<<=]
2029        | T![-=]
2030        | T![|=]
2031        | T![&=]
2032        | T![^=]
2033        | T![|]
2034        | T![return]
2035        | T![break]
2036        | T![continue] = prev.kind()
2037    {
2038        token = prev
2039    }
2040    token
2041}