ide_completion/context/
analysis.rs

1//! Module responsible for analyzing the code surrounding the cursor for completion.
2use std::iter;
3
4use hir::{ExpandResult, InFile, Semantics, Type, TypeInfo, Variant};
5use ide_db::{
6    RootDatabase, active_parameter::ActiveParameter, syntax_helpers::node_ext::find_loops,
7};
8use itertools::{Either, Itertools};
9use stdx::always;
10use syntax::{
11    AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken,
12    T, TextRange, TextSize,
13    algo::{
14        self, ancestors_at_offset, find_node_at_offset, non_trivia_sibling,
15        previous_non_trivia_token,
16    },
17    ast::{
18        self, AttrKind, HasArgList, HasGenericArgs, HasGenericParams, HasLoopBody, HasName,
19        NameOrNameRef,
20    },
21    match_ast,
22};
23
24use crate::{
25    completions::postfix::is_in_condition,
26    context::{
27        AttrCtx, BreakableKind, COMPLETION_MARKER, CompletionAnalysis, DotAccess, DotAccessExprCtx,
28        DotAccessKind, ItemListKind, LifetimeContext, LifetimeKind, NameContext, NameKind,
29        NameRefContext, NameRefKind, ParamContext, ParamKind, PathCompletionCtx, PathExprCtx,
30        PathKind, PatternContext, PatternRefutability, Qualified, QualifierCtx,
31        TypeAscriptionTarget, TypeLocation,
32    },
33};
34
35#[derive(Debug)]
36struct ExpansionResult {
37    original_file: SyntaxNode,
38    speculative_file: SyntaxNode,
39    /// The offset in the original file.
40    original_offset: TextSize,
41    /// The offset in the speculatively expanded file.
42    speculative_offset: TextSize,
43    fake_ident_token: SyntaxToken,
44    derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
45}
46
47pub(super) struct AnalysisResult<'db> {
48    pub(super) analysis: CompletionAnalysis<'db>,
49    pub(super) expected: (Option<Type<'db>>, Option<ast::NameOrNameRef>),
50    pub(super) qualifier_ctx: QualifierCtx,
51    /// the original token of the expanded file
52    pub(super) token: SyntaxToken,
53    /// The offset in the original file.
54    pub(super) original_offset: TextSize,
55}
56
57pub(super) fn expand_and_analyze<'db>(
58    sema: &Semantics<'db, RootDatabase>,
59    original_file: InFile<SyntaxNode>,
60    speculative_file: SyntaxNode,
61    offset: TextSize,
62    original_token: &SyntaxToken,
63) -> Option<AnalysisResult<'db>> {
64    // as we insert after the offset, right biased will *always* pick the identifier no matter
65    // if there is an ident already typed or not
66    let fake_ident_token = speculative_file.token_at_offset(offset).right_biased()?;
67    // the relative offset between the cursor and the *identifier* token we are completing on
68    let relative_offset = offset - fake_ident_token.text_range().start();
69    // make the offset point to the start of the original token, as that is what the
70    // intermediate offsets calculated in expansion always points to
71    let offset = offset - relative_offset;
72    let expansion = expand_maybe_stop(
73        sema,
74        original_file.clone(),
75        speculative_file.clone(),
76        offset,
77        fake_ident_token.clone(),
78        relative_offset,
79    )
80    .unwrap_or(ExpansionResult {
81        original_file: original_file.value,
82        speculative_file,
83        original_offset: offset,
84        speculative_offset: fake_ident_token.text_range().start(),
85        fake_ident_token,
86        derive_ctx: None,
87    });
88
89    // add the relative offset back, so that left_biased finds the proper token
90    let original_offset = expansion.original_offset + relative_offset;
91    let token = expansion.original_file.token_at_offset(original_offset).left_biased()?;
92
93    analyze(sema, expansion, original_token, &token).map(|(analysis, expected, qualifier_ctx)| {
94        AnalysisResult { analysis, expected, qualifier_ctx, token, original_offset }
95    })
96}
97
98fn token_at_offset_ignore_whitespace(file: &SyntaxNode, offset: TextSize) -> Option<SyntaxToken> {
99    let token = file.token_at_offset(offset).left_biased()?;
100    algo::skip_whitespace_token(token, Direction::Prev)
101}
102
103/// Expand attributes and macro calls at the current cursor position for both the original file
104/// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
105/// and speculative states stay in sync.
106///
107/// We do this by recursively expanding all macros and picking the best possible match. We cannot just
108/// choose the first expansion each time because macros can expand to something that does not include
109/// our completion marker, e.g.:
110///
111/// ```ignore
112/// macro_rules! helper { ($v:ident) => {} }
113/// macro_rules! my_macro {
114///     ($v:ident) => {
115///         helper!($v);
116///         $v
117///     };
118/// }
119///
120/// my_macro!(complete_me_here);
121/// ```
122/// If we would expand the first thing we encounter only (which in fact this method used to do), we would
123/// be unable to complete here, because we would be walking directly into the void. So we instead try
124/// *every* possible path.
125///
126/// This can also creates discrepancies between the speculative and real expansions: because we insert
127/// tokens, we insert characters, which means if we try the second occurrence it may not be at the same
128/// position in the original and speculative file. We take an educated guess here, and for each token
129/// that we check, we subtract `COMPLETION_MARKER.len()`. This may not be accurate because proc macros
130/// can insert the text of the completion marker in other places while removing the span, but this is
131/// the best we can do.
132fn expand_maybe_stop(
133    sema: &Semantics<'_, RootDatabase>,
134    original_file: InFile<SyntaxNode>,
135    speculative_file: SyntaxNode,
136    original_offset: TextSize,
137    fake_ident_token: SyntaxToken,
138    relative_offset: TextSize,
139) -> Option<ExpansionResult> {
140    if let result @ Some(_) = expand(
141        sema,
142        original_file.clone(),
143        speculative_file.clone(),
144        original_offset,
145        fake_ident_token.clone(),
146        relative_offset,
147    ) {
148        return result;
149    }
150
151    // We can't check whether the fake expansion is inside macro call, because that requires semantic info.
152    // But hopefully checking just the real one should be enough.
153    if token_at_offset_ignore_whitespace(&original_file.value, original_offset + relative_offset)
154        .is_some_and(|original_token| {
155            !sema.is_inside_macro_call(original_file.with_value(&original_token))
156        })
157    {
158        // Recursion base case.
159        Some(ExpansionResult {
160            original_file: original_file.value,
161            speculative_file,
162            original_offset,
163            speculative_offset: fake_ident_token.text_range().start(),
164            fake_ident_token,
165            derive_ctx: None,
166        })
167    } else {
168        None
169    }
170}
171
172fn expand(
173    sema: &Semantics<'_, RootDatabase>,
174    original_file: InFile<SyntaxNode>,
175    speculative_file: SyntaxNode,
176    original_offset: TextSize,
177    fake_ident_token: SyntaxToken,
178    relative_offset: TextSize,
179) -> Option<ExpansionResult> {
180    let _p = tracing::info_span!("CompletionContext::expand").entered();
181
182    let parent_item =
183        |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
184    let original_node = token_at_offset_ignore_whitespace(&original_file.value, original_offset)
185        .and_then(|token| token.parent_ancestors().find_map(ast::Item::cast));
186    let ancestor_items = iter::successors(
187        Option::zip(
188            original_node,
189            find_node_at_offset::<ast::Item>(
190                &speculative_file,
191                fake_ident_token.text_range().start(),
192            ),
193        ),
194        |(a, b)| parent_item(a).zip(parent_item(b)),
195    );
196
197    // first try to expand attributes as these are always the outermost macro calls
198    'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
199        match (
200            sema.expand_attr_macro(&actual_item),
201            sema.speculative_expand_attr_macro(
202                &actual_item,
203                &item_with_fake_ident,
204                fake_ident_token.clone(),
205            ),
206        ) {
207            // maybe parent items have attributes, so continue walking the ancestors
208            (None, None) => continue 'ancestors,
209            // successful expansions
210            (
211                Some(ExpandResult { value: actual_expansion, err: _ }),
212                Some((fake_expansion, fake_mapped_tokens)),
213            ) => {
214                let mut accumulated_offset_from_fake_tokens = 0;
215                let actual_range = actual_expansion.text_range().end();
216                let result = fake_mapped_tokens
217                    .into_iter()
218                    .filter_map(|(fake_mapped_token, rank)| {
219                        let accumulated_offset = accumulated_offset_from_fake_tokens;
220                        if !fake_mapped_token.text().contains(COMPLETION_MARKER) {
221                            // Proc macros can make the same span with different text, we don't
222                            // want them to participate in completion because the macro author probably
223                            // didn't intend them to.
224                            return None;
225                        }
226                        accumulated_offset_from_fake_tokens += COMPLETION_MARKER.len();
227
228                        let new_offset = fake_mapped_token.text_range().start()
229                            - TextSize::new(accumulated_offset as u32);
230                        if new_offset + relative_offset > actual_range {
231                            // offset outside of bounds from the original expansion,
232                            // stop here to prevent problems from happening
233                            return None;
234                        }
235                        let result = expand_maybe_stop(
236                            sema,
237                            actual_expansion.clone(),
238                            fake_expansion.clone(),
239                            new_offset,
240                            fake_mapped_token,
241                            relative_offset,
242                        )?;
243                        Some((result, rank))
244                    })
245                    .min_by_key(|(_, rank)| *rank)
246                    .map(|(result, _)| result);
247                if result.is_some() {
248                    return result;
249                }
250            }
251            // exactly one expansion failed, inconsistent state so stop expanding completely
252            _ => break 'ancestors,
253        }
254    }
255
256    // No attributes have been expanded, so look for macro_call! token trees or derive token trees
257    let orig_tt = ancestors_at_offset(&original_file.value, original_offset)
258        .map_while(Either::<ast::TokenTree, ast::Meta>::cast)
259        .last()?;
260    let spec_tt = ancestors_at_offset(&speculative_file, fake_ident_token.text_range().start())
261        .map_while(Either::<ast::TokenTree, ast::Meta>::cast)
262        .last()?;
263
264    let (tts, attrs) = match (orig_tt, spec_tt) {
265        (Either::Left(orig_tt), Either::Left(spec_tt)) => {
266            let attrs = orig_tt
267                .syntax()
268                .parent()
269                .and_then(ast::Meta::cast)
270                .and_then(|it| it.parent_attr())
271                .zip(
272                    spec_tt
273                        .syntax()
274                        .parent()
275                        .and_then(ast::Meta::cast)
276                        .and_then(|it| it.parent_attr()),
277                );
278            (Some((orig_tt, spec_tt)), attrs)
279        }
280        (Either::Right(orig_path), Either::Right(spec_path)) => {
281            (None, orig_path.parent_attr().zip(spec_path.parent_attr()))
282        }
283        _ => return None,
284    };
285
286    // Expand pseudo-derive expansion aka `derive(Debug$0)`
287    if let Some((orig_attr, spec_attr)) = attrs {
288        if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_tokens))) = (
289            sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
290            sema.speculative_expand_derive_as_pseudo_attr_macro(
291                &orig_attr,
292                &spec_attr,
293                fake_ident_token.clone(),
294            ),
295        ) && let Some((fake_mapped_token, _)) =
296            fake_mapped_tokens.into_iter().min_by_key(|(_, rank)| *rank)
297        {
298            return Some(ExpansionResult {
299                original_file: original_file.value,
300                speculative_file,
301                original_offset,
302                speculative_offset: fake_ident_token.text_range().start(),
303                fake_ident_token,
304                derive_ctx: Some((
305                    actual_expansion,
306                    fake_expansion,
307                    fake_mapped_token.text_range().start(),
308                    orig_attr,
309                )),
310            });
311        }
312
313        if let Some(spec_adt) =
314            spec_attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it {
315                ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
316                ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
317                ast::Item::Union(it) => Some(ast::Adt::Union(it)),
318                _ => None,
319            })
320        {
321            // might be the path of derive helper or a token tree inside of one
322            if let Some(helpers) = sema.derive_helper(&orig_attr) {
323                for (_mac, file) in helpers {
324                    if let Some((fake_expansion, fake_mapped_tokens)) = sema.speculative_expand_raw(
325                        file,
326                        spec_adt.syntax(),
327                        fake_ident_token.clone(),
328                    ) {
329                        // we are inside a derive helper token tree, treat this as being inside
330                        // the derive expansion
331                        let actual_expansion = sema.parse_or_expand(file.into());
332                        let mut accumulated_offset_from_fake_tokens = 0;
333                        let actual_range = actual_expansion.text_range().end();
334                        let result = fake_mapped_tokens
335                            .into_iter()
336                            .filter_map(|(fake_mapped_token, rank)| {
337                                let accumulated_offset = accumulated_offset_from_fake_tokens;
338                                if !fake_mapped_token.text().contains(COMPLETION_MARKER) {
339                                    // Proc macros can make the same span with different text, we don't
340                                    // want them to participate in completion because the macro author probably
341                                    // didn't intend them to.
342                                    return None;
343                                }
344                                accumulated_offset_from_fake_tokens += COMPLETION_MARKER.len();
345
346                                let new_offset = fake_mapped_token.text_range().start()
347                                    - TextSize::new(accumulated_offset as u32);
348                                if new_offset + relative_offset > actual_range {
349                                    // offset outside of bounds from the original expansion,
350                                    // stop here to prevent problems from happening
351                                    return None;
352                                }
353                                let result = expand_maybe_stop(
354                                    sema,
355                                    InFile::new(file.into(), actual_expansion.clone()),
356                                    fake_expansion.clone(),
357                                    new_offset,
358                                    fake_mapped_token,
359                                    relative_offset,
360                                )?;
361                                Some((result, rank))
362                            })
363                            .min_by_key(|(_, rank)| *rank)
364                            .map(|(result, _)| result);
365                        if result.is_some() {
366                            return result;
367                        }
368                    }
369                }
370            }
371        }
372        // at this point we won't have any more successful expansions, so stop
373        return None;
374    }
375
376    // Expand fn-like macro calls
377    let (orig_tt, spec_tt) = tts?;
378    let (actual_macro_call, macro_call_with_fake_ident) = (
379        orig_tt.syntax().parent().and_then(ast::MacroCall::cast)?,
380        spec_tt.syntax().parent().and_then(ast::MacroCall::cast)?,
381    );
382    let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
383    let mac_call_path1 = macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
384
385    // inconsistent state, stop expanding
386    if mac_call_path0 != mac_call_path1 {
387        return None;
388    }
389    let speculative_args = macro_call_with_fake_ident.token_tree()?;
390
391    match (
392        sema.expand_macro_call(&actual_macro_call),
393        sema.speculative_expand_macro_call(&actual_macro_call, &speculative_args, fake_ident_token),
394    ) {
395        // successful expansions
396        (Some(actual_expansion), Some((fake_expansion, fake_mapped_tokens))) => {
397            let mut accumulated_offset_from_fake_tokens = 0;
398            let actual_range = actual_expansion.text_range().end();
399            fake_mapped_tokens
400                .into_iter()
401                .filter_map(|(fake_mapped_token, rank)| {
402                    let accumulated_offset = accumulated_offset_from_fake_tokens;
403                    if !fake_mapped_token.text().contains(COMPLETION_MARKER) {
404                        // Proc macros can make the same span with different text, we don't
405                        // want them to participate in completion because the macro author probably
406                        // didn't intend them to.
407                        return None;
408                    }
409                    accumulated_offset_from_fake_tokens += COMPLETION_MARKER.len();
410
411                    let new_offset = fake_mapped_token.text_range().start()
412                        - TextSize::new(accumulated_offset as u32);
413                    if new_offset + relative_offset > actual_range {
414                        // offset outside of bounds from the original expansion,
415                        // stop here to prevent problems from happening
416                        return None;
417                    }
418                    let result = expand_maybe_stop(
419                        sema,
420                        actual_expansion.clone(),
421                        fake_expansion.clone(),
422                        new_offset,
423                        fake_mapped_token,
424                        relative_offset,
425                    )?;
426                    Some((result, rank))
427                })
428                .min_by_key(|(_, rank)| *rank)
429                .map(|(result, _)| result)
430        }
431        // at least one expansion failed, we won't have anything to expand from this point
432        // onwards so break out
433        _ => None,
434    }
435}
436
437/// Fill the completion context, this is what does semantic reasoning about the surrounding context
438/// of the completion location.
439fn analyze<'db>(
440    sema: &Semantics<'db, RootDatabase>,
441    expansion_result: ExpansionResult,
442    original_token: &SyntaxToken,
443    self_token: &SyntaxToken,
444) -> Option<(CompletionAnalysis<'db>, (Option<Type<'db>>, Option<ast::NameOrNameRef>), QualifierCtx)>
445{
446    let _p = tracing::info_span!("CompletionContext::analyze").entered();
447    let ExpansionResult {
448        original_file,
449        speculative_file,
450        original_offset: _,
451        speculative_offset,
452        fake_ident_token,
453        derive_ctx,
454    } = expansion_result;
455
456    if original_token.kind() != self_token.kind()
457        // FIXME: This check can be removed once we use speculative database forking for completions
458        && !(original_token.kind().is_punct() || original_token.kind().is_trivia())
459        && !(SyntaxKind::is_any_identifier(original_token.kind())
460            && SyntaxKind::is_any_identifier(self_token.kind()))
461    {
462        return None;
463    }
464
465    // Overwrite the path kind for derives
466    if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
467        if let Some(ast::NameLike::NameRef(name_ref)) =
468            find_node_at_offset(&file_with_fake_ident, offset)
469        {
470            let parent = name_ref.syntax().parent()?;
471            let (mut nameref_ctx, _) =
472                classify_name_ref(sema, &original_file, name_ref, offset, parent)?;
473            if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
474                path_ctx.kind = PathKind::Derive {
475                    existing_derives: sema
476                        .resolve_derive_macro(&origin_attr)
477                        .into_iter()
478                        .flatten()
479                        .flatten()
480                        .collect(),
481                };
482            }
483            return Some((
484                CompletionAnalysis::NameRef(nameref_ctx),
485                (None, None),
486                QualifierCtx::default(),
487            ));
488        }
489        return None;
490    }
491
492    let Some(name_like) = find_node_at_offset(&speculative_file, speculative_offset) else {
493        let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
494            CompletionAnalysis::String { original, expanded: ast::String::cast(self_token.clone()) }
495        } else {
496            // Fix up trailing whitespace problem
497            // #[attr(foo = $0
498            let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?;
499            let p = token.parent()?;
500            if p.kind() == SyntaxKind::TOKEN_TREE
501                && p.ancestors().any(|it| it.kind() == SyntaxKind::META)
502            {
503                let colon_prefix = previous_non_trivia_token(self_token.clone())
504                    .is_some_and(|it| T![:] == it.kind());
505
506                CompletionAnalysis::UnexpandedAttrTT {
507                    fake_attribute_under_caret: fake_ident_token
508                        .parent_ancestors()
509                        .find_map(ast::Attr::cast),
510                    colon_prefix,
511                    extern_crate: p.ancestors().find_map(ast::ExternCrate::cast),
512                }
513            } else if p.kind() == SyntaxKind::TOKEN_TREE
514                && p.ancestors().any(|it| ast::Macro::can_cast(it.kind()))
515            {
516                if let Some([_ident, colon, _name, dollar]) = fake_ident_token
517                    .siblings_with_tokens(Direction::Prev)
518                    .filter(|it| !it.kind().is_trivia())
519                    .take(4)
520                    .collect_array()
521                    && dollar.kind() == T![$]
522                    && colon.kind() == T![:]
523                {
524                    CompletionAnalysis::MacroSegment
525                } else {
526                    return None;
527                }
528            } else {
529                return None;
530            }
531        };
532        return Some((analysis, (None, None), QualifierCtx::default()));
533    };
534
535    let expected = expected_type_and_name(sema, self_token, &name_like);
536    let mut qual_ctx = QualifierCtx::default();
537    let analysis = match name_like {
538        ast::NameLike::Lifetime(lifetime) => {
539            CompletionAnalysis::Lifetime(classify_lifetime(sema, &original_file, lifetime)?)
540        }
541        ast::NameLike::NameRef(name_ref) => {
542            let parent = name_ref.syntax().parent()?;
543            let (nameref_ctx, qualifier_ctx) = classify_name_ref(
544                sema,
545                &original_file,
546                name_ref,
547                expansion_result.original_offset,
548                parent,
549            )?;
550
551            if let NameRefContext {
552                kind:
553                    NameRefKind::Path(PathCompletionCtx { kind: PathKind::Expr { .. }, path, .. }, ..),
554                ..
555            } = &nameref_ctx
556                && is_in_token_of_for_loop(path)
557            {
558                // for pat $0
559                // there is nothing to complete here except `in` keyword
560                // don't bother populating the context
561                // Ideally this special casing wouldn't be needed, but the parser recovers
562                return None;
563            }
564
565            qual_ctx = qualifier_ctx;
566            CompletionAnalysis::NameRef(nameref_ctx)
567        }
568        ast::NameLike::Name(name) => {
569            let name_ctx = classify_name(sema, &original_file, name)?;
570            CompletionAnalysis::Name(name_ctx)
571        }
572    };
573    Some((analysis, expected, qual_ctx))
574}
575
576/// Calculate the expected type and name of the cursor position.
577fn expected_type_and_name<'db>(
578    sema: &Semantics<'db, RootDatabase>,
579    self_token: &SyntaxToken,
580    name_like: &ast::NameLike,
581) -> (Option<Type<'db>>, Option<NameOrNameRef>) {
582    let token = prev_special_biased_token_at_trivia(self_token.clone());
583    let mut node = match token.parent() {
584        Some(it) => it,
585        None => return (None, None),
586    };
587
588    let strip_refs = |mut ty: Type<'db>| match name_like {
589        ast::NameLike::NameRef(n) => {
590            let p = match n.syntax().parent() {
591                Some(it) => it,
592                None => return ty,
593            };
594            let top_syn = match_ast! {
595                match p {
596                    ast::FieldExpr(e) => e
597                        .syntax()
598                        .ancestors()
599                        .take_while(|it| ast::FieldExpr::can_cast(it.kind()))
600                        .last(),
601                    ast::PathSegment(e) => e
602                        .syntax()
603                        .ancestors()
604                        .skip(1)
605                        .take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind()))
606                        .find(|it| ast::PathExpr::can_cast(it.kind())),
607                    _ => None
608                }
609            };
610            let top_syn = match top_syn {
611                Some(it) => it,
612                None => return ty,
613            };
614            let refs_level = top_syn
615                .ancestors()
616                .skip(1)
617                .map_while(Either::<ast::RefExpr, ast::PrefixExpr>::cast)
618                .take_while(|it| match it {
619                    Either::Left(_) => true,
620                    Either::Right(prefix) => prefix.op_kind() == Some(ast::UnaryOp::Deref),
621                })
622                .fold(0i32, |level, expr| match expr {
623                    Either::Left(_) => level + 1,
624                    Either::Right(_) => level - 1,
625                });
626            for _ in 0..refs_level {
627                cov_mark::hit!(expected_type_fn_param_ref);
628                ty = ty.strip_reference();
629            }
630            for _ in refs_level..0 {
631                cov_mark::hit!(expected_type_fn_param_deref);
632                ty = ty.add_reference(hir::Mutability::Shared);
633            }
634            ty
635        }
636        _ => ty,
637    };
638
639    let (ty, name) = loop {
640        break match_ast! {
641            match node {
642                ast::LetStmt(it) => {
643                    cov_mark::hit!(expected_type_let_with_leading_char);
644                    cov_mark::hit!(expected_type_let_without_leading_char);
645                    let ty = it.pat()
646                        .and_then(|pat| sema.type_of_pat(&pat))
647                        .or_else(|| it.initializer().and_then(|it| sema.type_of_expr(&it)))
648                        .map(TypeInfo::original)
649                        .filter(|ty| {
650                            // don't infer the let type if the expr is a function,
651                            // preventing parenthesis from vanishing
652                            it.ty().is_some() || !ty.is_fn()
653                        });
654                    let name = match it.pat() {
655                        Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
656                        Some(_) | None => None,
657                    };
658
659                    (ty, name)
660                },
661                ast::LetExpr(it) => {
662                    cov_mark::hit!(expected_type_if_let_without_leading_char);
663                    let ty = it.pat()
664                        .and_then(|pat| sema.type_of_pat(&pat))
665                        .or_else(|| it.expr().and_then(|it| sema.type_of_expr(&it)))
666                        .map(TypeInfo::original);
667                    (ty, None)
668                },
669                ast::BinExpr(it) => {
670                    if let Some(ast::BinaryOp::Assignment { op: None }) = it.op_kind() {
671                        let ty = it.lhs()
672                            .and_then(|lhs| sema.type_of_expr(&lhs))
673                            .or_else(|| it.rhs().and_then(|rhs| sema.type_of_expr(&rhs)))
674                            .map(TypeInfo::original);
675                        (ty, None)
676                    } else if let Some(ast::BinaryOp::LogicOp(_)) = it.op_kind() {
677                        let ty = sema.type_of_expr(&it.clone().into()).map(TypeInfo::original);
678                        (ty, None)
679                    } else {
680                        (None, None)
681                    }
682                },
683                ast::ArgList(_) => {
684                    cov_mark::hit!(expected_type_fn_param);
685                    ActiveParameter::at_token(
686                        sema,
687                        token.clone(),
688                    ).map(|ap| {
689                        let name = ap.ident().map(NameOrNameRef::Name);
690                        (Some(ap.ty), name)
691                    })
692                    .unwrap_or((None, None))
693                },
694                ast::RecordExprFieldList(it) => {
695                    // wouldn't try {} be nice...
696                    (|| {
697                        if token.kind() == T![..]
698                            ||token.prev_token().map(|t| t.kind()) == Some(T![..])
699                        {
700                            cov_mark::hit!(expected_type_struct_func_update);
701                            let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?;
702                            let ty = sema.type_of_expr(&record_expr.into())?;
703                            Some((
704                                Some(ty.original),
705                                None
706                            ))
707                        } else {
708                            cov_mark::hit!(expected_type_struct_field_without_leading_char);
709                            cov_mark::hit!(expected_type_struct_field_followed_by_comma);
710                            let expr_field = previous_non_trivia_token(token.clone())?.parent().and_then(ast::RecordExprField::cast)?;
711                            let (_, _, ty) = sema.resolve_record_field(&expr_field)?;
712                            Some((
713                                Some(ty),
714                                expr_field.field_name().map(NameOrNameRef::NameRef),
715                            ))
716                        }
717                    })().unwrap_or((None, None))
718                },
719                ast::RecordExprField(it) => {
720                    let field_ty = sema.resolve_record_field(&it).map(|(_, _, ty)| ty);
721                    let field_name = it.field_name().map(NameOrNameRef::NameRef);
722                    if let Some(expr) = it.expr() {
723                        cov_mark::hit!(expected_type_struct_field_with_leading_char);
724                        let ty = field_ty
725                            .or_else(|| sema.type_of_expr(&expr).map(TypeInfo::original));
726                        (ty, field_name)
727                    } else {
728                        (field_ty, field_name)
729                    }
730                },
731                // match foo { $0 }
732                // match foo { ..., pat => $0 }
733                ast::MatchExpr(it) => {
734                    let on_arrow = previous_non_trivia_token(token.clone()).is_some_and(|it| T![=>] == it.kind());
735
736                    let ty = if on_arrow {
737                        // match foo { ..., pat => $0 }
738                        cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
739                        cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
740                        sema.type_of_expr(&it.into())
741                    } else {
742                        // match foo { $0 }
743                        cov_mark::hit!(expected_type_match_arm_without_leading_char);
744                        it.expr().and_then(|e| sema.type_of_expr(&e))
745                    }.map(TypeInfo::original);
746                    (ty, None)
747                },
748                ast::MatchArm(it) => {
749                    let on_arrow = previous_non_trivia_token(token.clone()).is_some_and(|it| T![=>] == it.kind());
750                    let in_body = it.expr().is_some_and(|it| it.syntax().text_range().contains_range(token.text_range()));
751                    let match_expr = it.parent_match();
752
753                    let ty = if on_arrow || in_body {
754                        // match foo { ..., pat => $0 }
755                        cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
756                        cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
757                        sema.type_of_expr(&match_expr.into())
758                    } else {
759                        // match foo { $0 }
760                        cov_mark::hit!(expected_type_match_arm_without_leading_char);
761                        match_expr.expr().and_then(|e| sema.type_of_expr(&e))
762                    }.map(TypeInfo::original);
763                    (ty, None)
764                },
765                ast::IfExpr(it) => {
766                    let ty = if let Some(body) = it.then_branch()
767                        && token.text_range().end() > body.syntax().text_range().start()
768                    {
769                        sema.type_of_expr(&body.into())
770                    } else {
771                        it.condition().and_then(|e| sema.type_of_expr(&e))
772                    }.map(TypeInfo::original);
773                    (ty, None)
774                },
775                ast::IdentPat(it) => {
776                    cov_mark::hit!(expected_type_if_let_with_leading_char);
777                    cov_mark::hit!(expected_type_match_arm_with_leading_char);
778                    let ty = sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
779                    (ty, None)
780                },
781                ast::Fn(it) => {
782                    cov_mark::hit!(expected_type_fn_ret_with_leading_char);
783                    cov_mark::hit!(expected_type_fn_ret_without_leading_char);
784                    let def = sema.to_def(&it);
785                    (def.map(|def| def.ret_type(sema.db)), None)
786                },
787                ast::ReturnExpr(it) => {
788                    let fn_ = sema.ancestors_with_macros(it.syntax().clone())
789                        .find_map(Either::<ast::Fn, ast::ClosureExpr>::cast);
790                    let ty = fn_.and_then(|f| match f {
791                        Either::Left(f) => Some(sema.to_def(&f)?.ret_type(sema.db)),
792                        Either::Right(f) => {
793                            let ty = sema.type_of_expr(&f.into())?.original.as_callable(sema.db)?;
794                            Some(ty.return_type())
795                        },
796                    });
797                    (ty, None)
798                },
799                ast::BreakExpr(it) => {
800                    let ty = it.break_token()
801                        .and_then(|it| find_loops(sema, &it)?.next())
802                        .and_then(|expr| sema.type_of_expr(&expr));
803                    (ty.map(TypeInfo::original), None)
804                },
805                ast::ClosureExpr(it) => {
806                    let ty = sema.type_of_expr(&it.into());
807                    ty.and_then(|ty| ty.original.as_callable(sema.db))
808                        .map(|c| (Some(c.return_type()), None))
809                        .unwrap_or((None, None))
810                },
811                ast::ParamList(it) => {
812                    let closure = it.syntax().parent().and_then(ast::ClosureExpr::cast);
813                    let ty = closure
814                        .filter(|_| it.syntax().text_range().end() <= self_token.text_range().start())
815                        .and_then(|it| sema.type_of_expr(&it.into()));
816                    ty.and_then(|ty| ty.original.as_callable(sema.db))
817                        .map(|c| (Some(c.return_type()), None))
818                        .unwrap_or((None, None))
819                },
820                ast::Stmt(_) => (None, None),
821                ast::Item(_) => (None, None),
822                _ => {
823                    match node.parent() {
824                        Some(n) => {
825                            node = n;
826                            continue;
827                        },
828                        None => (None, None),
829                    }
830                },
831            }
832        };
833    };
834    (ty.map(strip_refs), name)
835}
836
837fn classify_lifetime(
838    sema: &Semantics<'_, RootDatabase>,
839    original_file: &SyntaxNode,
840    lifetime: ast::Lifetime,
841) -> Option<LifetimeContext> {
842    let parent = lifetime.syntax().parent()?;
843    if parent.kind() == SyntaxKind::ERROR {
844        return None;
845    }
846
847    let lifetime =
848        find_node_at_offset::<ast::Lifetime>(original_file, lifetime.syntax().text_range().start());
849    let kind = match_ast! {
850        match parent {
851            ast::LifetimeParam(_) => LifetimeKind::LifetimeParam,
852            ast::BreakExpr(_) => LifetimeKind::LabelRef,
853            ast::ContinueExpr(_) => LifetimeKind::LabelRef,
854            ast::Label(_) => LifetimeKind::LabelDef,
855            _ => {
856                let def = lifetime.as_ref().and_then(|lt| sema.scope(lt.syntax())?.generic_def());
857                LifetimeKind::Lifetime { in_lifetime_param_bound: ast::TypeBound::can_cast(parent.kind()), def }
858            },
859        }
860    };
861
862    Some(LifetimeContext { kind })
863}
864
865fn classify_name(
866    sema: &Semantics<'_, RootDatabase>,
867    original_file: &SyntaxNode,
868    name: ast::Name,
869) -> Option<NameContext> {
870    let parent = name.syntax().parent()?;
871    let kind = match_ast! {
872        match parent {
873            ast::Const(_) => NameKind::Const,
874            ast::ConstParam(_) => NameKind::ConstParam,
875            ast::Enum(_) => NameKind::Enum,
876            ast::Fn(_) => NameKind::Function,
877            ast::IdentPat(bind_pat) => {
878                let mut pat_ctx = pattern_context_for(sema, original_file, bind_pat.into());
879                if let Some(record_field) = ast::RecordPatField::for_field_name(&name) {
880                    pat_ctx.record_pat = find_node_in_file_compensated(sema, original_file, &record_field.parent_record_pat());
881                }
882
883                NameKind::IdentPat(pat_ctx)
884            },
885            ast::MacroDef(_) => NameKind::MacroDef,
886            ast::MacroRules(_) => NameKind::MacroRules,
887            ast::Module(module) => NameKind::Module(module),
888            ast::RecordField(_) => NameKind::RecordField,
889            ast::Rename(_) => NameKind::Rename,
890            ast::SelfParam(_) => NameKind::SelfParam,
891            ast::Static(_) => NameKind::Static,
892            ast::Struct(_) => NameKind::Struct,
893            ast::Trait(_) => NameKind::Trait,
894            ast::TypeAlias(_) => NameKind::TypeAlias,
895            ast::TypeParam(_) => NameKind::TypeParam,
896            ast::Union(_) => NameKind::Union,
897            ast::Variant(_) => NameKind::Variant,
898            _ => return None,
899        }
900    };
901    let name = find_node_at_offset(original_file, name.syntax().text_range().start());
902    Some(NameContext { name, kind })
903}
904
905fn classify_name_ref<'db>(
906    sema: &Semantics<'db, RootDatabase>,
907    original_file: &SyntaxNode,
908    name_ref: ast::NameRef,
909    original_offset: TextSize,
910    parent: SyntaxNode,
911) -> Option<(NameRefContext<'db>, QualifierCtx)> {
912    let nameref = find_node_at_offset(original_file, original_offset);
913
914    let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
915
916    if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
917        let dot_prefix = previous_non_trivia_token(name_ref.syntax().clone())
918            .is_some_and(|it| T![.] == it.kind());
919
920        return find_node_in_file_compensated(
921            sema,
922            original_file,
923            &record_field.parent_record_lit(),
924        )
925        .map(|expr| NameRefKind::RecordExpr { expr, dot_prefix })
926        .map(make_res);
927    }
928    if let Some(record_field) = ast::RecordPatField::for_field_name_ref(&name_ref) {
929        let kind = NameRefKind::Pattern(PatternContext {
930            param_ctx: None,
931            has_type_ascription: false,
932            ref_token: None,
933            mut_token: None,
934            record_pat: find_node_in_file_compensated(
935                sema,
936                original_file,
937                &record_field.parent_record_pat(),
938            ),
939            ..pattern_context_for(sema, original_file, record_field.parent_record_pat().into())
940        });
941        return Some(make_res(kind));
942    }
943
944    let field_expr_handle = |receiver, node| {
945        let receiver = find_opt_node_in_file(original_file, receiver);
946        let receiver_is_ambiguous_float_literal = match &receiver {
947            Some(ast::Expr::Literal(l)) => matches! {
948                l.kind(),
949                ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().is_some_and(|it| it.text().ends_with('.'))
950            },
951            _ => false,
952        };
953
954        let receiver_is_part_of_indivisible_expression = match &receiver {
955            Some(ast::Expr::IfExpr(_)) => {
956                let next_token_kind =
957                    next_non_trivia_token(name_ref.syntax().clone()).map(|t| t.kind());
958                next_token_kind == Some(SyntaxKind::ELSE_KW)
959            }
960            _ => false,
961        };
962        if receiver_is_part_of_indivisible_expression {
963            return None;
964        }
965
966        let mut receiver_ty = receiver.as_ref().and_then(|it| sema.type_of_expr(it));
967        if receiver_is_ambiguous_float_literal {
968            // `123.|` is parsed as a float but should actually be an integer.
969            always!(receiver_ty.as_ref().is_none_or(|receiver_ty| receiver_ty.original.is_float()));
970            receiver_ty =
971                Some(TypeInfo { original: hir::BuiltinType::i32().ty(sema.db), adjusted: None });
972        }
973
974        let kind = NameRefKind::DotAccess(DotAccess {
975            receiver_ty,
976            kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
977            receiver,
978            ctx: DotAccessExprCtx {
979                in_block_expr: is_in_block(node),
980                in_breakable: is_in_breakable(node).unzip().0,
981            },
982        });
983        Some(make_res(kind))
984    };
985
986    let segment = match_ast! {
987        match parent {
988            ast::PathSegment(segment) => segment,
989            ast::FieldExpr(field) => {
990                return field_expr_handle(field.expr(), field.syntax());
991            },
992            ast::ExternCrate(_) => {
993                let kind = NameRefKind::ExternCrate;
994                return Some(make_res(kind));
995            },
996            ast::MethodCallExpr(method) => {
997                let receiver = find_opt_node_in_file(original_file, method.receiver());
998                let has_parens = has_parens(&method);
999                if !has_parens && let Some(res) = field_expr_handle(method.receiver(), method.syntax()) {
1000                    return Some(res)
1001                }
1002                let kind = NameRefKind::DotAccess(DotAccess {
1003                    receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
1004                    kind: DotAccessKind::Method,
1005                    receiver,
1006                    ctx: DotAccessExprCtx { in_block_expr: is_in_block(method.syntax()), in_breakable: is_in_breakable(method.syntax()).unzip().0 }
1007                });
1008                return Some(make_res(kind));
1009            },
1010            _ => return None,
1011        }
1012    };
1013
1014    let path = segment.parent_path();
1015    let original_path = find_node_in_file_compensated(sema, original_file, &path);
1016
1017    let mut path_ctx = PathCompletionCtx {
1018        has_call_parens: false,
1019        has_macro_bang: false,
1020        qualified: Qualified::No,
1021        parent: None,
1022        path: path.clone(),
1023        original_path,
1024        kind: PathKind::Item { kind: ItemListKind::SourceFile },
1025        has_type_args: false,
1026        use_tree_parent: false,
1027    };
1028
1029    let func_update_record = |syn: &SyntaxNode| {
1030        if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) {
1031            find_node_in_file_compensated(sema, original_file, &record_expr)
1032        } else {
1033            None
1034        }
1035    };
1036    let prev_expr = |node: SyntaxNode| {
1037        let node = match node.parent().and_then(ast::ExprStmt::cast) {
1038            Some(stmt) => stmt.syntax().clone(),
1039            None => node,
1040        };
1041        let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?;
1042
1043        match_ast! {
1044            match prev_sibling {
1045                ast::ExprStmt(stmt) => stmt.expr().filter(|_| stmt.semicolon_token().is_none()),
1046                ast::LetStmt(stmt) => stmt.initializer().filter(|_| stmt.semicolon_token().is_none()),
1047                ast::Expr(expr) => Some(expr),
1048                _ => None,
1049            }
1050        }
1051    };
1052    let after_incomplete_let = |node: SyntaxNode| {
1053        prev_expr(node).and_then(|it| it.syntax().parent()).and_then(ast::LetStmt::cast)
1054    };
1055    let before_else_kw = |node: &SyntaxNode| {
1056        node.parent()
1057            .and_then(ast::ExprStmt::cast)
1058            .filter(|stmt| stmt.semicolon_token().is_none())
1059            .and_then(|stmt| non_trivia_sibling(stmt.syntax().clone().into(), Direction::Next))
1060            .and_then(NodeOrToken::into_node)
1061            .filter(|next| next.kind() == SyntaxKind::ERROR)
1062            .and_then(|next| next.first_token())
1063            .is_some_and(|token| token.kind() == SyntaxKind::ELSE_KW)
1064    };
1065    let is_in_value = |it: &SyntaxNode| {
1066        let Some(node) = it.parent() else { return false };
1067        let kind = node.kind();
1068        ast::LetStmt::can_cast(kind)
1069            || ast::ArgList::can_cast(kind)
1070            || ast::ArrayExpr::can_cast(kind)
1071            || ast::ParenExpr::can_cast(kind)
1072            || ast::BreakExpr::can_cast(kind)
1073            || ast::ReturnExpr::can_cast(kind)
1074            || ast::PrefixExpr::can_cast(kind)
1075            || ast::FormatArgsArg::can_cast(kind)
1076            || ast::RecordExprField::can_cast(kind)
1077            || ast::BinExpr::cast(node.clone())
1078                .and_then(|expr| expr.rhs())
1079                .is_some_and(|expr| expr.syntax() == it)
1080            || ast::IndexExpr::cast(node)
1081                .and_then(|expr| expr.index())
1082                .is_some_and(|expr| expr.syntax() == it)
1083    };
1084
1085    // We do not want to generate path completions when we are sandwiched between an item decl signature and its body.
1086    // ex. trait Foo $0 {}
1087    // in these cases parser recovery usually kicks in for our inserted identifier, causing it
1088    // to either be parsed as an ExprStmt or a ItemRecovery, depending on whether it is in a block
1089    // expression or an item list.
1090    // The following code checks if the body is missing, if it is we either cut off the body
1091    // from the item or it was missing in the first place
1092    let inbetween_body_and_decl_check = |node: SyntaxNode| {
1093        if let Some(NodeOrToken::Node(n)) =
1094            syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev)
1095            && let Some(item) = ast::Item::cast(n)
1096        {
1097            let is_inbetween = match &item {
1098                ast::Item::Const(it) => it.body().is_none() && it.semicolon_token().is_none(),
1099                ast::Item::Enum(it) => it.variant_list().is_none(),
1100                ast::Item::ExternBlock(it) => it.extern_item_list().is_none(),
1101                ast::Item::Fn(it) => it.body().is_none() && it.semicolon_token().is_none(),
1102                ast::Item::Impl(it) => it.assoc_item_list().is_none(),
1103                ast::Item::Module(it) => it.item_list().is_none() && it.semicolon_token().is_none(),
1104                ast::Item::Static(it) => it.body().is_none(),
1105                ast::Item::Struct(it) => {
1106                    it.field_list().is_none() && it.semicolon_token().is_none()
1107                }
1108                ast::Item::Trait(it) => it.assoc_item_list().is_none(),
1109                ast::Item::TypeAlias(it) => it.ty().is_none() && it.semicolon_token().is_none(),
1110                ast::Item::Union(it) => it.record_field_list().is_none(),
1111                _ => false,
1112            };
1113            if is_inbetween {
1114                return Some(item);
1115            }
1116        }
1117        None
1118    };
1119
1120    let generic_arg_location = |arg: ast::GenericArg| {
1121        let mut override_location = None;
1122        let location = find_opt_node_in_file_compensated(
1123            sema,
1124            original_file,
1125            arg.syntax().parent().and_then(ast::GenericArgList::cast),
1126        )
1127        .map(|args| {
1128            let mut in_trait = None;
1129            let param = (|| {
1130                let parent = args.syntax().parent()?;
1131                let params = match_ast! {
1132                    match parent {
1133                        ast::PathSegment(segment) => {
1134                            match sema.resolve_path(&segment.parent_path().top_path())? {
1135                                hir::PathResolution::Def(def) => match def {
1136                                    hir::ModuleDef::Function(func) => {
1137                                         sema.source(func)?.value.generic_param_list()
1138                                    }
1139                                    hir::ModuleDef::Adt(adt) => {
1140                                        sema.source(adt)?.value.generic_param_list()
1141                                    }
1142                                    hir::ModuleDef::Variant(variant) => {
1143                                        sema.source(variant.parent_enum(sema.db))?.value.generic_param_list()
1144                                    }
1145                                    hir::ModuleDef::Trait(trait_) => {
1146                                        if let ast::GenericArg::AssocTypeArg(arg) = &arg {
1147                                            let arg_name = arg.name_ref()?;
1148                                            let arg_name = arg_name.text();
1149                                            for item in trait_.items_with_supertraits(sema.db) {
1150                                                match item {
1151                                                    hir::AssocItem::TypeAlias(assoc_ty) => {
1152                                                        if assoc_ty.name(sema.db).as_str() == arg_name {
1153                                                            override_location = Some(TypeLocation::AssocTypeEq);
1154                                                            return None;
1155                                                        }
1156                                                    },
1157                                                    hir::AssocItem::Const(const_) => {
1158                                                        if const_.name(sema.db)?.as_str() == arg_name {
1159                                                            override_location =  Some(TypeLocation::AssocConstEq);
1160                                                            return None;
1161                                                        }
1162                                                    },
1163                                                    _ => (),
1164                                                }
1165                                            }
1166                                            return None;
1167                                        } else {
1168                                            in_trait = Some(trait_);
1169                                            sema.source(trait_)?.value.generic_param_list()
1170                                        }
1171                                    }
1172                                    hir::ModuleDef::TypeAlias(ty_) => {
1173                                        sema.source(ty_)?.value.generic_param_list()
1174                                    }
1175                                    _ => None,
1176                                },
1177                                _ => None,
1178                            }
1179                        },
1180                        ast::MethodCallExpr(call) => {
1181                            let func = sema.resolve_method_call(&call)?;
1182                            sema.source(func)?.value.generic_param_list()
1183                        },
1184                        ast::AssocTypeArg(arg) => {
1185                            let trait_ = ast::PathSegment::cast(arg.syntax().parent()?.parent()?)?;
1186                            match sema.resolve_path(&trait_.parent_path().top_path())? {
1187                                hir::PathResolution::Def(hir::ModuleDef::Trait(trait_)) =>  {
1188                                        let arg_name = arg.name_ref()?;
1189                                        let arg_name = arg_name.text();
1190                                        let trait_items = trait_.items_with_supertraits(sema.db);
1191                                        let assoc_ty = trait_items.iter().find_map(|item| match item {
1192                                            hir::AssocItem::TypeAlias(assoc_ty) => {
1193                                                (assoc_ty.name(sema.db).as_str() == arg_name)
1194                                                    .then_some(assoc_ty)
1195                                            },
1196                                            _ => None,
1197                                        })?;
1198                                        sema.source(*assoc_ty)?.value.generic_param_list()
1199                                    }
1200                                _ => None,
1201                            }
1202                        },
1203                        _ => None,
1204                    }
1205                }?;
1206                // Determine the index of the argument in the `GenericArgList` and match it with
1207                // the corresponding parameter in the `GenericParamList`. Since lifetime parameters
1208                // are often omitted, ignore them for the purposes of matching the argument with
1209                // its parameter unless a lifetime argument is provided explicitly. That is, for
1210                // `struct S<'a, 'b, T>`, match `S::<$0>` to `T` and `S::<'a, $0, _>` to `'b`.
1211                // FIXME: This operates on the syntax tree and will produce incorrect results when
1212                // generic parameters are disabled by `#[cfg]` directives. It should operate on the
1213                // HIR, but the functionality necessary to do so is not exposed at the moment.
1214                let mut explicit_lifetime_arg = false;
1215                let arg_idx = arg
1216                    .syntax()
1217                    .siblings(Direction::Prev)
1218                    // Skip the node itself
1219                    .skip(1)
1220                    .map(|arg| if ast::LifetimeArg::can_cast(arg.kind()) { explicit_lifetime_arg = true })
1221                    .count();
1222                let param_idx = if explicit_lifetime_arg {
1223                    arg_idx
1224                } else {
1225                    // Lifetimes parameters always precede type and generic parameters,
1226                    // so offset the argument index by the total number of lifetime params
1227                    arg_idx + params.lifetime_params().count()
1228                };
1229                params.generic_params().nth(param_idx)
1230            })();
1231            (args, in_trait, param)
1232        });
1233        let (arg_list, of_trait, corresponding_param) = match location {
1234            Some((arg_list, of_trait, param)) => (Some(arg_list), of_trait, param),
1235            _ => (None, None, None),
1236        };
1237        override_location.unwrap_or(TypeLocation::GenericArg {
1238            args: arg_list,
1239            of_trait,
1240            corresponding_param,
1241        })
1242    };
1243
1244    let type_location = |node: &SyntaxNode| {
1245        let parent = node.parent()?;
1246        let res = match_ast! {
1247            match parent {
1248                ast::Const(it) => {
1249                    let name = find_opt_node_in_file(original_file, it.name())?;
1250                    let original = ast::Const::cast(name.syntax().parent()?)?;
1251                    TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
1252                },
1253                ast::RetType(it) => {
1254                    it.thin_arrow_token()?;
1255                    let parent = match ast::Fn::cast(parent.parent()?) {
1256                        Some(it) => it.param_list(),
1257                        None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(),
1258                    };
1259
1260                    let parent = find_opt_node_in_file(original_file, parent)?.syntax().parent()?;
1261                    TypeLocation::TypeAscription(TypeAscriptionTarget::RetType(match_ast! {
1262                        match parent {
1263                            ast::ClosureExpr(it) => {
1264                                it.body()
1265                            },
1266                            ast::Fn(it) => {
1267                                it.body().map(ast::Expr::BlockExpr)
1268                            },
1269                            _ => return None,
1270                        }
1271                    }))
1272                },
1273                ast::Param(it) => {
1274                    it.colon_token()?;
1275                    TypeLocation::TypeAscription(TypeAscriptionTarget::FnParam(find_opt_node_in_file(original_file, it.pat())))
1276                },
1277                ast::LetStmt(it) => {
1278                    it.colon_token()?;
1279                    TypeLocation::TypeAscription(TypeAscriptionTarget::Let(find_opt_node_in_file(original_file, it.pat())))
1280                },
1281                ast::Impl(it) => {
1282                    match it.trait_() {
1283                        Some(t) if t.syntax() == node => TypeLocation::ImplTrait,
1284                        _ => match it.self_ty() {
1285                            Some(t) if t.syntax() == node => TypeLocation::ImplTarget,
1286                            _ => return None,
1287                        },
1288                    }
1289                },
1290                ast::TypeBound(_) => TypeLocation::TypeBound,
1291                // is this case needed?
1292                ast::TypeBoundList(_) => TypeLocation::TypeBound,
1293                ast::GenericArg(it) => generic_arg_location(it),
1294                // is this case needed?
1295                ast::GenericArgList(it) => {
1296                    let args = find_opt_node_in_file_compensated(sema, original_file, Some(it));
1297                    TypeLocation::GenericArg { args, of_trait: None, corresponding_param: None }
1298                },
1299                ast::TupleField(_) => TypeLocation::TupleField,
1300                _ => return None,
1301            }
1302        };
1303        Some(res)
1304    };
1305
1306    let make_path_kind_expr = |expr: ast::Expr| {
1307        let it = expr.syntax();
1308        let in_block_expr = is_in_block(it);
1309        let (in_loop_body, innermost_breakable) = is_in_breakable(it).unzip();
1310        let after_if_expr = is_after_if_expr(it.clone());
1311        let ref_expr_parent =
1312            path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
1313        let after_amp = non_trivia_sibling(it.clone().into(), Direction::Prev)
1314            .map(|it| it.kind() == SyntaxKind::AMP)
1315            .unwrap_or(false);
1316        let (innermost_ret_ty, self_param) = {
1317            let find_ret_ty = |it: SyntaxNode| {
1318                if let Some(item) = ast::Item::cast(it.clone()) {
1319                    match item {
1320                        ast::Item::Fn(f) => Some(sema.to_def(&f).map(|it| it.ret_type(sema.db))),
1321                        ast::Item::MacroCall(_) => None,
1322                        _ => Some(None),
1323                    }
1324                } else {
1325                    let expr = ast::Expr::cast(it)?;
1326                    let callable = match expr {
1327                        // FIXME
1328                        // ast::Expr::BlockExpr(b) if b.async_token().is_some() || b.try_token().is_some() => sema.type_of_expr(b),
1329                        ast::Expr::ClosureExpr(_) => sema.type_of_expr(&expr),
1330                        _ => return None,
1331                    };
1332                    Some(
1333                        callable
1334                            .and_then(|c| c.adjusted().as_callable(sema.db))
1335                            .map(|it| it.return_type()),
1336                    )
1337                }
1338            };
1339            let fn_self_param =
1340                |fn_: ast::Fn| sema.to_def(&fn_).and_then(|it| it.self_param(sema.db));
1341            let closure_this_param = |closure: ast::ClosureExpr| {
1342                if closure.param_list()?.params().next()?.pat()?.syntax().text() != "this" {
1343                    return None;
1344                }
1345                sema.type_of_expr(&closure.into())
1346                    .and_then(|it| it.original.as_callable(sema.db))
1347                    .and_then(|it| it.params().into_iter().next())
1348            };
1349            let find_fn_self_param = |it: SyntaxNode| {
1350                match_ast! {
1351                    match it {
1352                        ast::Fn(fn_) => Some(fn_self_param(fn_).map(Either::Left)),
1353                        ast::ClosureExpr(f) => closure_this_param(f).map(Either::Right).map(Some),
1354                        ast::MacroCall(_) => None,
1355                        ast::Item(_) => Some(None),
1356                        _ => None,
1357                    }
1358                }
1359            };
1360
1361            match find_node_in_file_compensated(sema, original_file, &expr) {
1362                Some(it) => {
1363                    // buggy
1364                    let innermost_ret_ty = sema
1365                        .ancestors_with_macros(it.syntax().clone())
1366                        .find_map(find_ret_ty)
1367                        .flatten();
1368
1369                    let self_param = sema
1370                        .ancestors_with_macros(it.syntax().clone())
1371                        .find_map(find_fn_self_param)
1372                        .flatten();
1373                    (innermost_ret_ty, self_param)
1374                }
1375                None => (None, None),
1376            }
1377        };
1378        let innermost_breakable_ty = innermost_breakable
1379            .and_then(ast::Expr::cast)
1380            .and_then(|expr| find_node_in_file_compensated(sema, original_file, &expr))
1381            .and_then(|expr| sema.type_of_expr(&expr))
1382            .map(|ty| if ty.original.is_never() { ty.adjusted() } else { ty.original() });
1383        let is_func_update = func_update_record(it);
1384        let in_condition = is_in_condition(&expr);
1385        let after_incomplete_let = after_incomplete_let(it.clone()).is_some();
1386        let incomplete_expr_stmt =
1387            it.parent().and_then(ast::ExprStmt::cast).map(|it| it.semicolon_token().is_none());
1388        let before_else_kw = before_else_kw(it);
1389        let incomplete_let = left_ancestors(it.parent())
1390            .find_map(ast::LetStmt::cast)
1391            .is_some_and(|it| it.semicolon_token().is_none())
1392            || after_incomplete_let && incomplete_expr_stmt.unwrap_or(true) && !before_else_kw;
1393        let in_value = is_in_value(it);
1394        let impl_ = fetch_immediate_impl_or_trait(sema, original_file, expr.syntax())
1395            .and_then(Either::left);
1396
1397        let in_match_guard = match it.parent().and_then(ast::MatchArm::cast) {
1398            Some(arm) => arm
1399                .fat_arrow_token()
1400                .is_none_or(|arrow| it.text_range().start() < arrow.text_range().start()),
1401            None => false,
1402        };
1403
1404        PathKind::Expr {
1405            expr_ctx: PathExprCtx {
1406                in_block_expr,
1407                in_breakable: in_loop_body,
1408                after_if_expr,
1409                before_else_kw,
1410                in_condition,
1411                ref_expr_parent,
1412                after_amp,
1413                is_func_update,
1414                innermost_ret_ty,
1415                innermost_breakable_ty,
1416                self_param,
1417                in_value,
1418                incomplete_let,
1419                after_incomplete_let,
1420                impl_,
1421                in_match_guard,
1422            },
1423        }
1424    };
1425    let make_path_kind_type = |ty: ast::Type| {
1426        let location = type_location(ty.syntax());
1427        PathKind::Type { location: location.unwrap_or(TypeLocation::Other) }
1428    };
1429
1430    let kind_item = |it: &SyntaxNode| {
1431        let parent = it.parent()?;
1432        let kind = match_ast! {
1433            match parent {
1434                ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
1435                ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
1436                    Some(it) => match_ast! {
1437                        match it {
1438                            ast::Trait(_) => ItemListKind::Trait,
1439                            ast::Impl(it) => if it.trait_().is_some() {
1440                                ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it))
1441                            } else {
1442                                ItemListKind::Impl
1443                            },
1444                            _ => return None
1445                        }
1446                    },
1447                    None => return None,
1448                } },
1449                ast::ExternItemList(it) => {
1450                    let exn_blk = it.syntax().parent().and_then(ast::ExternBlock::cast);
1451                    PathKind::Item {
1452                        kind: ItemListKind::ExternBlock {
1453                            is_unsafe: exn_blk.and_then(|it| it.unsafe_token()).is_some(),
1454                        }
1455                    }
1456                },
1457                ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile },
1458                _ => return None,
1459            }
1460        };
1461        Some(kind)
1462    };
1463
1464    let mut kind_macro_call = |it: ast::MacroCall| {
1465        path_ctx.has_macro_bang = it.excl_token().is_some();
1466        let parent = it.syntax().parent()?;
1467        if let Some(kind) = kind_item(it.syntax()) {
1468            return Some(kind);
1469        }
1470        let kind = match_ast! {
1471            match parent {
1472                ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
1473                ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
1474                ast::MacroType(ty) => make_path_kind_type(ty.into()),
1475                _ => return None,
1476            }
1477        };
1478        Some(kind)
1479    };
1480    let make_path_kind_attr = |meta: ast::Meta| {
1481        let attr = meta.parent_attr()?;
1482        let kind = attr.kind();
1483        let attached = attr.syntax().parent()?;
1484        let is_trailing_outer_attr = kind != AttrKind::Inner
1485            && non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next).is_none();
1486        let annotated_item_kind = if is_trailing_outer_attr { None } else { Some(attached.kind()) };
1487        let derive_helpers = annotated_item_kind
1488            .filter(|kind| {
1489                matches!(
1490                    kind,
1491                    SyntaxKind::STRUCT
1492                        | SyntaxKind::ENUM
1493                        | SyntaxKind::UNION
1494                        | SyntaxKind::VARIANT
1495                        | SyntaxKind::TUPLE_FIELD
1496                        | SyntaxKind::RECORD_FIELD
1497                )
1498            })
1499            .and_then(|_| nameref.as_ref()?.syntax().ancestors().find_map(ast::Adt::cast))
1500            .and_then(|adt| sema.derive_helpers_in_scope(&adt))
1501            .unwrap_or_default();
1502        Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind, derive_helpers } })
1503    };
1504
1505    // Infer the path kind
1506    let parent = path.syntax().parent()?;
1507    let kind = 'find_kind: {
1508        if parent.kind() == SyntaxKind::ERROR {
1509            if let Some(kind) = inbetween_body_and_decl_check(parent.clone()) {
1510                return Some(make_res(NameRefKind::Keyword(kind)));
1511            }
1512
1513            break 'find_kind kind_item(&parent)?;
1514        }
1515        match_ast! {
1516            match parent {
1517                ast::PathType(it) => make_path_kind_type(it.into()),
1518                ast::PathExpr(it) => {
1519                    if let Some(p) = it.syntax().parent() {
1520                        let p_kind = p.kind();
1521                        // The syntax node of interest, for which we want to check whether
1522                        // it is sandwiched between an item decl signature and its body.
1523                        let probe = if ast::ExprStmt::can_cast(p_kind) {
1524                            Some(p)
1525                        } else if ast::StmtList::can_cast(p_kind) {
1526                            Some(it.syntax().clone())
1527                        } else {
1528                            None
1529                        };
1530                        if let Some(kind) = probe.and_then(inbetween_body_and_decl_check) {
1531                            return Some(make_res(NameRefKind::Keyword(kind)));
1532                        }
1533                    }
1534
1535                    path_ctx.has_call_parens = it.syntax().parent().is_some_and(|it| ast::CallExpr::cast(it).is_some_and(|it| has_parens(&it)));
1536
1537                    make_path_kind_expr(it.into())
1538                },
1539                ast::TupleStructPat(it) => {
1540                    path_ctx.has_call_parens = true;
1541                    PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
1542                },
1543                ast::RecordPat(it) => {
1544                    path_ctx.has_call_parens = true;
1545                    PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
1546                },
1547                ast::PathPat(it) => {
1548                    PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
1549                },
1550                ast::MacroCall(it) => {
1551                    kind_macro_call(it)?
1552                },
1553                ast::Meta(meta) => make_path_kind_attr(meta)?,
1554                ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
1555                ast::UseTree(_) => PathKind::Use,
1556                // completing inside a qualifier
1557                ast::Path(parent) => {
1558                    path_ctx.parent = Some(parent.clone());
1559                    let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?;
1560                    match_ast! {
1561                        match parent {
1562                            ast::PathType(it) => make_path_kind_type(it.into()),
1563                            ast::PathExpr(it) => {
1564                                path_ctx.has_call_parens = it.syntax().parent().is_some_and(|it| ast::CallExpr::cast(it).is_some_and(|it| has_parens(&it)));
1565
1566                                make_path_kind_expr(it.into())
1567                            },
1568                            ast::TupleStructPat(it) => {
1569                                path_ctx.has_call_parens = true;
1570                                PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
1571                            },
1572                            ast::RecordPat(it) => {
1573                                path_ctx.has_call_parens = true;
1574                                PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
1575                            },
1576                            ast::PathPat(it) => {
1577                                PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
1578                            },
1579                            ast::MacroCall(it) => {
1580                                kind_macro_call(it)?
1581                            },
1582                            ast::Meta(meta) => make_path_kind_attr(meta)?,
1583                            ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
1584                            ast::UseTree(_) => PathKind::Use,
1585                            ast::RecordExpr(it) => make_path_kind_expr(it.into()),
1586                            _ => return None,
1587                        }
1588                    }
1589                },
1590                ast::RecordExpr(it) => {
1591                    // A record expression in this position is usually a result of parsing recovery, so check that
1592                    if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) {
1593                        return Some(make_res(NameRefKind::Keyword(kind)));
1594                    }
1595                    make_path_kind_expr(it.into())
1596                },
1597                _ => return None,
1598            }
1599        }
1600    };
1601
1602    path_ctx.kind = kind;
1603    path_ctx.has_type_args = segment.generic_arg_list().is_some();
1604
1605    // calculate the qualifier context
1606    if let Some((qualifier, use_tree_parent)) = path_or_use_tree_qualifier(&path) {
1607        path_ctx.use_tree_parent = use_tree_parent;
1608        if !use_tree_parent && segment.coloncolon_token().is_some() {
1609            path_ctx.qualified = Qualified::Absolute;
1610        } else {
1611            let qualifier = qualifier
1612                .segment()
1613                .and_then(|it| find_node_in_file(original_file, &it))
1614                .map(|it| it.parent_path());
1615            if let Some(qualifier) = qualifier {
1616                let type_anchor = match qualifier.segment().and_then(|it| it.kind()) {
1617                    Some(ast::PathSegmentKind::Type { type_ref: Some(type_ref), trait_ref })
1618                        if qualifier.qualifier().is_none() =>
1619                    {
1620                        Some((type_ref, trait_ref))
1621                    }
1622                    _ => None,
1623                };
1624
1625                path_ctx.qualified = if let Some((ty, trait_ref)) = type_anchor {
1626                    let ty = match ty {
1627                        ast::Type::InferType(_) => None,
1628                        ty => sema.resolve_type(&ty),
1629                    };
1630                    let trait_ = trait_ref.and_then(|it| sema.resolve_trait(&it.path()?));
1631                    Qualified::TypeAnchor { ty, trait_ }
1632                } else {
1633                    let res = sema.resolve_path(&qualifier);
1634
1635                    // For understanding how and why super_chain_len is calculated the way it
1636                    // is check the documentation at it's definition
1637                    let mut segment_count = 0;
1638                    let super_count = iter::successors(Some(qualifier.clone()), |p| p.qualifier())
1639                        .take_while(|p| {
1640                            p.segment()
1641                                .and_then(|s| {
1642                                    segment_count += 1;
1643                                    s.super_token()
1644                                })
1645                                .is_some()
1646                        })
1647                        .count();
1648
1649                    let super_chain_len =
1650                        if segment_count > super_count { None } else { Some(super_count) };
1651
1652                    Qualified::With { path: qualifier, resolution: res, super_chain_len }
1653                }
1654            };
1655        }
1656    } else if let Some(segment) = path.segment()
1657        && segment.coloncolon_token().is_some()
1658    {
1659        path_ctx.qualified = Qualified::Absolute;
1660    }
1661
1662    let mut qualifier_ctx = QualifierCtx::default();
1663    if path_ctx.is_trivial_path() {
1664        // fetch the full expression that may have qualifiers attached to it
1665        let top_node = match path_ctx.kind {
1666            PathKind::Expr { expr_ctx: PathExprCtx { in_block_expr: true, .. } } => {
1667                parent.ancestors().find(|it| ast::PathExpr::can_cast(it.kind())).and_then(|p| {
1668                    let parent = p.parent()?;
1669                    if ast::StmtList::can_cast(parent.kind()) {
1670                        Some(p)
1671                    } else if ast::ExprStmt::can_cast(parent.kind()) {
1672                        Some(parent)
1673                    } else {
1674                        None
1675                    }
1676                })
1677            }
1678            PathKind::Item { .. } => parent.ancestors().find(|it| it.kind() == SyntaxKind::ERROR),
1679            _ => None,
1680        };
1681        if let Some(top) = top_node {
1682            if let Some(NodeOrToken::Node(error_node)) =
1683                syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev)
1684                && error_node.kind() == SyntaxKind::ERROR
1685            {
1686                for token in error_node.children_with_tokens().filter_map(NodeOrToken::into_token) {
1687                    match token.kind() {
1688                        SyntaxKind::UNSAFE_KW => qualifier_ctx.unsafe_tok = Some(token),
1689                        SyntaxKind::ASYNC_KW => qualifier_ctx.async_tok = Some(token),
1690                        SyntaxKind::SAFE_KW => qualifier_ctx.safe_tok = Some(token),
1691                        _ => {}
1692                    }
1693                }
1694                qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast);
1695                qualifier_ctx.abi_node = error_node.children().find_map(ast::Abi::cast);
1696            }
1697
1698            if let PathKind::Item { .. } = path_ctx.kind
1699                && qualifier_ctx.none()
1700                && let Some(t) = top.first_token()
1701                && let Some(prev) =
1702                    t.prev_token().and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev))
1703                && ![T![;], T!['}'], T!['{'], T![']']].contains(&prev.kind())
1704            {
1705                // This was inferred to be an item position path, but it seems
1706                // to be part of some other broken node which leaked into an item
1707                // list
1708                return None;
1709            }
1710        }
1711    }
1712    Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx))
1713}
1714
1715/// When writing in the middle of some code the following situation commonly occurs (`|` denotes the cursor):
1716/// ```ignore
1717/// value.method|
1718/// (1, 2, 3)
1719/// ```
1720/// Here, we want to complete the method parentheses & arguments (if the corresponding settings are on),
1721/// but the thing is parsed as a method call with parentheses. Therefore we use heuristics: if the parentheses
1722/// are on the next line, consider them non-existent.
1723fn has_parens(node: &dyn HasArgList) -> bool {
1724    let Some(arg_list) = node.arg_list() else { return false };
1725    if arg_list.l_paren_token().is_none() {
1726        return false;
1727    }
1728    let prev_siblings = iter::successors(arg_list.syntax().prev_sibling_or_token(), |it| {
1729        it.prev_sibling_or_token()
1730    });
1731    prev_siblings
1732        .take_while(|syntax| syntax.kind().is_trivia())
1733        .filter_map(|syntax| {
1734            syntax.into_token().filter(|token| token.kind() == SyntaxKind::WHITESPACE)
1735        })
1736        .all(|whitespace| !whitespace.text().contains('\n'))
1737}
1738
1739fn pattern_context_for(
1740    sema: &Semantics<'_, RootDatabase>,
1741    original_file: &SyntaxNode,
1742    pat: ast::Pat,
1743) -> PatternContext {
1744    let mut param_ctx = None;
1745
1746    let mut missing_variants = vec![];
1747    let is_pat_like = |kind| {
1748        ast::Pat::can_cast(kind)
1749            || ast::RecordPatField::can_cast(kind)
1750            || ast::RecordPatFieldList::can_cast(kind)
1751    };
1752
1753    let (refutability, has_type_ascription) = pat
1754        .syntax()
1755        .ancestors()
1756        .find(|it| !is_pat_like(it.kind()))
1757        .map_or((PatternRefutability::Irrefutable, false), |node| {
1758            let refutability = match_ast! {
1759                match node {
1760                    ast::LetStmt(let_) => return (PatternRefutability::Refutable, let_.ty().is_some()),
1761                    ast::Param(param) => {
1762                        let has_type_ascription = param.ty().is_some();
1763                        param_ctx = (|| {
1764                            let fake_param_list = param.syntax().parent().and_then(ast::ParamList::cast)?;
1765                            let param_list = find_node_in_file_compensated(sema, original_file, &fake_param_list)?;
1766                            let param_list_owner = param_list.syntax().parent()?;
1767                            let kind = match_ast! {
1768                                match param_list_owner {
1769                                    ast::ClosureExpr(closure) => ParamKind::Closure(closure),
1770                                    ast::Fn(fn_) => ParamKind::Function(fn_),
1771                                    _ => return None,
1772                                }
1773                            };
1774                            Some(ParamContext {
1775                                param_list, param, kind
1776                            })
1777                        })();
1778                        return (PatternRefutability::Irrefutable, has_type_ascription)
1779                    },
1780                    ast::MatchArm(match_arm) => {
1781                       let missing_variants_opt = match_arm
1782                            .syntax()
1783                            .parent()
1784                            .and_then(ast::MatchArmList::cast)
1785                            .and_then(|match_arm_list| {
1786                                match_arm_list
1787                                .syntax()
1788                                .parent()
1789                                .and_then(ast::MatchExpr::cast)
1790                                .and_then(|match_expr| {
1791                                    let expr_opt = find_opt_node_in_file(original_file, match_expr.expr());
1792
1793                                    expr_opt.and_then(|expr| {
1794                                        sema.type_of_expr(&expr)?
1795                                        .adjusted()
1796                                        .autoderef(sema.db)
1797                                        .find_map(|ty| match ty.as_adt() {
1798                                            Some(hir::Adt::Enum(e)) => Some(e),
1799                                            _ => None,
1800                                        }).map(|enum_| enum_.variants(sema.db))
1801                                    })
1802                                }).map(|variants| variants.iter().filter_map(|variant| {
1803                                        let variant_name = variant.name(sema.db);
1804
1805                                        let variant_already_present = match_arm_list.arms().any(|arm| {
1806                                            arm.pat().and_then(|pat| {
1807                                                let pat_already_present = pat.syntax().to_string().contains(variant_name.as_str());
1808                                                pat_already_present.then_some(pat_already_present)
1809                                            }).is_some()
1810                                        });
1811
1812                                        (!variant_already_present).then_some(*variant)
1813                                    }).collect::<Vec<Variant>>())
1814                        });
1815
1816                        if let Some(missing_variants_) = missing_variants_opt {
1817                            missing_variants = missing_variants_;
1818                        };
1819
1820                        PatternRefutability::Refutable
1821                    },
1822                    ast::LetExpr(_) => PatternRefutability::Refutable,
1823                    ast::ForExpr(_) => PatternRefutability::Irrefutable,
1824                    _ => PatternRefutability::Irrefutable,
1825                }
1826            };
1827            (refutability, false)
1828        });
1829    let (ref_token, mut_token) = match &pat {
1830        ast::Pat::IdentPat(it) => (it.ref_token(), it.mut_token()),
1831        _ => (None, None),
1832    };
1833
1834    // Only suggest name in let-stmt or fn param
1835    let should_suggest_name = matches!(
1836            &pat,
1837            ast::Pat::IdentPat(it)
1838                if it.syntax()
1839                .parent().is_some_and(|node| {
1840                    let kind = node.kind();
1841                    ast::LetStmt::can_cast(kind) || ast::Param::can_cast(kind)
1842                })
1843    );
1844
1845    PatternContext {
1846        refutability,
1847        param_ctx,
1848        has_type_ascription,
1849        should_suggest_name,
1850        after_if_expr: is_after_if_expr(pat.syntax().clone()),
1851        parent_pat: pat.syntax().parent().and_then(ast::Pat::cast),
1852        mut_token,
1853        ref_token,
1854        record_pat: None,
1855        impl_or_trait: fetch_immediate_impl_or_trait(sema, original_file, pat.syntax()),
1856        missing_variants,
1857    }
1858}
1859
1860fn fetch_immediate_impl_or_trait(
1861    sema: &Semantics<'_, RootDatabase>,
1862    original_file: &SyntaxNode,
1863    node: &SyntaxNode,
1864) -> Option<Either<ast::Impl, ast::Trait>> {
1865    let mut ancestors = ancestors_in_file_compensated(sema, original_file, node)?
1866        .filter_map(ast::Item::cast)
1867        .filter(|it| !matches!(it, ast::Item::MacroCall(_)));
1868
1869    match ancestors.next()? {
1870        ast::Item::Const(_) | ast::Item::Fn(_) | ast::Item::TypeAlias(_) => (),
1871        ast::Item::Impl(it) => return Some(Either::Left(it)),
1872        ast::Item::Trait(it) => return Some(Either::Right(it)),
1873        _ => return None,
1874    }
1875    match ancestors.next()? {
1876        ast::Item::Impl(it) => Some(Either::Left(it)),
1877        ast::Item::Trait(it) => Some(Either::Right(it)),
1878        _ => None,
1879    }
1880}
1881
1882/// Attempts to find `node` inside `syntax` via `node`'s text range.
1883/// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
1884fn find_opt_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: Option<N>) -> Option<N> {
1885    find_node_in_file(syntax, &node?)
1886}
1887
1888/// Attempts to find `node` inside `syntax` via `node`'s text range.
1889/// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
1890fn find_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
1891    let syntax_range = syntax.text_range();
1892    let range = node.syntax().text_range();
1893    let intersection = range.intersect(syntax_range)?;
1894    syntax.covering_element(intersection).ancestors().find_map(N::cast)
1895}
1896
1897/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
1898/// for the offset introduced by the fake ident.
1899/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
1900fn find_node_in_file_compensated<N: AstNode>(
1901    sema: &Semantics<'_, RootDatabase>,
1902    in_file: &SyntaxNode,
1903    node: &N,
1904) -> Option<N> {
1905    ancestors_in_file_compensated(sema, in_file, node.syntax())?.find_map(N::cast)
1906}
1907
1908fn ancestors_in_file_compensated<'sema>(
1909    sema: &'sema Semantics<'_, RootDatabase>,
1910    in_file: &SyntaxNode,
1911    node: &SyntaxNode,
1912) -> Option<impl Iterator<Item = SyntaxNode> + 'sema> {
1913    let syntax_range = in_file.text_range();
1914    let range = node.text_range();
1915    let end = range.end().checked_sub(TextSize::try_from(COMPLETION_MARKER.len()).ok()?)?;
1916    if end < range.start() {
1917        return None;
1918    }
1919    let range = TextRange::new(range.start(), end);
1920    // our inserted ident could cause `range` to go outside of the original syntax, so cap it
1921    let intersection = range.intersect(syntax_range)?;
1922    let node = match in_file.covering_element(intersection) {
1923        NodeOrToken::Node(node) => node,
1924        NodeOrToken::Token(tok) => tok.parent()?,
1925    };
1926    Some(sema.ancestors_with_macros(node))
1927}
1928
1929/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
1930/// for the offset introduced by the fake ident..
1931/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
1932fn find_opt_node_in_file_compensated<N: AstNode>(
1933    sema: &Semantics<'_, RootDatabase>,
1934    syntax: &SyntaxNode,
1935    node: Option<N>,
1936) -> Option<N> {
1937    find_node_in_file_compensated(sema, syntax, &node?)
1938}
1939
1940fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
1941    if let Some(qual) = path.qualifier() {
1942        return Some((qual, false));
1943    }
1944    let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
1945    let use_tree = use_tree_list.syntax().parent().and_then(ast::UseTree::cast)?;
1946    Some((use_tree.path()?, true))
1947}
1948
1949fn left_ancestors(node: Option<SyntaxNode>) -> impl Iterator<Item = SyntaxNode> {
1950    node.into_iter().flat_map(|node| {
1951        let end = node.text_range().end();
1952        node.ancestors().take_while(move |it| it.text_range().end() == end)
1953    })
1954}
1955
1956fn is_in_token_of_for_loop(path: &ast::Path) -> bool {
1957    // oh my ...
1958    (|| {
1959        let expr = path.syntax().parent().and_then(ast::PathExpr::cast)?;
1960        let for_expr = expr.syntax().parent().and_then(ast::ForExpr::cast)?;
1961        if for_expr.in_token().is_some() {
1962            return Some(false);
1963        }
1964        let pat = for_expr.pat()?;
1965        let next_sibl = next_non_trivia_sibling(pat.syntax().clone().into())?;
1966        Some(match next_sibl {
1967            syntax::NodeOrToken::Node(n) => {
1968                n.text_range().start() == path.syntax().text_range().start()
1969            }
1970            syntax::NodeOrToken::Token(t) => {
1971                t.text_range().start() == path.syntax().text_range().start()
1972            }
1973        })
1974    })()
1975    .unwrap_or(false)
1976}
1977
1978fn is_in_breakable(node: &SyntaxNode) -> Option<(BreakableKind, SyntaxNode)> {
1979    node.ancestors()
1980        .take_while(|it| it.kind() != SyntaxKind::FN && it.kind() != SyntaxKind::CLOSURE_EXPR)
1981        .find_map(|it| {
1982            let (breakable, loop_body) = match_ast! {
1983                match it {
1984                    ast::ForExpr(it) => (BreakableKind::For, it.loop_body()?),
1985                    ast::WhileExpr(it) => (BreakableKind::While, it.loop_body()?),
1986                    ast::LoopExpr(it) => (BreakableKind::Loop, it.loop_body()?),
1987                    ast::BlockExpr(it) => return it.label().map(|_| (BreakableKind::Block, it.syntax().clone())),
1988                    _ => return None,
1989                }
1990            };
1991            loop_body.syntax().text_range().contains_range(node.text_range())
1992                .then_some((breakable, it))
1993        })
1994}
1995
1996fn is_in_block(node: &SyntaxNode) -> bool {
1997    if has_in_newline_expr_first(node) {
1998        return true;
1999    };
2000    node.parent()
2001        .map(|node| ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind()))
2002        .unwrap_or(false)
2003}
2004
2005/// Similar to `has_parens`, heuristic sensing incomplete statement before ambiguous `Expr`
2006///
2007/// Heuristic:
2008///
2009/// If the `PathExpr` is left part of the `Expr` and there is a newline after the `PathExpr`,
2010/// it is considered that the `PathExpr` is not part of the `Expr`.
2011fn has_in_newline_expr_first(node: &SyntaxNode) -> bool {
2012    if ast::PathExpr::can_cast(node.kind())
2013        && let Some(NodeOrToken::Token(next)) = node.next_sibling_or_token()
2014        && next.kind() == SyntaxKind::WHITESPACE
2015        && next.text().contains('\n')
2016        && let Some(stmt_like) = node
2017            .ancestors()
2018            .take_while(|it| it.text_range().start() == node.text_range().start())
2019            .filter_map(Either::<ast::ExprStmt, ast::Expr>::cast)
2020            .last()
2021    {
2022        stmt_like.syntax().parent().and_then(ast::StmtList::cast).is_some()
2023    } else {
2024        false
2025    }
2026}
2027
2028fn is_after_if_expr(node: SyntaxNode) -> bool {
2029    let node = match node.parent().and_then(Either::<ast::ExprStmt, ast::MatchArm>::cast) {
2030        Some(stmt) => stmt.syntax().clone(),
2031        None => node,
2032    };
2033    let prev_sibling =
2034        non_trivia_sibling(node.into(), Direction::Prev).and_then(NodeOrToken::into_node);
2035    iter::successors(prev_sibling, |it| it.last_child_or_token()?.into_node())
2036        .find_map(ast::IfExpr::cast)
2037        .is_some()
2038}
2039
2040fn next_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken> {
2041    let mut token = match e.into() {
2042        SyntaxElement::Node(n) => n.last_token()?,
2043        SyntaxElement::Token(t) => t,
2044    }
2045    .next_token();
2046    while let Some(inner) = token {
2047        if !inner.kind().is_trivia() {
2048            return Some(inner);
2049        } else {
2050            token = inner.next_token();
2051        }
2052    }
2053    None
2054}
2055
2056fn next_non_trivia_sibling(ele: SyntaxElement) -> Option<SyntaxElement> {
2057    let mut e = ele.next_sibling_or_token();
2058    while let Some(inner) = e {
2059        if !inner.kind().is_trivia() {
2060            return Some(inner);
2061        } else {
2062            e = inner.next_sibling_or_token();
2063        }
2064    }
2065    None
2066}
2067
2068fn prev_special_biased_token_at_trivia(mut token: SyntaxToken) -> SyntaxToken {
2069    while token.kind().is_trivia()
2070        && let Some(prev) = token.prev_token()
2071        && let T![=]
2072        | T![+=]
2073        | T![/=]
2074        | T![*=]
2075        | T![%=]
2076        | T![>>=]
2077        | T![<<=]
2078        | T![-=]
2079        | T![|=]
2080        | T![&=]
2081        | T![^=]
2082        | T![|]
2083        | T![return]
2084        | T![break]
2085        | T![continue]
2086        | T![lifetime_ident] = prev.kind()
2087    {
2088        token = prev
2089    }
2090    token
2091}