1use std::iter;
3
4use hir::{EnumVariant, ExpandResult, InFile, Semantics, Type, TypeInfo};
5use ide_db::{
6 RootDatabase, active_parameter::ActiveParameter, syntax_helpers::node_ext::find_loops,
7};
8use itertools::{Either, Itertools};
9use stdx::always;
10use syntax::{
11 AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken,
12 T, TextRange, TextSize,
13 algo::{
14 self, ancestors_at_offset, find_node_at_offset, non_trivia_sibling,
15 previous_non_trivia_token,
16 },
17 ast::{
18 self, AttrKind, HasArgList, HasGenericArgs, HasGenericParams, HasLoopBody, HasName,
19 NameOrNameRef,
20 },
21 match_ast,
22};
23
24use crate::{
25 completions::postfix::is_in_condition,
26 context::{
27 AttrCtx, BreakableKind, COMPLETION_MARKER, CompletionAnalysis, DotAccess, DotAccessExprCtx,
28 DotAccessKind, ItemListKind, LifetimeContext, LifetimeKind, NameContext, NameKind,
29 NameRefContext, NameRefKind, ParamContext, ParamKind, PathCompletionCtx, PathExprCtx,
30 PathKind, PatternContext, PatternRefutability, Qualified, QualifierCtx,
31 TypeAscriptionTarget, TypeLocation,
32 },
33};
34
35#[derive(Debug)]
36struct ExpansionResult {
37 original_file: SyntaxNode,
38 speculative_file: SyntaxNode,
39 original_offset: TextSize,
41 speculative_offset: TextSize,
43 fake_ident_token: SyntaxToken,
44 derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
45}
46
47pub(super) struct AnalysisResult<'db> {
48 pub(super) analysis: CompletionAnalysis<'db>,
49 pub(super) expected: (Option<Type<'db>>, Option<ast::NameOrNameRef>),
50 pub(super) qualifier_ctx: QualifierCtx,
51 pub(super) token: SyntaxToken,
53 pub(super) original_offset: TextSize,
55}
56
57pub(super) fn expand_and_analyze<'db>(
58 sema: &Semantics<'db, RootDatabase>,
59 original_file: InFile<SyntaxNode>,
60 speculative_file: SyntaxNode,
61 offset: TextSize,
62 original_token: &SyntaxToken,
63) -> Option<AnalysisResult<'db>> {
64 let fake_ident_token = speculative_file.token_at_offset(offset).right_biased()?;
67 let relative_offset = offset - fake_ident_token.text_range().start();
69 let offset = offset - relative_offset;
72 let expansion = expand_maybe_stop(
73 sema,
74 original_file.clone(),
75 speculative_file.clone(),
76 offset,
77 fake_ident_token.clone(),
78 relative_offset,
79 )
80 .unwrap_or(ExpansionResult {
81 original_file: original_file.value,
82 speculative_file,
83 original_offset: offset,
84 speculative_offset: fake_ident_token.text_range().start(),
85 fake_ident_token,
86 derive_ctx: None,
87 });
88
89 let original_offset = expansion.original_offset + relative_offset;
91 let token = expansion.original_file.token_at_offset(original_offset).left_biased()?;
92
93 analyze(sema, expansion, original_token, &token).map(|(analysis, expected, qualifier_ctx)| {
94 AnalysisResult { analysis, expected, qualifier_ctx, token, original_offset }
95 })
96}
97
98fn token_at_offset_ignore_whitespace(file: &SyntaxNode, offset: TextSize) -> Option<SyntaxToken> {
99 let token = file.token_at_offset(offset).left_biased()?;
100 algo::skip_whitespace_token(token, Direction::Prev)
101}
102
103fn expand_maybe_stop(
133 sema: &Semantics<'_, RootDatabase>,
134 original_file: InFile<SyntaxNode>,
135 speculative_file: SyntaxNode,
136 original_offset: TextSize,
137 fake_ident_token: SyntaxToken,
138 relative_offset: TextSize,
139) -> Option<ExpansionResult> {
140 if let result @ Some(_) = expand(
141 sema,
142 original_file.clone(),
143 speculative_file.clone(),
144 original_offset,
145 fake_ident_token.clone(),
146 relative_offset,
147 ) {
148 return result;
149 }
150
151 if token_at_offset_ignore_whitespace(&original_file.value, original_offset + relative_offset)
154 .is_some_and(|original_token| {
155 !sema.is_inside_macro_call(original_file.with_value(&original_token))
156 })
157 {
158 Some(ExpansionResult {
160 original_file: original_file.value,
161 speculative_file,
162 original_offset,
163 speculative_offset: fake_ident_token.text_range().start(),
164 fake_ident_token,
165 derive_ctx: None,
166 })
167 } else {
168 None
169 }
170}
171
172fn expand(
173 sema: &Semantics<'_, RootDatabase>,
174 original_file: InFile<SyntaxNode>,
175 speculative_file: SyntaxNode,
176 original_offset: TextSize,
177 fake_ident_token: SyntaxToken,
178 relative_offset: TextSize,
179) -> Option<ExpansionResult> {
180 let _p = tracing::info_span!("CompletionContext::expand").entered();
181
182 let parent_item =
183 |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
184 let original_node = token_at_offset_ignore_whitespace(&original_file.value, original_offset)
185 .and_then(|token| token.parent_ancestors().find_map(ast::Item::cast));
186 let ancestor_items = iter::successors(
187 Option::zip(
188 original_node,
189 find_node_at_offset::<ast::Item>(
190 &speculative_file,
191 fake_ident_token.text_range().start(),
192 ),
193 ),
194 |(a, b)| parent_item(a).zip(parent_item(b)),
195 );
196
197 'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
199 match (
200 sema.expand_attr_macro(&actual_item),
201 sema.speculative_expand_attr_macro(
202 &actual_item,
203 &item_with_fake_ident,
204 fake_ident_token.clone(),
205 ),
206 ) {
207 (None, None) => continue 'ancestors,
209 (
211 Some(ExpandResult { value: actual_expansion, err: _ }),
212 Some((fake_expansion, fake_mapped_tokens)),
213 ) => {
214 let mut accumulated_offset_from_fake_tokens = 0;
215 let actual_range = actual_expansion.text_range().end();
216 let result = fake_mapped_tokens
217 .into_iter()
218 .filter_map(|(fake_mapped_token, rank)| {
219 let accumulated_offset = accumulated_offset_from_fake_tokens;
220 if !fake_mapped_token.text().contains(COMPLETION_MARKER) {
221 return None;
225 }
226 accumulated_offset_from_fake_tokens += COMPLETION_MARKER.len();
227
228 let new_offset = fake_mapped_token.text_range().start()
229 - TextSize::new(accumulated_offset as u32);
230 if new_offset + relative_offset > actual_range {
231 return None;
234 }
235 let result = expand_maybe_stop(
236 sema,
237 actual_expansion.clone(),
238 fake_expansion.clone(),
239 new_offset,
240 fake_mapped_token,
241 relative_offset,
242 )?;
243 Some((result, rank))
244 })
245 .min_by_key(|(_, rank)| *rank)
246 .map(|(result, _)| result);
247 if result.is_some() {
248 return result;
249 }
250 }
251 _ => break 'ancestors,
253 }
254 }
255
256 let orig_tt = ancestors_at_offset(&original_file.value, original_offset)
258 .map_while(Either::<ast::TokenTree, ast::Meta>::cast)
259 .last()?;
260 let spec_tt = ancestors_at_offset(&speculative_file, fake_ident_token.text_range().start())
261 .map_while(Either::<ast::TokenTree, ast::Meta>::cast)
262 .last()?;
263
264 let (tts, attrs) = match (orig_tt, spec_tt) {
265 (Either::Left(orig_tt), Either::Left(spec_tt)) => {
266 let attrs = orig_tt
267 .syntax()
268 .parent()
269 .and_then(ast::Meta::cast)
270 .and_then(|it| it.parent_attr())
271 .zip(
272 spec_tt
273 .syntax()
274 .parent()
275 .and_then(ast::Meta::cast)
276 .and_then(|it| it.parent_attr()),
277 );
278 (Some((orig_tt, spec_tt)), attrs)
279 }
280 (Either::Right(orig_path), Either::Right(spec_path)) => {
281 (None, orig_path.parent_attr().zip(spec_path.parent_attr()))
282 }
283 _ => return None,
284 };
285
286 if let Some((orig_attr, spec_attr)) = attrs
288 && let Some(orig_meta) = orig_attr.meta()
289 {
290 if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_tokens))) = (
292 sema.expand_derive_as_pseudo_attr_macro(&orig_meta),
293 sema.speculative_expand_derive_as_pseudo_attr_macro(
294 &orig_attr,
295 &spec_attr,
296 fake_ident_token.clone(),
297 ),
298 ) && let Some((fake_mapped_token, _)) =
299 fake_mapped_tokens.into_iter().min_by_key(|(_, rank)| *rank)
300 {
301 return Some(ExpansionResult {
302 original_file: original_file.value,
303 speculative_file,
304 original_offset,
305 speculative_offset: fake_ident_token.text_range().start(),
306 fake_ident_token,
307 derive_ctx: Some((
308 actual_expansion,
309 fake_expansion,
310 fake_mapped_token.text_range().start(),
311 orig_attr,
312 )),
313 });
314 }
315
316 if let Some(spec_adt) =
317 spec_attr.syntax().ancestors().find_map(ast::Item::cast).and_then(|it| match it {
318 ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
319 ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
320 ast::Item::Union(it) => Some(ast::Adt::Union(it)),
321 _ => None,
322 })
323 {
324 if let Some(helpers) = sema.derive_helper(&orig_attr) {
326 for (_mac, file) in helpers {
327 if let Some((fake_expansion, fake_mapped_tokens)) = sema.speculative_expand_raw(
328 file,
329 spec_adt.syntax(),
330 fake_ident_token.clone(),
331 ) {
332 let actual_expansion = sema.parse_or_expand(file.into());
335 let mut accumulated_offset_from_fake_tokens = 0;
336 let actual_range = actual_expansion.text_range().end();
337 let result = fake_mapped_tokens
338 .into_iter()
339 .filter_map(|(fake_mapped_token, rank)| {
340 let accumulated_offset = accumulated_offset_from_fake_tokens;
341 if !fake_mapped_token.text().contains(COMPLETION_MARKER) {
342 return None;
346 }
347 accumulated_offset_from_fake_tokens += COMPLETION_MARKER.len();
348
349 let new_offset = fake_mapped_token.text_range().start()
350 - TextSize::new(accumulated_offset as u32);
351 if new_offset + relative_offset > actual_range {
352 return None;
355 }
356 let result = expand_maybe_stop(
357 sema,
358 InFile::new(file.into(), actual_expansion.clone()),
359 fake_expansion.clone(),
360 new_offset,
361 fake_mapped_token,
362 relative_offset,
363 )?;
364 Some((result, rank))
365 })
366 .min_by_key(|(_, rank)| *rank)
367 .map(|(result, _)| result);
368 if result.is_some() {
369 return result;
370 }
371 }
372 }
373 }
374 }
375 return None;
377 }
378
379 let (orig_tt, spec_tt) = tts?;
381 let (actual_macro_call, macro_call_with_fake_ident) = (
382 orig_tt.syntax().parent().and_then(ast::MacroCall::cast)?,
383 spec_tt.syntax().parent().and_then(ast::MacroCall::cast)?,
384 );
385 let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
386 let mac_call_path1 = macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
387
388 if mac_call_path0 != mac_call_path1 {
390 return None;
391 }
392 let speculative_args = macro_call_with_fake_ident.token_tree()?;
393
394 match (
395 sema.expand_macro_call(&actual_macro_call),
396 sema.speculative_expand_macro_call(&actual_macro_call, &speculative_args, fake_ident_token),
397 ) {
398 (Some(actual_expansion), Some((fake_expansion, fake_mapped_tokens))) => {
400 let mut accumulated_offset_from_fake_tokens = 0;
401 let actual_range = actual_expansion.text_range().end();
402 fake_mapped_tokens
403 .into_iter()
404 .filter_map(|(fake_mapped_token, rank)| {
405 let accumulated_offset = accumulated_offset_from_fake_tokens;
406 if !fake_mapped_token.text().contains(COMPLETION_MARKER) {
407 return None;
411 }
412 accumulated_offset_from_fake_tokens += COMPLETION_MARKER.len();
413
414 let new_offset = fake_mapped_token.text_range().start()
415 - TextSize::new(accumulated_offset as u32);
416 if new_offset + relative_offset > actual_range {
417 return None;
420 }
421 let result = expand_maybe_stop(
422 sema,
423 actual_expansion.clone(),
424 fake_expansion.clone(),
425 new_offset,
426 fake_mapped_token,
427 relative_offset,
428 )?;
429 Some((result, rank))
430 })
431 .min_by_key(|(_, rank)| *rank)
432 .map(|(result, _)| result)
433 }
434 _ => None,
437 }
438}
439
440fn analyze<'db>(
443 sema: &Semantics<'db, RootDatabase>,
444 expansion_result: ExpansionResult,
445 original_token: &SyntaxToken,
446 self_token: &SyntaxToken,
447) -> Option<(CompletionAnalysis<'db>, (Option<Type<'db>>, Option<ast::NameOrNameRef>), QualifierCtx)>
448{
449 let _p = tracing::info_span!("CompletionContext::analyze").entered();
450 let ExpansionResult {
451 original_file,
452 speculative_file,
453 original_offset: _,
454 speculative_offset,
455 fake_ident_token,
456 derive_ctx,
457 } = expansion_result;
458
459 if original_token.kind() != self_token.kind()
460 && !(original_token.kind().is_punct() || original_token.kind().is_trivia())
462 && !(SyntaxKind::is_any_identifier(original_token.kind())
463 && SyntaxKind::is_any_identifier(self_token.kind()))
464 {
465 return None;
466 }
467
468 if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx
470 && let Some(origin_meta) = origin_attr.meta()
471 {
472 if let Some(ast::NameLike::NameRef(name_ref)) =
473 find_node_at_offset(&file_with_fake_ident, offset)
474 {
475 let parent = name_ref.syntax().parent()?;
476 let (mut nameref_ctx, _) =
477 classify_name_ref(sema, &original_file, name_ref, offset, parent)?;
478 if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
479 path_ctx.kind = PathKind::Derive {
480 existing_derives: sema
481 .resolve_derive_macro(&origin_meta)
482 .into_iter()
483 .flatten()
484 .flatten()
485 .collect(),
486 };
487 }
488 return Some((
489 CompletionAnalysis::NameRef(nameref_ctx),
490 (None, None),
491 QualifierCtx::default(),
492 ));
493 }
494 return None;
495 }
496
497 let Some(name_like) = find_node_at_offset(&speculative_file, speculative_offset) else {
498 let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
499 CompletionAnalysis::String { original, expanded: ast::String::cast(self_token.clone()) }
500 } else {
501 let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?;
504 let p = token.parent()?;
505 if p.kind() == SyntaxKind::TOKEN_TREE
506 && p.ancestors().any(|it| it.kind() == SyntaxKind::TOKEN_TREE_META)
507 {
508 let colon_prefix = previous_non_trivia_token(self_token.clone())
509 .is_some_and(|it| T![:] == it.kind());
510
511 CompletionAnalysis::UnexpandedAttrTT {
512 fake_attribute_under_caret: fake_ident_token
513 .parent_ancestors()
514 .find_map(ast::TokenTreeMeta::cast),
515 colon_prefix,
516 extern_crate: p.ancestors().find_map(ast::ExternCrate::cast),
517 }
518 } else if p.kind() == SyntaxKind::TOKEN_TREE
519 && p.ancestors().any(|it| ast::Macro::can_cast(it.kind()))
520 {
521 if let Some([_ident, colon, _name, dollar]) = fake_ident_token
522 .siblings_with_tokens(Direction::Prev)
523 .filter(|it| !it.kind().is_trivia())
524 .take(4)
525 .collect_array()
526 && dollar.kind() == T![$]
527 && colon.kind() == T![:]
528 {
529 CompletionAnalysis::MacroSegment
530 } else {
531 return None;
532 }
533 } else if find_node_at_offset::<ast::CfgPredicate>(
534 &speculative_file,
535 speculative_offset,
536 )
537 .is_some()
538 {
539 CompletionAnalysis::CfgPredicate
540 } else {
541 return None;
542 }
543 };
544 return Some((analysis, (None, None), QualifierCtx::default()));
545 };
546
547 let expected = expected_type_and_name(sema, self_token, &name_like);
548 let mut qual_ctx = QualifierCtx::default();
549 let analysis = match name_like {
550 ast::NameLike::Lifetime(lifetime) => {
551 CompletionAnalysis::Lifetime(classify_lifetime(sema, &original_file, lifetime)?)
552 }
553 ast::NameLike::NameRef(name_ref) => {
554 let parent = name_ref.syntax().parent()?;
555 let (nameref_ctx, qualifier_ctx) = classify_name_ref(
556 sema,
557 &original_file,
558 name_ref,
559 expansion_result.original_offset,
560 parent,
561 )?;
562
563 if let NameRefContext {
564 kind:
565 NameRefKind::Path(PathCompletionCtx { kind: PathKind::Expr { .. }, path, .. }, ..),
566 ..
567 } = &nameref_ctx
568 && is_in_token_of_for_loop(path)
569 {
570 return None;
575 }
576
577 qual_ctx = qualifier_ctx;
578 CompletionAnalysis::NameRef(nameref_ctx)
579 }
580 ast::NameLike::Name(name) => {
581 let name_ctx = classify_name(sema, &original_file, name)?;
582 CompletionAnalysis::Name(name_ctx)
583 }
584 };
585 Some((analysis, expected, qual_ctx))
586}
587
588fn expected_type_and_name<'db>(
590 sema: &Semantics<'db, RootDatabase>,
591 self_token: &SyntaxToken,
592 name_like: &ast::NameLike,
593) -> (Option<Type<'db>>, Option<NameOrNameRef>) {
594 let token = prev_special_biased_token_at_trivia(self_token.clone());
595 let mut node = match token.parent() {
596 Some(it) => it,
597 None => return (None, None),
598 };
599
600 let strip_refs = |mut ty: Type<'db>| match name_like {
601 ast::NameLike::NameRef(n) => {
602 let p = match n.syntax().parent() {
603 Some(it) => it,
604 None => return ty,
605 };
606 let top_syn = match_ast! {
607 match p {
608 ast::FieldExpr(e) => e
609 .syntax()
610 .ancestors()
611 .take_while(|it| ast::FieldExpr::can_cast(it.kind()))
612 .last(),
613 ast::PathSegment(e) => e
614 .syntax()
615 .ancestors()
616 .skip(1)
617 .take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind()))
618 .find(|it| ast::PathExpr::can_cast(it.kind())),
619 _ => None
620 }
621 };
622 let top_syn = match top_syn {
623 Some(it) => it,
624 None => return ty,
625 };
626 let refs_level = top_syn
627 .ancestors()
628 .skip(1)
629 .map_while(Either::<ast::RefExpr, ast::PrefixExpr>::cast)
630 .take_while(|it| match it {
631 Either::Left(_) => true,
632 Either::Right(prefix) => prefix.op_kind() == Some(ast::UnaryOp::Deref),
633 })
634 .fold(0i32, |level, expr| match expr {
635 Either::Left(_) => level + 1,
636 Either::Right(_) => level - 1,
637 });
638 for _ in 0..refs_level {
639 cov_mark::hit!(expected_type_fn_param_ref);
640 ty = ty.strip_reference();
641 }
642 for _ in refs_level..0 {
643 cov_mark::hit!(expected_type_fn_param_deref);
644 ty = ty.add_reference(hir::Mutability::Shared);
645 }
646 ty
647 }
648 _ => ty,
649 };
650
651 let (ty, name) = loop {
652 break match_ast! {
653 match node {
654 ast::LetStmt(it) => {
655 cov_mark::hit!(expected_type_let_with_leading_char);
656 cov_mark::hit!(expected_type_let_without_leading_char);
657 let ty = it.pat()
658 .and_then(|pat| sema.type_of_pat(&pat))
659 .or_else(|| it.initializer().and_then(|it| sema.type_of_expr(&it)))
660 .map(TypeInfo::original)
661 .filter(|ty| {
662 it.ty().is_some() || !ty.is_fn()
665 });
666 let name = match it.pat() {
667 Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
668 Some(_) | None => None,
669 };
670
671 (ty, name)
672 },
673 ast::LetExpr(it) => {
674 cov_mark::hit!(expected_type_if_let_without_leading_char);
675 let ty = it.pat()
676 .and_then(|pat| sema.type_of_pat(&pat))
677 .or_else(|| it.expr().and_then(|it| sema.type_of_expr(&it)))
678 .map(TypeInfo::original);
679 (ty, None)
680 },
681 ast::BinExpr(it) => {
682 if let Some(ast::BinaryOp::Assignment { op: None }) = it.op_kind() {
683 let ty = it.lhs()
684 .and_then(|lhs| sema.type_of_expr(&lhs))
685 .or_else(|| it.rhs().and_then(|rhs| sema.type_of_expr(&rhs)))
686 .map(TypeInfo::original);
687 (ty, None)
688 } else if let Some(ast::BinaryOp::LogicOp(_)) = it.op_kind() {
689 let ty = sema.type_of_expr(&it.clone().into()).map(TypeInfo::original);
690 (ty, None)
691 } else {
692 (None, None)
693 }
694 },
695 ast::ArgList(_) => {
696 cov_mark::hit!(expected_type_fn_param);
697 ActiveParameter::at_token(
698 sema,
699 token.clone(),
700 ).map(|ap| {
701 let name = ap.ident().map(NameOrNameRef::Name);
702 (Some(ap.ty), name)
703 })
704 .unwrap_or((None, None))
705 },
706 ast::RecordExprFieldList(it) => {
707 (|| {
709 if token.kind() == T![..]
710 ||token.prev_token().map(|t| t.kind()) == Some(T![..])
711 {
712 cov_mark::hit!(expected_type_struct_func_update);
713 let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?;
714 let ty = sema.type_of_expr(&record_expr.into())?;
715 Some((
716 Some(ty.original),
717 None
718 ))
719 } else {
720 cov_mark::hit!(expected_type_struct_field_without_leading_char);
721 cov_mark::hit!(expected_type_struct_field_followed_by_comma);
722 let expr_field = previous_non_trivia_token(token.clone())?.parent().and_then(ast::RecordExprField::cast)?;
723 let (_, _, ty) = sema.resolve_record_field(&expr_field)?;
724 Some((
725 Some(ty),
726 expr_field.field_name().map(NameOrNameRef::NameRef),
727 ))
728 }
729 })().unwrap_or((None, None))
730 },
731 ast::RecordExprField(it) => {
732 let field_ty = sema.resolve_record_field(&it).map(|(_, _, ty)| ty);
733 let field_name = it.field_name().map(NameOrNameRef::NameRef);
734 if let Some(expr) = it.expr() {
735 cov_mark::hit!(expected_type_struct_field_with_leading_char);
736 let ty = field_ty
737 .or_else(|| sema.type_of_expr(&expr).map(TypeInfo::original));
738 (ty, field_name)
739 } else {
740 (field_ty, field_name)
741 }
742 },
743 ast::MatchExpr(it) => {
746 let on_arrow = previous_non_trivia_token(token.clone()).is_some_and(|it| T![=>] == it.kind());
747
748 let ty = if on_arrow {
749 cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
751 cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
752 sema.type_of_expr(&it.into())
753 } else {
754 cov_mark::hit!(expected_type_match_arm_without_leading_char);
756 it.expr().and_then(|e| sema.type_of_expr(&e))
757 }.map(TypeInfo::original);
758 (ty, None)
759 },
760 ast::MatchArm(it) => {
761 let on_arrow = previous_non_trivia_token(token.clone()).is_some_and(|it| T![=>] == it.kind());
762 let in_body = it.expr().is_some_and(|it| it.syntax().text_range().contains_range(token.text_range()));
763 let match_expr = it.parent_match();
764
765 let ty = if on_arrow || in_body {
766 cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
768 cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
769 sema.type_of_expr(&match_expr.into())
770 } else {
771 cov_mark::hit!(expected_type_match_arm_without_leading_char);
773 match_expr.expr().and_then(|e| sema.type_of_expr(&e))
774 }.map(TypeInfo::original);
775 (ty, None)
776 },
777 ast::IfExpr(it) => {
778 let ty = if let Some(body) = it.then_branch()
779 && token.text_range().end() > body.syntax().text_range().start()
780 {
781 sema.type_of_expr(&body.into())
782 } else {
783 it.condition().and_then(|e| sema.type_of_expr(&e))
784 }.map(TypeInfo::original);
785 (ty, None)
786 },
787 ast::IdentPat(it) => {
788 cov_mark::hit!(expected_type_if_let_with_leading_char);
789 cov_mark::hit!(expected_type_match_arm_with_leading_char);
790 let ty = sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
791 (ty, None)
792 },
793 ast::TupleStructPat(it) => {
794 let fields = it.path().and_then(|path| match sema.resolve_path(&path)? {
795 hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) => Some(adt.as_struct()?.fields(sema.db)),
796 hir::PathResolution::Def(hir::ModuleDef::EnumVariant(variant)) => Some(variant.fields(sema.db)),
797 _ => None,
798 });
799 let nr = it.fields().take_while(|it| it.syntax().text_range().end() <= token.text_range().start()).count();
800 let ty = fields.and_then(|fields| Some(fields.get(nr)?.ty(sema.db).to_type(sema.db)));
801 (ty, None)
802 },
803 ast::Fn(it) => {
804 cov_mark::hit!(expected_type_fn_ret_with_leading_char);
805 cov_mark::hit!(expected_type_fn_ret_without_leading_char);
806 let def = sema.to_def(&it);
807 (def.map(|def| def.ret_type(sema.db)), None)
808 },
809 ast::ReturnExpr(it) => {
810 let fn_ = sema.ancestors_with_macros(it.syntax().clone())
811 .find_map(Either::<ast::Fn, ast::ClosureExpr>::cast);
812 let ty = fn_.and_then(|f| match f {
813 Either::Left(f) => Some(sema.to_def(&f)?.ret_type(sema.db)),
814 Either::Right(f) => {
815 let ty = sema.type_of_expr(&f.into())?.original.as_callable(sema.db)?;
816 Some(ty.return_type())
817 },
818 });
819 (ty, None)
820 },
821 ast::BreakExpr(it) => {
822 let ty = it.break_token()
823 .and_then(|it| find_loops(sema, &it)?.next())
824 .and_then(|expr| sema.type_of_expr(&expr));
825 (ty.map(TypeInfo::original), None)
826 },
827 ast::ClosureExpr(it) => {
828 let ty = sema.type_of_expr(&it.into());
829 ty.and_then(|ty| ty.original.as_callable(sema.db))
830 .map(|c| (Some(c.return_type()), None))
831 .unwrap_or((None, None))
832 },
833 ast::ParamList(it) => {
834 let closure = it.syntax().parent().and_then(ast::ClosureExpr::cast);
835 let ty = closure
836 .filter(|_| it.syntax().text_range().end() <= self_token.text_range().start())
837 .and_then(|it| sema.type_of_expr(&it.into()));
838 ty.and_then(|ty| ty.original.as_callable(sema.db))
839 .map(|c| (Some(c.return_type()), None))
840 .unwrap_or((None, None))
841 },
842 ast::Variant(it) => {
843 let is_simple_field = |field: ast::TupleField| {
844 let Some(ty) = field.ty() else { return true };
845 matches!(ty, ast::Type::PathType(_)) && ty.generic_arg_list().is_none()
846 };
847 let is_simple_variant = matches!(
848 it.field_list(),
849 Some(ast::FieldList::TupleFieldList(list))
850 if list.syntax().children_with_tokens().all(|it| it.kind() != T![,])
851 && list.fields().next().is_none_or(is_simple_field)
852 );
853 (None, it.name().filter(|_| is_simple_variant).map(NameOrNameRef::Name))
854 },
855 ast::Stmt(_) => (None, None),
856 ast::Item(_) => (None, None),
857 _ => {
858 match node.parent() {
859 Some(n) => {
860 node = n;
861 continue;
862 },
863 None => (None, None),
864 }
865 },
866 }
867 };
868 };
869 (ty.map(strip_refs), name)
870}
871
872fn classify_lifetime(
873 sema: &Semantics<'_, RootDatabase>,
874 original_file: &SyntaxNode,
875 lifetime: ast::Lifetime,
876) -> Option<LifetimeContext> {
877 let parent = lifetime.syntax().parent()?;
878 if parent.kind() == SyntaxKind::ERROR {
879 return None;
880 }
881
882 let lifetime =
883 find_node_at_offset::<ast::Lifetime>(original_file, lifetime.syntax().text_range().start());
884 let kind = match_ast! {
885 match parent {
886 ast::LifetimeParam(_) => LifetimeKind::LifetimeParam,
887 ast::BreakExpr(_) => LifetimeKind::LabelRef,
888 ast::ContinueExpr(_) => LifetimeKind::LabelRef,
889 ast::Label(_) => LifetimeKind::LabelDef,
890 _ => {
891 let def = lifetime.as_ref().and_then(|lt| sema.scope(lt.syntax())?.generic_def());
892 LifetimeKind::Lifetime { in_lifetime_param_bound: ast::TypeBound::can_cast(parent.kind()), def }
893 },
894 }
895 };
896
897 Some(LifetimeContext { kind })
898}
899
900fn classify_name(
901 sema: &Semantics<'_, RootDatabase>,
902 original_file: &SyntaxNode,
903 name: ast::Name,
904) -> Option<NameContext> {
905 let parent = name.syntax().parent()?;
906 let kind = match_ast! {
907 match parent {
908 ast::Const(_) => NameKind::Const,
909 ast::ConstParam(_) => NameKind::ConstParam,
910 ast::Enum(_) => NameKind::Enum,
911 ast::Fn(_) => NameKind::Function,
912 ast::IdentPat(bind_pat) => {
913 let mut pat_ctx = pattern_context_for(sema, original_file, bind_pat.into());
914 if let Some(record_field) = ast::RecordPatField::for_field_name(&name) {
915 pat_ctx.record_pat = find_node_in_file_compensated(sema, original_file, &record_field.parent_record_pat());
916 }
917
918 NameKind::IdentPat(pat_ctx)
919 },
920 ast::MacroDef(_) => NameKind::MacroDef,
921 ast::MacroRules(_) => NameKind::MacroRules,
922 ast::Module(module) => NameKind::Module(module),
923 ast::RecordField(_) => NameKind::RecordField,
924 ast::Rename(_) => NameKind::Rename,
925 ast::SelfParam(_) => NameKind::SelfParam,
926 ast::Static(_) => NameKind::Static,
927 ast::Struct(_) => NameKind::Struct,
928 ast::Trait(_) => NameKind::Trait,
929 ast::TypeAlias(_) => NameKind::TypeAlias,
930 ast::TypeParam(_) => NameKind::TypeParam,
931 ast::Union(_) => NameKind::Union,
932 ast::Variant(_) => NameKind::Variant,
933 _ => return None,
934 }
935 };
936 let name = find_node_at_offset(original_file, name.syntax().text_range().start());
937 Some(NameContext { name, kind })
938}
939
940fn classify_name_ref<'db>(
941 sema: &Semantics<'db, RootDatabase>,
942 original_file: &SyntaxNode,
943 name_ref: ast::NameRef,
944 original_offset: TextSize,
945 parent: SyntaxNode,
946) -> Option<(NameRefContext<'db>, QualifierCtx)> {
947 let nameref = find_node_at_offset(original_file, original_offset);
948
949 let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
950
951 if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
952 let dot_prefix = previous_non_trivia_token(name_ref.syntax().clone())
953 .is_some_and(|it| T![.] == it.kind());
954
955 return find_node_in_file_compensated(
956 sema,
957 original_file,
958 &record_field.parent_record_lit(),
959 )
960 .map(|expr| NameRefKind::RecordExpr { expr, dot_prefix })
961 .map(make_res);
962 }
963 if let Some(record_field) = ast::RecordPatField::for_field_name_ref(&name_ref) {
964 let kind = NameRefKind::Pattern(PatternContext {
965 param_ctx: None,
966 has_type_ascription: false,
967 ref_token: None,
968 mut_token: None,
969 record_pat: find_node_in_file_compensated(
970 sema,
971 original_file,
972 &record_field.parent_record_pat(),
973 ),
974 ..pattern_context_for(sema, original_file, record_field.parent_record_pat().into())
975 });
976 return Some(make_res(kind));
977 }
978
979 let field_expr_handle = |receiver, node| {
980 let receiver = find_opt_node_in_file(original_file, receiver);
981 let receiver_is_ambiguous_float_literal = match &receiver {
982 Some(ast::Expr::Literal(l)) => {
983 matches!(l.kind(), ast::LiteralKind::FloatNumber { .. })
984 && l.syntax().last_token().is_some_and(|it| it.text().ends_with('.'))
985 }
986 _ => false,
987 };
988
989 let receiver_is_part_of_indivisible_expression = match &receiver {
990 Some(ast::Expr::IfExpr(_)) => {
991 let next_token_kind =
992 next_non_trivia_token(name_ref.syntax().clone()).map(|t| t.kind());
993 next_token_kind == Some(SyntaxKind::ELSE_KW)
994 }
995 _ => false,
996 };
997 if receiver_is_part_of_indivisible_expression {
998 return None;
999 }
1000
1001 let mut receiver_ty = receiver.as_ref().and_then(|it| sema.type_of_expr(it));
1002 if receiver_is_ambiguous_float_literal {
1003 always!(receiver_ty.as_ref().is_none_or(|receiver_ty| receiver_ty.original.is_float()));
1005 receiver_ty =
1006 Some(TypeInfo { original: hir::BuiltinType::i32().ty(sema.db), adjusted: None });
1007 }
1008
1009 let kind = NameRefKind::DotAccess(DotAccess {
1010 receiver_ty,
1011 kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
1012 receiver,
1013 ctx: DotAccessExprCtx {
1014 in_block_expr: is_in_block(node),
1015 in_breakable: is_in_breakable(node).unzip().0,
1016 },
1017 });
1018 Some(make_res(kind))
1019 };
1020
1021 let segment = match_ast! {
1022 match parent {
1023 ast::PathSegment(segment) => segment,
1024 ast::FieldExpr(field) => {
1025 return field_expr_handle(field.expr(), field.syntax());
1026 },
1027 ast::ExternCrate(_) => {
1028 let kind = NameRefKind::ExternCrate;
1029 return Some(make_res(kind));
1030 },
1031 ast::MethodCallExpr(method) => {
1032 let receiver = find_opt_node_in_file(original_file, method.receiver());
1033 let has_parens = has_parens(&method);
1034 if !has_parens && let Some(res) = field_expr_handle(method.receiver(), method.syntax()) {
1035 return Some(res)
1036 }
1037 let kind = NameRefKind::DotAccess(DotAccess {
1038 receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
1039 kind: DotAccessKind::Method,
1040 receiver,
1041 ctx: DotAccessExprCtx { in_block_expr: is_in_block(method.syntax()), in_breakable: is_in_breakable(method.syntax()).unzip().0 }
1042 });
1043 return Some(make_res(kind));
1044 },
1045 _ => return None,
1046 }
1047 };
1048
1049 let path = segment.parent_path();
1050 let original_path = find_node_in_file_compensated(sema, original_file, &path);
1051
1052 let mut path_ctx = PathCompletionCtx {
1053 has_call_parens: false,
1054 has_macro_bang: false,
1055 qualified: Qualified::No,
1056 parent: None,
1057 path: path.clone(),
1058 original_path,
1059 kind: PathKind::Item { kind: ItemListKind::SourceFile },
1060 has_type_args: false,
1061 use_tree_parent: false,
1062 };
1063
1064 let func_update_record = |syn: &SyntaxNode| {
1065 if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) {
1066 find_node_in_file_compensated(sema, original_file, &record_expr)
1067 } else {
1068 None
1069 }
1070 };
1071 let prev_expr = |node: SyntaxNode| {
1072 let node = match node.parent().and_then(ast::ExprStmt::cast) {
1073 Some(stmt) => stmt.syntax().clone(),
1074 None => node,
1075 };
1076 let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?;
1077
1078 match_ast! {
1079 match prev_sibling {
1080 ast::ExprStmt(stmt) => stmt.expr().filter(|_| stmt.semicolon_token().is_none()),
1081 ast::LetStmt(stmt) => stmt.initializer().filter(|_| stmt.semicolon_token().is_none()),
1082 ast::Expr(expr) => Some(expr),
1083 _ => None,
1084 }
1085 }
1086 };
1087 let after_incomplete_let = |node: SyntaxNode| {
1088 prev_expr(node).and_then(|it| it.syntax().parent()).and_then(ast::LetStmt::cast)
1089 };
1090 let before_else_kw = |node: &SyntaxNode| {
1091 node.parent()
1092 .and_then(ast::ExprStmt::cast)
1093 .filter(|stmt| stmt.semicolon_token().is_none())
1094 .and_then(|stmt| non_trivia_sibling(stmt.syntax().clone().into(), Direction::Next))
1095 .and_then(NodeOrToken::into_node)
1096 .filter(|next| next.kind() == SyntaxKind::ERROR)
1097 .and_then(|next| next.first_token())
1098 .is_some_and(|token| token.kind() == SyntaxKind::ELSE_KW)
1099 };
1100 let is_in_value = |it: &SyntaxNode| {
1101 let Some(node) = it.parent() else { return false };
1102 let kind = node.kind();
1103 ast::LetStmt::can_cast(kind)
1104 || ast::ArgList::can_cast(kind)
1105 || ast::ArrayExpr::can_cast(kind)
1106 || ast::ParenExpr::can_cast(kind)
1107 || ast::BreakExpr::can_cast(kind)
1108 || ast::ReturnExpr::can_cast(kind)
1109 || ast::PrefixExpr::can_cast(kind)
1110 || ast::FormatArgsArg::can_cast(kind)
1111 || ast::RecordExprField::can_cast(kind)
1112 || ast::BinExpr::cast(node.clone())
1113 .and_then(|expr| expr.rhs())
1114 .is_some_and(|expr| expr.syntax() == it)
1115 || ast::IndexExpr::cast(node)
1116 .and_then(|expr| expr.index())
1117 .is_some_and(|expr| expr.syntax() == it)
1118 };
1119
1120 let inbetween_body_and_decl_check = |node: SyntaxNode| {
1128 if let Some(NodeOrToken::Node(n)) =
1129 syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev)
1130 && let Some(item) = ast::Item::cast(n)
1131 {
1132 let is_inbetween = match &item {
1133 ast::Item::Const(it) => it.body().is_none() && it.semicolon_token().is_none(),
1134 ast::Item::Enum(it) => it.variant_list().is_none(),
1135 ast::Item::ExternBlock(it) => it.extern_item_list().is_none(),
1136 ast::Item::Fn(it) => it.body().is_none() && it.semicolon_token().is_none(),
1137 ast::Item::Impl(it) => it.assoc_item_list().is_none(),
1138 ast::Item::Module(it) => it.item_list().is_none() && it.semicolon_token().is_none(),
1139 ast::Item::Static(it) => it.body().is_none(),
1140 ast::Item::Struct(it) => {
1141 it.field_list().is_none() && it.semicolon_token().is_none()
1142 }
1143 ast::Item::Trait(it) => it.assoc_item_list().is_none(),
1144 ast::Item::TypeAlias(it) => it.ty().is_none() && it.semicolon_token().is_none(),
1145 ast::Item::Union(it) => it.record_field_list().is_none(),
1146 _ => false,
1147 };
1148 if is_inbetween {
1149 return Some(item);
1150 }
1151 }
1152 None
1153 };
1154
1155 let generic_arg_location = |arg: ast::GenericArg| {
1156 let mut override_location = None;
1157 let location = find_opt_node_in_file_compensated(
1158 sema,
1159 original_file,
1160 arg.syntax().parent().and_then(ast::GenericArgList::cast),
1161 )
1162 .map(|args| {
1163 let mut in_trait = None;
1164 let param = (|| {
1165 let parent = args.syntax().parent()?;
1166 let params = match_ast! {
1167 match parent {
1168 ast::PathSegment(segment) => {
1169 match sema.resolve_path(&segment.parent_path().top_path())? {
1170 hir::PathResolution::Def(def) => match def {
1171 hir::ModuleDef::Function(func) => {
1172 sema.source(func)?.value.generic_param_list()
1173 }
1174 hir::ModuleDef::Adt(adt) => {
1175 sema.source(adt)?.value.generic_param_list()
1176 }
1177 hir::ModuleDef::EnumVariant(variant) => {
1178 sema.source(variant.parent_enum(sema.db))?.value.generic_param_list()
1179 }
1180 hir::ModuleDef::Trait(trait_) => {
1181 if let ast::GenericArg::AssocTypeArg(arg) = &arg {
1182 let arg_name = arg.name_ref()?;
1183 let arg_name = arg_name.text();
1184 for item in trait_.items_with_supertraits(sema.db) {
1185 match item {
1186 hir::AssocItem::TypeAlias(assoc_ty) => {
1187 if assoc_ty.name(sema.db).as_str() == arg_name {
1188 override_location = Some(TypeLocation::AssocTypeEq);
1189 return None;
1190 }
1191 },
1192 hir::AssocItem::Const(const_) => {
1193 if const_.name(sema.db)?.as_str() == arg_name {
1194 override_location = Some(TypeLocation::AssocConstEq);
1195 return None;
1196 }
1197 },
1198 _ => (),
1199 }
1200 }
1201 return None;
1202 } else {
1203 in_trait = Some(trait_);
1204 sema.source(trait_)?.value.generic_param_list()
1205 }
1206 }
1207 hir::ModuleDef::TypeAlias(ty_) => {
1208 sema.source(ty_)?.value.generic_param_list()
1209 }
1210 _ => None,
1211 },
1212 _ => None,
1213 }
1214 },
1215 ast::MethodCallExpr(call) => {
1216 let func = sema.resolve_method_call(&call)?;
1217 sema.source(func)?.value.generic_param_list()
1218 },
1219 ast::AssocTypeArg(arg) => {
1220 let trait_ = ast::PathSegment::cast(arg.syntax().parent()?.parent()?)?;
1221 match sema.resolve_path(&trait_.parent_path().top_path())? {
1222 hir::PathResolution::Def(hir::ModuleDef::Trait(trait_)) => {
1223 let arg_name = arg.name_ref()?;
1224 let arg_name = arg_name.text();
1225 let trait_items = trait_.items_with_supertraits(sema.db);
1226 let assoc_ty = trait_items.iter().find_map(|item| match item {
1227 hir::AssocItem::TypeAlias(assoc_ty) => {
1228 (assoc_ty.name(sema.db).as_str() == arg_name)
1229 .then_some(assoc_ty)
1230 },
1231 _ => None,
1232 })?;
1233 sema.source(*assoc_ty)?.value.generic_param_list()
1234 }
1235 _ => None,
1236 }
1237 },
1238 _ => None,
1239 }
1240 }?;
1241 let mut explicit_lifetime_arg = false;
1250 let arg_idx = arg
1251 .syntax()
1252 .siblings(Direction::Prev)
1253 .skip(1)
1255 .map(|arg| if ast::LifetimeArg::can_cast(arg.kind()) { explicit_lifetime_arg = true })
1256 .count();
1257 let param_idx = if explicit_lifetime_arg {
1258 arg_idx
1259 } else {
1260 arg_idx + params.lifetime_params().count()
1263 };
1264 params.generic_params().nth(param_idx)
1265 })();
1266 (args, in_trait, param)
1267 });
1268 let (arg_list, of_trait, corresponding_param) = match location {
1269 Some((arg_list, of_trait, param)) => (Some(arg_list), of_trait, param),
1270 _ => (None, None, None),
1271 };
1272 override_location.unwrap_or(TypeLocation::GenericArg {
1273 args: arg_list,
1274 of_trait,
1275 corresponding_param,
1276 })
1277 };
1278
1279 let type_location = |node: &SyntaxNode| {
1280 let parent = node.parent()?;
1281 let res = match_ast! {
1282 match parent {
1283 ast::Const(it) => {
1284 let name = find_opt_node_in_file(original_file, it.name())?;
1285 let original = ast::Const::cast(name.syntax().parent()?)?;
1286 TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
1287 },
1288 ast::Static(it) => {
1289 let name = find_opt_node_in_file(original_file, it.name())?;
1290 let original = ast::Static::cast(name.syntax().parent()?)?;
1291 TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
1292 },
1293 ast::RetType(_) => {
1294 let parent = match ast::Fn::cast(parent.parent()?) {
1295 Some(it) => it.param_list(),
1296 None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(),
1297 };
1298
1299 let parent = find_opt_node_in_file(original_file, parent)?.syntax().parent()?;
1300 let body = match_ast! {
1301 match parent {
1302 ast::ClosureExpr(it) => {
1303 it.body()
1304 },
1305 ast::Fn(it) => {
1306 it.body().map(ast::Expr::BlockExpr)
1307 },
1308 _ => return None,
1309 }
1310 };
1311 let item = ast::Fn::cast(parent);
1312 TypeLocation::TypeAscription(TypeAscriptionTarget::RetType { body, item })
1313 },
1314 ast::Param(it) => {
1315 it.colon_token()?;
1316 TypeLocation::TypeAscription(TypeAscriptionTarget::FnParam(find_opt_node_in_file(original_file, it.pat())))
1317 },
1318 ast::LetStmt(it) => {
1319 it.colon_token()?;
1320 TypeLocation::TypeAscription(TypeAscriptionTarget::Let(find_opt_node_in_file(original_file, it.pat())))
1321 },
1322 ast::Impl(it) => {
1323 match it.trait_() {
1324 Some(t) if t.syntax() == node => TypeLocation::ImplTrait,
1325 _ => match it.self_ty() {
1326 Some(t) if t.syntax() == node => TypeLocation::ImplTarget,
1327 _ => return None,
1328 },
1329 }
1330 },
1331 ast::TypeBound(_) => TypeLocation::TypeBound,
1332 ast::TypeBoundList(_) => TypeLocation::TypeBound,
1334 ast::GenericArg(it) => generic_arg_location(it),
1335 ast::GenericArgList(it) => {
1337 let args = find_opt_node_in_file_compensated(sema, original_file, Some(it));
1338 TypeLocation::GenericArg { args, of_trait: None, corresponding_param: None }
1339 },
1340 ast::TupleField(_) => TypeLocation::TupleField,
1341 _ => return None,
1342 }
1343 };
1344 Some(res)
1345 };
1346
1347 let make_path_kind_expr = |expr: ast::Expr| {
1348 let it = expr.syntax();
1349 let prev_token = iter::successors(it.first_token(), |it| it.prev_token())
1350 .skip(1)
1351 .find(|it| !it.kind().is_trivia());
1352 let in_block_expr = is_in_block(it);
1353 let (in_loop_body, innermost_breakable) = is_in_breakable(it).unzip();
1354 let after_if_expr = is_after_if_expr(it.clone());
1355 let after_amp = prev_token.as_ref().is_some_and(|it| it.kind() == SyntaxKind::AMP);
1356 let ref_expr_parent = prev_token.and_then(|it| it.parent()).and_then(ast::RefExpr::cast);
1357 let (innermost_ret_ty, self_param) = {
1358 let find_ret_ty = |it: SyntaxNode| {
1359 if let Some(item) = ast::Item::cast(it.clone()) {
1360 match item {
1361 ast::Item::Fn(f) => Some(sema.to_def(&f).map(|it| it.ret_type(sema.db))),
1362 ast::Item::MacroCall(_) => None,
1363 _ => Some(None),
1364 }
1365 } else {
1366 let expr = ast::Expr::cast(it)?;
1367 let callable = match expr {
1368 ast::Expr::ClosureExpr(_) => sema.type_of_expr(&expr),
1371 _ => return None,
1372 };
1373 Some(
1374 callable
1375 .and_then(|c| c.adjusted().as_callable(sema.db))
1376 .map(|it| it.return_type()),
1377 )
1378 }
1379 };
1380 let fn_self_param =
1381 |fn_: ast::Fn| sema.to_def(&fn_).and_then(|it| it.self_param(sema.db));
1382 let closure_this_param = |closure: ast::ClosureExpr| {
1383 if closure.param_list()?.params().next()?.pat()?.syntax().text() != "this" {
1384 return None;
1385 }
1386 sema.type_of_expr(&closure.into())
1387 .and_then(|it| it.original.as_callable(sema.db))
1388 .and_then(|it| it.params().into_iter().next())
1389 };
1390 let find_fn_self_param = |it: SyntaxNode| {
1391 match_ast! {
1392 match it {
1393 ast::Fn(fn_) => Some(fn_self_param(fn_).map(Either::Left)),
1394 ast::ClosureExpr(f) => closure_this_param(f).map(Either::Right).map(Some),
1395 ast::MacroCall(_) => None,
1396 ast::Item(_) => Some(None),
1397 _ => None,
1398 }
1399 }
1400 };
1401
1402 match find_node_in_file_compensated(sema, original_file, &expr) {
1403 Some(it) => {
1404 let innermost_ret_ty = sema
1406 .ancestors_with_macros(it.syntax().clone())
1407 .find_map(find_ret_ty)
1408 .flatten();
1409
1410 let self_param = sema
1411 .ancestors_with_macros(it.syntax().clone())
1412 .find_map(find_fn_self_param)
1413 .flatten();
1414 (innermost_ret_ty, self_param)
1415 }
1416 None => (None, None),
1417 }
1418 };
1419 let innermost_breakable_ty = innermost_breakable
1420 .and_then(ast::Expr::cast)
1421 .and_then(|expr| find_node_in_file_compensated(sema, original_file, &expr))
1422 .and_then(|expr| sema.type_of_expr(&expr))
1423 .map(|ty| if ty.original.is_never() { ty.adjusted() } else { ty.original() });
1424 let is_func_update = func_update_record(it);
1425 let in_condition = is_in_condition(&expr);
1426 let after_incomplete_let = after_incomplete_let(it.clone()).is_some();
1427 let incomplete_expr_stmt =
1428 it.parent().and_then(ast::ExprStmt::cast).map(|it| it.semicolon_token().is_none());
1429 let before_else_kw = before_else_kw(it);
1430 let incomplete_let = left_ancestors(it.parent())
1431 .find_map(ast::LetStmt::cast)
1432 .is_some_and(|it| it.semicolon_token().is_none())
1433 || after_incomplete_let && incomplete_expr_stmt.unwrap_or(true) && !before_else_kw;
1434 let in_value = is_in_value(it);
1435 let impl_ = fetch_immediate_impl_or_trait(sema, original_file, expr.syntax())
1436 .and_then(Either::left);
1437
1438 let in_match_guard = match it.parent().and_then(ast::MatchArm::cast) {
1439 Some(arm) => arm
1440 .fat_arrow_token()
1441 .is_none_or(|arrow| it.text_range().start() < arrow.text_range().start()),
1442 None => false,
1443 };
1444
1445 PathKind::Expr {
1446 expr_ctx: PathExprCtx {
1447 in_block_expr,
1448 in_breakable: in_loop_body,
1449 after_if_expr,
1450 before_else_kw,
1451 in_condition,
1452 ref_expr_parent,
1453 after_amp,
1454 is_func_update,
1455 innermost_ret_ty,
1456 innermost_breakable_ty,
1457 self_param,
1458 in_value,
1459 incomplete_let,
1460 after_incomplete_let,
1461 impl_,
1462 in_match_guard,
1463 },
1464 }
1465 };
1466 let make_path_kind_type = |ty: ast::Type| {
1467 let location = type_location(ty.syntax());
1468 PathKind::Type { location: location.unwrap_or(TypeLocation::Other) }
1469 };
1470
1471 let kind_item = |it: &SyntaxNode| {
1472 let parent = it.parent()?;
1473 let kind = match_ast! {
1474 match parent {
1475 ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
1476 ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
1477 Some(it) => match_ast! {
1478 match it {
1479 ast::Trait(_) => ItemListKind::Trait,
1480 ast::Impl(it) => if it.trait_().is_some() {
1481 ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it))
1482 } else {
1483 ItemListKind::Impl
1484 },
1485 _ => return None
1486 }
1487 },
1488 None => return None,
1489 } },
1490 ast::ExternItemList(it) => {
1491 let exn_blk = it.syntax().parent().and_then(ast::ExternBlock::cast);
1492 PathKind::Item {
1493 kind: ItemListKind::ExternBlock {
1494 is_unsafe: exn_blk.and_then(|it| it.unsafe_token()).is_some(),
1495 }
1496 }
1497 },
1498 ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile },
1499 _ => return None,
1500 }
1501 };
1502 Some(kind)
1503 };
1504
1505 let mut kind_macro_call = |it: ast::MacroCall| {
1506 path_ctx.has_macro_bang = it.excl_token().is_some();
1507 let parent = it.syntax().parent()?;
1508 if let Some(kind) = kind_item(it.syntax()) {
1509 return Some(kind);
1510 }
1511 let kind = match_ast! {
1512 match parent {
1513 ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
1514 ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
1515 ast::MacroType(ty) => make_path_kind_type(ty.into()),
1516 _ => return None,
1517 }
1518 };
1519 Some(kind)
1520 };
1521 let make_path_kind_attr = |meta: ast::Meta| {
1522 let attr = meta.parent_attr()?;
1523 let kind = attr.kind();
1524 let attached = attr.syntax().parent()?;
1525 let is_trailing_outer_attr = kind != AttrKind::Inner
1526 && non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next).is_none();
1527 let annotated_item_kind = if is_trailing_outer_attr { None } else { Some(attached.kind()) };
1528 let derive_helpers = annotated_item_kind
1529 .filter(|kind| {
1530 matches!(
1531 kind,
1532 SyntaxKind::STRUCT
1533 | SyntaxKind::ENUM
1534 | SyntaxKind::UNION
1535 | SyntaxKind::VARIANT
1536 | SyntaxKind::TUPLE_FIELD
1537 | SyntaxKind::RECORD_FIELD
1538 )
1539 })
1540 .and_then(|_| find_node_at_offset::<ast::Adt>(original_file, original_offset))
1541 .and_then(|adt| sema.derive_helpers_in_scope(&adt))
1542 .unwrap_or_default();
1543 Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind, derive_helpers } })
1544 };
1545
1546 let parent = path.syntax().parent()?;
1548 let kind = 'find_kind: {
1549 if parent.kind() == SyntaxKind::ERROR {
1550 if let Some(kind) = inbetween_body_and_decl_check(parent.clone()) {
1551 return Some(make_res(NameRefKind::Keyword(kind)));
1552 }
1553
1554 break 'find_kind kind_item(&parent)?;
1555 }
1556 match_ast! {
1557 match parent {
1558 ast::PathType(it) => make_path_kind_type(it.into()),
1559 ast::PathExpr(it) => {
1560 if let Some(p) = it.syntax().parent() {
1561 let p_kind = p.kind();
1562 let probe = if ast::ExprStmt::can_cast(p_kind) {
1565 Some(p)
1566 } else if ast::StmtList::can_cast(p_kind) {
1567 Some(it.syntax().clone())
1568 } else {
1569 None
1570 };
1571 if let Some(kind) = probe.and_then(inbetween_body_and_decl_check) {
1572 return Some(make_res(NameRefKind::Keyword(kind)));
1573 }
1574 }
1575
1576 path_ctx.has_call_parens = it.syntax().parent().is_some_and(|it| ast::CallExpr::cast(it).is_some_and(|it| has_parens(&it)));
1577
1578 make_path_kind_expr(it.into())
1579 },
1580 ast::TupleStructPat(it) => {
1581 path_ctx.has_call_parens = true;
1582 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
1583 },
1584 ast::RecordPat(it) => {
1585 path_ctx.has_call_parens = true;
1586 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
1587 },
1588 ast::PathPat(it) => {
1589 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
1590 },
1591 ast::MacroCall(it) => {
1592 kind_macro_call(it)?
1593 },
1594 ast::Meta(meta) => make_path_kind_attr(meta)?,
1595 ast::VisibilityInner(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
1596 ast::UseTree(_) => PathKind::Use,
1597 ast::Path(parent) => {
1599 path_ctx.parent = Some(parent.clone());
1600 let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?;
1601 match_ast! {
1602 match parent {
1603 ast::PathType(it) => make_path_kind_type(it.into()),
1604 ast::PathExpr(it) => {
1605 path_ctx.has_call_parens = it.syntax().parent().is_some_and(|it| ast::CallExpr::cast(it).is_some_and(|it| has_parens(&it)));
1606
1607 make_path_kind_expr(it.into())
1608 },
1609 ast::TupleStructPat(it) => {
1610 path_ctx.has_call_parens = true;
1611 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
1612 },
1613 ast::RecordPat(it) => {
1614 path_ctx.has_call_parens = true;
1615 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
1616 },
1617 ast::PathPat(it) => {
1618 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
1619 },
1620 ast::MacroCall(it) => {
1621 kind_macro_call(it)?
1622 },
1623 ast::Meta(meta) => make_path_kind_attr(meta)?,
1624 ast::VisibilityInner(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
1625 ast::UseTree(_) => PathKind::Use,
1626 ast::RecordExpr(it) => make_path_kind_expr(it.into()),
1627 _ => return None,
1628 }
1629 }
1630 },
1631 ast::RecordExpr(it) => {
1632 if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) {
1634 return Some(make_res(NameRefKind::Keyword(kind)));
1635 }
1636 make_path_kind_expr(it.into())
1637 },
1638 _ => return None,
1639 }
1640 }
1641 };
1642
1643 path_ctx.kind = kind;
1644 path_ctx.has_type_args = segment.generic_arg_list().is_some();
1645
1646 if let Some((qualifier, use_tree_parent)) = path_or_use_tree_qualifier(&path) {
1648 path_ctx.use_tree_parent = use_tree_parent;
1649 if !use_tree_parent && segment.coloncolon_token().is_some() {
1650 path_ctx.qualified = Qualified::Absolute;
1651 } else {
1652 let qualifier = qualifier
1653 .segment()
1654 .and_then(|it| find_node_in_file(original_file, &it))
1655 .map(|it| it.parent_path());
1656 if let Some(qualifier) = qualifier {
1657 let type_anchor = match qualifier.segment().and_then(|it| it.kind()) {
1658 Some(ast::PathSegmentKind::Type { type_ref: Some(type_ref), trait_ref })
1659 if qualifier.qualifier().is_none() =>
1660 {
1661 Some((type_ref, trait_ref))
1662 }
1663 _ => None,
1664 };
1665
1666 path_ctx.qualified = if let Some((ty, trait_ref)) = type_anchor {
1667 let ty = match ty {
1668 ast::Type::InferType(_) => None,
1669 ty => sema.resolve_type(&ty),
1670 };
1671 let trait_ = trait_ref.and_then(|it| sema.resolve_trait(&it.path()?));
1672 Qualified::TypeAnchor { ty, trait_ }
1673 } else {
1674 let res = sema.resolve_path(&qualifier);
1675
1676 let mut segment_count = 0;
1679 let super_count = iter::successors(Some(qualifier.clone()), |p| p.qualifier())
1680 .take_while(|p| {
1681 p.segment()
1682 .and_then(|s| {
1683 segment_count += 1;
1684 s.super_token()
1685 })
1686 .is_some()
1687 })
1688 .count();
1689
1690 let super_chain_len =
1691 if segment_count > super_count { None } else { Some(super_count) };
1692
1693 Qualified::With { path: qualifier, resolution: res, super_chain_len }
1694 }
1695 };
1696 }
1697 } else if let Some(segment) = path.segment()
1698 && segment.coloncolon_token().is_some()
1699 {
1700 path_ctx.qualified = Qualified::Absolute;
1701 }
1702
1703 let mut qualifier_ctx = QualifierCtx::default();
1704 if path_ctx.is_trivial_path() {
1705 let top_node = match path_ctx.kind {
1707 PathKind::Expr { expr_ctx: PathExprCtx { in_block_expr: true, .. } } => {
1708 parent.ancestors().find(|it| ast::PathExpr::can_cast(it.kind())).and_then(|p| {
1709 let parent = p.parent()?;
1710 if ast::StmtList::can_cast(parent.kind()) {
1711 Some(p)
1712 } else if ast::ExprStmt::can_cast(parent.kind()) {
1713 Some(parent)
1714 } else {
1715 None
1716 }
1717 })
1718 }
1719 PathKind::Item { .. } => parent.ancestors().find(|it| it.kind() == SyntaxKind::ERROR),
1720 _ => None,
1721 };
1722 if let Some(top) = top_node {
1723 if let Some(NodeOrToken::Node(error_node)) =
1724 syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev)
1725 && error_node.kind() == SyntaxKind::ERROR
1726 {
1727 for token in error_node.children_with_tokens().filter_map(NodeOrToken::into_token) {
1728 match token.kind() {
1729 SyntaxKind::UNSAFE_KW => qualifier_ctx.unsafe_tok = Some(token),
1730 SyntaxKind::ASYNC_KW => qualifier_ctx.async_tok = Some(token),
1731 SyntaxKind::SAFE_KW => qualifier_ctx.safe_tok = Some(token),
1732 _ => {}
1733 }
1734 }
1735 qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast);
1736 qualifier_ctx.abi_node = error_node.children().find_map(ast::Abi::cast);
1737 }
1738
1739 if let PathKind::Item { .. } = path_ctx.kind
1740 && qualifier_ctx.none()
1741 && let Some(t) = top.first_token()
1742 && let Some(prev) =
1743 t.prev_token().and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev))
1744 && ![T![;], T!['}'], T!['{'], T![']']].contains(&prev.kind())
1745 {
1746 return None;
1750 }
1751 }
1752 }
1753 Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx))
1754}
1755
1756fn has_parens(node: &dyn HasArgList) -> bool {
1765 let Some(arg_list) = node.arg_list() else { return false };
1766 if arg_list.l_paren_token().is_none() {
1767 return false;
1768 }
1769 let prev_siblings = iter::successors(arg_list.syntax().prev_sibling_or_token(), |it| {
1770 it.prev_sibling_or_token()
1771 });
1772 prev_siblings
1773 .take_while(|syntax| syntax.kind().is_trivia())
1774 .filter_map(|syntax| {
1775 syntax.into_token().filter(|token| token.kind() == SyntaxKind::WHITESPACE)
1776 })
1777 .all(|whitespace| !whitespace.text().contains('\n'))
1778}
1779
1780fn pattern_context_for(
1781 sema: &Semantics<'_, RootDatabase>,
1782 original_file: &SyntaxNode,
1783 pat: ast::Pat,
1784) -> PatternContext {
1785 let mut param_ctx = None;
1786
1787 let mut missing_variants = vec![];
1788 let is_pat_like = |kind| {
1789 ast::Pat::can_cast(kind)
1790 || ast::RecordPatField::can_cast(kind)
1791 || ast::RecordPatFieldList::can_cast(kind)
1792 };
1793
1794 let (refutability, has_type_ascription) = pat
1795 .syntax()
1796 .ancestors()
1797 .find(|it| !is_pat_like(it.kind()))
1798 .map_or((PatternRefutability::Irrefutable, false), |node| {
1799 let refutability = match_ast! {
1800 match node {
1801 ast::LetStmt(let_) => return (PatternRefutability::Refutable, let_.ty().is_some()),
1802 ast::Param(param) => {
1803 let has_type_ascription = param.ty().is_some();
1804 param_ctx = (|| {
1805 let fake_param_list = param.syntax().parent().and_then(ast::ParamList::cast)?;
1806 let param_list = find_node_in_file_compensated(sema, original_file, &fake_param_list)?;
1807 let param_list_owner = param_list.syntax().parent()?;
1808 let kind = match_ast! {
1809 match param_list_owner {
1810 ast::ClosureExpr(closure) => ParamKind::Closure(closure),
1811 ast::Fn(fn_) => ParamKind::Function(fn_),
1812 _ => return None,
1813 }
1814 };
1815 Some(ParamContext {
1816 param_list, param, kind
1817 })
1818 })();
1819 return (PatternRefutability::Irrefutable, has_type_ascription)
1820 },
1821 ast::MatchArm(match_arm) => {
1822 let missing_variants_opt = match_arm
1823 .syntax()
1824 .parent()
1825 .and_then(ast::MatchArmList::cast)
1826 .and_then(|match_arm_list| {
1827 match_arm_list
1828 .syntax()
1829 .parent()
1830 .and_then(ast::MatchExpr::cast)
1831 .and_then(|match_expr| {
1832 let expr_opt = find_opt_node_in_file(original_file, match_expr.expr());
1833
1834 expr_opt.and_then(|expr| {
1835 sema.type_of_expr(&expr)?
1836 .adjusted()
1837 .autoderef(sema.db)
1838 .find_map(|ty| match ty.as_adt() {
1839 Some(hir::Adt::Enum(e)) => Some(e),
1840 _ => None,
1841 }).map(|enum_| enum_.variants(sema.db))
1842 })
1843 }).map(|variants| variants.iter().filter_map(|variant| {
1844 let variant_name = variant.name(sema.db);
1845
1846 let variant_already_present = match_arm_list.arms().any(|arm| {
1847 arm.pat().and_then(|pat| {
1848 let pat_already_present = pat.syntax().to_string().contains(variant_name.as_str());
1849 pat_already_present.then_some(pat_already_present)
1850 }).is_some()
1851 });
1852
1853 (!variant_already_present).then_some(*variant)
1854 }).collect::<Vec<EnumVariant>>())
1855 });
1856
1857 if let Some(missing_variants_) = missing_variants_opt {
1858 missing_variants = missing_variants_;
1859 };
1860
1861 PatternRefutability::Refutable
1862 },
1863 ast::LetExpr(_) => PatternRefutability::Refutable,
1864 ast::ForExpr(_) => PatternRefutability::Irrefutable,
1865 _ => PatternRefutability::Irrefutable,
1866 }
1867 };
1868 (refutability, false)
1869 });
1870 let (ref_token, mut_token) = match &pat {
1871 ast::Pat::IdentPat(it) => (it.ref_token(), it.mut_token()),
1872 _ => (None, None),
1873 };
1874
1875 let should_suggest_name = matches!(
1877 &pat,
1878 ast::Pat::IdentPat(it)
1879 if it.syntax()
1880 .parent().is_some_and(|node| {
1881 let kind = node.kind();
1882 ast::LetStmt::can_cast(kind) || ast::Param::can_cast(kind)
1883 })
1884 );
1885
1886 PatternContext {
1887 refutability,
1888 param_ctx,
1889 has_type_ascription,
1890 should_suggest_name,
1891 after_if_expr: is_after_if_expr(pat.syntax().clone()),
1892 parent_pat: pat.syntax().parent().and_then(ast::Pat::cast),
1893 mut_token,
1894 ref_token,
1895 record_pat: None,
1896 impl_or_trait: fetch_immediate_impl_or_trait(sema, original_file, pat.syntax()),
1897 missing_variants,
1898 }
1899}
1900
1901fn fetch_immediate_impl_or_trait(
1902 sema: &Semantics<'_, RootDatabase>,
1903 original_file: &SyntaxNode,
1904 node: &SyntaxNode,
1905) -> Option<Either<ast::Impl, ast::Trait>> {
1906 let mut ancestors = ancestors_in_file_compensated(sema, original_file, node)?
1907 .filter_map(ast::Item::cast)
1908 .filter(|it| !matches!(it, ast::Item::MacroCall(_)));
1909
1910 match ancestors.next()? {
1911 ast::Item::Const(_) | ast::Item::Fn(_) | ast::Item::TypeAlias(_) => (),
1912 ast::Item::Impl(it) => return Some(Either::Left(it)),
1913 ast::Item::Trait(it) => return Some(Either::Right(it)),
1914 _ => return None,
1915 }
1916 match ancestors.next()? {
1917 ast::Item::Impl(it) => Some(Either::Left(it)),
1918 ast::Item::Trait(it) => Some(Either::Right(it)),
1919 _ => None,
1920 }
1921}
1922
1923fn find_opt_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: Option<N>) -> Option<N> {
1926 find_node_in_file(syntax, &node?)
1927}
1928
1929fn find_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
1932 let syntax_range = syntax.text_range();
1933 let range = node.syntax().text_range();
1934 let intersection = range.intersect(syntax_range)?;
1935 syntax.covering_element(intersection).ancestors().find_map(N::cast)
1936}
1937
1938fn find_node_in_file_compensated<N: AstNode>(
1942 sema: &Semantics<'_, RootDatabase>,
1943 in_file: &SyntaxNode,
1944 node: &N,
1945) -> Option<N> {
1946 ancestors_in_file_compensated(sema, in_file, node.syntax())?.find_map(N::cast)
1947}
1948
1949fn ancestors_in_file_compensated<'sema>(
1950 sema: &'sema Semantics<'_, RootDatabase>,
1951 in_file: &SyntaxNode,
1952 node: &SyntaxNode,
1953) -> Option<impl Iterator<Item = SyntaxNode> + 'sema> {
1954 let syntax_range = in_file.text_range();
1955 let range = node.text_range();
1956 let end = range.end().checked_sub(TextSize::try_from(COMPLETION_MARKER.len()).ok()?)?;
1957 if end < range.start() {
1958 return None;
1959 }
1960 let range = TextRange::new(range.start(), end);
1961 let intersection = range.intersect(syntax_range)?;
1963 let node = match in_file.covering_element(intersection) {
1964 NodeOrToken::Node(node) => node,
1965 NodeOrToken::Token(tok) => tok.parent()?,
1966 };
1967 Some(sema.ancestors_with_macros(node))
1968}
1969
1970fn find_opt_node_in_file_compensated<N: AstNode>(
1974 sema: &Semantics<'_, RootDatabase>,
1975 syntax: &SyntaxNode,
1976 node: Option<N>,
1977) -> Option<N> {
1978 find_node_in_file_compensated(sema, syntax, &node?)
1979}
1980
1981fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
1982 if let Some(qual) = path.qualifier() {
1983 return Some((qual, false));
1984 }
1985 let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
1986 let use_tree = use_tree_list.syntax().parent().and_then(ast::UseTree::cast)?;
1987 Some((use_tree.path()?, true))
1988}
1989
1990fn left_ancestors(node: Option<SyntaxNode>) -> impl Iterator<Item = SyntaxNode> {
1991 node.into_iter().flat_map(|node| {
1992 let end = node.text_range().end();
1993 node.ancestors().take_while(move |it| it.text_range().end() == end)
1994 })
1995}
1996
1997fn is_in_token_of_for_loop(path: &ast::Path) -> bool {
1998 (|| {
2000 let expr = path.syntax().parent().and_then(ast::PathExpr::cast)?;
2001 let for_expr = expr.syntax().parent().and_then(ast::ForExpr::cast)?;
2002 if for_expr.in_token().is_some() {
2003 return Some(false);
2004 }
2005 let pat = for_expr.pat()?;
2006 let next_sibl = next_non_trivia_sibling(pat.syntax().clone().into())?;
2007 Some(match next_sibl {
2008 syntax::NodeOrToken::Node(n) => {
2009 n.text_range().start() == path.syntax().text_range().start()
2010 }
2011 syntax::NodeOrToken::Token(t) => {
2012 t.text_range().start() == path.syntax().text_range().start()
2013 }
2014 })
2015 })()
2016 .unwrap_or(false)
2017}
2018
2019fn is_in_breakable(node: &SyntaxNode) -> Option<(BreakableKind, SyntaxNode)> {
2020 node.ancestors()
2021 .take_while(|it| it.kind() != SyntaxKind::FN && it.kind() != SyntaxKind::CLOSURE_EXPR)
2022 .find_map(|it| {
2023 let (breakable, loop_body) = match_ast! {
2024 match it {
2025 ast::ForExpr(it) => (BreakableKind::For, it.loop_body()?),
2026 ast::WhileExpr(it) => (BreakableKind::While, it.loop_body()?),
2027 ast::LoopExpr(it) => (BreakableKind::Loop, it.loop_body()?),
2028 ast::BlockExpr(it) => return it.label().map(|_| (BreakableKind::Block, it.syntax().clone())),
2029 _ => return None,
2030 }
2031 };
2032 loop_body.syntax().text_range().contains_range(node.text_range())
2033 .then_some((breakable, it))
2034 })
2035}
2036
2037fn is_in_block(node: &SyntaxNode) -> bool {
2038 if has_in_newline_expr_first(node) {
2039 return true;
2040 };
2041 node.parent()
2042 .map(|node| ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind()))
2043 .unwrap_or(false)
2044}
2045
2046fn has_in_newline_expr_first(node: &SyntaxNode) -> bool {
2053 if ast::PathExpr::can_cast(node.kind())
2054 && let Some(NodeOrToken::Token(next)) = node.next_sibling_or_token()
2055 && next.kind() == SyntaxKind::WHITESPACE
2056 && next.text().contains('\n')
2057 && let Some(stmt_like) = node
2058 .ancestors()
2059 .take_while(|it| it.text_range().start() == node.text_range().start())
2060 .filter_map(Either::<ast::ExprStmt, ast::Expr>::cast)
2061 .last()
2062 {
2063 stmt_like.syntax().parent().and_then(ast::StmtList::cast).is_some()
2064 } else {
2065 false
2066 }
2067}
2068
2069fn is_after_if_expr(node: SyntaxNode) -> bool {
2070 let node = match node.parent().and_then(Either::<ast::ExprStmt, ast::MatchArm>::cast) {
2071 Some(stmt) => stmt.syntax().clone(),
2072 None => node,
2073 };
2074 let Some(prev_token) = previous_non_trivia_token(node) else { return false };
2075 prev_token
2076 .parent_ancestors()
2077 .take_while(|it| it.text_range().end() == prev_token.text_range().end())
2078 .find_map(ast::IfExpr::cast)
2079 .is_some()
2080}
2081
2082fn next_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken> {
2083 let mut token = match e.into() {
2084 SyntaxElement::Node(n) => n.last_token()?,
2085 SyntaxElement::Token(t) => t,
2086 }
2087 .next_token();
2088 while let Some(inner) = token {
2089 if !inner.kind().is_trivia() {
2090 return Some(inner);
2091 } else {
2092 token = inner.next_token();
2093 }
2094 }
2095 None
2096}
2097
2098fn next_non_trivia_sibling(ele: SyntaxElement) -> Option<SyntaxElement> {
2099 let mut e = ele.next_sibling_or_token();
2100 while let Some(inner) = e {
2101 if !inner.kind().is_trivia() {
2102 return Some(inner);
2103 } else {
2104 e = inner.next_sibling_or_token();
2105 }
2106 }
2107 None
2108}
2109
2110fn prev_special_biased_token_at_trivia(mut token: SyntaxToken) -> SyntaxToken {
2111 while token.kind().is_trivia()
2112 && let Some(prev) = token.prev_token()
2113 && let T![=]
2114 | T![+=]
2115 | T![/=]
2116 | T![*=]
2117 | T![%=]
2118 | T![>>=]
2119 | T![<<=]
2120 | T![-=]
2121 | T![|=]
2122 | T![&=]
2123 | T![^=]
2124 | T![|]
2125 | T![return]
2126 | T![break]
2127 | T![continue]
2128 | T![lifetime_ident] = prev.kind()
2129 {
2130 token = prev
2131 }
2132 token
2133}