1use std::{
5 env, fmt,
6 ops::AddAssign,
7 panic::{AssertUnwindSafe, catch_unwind},
8 time::{SystemTime, UNIX_EPOCH},
9};
10
11use cfg::{CfgAtom, CfgDiff};
12use hir::{
13 Adt, AssocItem, Crate, DefWithBody, HasCrate, HasSource, HirDisplay, ImportPathConfig,
14 ModuleDef, Name,
15 db::{DefDatabase, ExpandDatabase, HirDatabase},
16 next_solver::{DbInterner, GenericArgs},
17};
18use hir_def::{
19 SyntheticSyntax,
20 expr_store::BodySourceMap,
21 hir::{ExprId, PatId},
22};
23use hir_ty::{Interner, TyExt, TypeFlags};
24use ide::{
25 Analysis, AnalysisHost, AnnotationConfig, DiagnosticsConfig, Edition, InlayFieldsToResolve,
26 InlayHintsConfig, LineCol, RootDatabase,
27};
28use ide_db::{
29 EditionedFileId, LineIndexDatabase, SnippetCap,
30 base_db::{SourceDatabase, salsa::Database},
31};
32use itertools::Itertools;
33use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace};
34use oorandom::Rand32;
35use profile::StopWatch;
36use project_model::{CargoConfig, CfgOverrides, ProjectManifest, ProjectWorkspace, RustLibSource};
37use rayon::prelude::*;
38use rustc_hash::{FxHashMap, FxHashSet};
39use syntax::AstNode;
40use vfs::{AbsPathBuf, Vfs, VfsPath};
41
42use crate::cli::{
43 Verbosity,
44 flags::{self, OutputFormat},
45 full_name_of_item, print_memory_usage,
46 progress_report::ProgressReport,
47 report_metric,
48};
49
50impl flags::AnalysisStats {
51 pub fn run(self, verbosity: Verbosity) -> anyhow::Result<()> {
52 let mut rng = {
53 let seed = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_millis() as u64;
54 Rand32::new(seed)
55 };
56
57 let cargo_config = CargoConfig {
58 sysroot: match self.no_sysroot {
59 true => None,
60 false => Some(RustLibSource::Discover),
61 },
62 all_targets: true,
63 set_test: !self.no_test,
64 cfg_overrides: CfgOverrides {
65 global: CfgDiff::new(vec![CfgAtom::Flag(hir::sym::miri)], vec![]),
66 selective: Default::default(),
67 },
68 ..Default::default()
69 };
70 let no_progress = &|_| ();
71
72 let mut db_load_sw = self.stop_watch();
73
74 let path = AbsPathBuf::assert_utf8(env::current_dir()?.join(&self.path));
75 let manifest = ProjectManifest::discover_single(&path)?;
76
77 let mut workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
78 let metadata_time = db_load_sw.elapsed();
79 let load_cargo_config = LoadCargoConfig {
80 load_out_dirs_from_check: !self.disable_build_scripts,
81 with_proc_macro_server: if self.disable_proc_macros {
82 ProcMacroServerChoice::None
83 } else {
84 match self.proc_macro_srv {
85 Some(ref path) => {
86 let path = vfs::AbsPathBuf::assert_utf8(path.to_owned());
87 ProcMacroServerChoice::Explicit(path)
88 }
89 None => ProcMacroServerChoice::Sysroot,
90 }
91 },
92 prefill_caches: false,
93 };
94
95 let build_scripts_time = if self.disable_build_scripts {
96 None
97 } else {
98 let mut build_scripts_sw = self.stop_watch();
99 let bs = workspace.run_build_scripts(&cargo_config, no_progress)?;
100 workspace.set_build_scripts(bs);
101 Some(build_scripts_sw.elapsed())
102 };
103
104 let (db, vfs, _proc_macro) =
105 load_workspace(workspace.clone(), &cargo_config.extra_env, &load_cargo_config)?;
106 eprint!("{:<20} {}", "Database loaded:", db_load_sw.elapsed());
107 eprint!(" (metadata {metadata_time}");
108 if let Some(build_scripts_time) = build_scripts_time {
109 eprint!("; build {build_scripts_time}");
110 }
111 eprintln!(")");
112
113 let mut host = AnalysisHost::with_database(db);
114 let db = host.raw_database();
115
116 let mut analysis_sw = self.stop_watch();
117
118 let mut krates = Crate::all(db);
119 if self.randomize {
120 shuffle(&mut rng, &mut krates);
121 }
122
123 let mut item_tree_sw = self.stop_watch();
124 let source_roots = krates
125 .iter()
126 .cloned()
127 .map(|krate| db.file_source_root(krate.root_file(db)).source_root_id(db))
128 .unique();
129
130 let mut dep_loc = 0;
131 let mut workspace_loc = 0;
132 let mut dep_item_trees = 0;
133 let mut workspace_item_trees = 0;
134
135 let mut workspace_item_stats = PrettyItemStats::default();
136 let mut dep_item_stats = PrettyItemStats::default();
137
138 for source_root_id in source_roots {
139 let source_root = db.source_root(source_root_id).source_root(db);
140 for file_id in source_root.iter() {
141 if let Some(p) = source_root.path_for_file(&file_id)
142 && let Some((_, Some("rs"))) = p.name_and_extension()
143 {
144 if !source_root.is_library || self.with_deps {
146 let length = db.file_text(file_id).text(db).lines().count();
147 let item_stats = db
148 .file_item_tree(EditionedFileId::current_edition(db, file_id).into())
149 .item_tree_stats()
150 .into();
151
152 workspace_loc += length;
153 workspace_item_trees += 1;
154 workspace_item_stats += item_stats;
155 } else {
156 let length = db.file_text(file_id).text(db).lines().count();
157 let item_stats = db
158 .file_item_tree(EditionedFileId::current_edition(db, file_id).into())
159 .item_tree_stats()
160 .into();
161
162 dep_loc += length;
163 dep_item_trees += 1;
164 dep_item_stats += item_stats;
165 }
166 }
167 }
168 }
169 eprintln!(" item trees: {workspace_item_trees}");
170 let item_tree_time = item_tree_sw.elapsed();
171
172 eprintln!(
173 " dependency lines of code: {}, item trees: {}",
174 UsizeWithUnderscore(dep_loc),
175 UsizeWithUnderscore(dep_item_trees),
176 );
177 eprintln!(" dependency item stats: {dep_item_stats}");
178
179 eprintln!("{:<20} {}", "Item Tree Collection:", item_tree_time);
196 report_metric("item tree time", item_tree_time.time.as_millis() as u64, "ms");
197 eprintln!(" Total Statistics:");
198
199 let mut crate_def_map_sw = self.stop_watch();
200 let mut num_crates = 0;
201 let mut visited_modules = FxHashSet::default();
202 let mut visit_queue = Vec::new();
203 for krate in krates {
204 let module = krate.root_module();
205 let file_id = module.definition_source_file_id(db);
206 let file_id = file_id.original_file(db);
207
208 let source_root = db.file_source_root(file_id.file_id(db)).source_root_id(db);
209 let source_root = db.source_root(source_root).source_root(db);
210 if !source_root.is_library || self.with_deps {
211 num_crates += 1;
212 visit_queue.push(module);
213 }
214 }
215
216 if self.randomize {
217 shuffle(&mut rng, &mut visit_queue);
218 }
219
220 eprint!(" crates: {num_crates}");
221 let mut num_decls = 0;
222 let mut bodies = Vec::new();
223 let mut adts = Vec::new();
224 let mut file_ids = Vec::new();
225
226 let mut num_traits = 0;
227 let mut num_macro_rules_macros = 0;
228 let mut num_proc_macros = 0;
229
230 while let Some(module) = visit_queue.pop() {
231 if visited_modules.insert(module) {
232 file_ids.extend(module.as_source_file_id(db));
233 visit_queue.extend(module.children(db));
234
235 for decl in module.declarations(db) {
236 num_decls += 1;
237 match decl {
238 ModuleDef::Function(f) => bodies.push(DefWithBody::from(f)),
239 ModuleDef::Adt(a) => {
240 if let Adt::Enum(e) = a {
241 for v in e.variants(db) {
242 bodies.push(DefWithBody::from(v));
243 }
244 }
245 adts.push(a)
246 }
247 ModuleDef::Const(c) => {
248 bodies.push(DefWithBody::from(c));
249 }
250 ModuleDef::Static(s) => bodies.push(DefWithBody::from(s)),
251 ModuleDef::Trait(_) => num_traits += 1,
252 ModuleDef::Macro(m) => match m.kind(db) {
253 hir::MacroKind::Declarative => num_macro_rules_macros += 1,
254 hir::MacroKind::Derive
255 | hir::MacroKind::Attr
256 | hir::MacroKind::ProcMacro => num_proc_macros += 1,
257 _ => (),
258 },
259 _ => (),
260 };
261 }
262
263 for impl_def in module.impl_defs(db) {
264 for item in impl_def.items(db) {
265 num_decls += 1;
266 match item {
267 AssocItem::Function(f) => bodies.push(DefWithBody::from(f)),
268 AssocItem::Const(c) => {
269 bodies.push(DefWithBody::from(c));
270 }
271 _ => (),
272 }
273 }
274 }
275 }
276 }
277 eprintln!(
278 ", mods: {}, decls: {num_decls}, bodies: {}, adts: {}, consts: {}",
279 visited_modules.len(),
280 bodies.len(),
281 adts.len(),
282 bodies
283 .iter()
284 .filter(|it| matches!(it, DefWithBody::Const(_) | DefWithBody::Static(_)))
285 .count(),
286 );
287
288 eprintln!(" Workspace:");
289 eprintln!(
290 " traits: {num_traits}, macro_rules macros: {num_macro_rules_macros}, proc_macros: {num_proc_macros}"
291 );
292 eprintln!(
293 " lines of code: {}, item trees: {}",
294 UsizeWithUnderscore(workspace_loc),
295 UsizeWithUnderscore(workspace_item_trees),
296 );
297 eprintln!(" usages: {workspace_item_stats}");
298
299 eprintln!(" Dependencies:");
300 eprintln!(
301 " lines of code: {}, item trees: {}",
302 UsizeWithUnderscore(dep_loc),
303 UsizeWithUnderscore(dep_item_trees),
304 );
305 eprintln!(" declarations: {dep_item_stats}");
306
307 let crate_def_map_time = crate_def_map_sw.elapsed();
308 eprintln!("{:<20} {}", "Item Collection:", crate_def_map_time);
309 report_metric("crate def map time", crate_def_map_time.time.as_millis() as u64, "ms");
310
311 if self.randomize {
312 shuffle(&mut rng, &mut bodies);
313 }
314
315 if !self.skip_lowering {
316 self.run_body_lowering(db, &vfs, &bodies, verbosity);
317 }
318
319 if !self.skip_inference {
320 self.run_inference(db, &vfs, &bodies, verbosity);
321 }
322
323 if !self.skip_mir_stats {
324 self.run_mir_lowering(db, &bodies, verbosity);
325 }
326
327 if !self.skip_data_layout {
328 self.run_data_layout(db, &adts, verbosity);
329 }
330
331 if !self.skip_const_eval {
332 self.run_const_eval(db, &bodies, verbosity);
333 }
334
335 if self.run_all_ide_things {
336 self.run_ide_things(host.analysis(), file_ids.clone(), db, &vfs, verbosity);
337 }
338
339 if self.run_term_search {
340 self.run_term_search(&workspace, db, &vfs, file_ids, verbosity);
341 }
342
343 let db = host.raw_database_mut();
344 db.trigger_lru_eviction();
345
346 let total_span = analysis_sw.elapsed();
347 eprintln!("{:<20} {total_span}", "Total:");
348 report_metric("total time", total_span.time.as_millis() as u64, "ms");
349 if let Some(instructions) = total_span.instructions {
350 report_metric("total instructions", instructions, "#instr");
351 }
352 report_metric("total memory", total_span.memory.allocated.megabytes() as u64, "MB");
353
354 if verbosity.is_verbose() {
355 print_memory_usage(host, vfs);
356 }
357
358 Ok(())
359 }
360
361 fn run_data_layout(&self, db: &RootDatabase, adts: &[hir::Adt], verbosity: Verbosity) {
362 let mut sw = self.stop_watch();
363 let mut all = 0;
364 let mut fail = 0;
365 for &a in adts {
366 let interner = DbInterner::new_with(db, Some(a.krate(db).base()), None);
367 let generic_params = db.generic_params(a.into());
368 if generic_params.iter_type_or_consts().next().is_some()
369 || generic_params.iter_lt().next().is_some()
370 {
371 continue;
373 }
374 all += 1;
375 let Err(e) = db.layout_of_adt(
376 hir_def::AdtId::from(a),
377 GenericArgs::new_from_iter(interner, []),
378 db.trait_environment(a.into()),
379 ) else {
380 continue;
381 };
382 if verbosity.is_spammy() {
383 let full_name = full_name_of_item(db, a.module(db), a.name(db));
384 println!("Data layout for {full_name} failed due {e:?}");
385 }
386 fail += 1;
387 }
388 let data_layout_time = sw.elapsed();
389 eprintln!("{:<20} {}", "Data layouts:", data_layout_time);
390 eprintln!("Failed data layouts: {fail} ({}%)", percentage(fail, all));
391 report_metric("failed data layouts", fail, "#");
392 report_metric("data layout time", data_layout_time.time.as_millis() as u64, "ms");
393 }
394
395 fn run_const_eval(&self, db: &RootDatabase, bodies: &[DefWithBody], verbosity: Verbosity) {
396 let len = bodies
397 .iter()
398 .filter(|body| matches!(body, DefWithBody::Const(_) | DefWithBody::Static(_)))
399 .count();
400 let mut bar = match verbosity {
401 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
402 _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
403 _ => ProgressReport::new(len),
404 };
405
406 let mut sw = self.stop_watch();
407 let mut all = 0;
408 let mut fail = 0;
409 for &b in bodies {
410 bar.set_message(move || format!("const eval: {}", full_name(db, b, b.module(db))));
411 let res = match b {
412 DefWithBody::Const(c) => c.eval(db),
413 DefWithBody::Static(s) => s.eval(db),
414 _ => continue,
415 };
416 bar.inc(1);
417 all += 1;
418 let Err(error) = res else {
419 continue;
420 };
421 if verbosity.is_spammy() {
422 let full_name =
423 full_name_of_item(db, b.module(db), b.name(db).unwrap_or(Name::missing()));
424 bar.println(format!("Const eval for {full_name} failed due {error:?}"));
425 }
426 fail += 1;
427 }
428 bar.finish_and_clear();
429 let const_eval_time = sw.elapsed();
430 eprintln!("{:<20} {}", "Const evaluation:", const_eval_time);
431 eprintln!("Failed const evals: {fail} ({}%)", percentage(fail, all));
432 report_metric("failed const evals", fail, "#");
433 report_metric("const eval time", const_eval_time.time.as_millis() as u64, "ms");
434 }
435
436 fn run_term_search(
437 &self,
438 ws: &ProjectWorkspace,
439 db: &RootDatabase,
440 vfs: &Vfs,
441 mut file_ids: Vec<EditionedFileId>,
442 verbosity: Verbosity,
443 ) {
444 let cargo_config = CargoConfig {
445 sysroot: match self.no_sysroot {
446 true => None,
447 false => Some(RustLibSource::Discover),
448 },
449 all_targets: true,
450 ..Default::default()
451 };
452
453 let mut bar = match verbosity {
454 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
455 _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
456 _ => ProgressReport::new(file_ids.len()),
457 };
458
459 file_ids.sort();
460 file_ids.dedup();
461
462 #[derive(Debug, Default)]
463 struct Acc {
464 tail_expr_syntax_hits: u64,
465 tail_expr_no_term: u64,
466 total_tail_exprs: u64,
467 error_codes: FxHashMap<String, u32>,
468 syntax_errors: u32,
469 }
470
471 let mut acc: Acc = Default::default();
472 bar.tick();
473 let mut sw = self.stop_watch();
474
475 for &file_id in &file_ids {
476 let file_id = file_id.editioned_file_id(db);
477 let sema = hir::Semantics::new(db);
478 let display_target = match sema.first_crate(file_id.file_id()) {
479 Some(krate) => krate.to_display_target(sema.db),
480 None => continue,
481 };
482
483 let parse = sema.parse_guess_edition(file_id.into());
484 let file_txt = db.file_text(file_id.into());
485 let path = vfs.file_path(file_id.into()).as_path().unwrap();
486
487 for node in parse.syntax().descendants() {
488 let expr = match syntax::ast::Expr::cast(node.clone()) {
489 Some(it) => it,
490 None => continue,
491 };
492 let block = match syntax::ast::BlockExpr::cast(expr.syntax().clone()) {
493 Some(it) => it,
494 None => continue,
495 };
496 let target_ty = match sema.type_of_expr(&expr) {
497 Some(it) => it.adjusted(),
498 None => continue, };
500
501 let expected_tail = match block.tail_expr() {
502 Some(it) => it,
503 None => continue,
504 };
505
506 if expected_tail.is_block_like() {
507 continue;
508 }
509
510 let range = sema.original_range(expected_tail.syntax()).range;
511 let original_text: String = db
512 .file_text(file_id.into())
513 .text(db)
514 .chars()
515 .skip(usize::from(range.start()))
516 .take(usize::from(range.end()) - usize::from(range.start()))
517 .collect();
518
519 let scope = match sema.scope(expected_tail.syntax()) {
520 Some(it) => it,
521 None => continue,
522 };
523
524 let ctx = hir::term_search::TermSearchCtx {
525 sema: &sema,
526 scope: &scope,
527 goal: target_ty,
528 config: hir::term_search::TermSearchConfig {
529 enable_borrowcheck: true,
530 ..Default::default()
531 },
532 };
533 let found_terms = hir::term_search::term_search(&ctx);
534
535 if found_terms.is_empty() {
536 acc.tail_expr_no_term += 1;
537 acc.total_tail_exprs += 1;
538 continue;
540 };
541
542 fn trim(s: &str) -> String {
543 s.chars().filter(|c| !c.is_whitespace()).collect()
544 }
545
546 let todo = syntax::ast::make::ext::expr_todo().to_string();
547 let mut formatter = |_: &hir::Type<'_>| todo.clone();
548 let mut syntax_hit_found = false;
549 for term in found_terms {
550 let generated = term
551 .gen_source_code(
552 &scope,
553 &mut formatter,
554 ImportPathConfig {
555 prefer_no_std: false,
556 prefer_prelude: true,
557 prefer_absolute: false,
558 allow_unstable: true,
559 },
560 display_target,
561 )
562 .unwrap();
563 syntax_hit_found |= trim(&original_text) == trim(&generated);
564
565 let mut txt = file_txt.text(db).to_string();
567
568 let edit = ide::TextEdit::replace(range, generated.clone());
569 edit.apply(&mut txt);
570
571 if self.validate_term_search {
572 std::fs::write(path, txt).unwrap();
573
574 let res = ws.run_build_scripts(&cargo_config, &|_| ()).unwrap();
575 if let Some(err) = res.error()
576 && err.contains("error: could not compile")
577 {
578 if let Some(mut err_idx) = err.find("error[E") {
579 err_idx += 7;
580 let err_code = &err[err_idx..err_idx + 4];
581 match err_code {
582 "0282" | "0283" => continue, "0277" | "0308" if generated.contains(&todo) => continue, "0599"
587 if err.contains(
588 "the following trait is implemented but not in scope",
589 ) =>
590 {
591 continue;
592 }
593 _ => (),
594 }
595 bar.println(err);
596 bar.println(generated);
597 acc.error_codes
598 .entry(err_code.to_owned())
599 .and_modify(|n| *n += 1)
600 .or_insert(1);
601 } else {
602 acc.syntax_errors += 1;
603 bar.println(format!("Syntax error: \n{err}"));
604 }
605 }
606 }
607 }
608
609 if syntax_hit_found {
610 acc.tail_expr_syntax_hits += 1;
611 }
612 acc.total_tail_exprs += 1;
613
614 let msg = move || {
615 format!(
616 "processing: {:<50}",
617 trim(&original_text).chars().take(50).collect::<String>()
618 )
619 };
620 if verbosity.is_spammy() {
621 bar.println(msg());
622 }
623 bar.set_message(msg);
624 }
625 if self.validate_term_search {
627 std::fs::write(path, file_txt.text(db).to_string()).unwrap();
628 }
629
630 bar.inc(1);
631 }
632 let term_search_time = sw.elapsed();
633
634 bar.println(format!(
635 "Tail Expr syntactic hits: {}/{} ({}%)",
636 acc.tail_expr_syntax_hits,
637 acc.total_tail_exprs,
638 percentage(acc.tail_expr_syntax_hits, acc.total_tail_exprs)
639 ));
640 bar.println(format!(
641 "Tail Exprs found: {}/{} ({}%)",
642 acc.total_tail_exprs - acc.tail_expr_no_term,
643 acc.total_tail_exprs,
644 percentage(acc.total_tail_exprs - acc.tail_expr_no_term, acc.total_tail_exprs)
645 ));
646 if self.validate_term_search {
647 bar.println(format!(
648 "Tail Exprs total errors: {}, syntax errors: {}, error codes:",
649 acc.error_codes.values().sum::<u32>() + acc.syntax_errors,
650 acc.syntax_errors,
651 ));
652 for (err, count) in acc.error_codes {
653 bar.println(format!(
654 " E{err}: {count:>5} (https://doc.rust-lang.org/error_codes/E{err}.html)"
655 ));
656 }
657 }
658 bar.println(format!(
659 "Term search avg time: {}ms",
660 term_search_time.time.as_millis() as u64 / acc.total_tail_exprs
661 ));
662 bar.println(format!("{:<20} {}", "Term search:", term_search_time));
663 report_metric("term search time", term_search_time.time.as_millis() as u64, "ms");
664
665 bar.finish_and_clear();
666 }
667
668 fn run_mir_lowering(&self, db: &RootDatabase, bodies: &[DefWithBody], verbosity: Verbosity) {
669 let mut bar = match verbosity {
670 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
671 _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
672 _ => ProgressReport::new(bodies.len()),
673 };
674 let mut sw = self.stop_watch();
675 let mut all = 0;
676 let mut fail = 0;
677 for &body_id in bodies {
678 bar.set_message(move || {
679 format!("mir lowering: {}", full_name(db, body_id, body_id.module(db)))
680 });
681 bar.inc(1);
682 if matches!(body_id, DefWithBody::Variant(_)) {
683 continue;
684 }
685 let module = body_id.module(db);
686 if !self.should_process(db, body_id, module) {
687 continue;
688 }
689
690 all += 1;
691 let Err(e) = db.mir_body(body_id.into()) else {
692 continue;
693 };
694 if verbosity.is_spammy() {
695 let full_name = module
696 .path_to_root(db)
697 .into_iter()
698 .rev()
699 .filter_map(|it| it.name(db))
700 .chain(Some(body_id.name(db).unwrap_or_else(Name::missing)))
701 .map(|it| it.display(db, Edition::LATEST).to_string())
702 .join("::");
703 bar.println(format!("Mir body for {full_name} failed due {e:?}"));
704 }
705 fail += 1;
706 bar.tick();
707 }
708 let mir_lowering_time = sw.elapsed();
709 bar.finish_and_clear();
710 eprintln!("{:<20} {}", "MIR lowering:", mir_lowering_time);
711 eprintln!("Mir failed bodies: {fail} ({}%)", percentage(fail, all));
712 report_metric("mir failed bodies", fail, "#");
713 report_metric("mir lowering time", mir_lowering_time.time.as_millis() as u64, "ms");
714 }
715
716 fn run_inference(
717 &self,
718 db: &RootDatabase,
719 vfs: &Vfs,
720 bodies: &[DefWithBody],
721 verbosity: Verbosity,
722 ) {
723 let mut bar = match verbosity {
724 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
725 _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
726 _ => ProgressReport::new(bodies.len()),
727 };
728
729 if self.parallel {
730 let mut inference_sw = self.stop_watch();
731 bodies
732 .par_iter()
733 .map_with(db.clone(), |snap, &body| {
734 snap.body(body.into());
735 snap.infer(body.into());
736 })
737 .count();
738 eprintln!("{:<20} {}", "Parallel Inference:", inference_sw.elapsed());
739 }
740
741 let mut inference_sw = self.stop_watch();
742 bar.tick();
743 let mut num_exprs = 0;
744 let mut num_exprs_unknown = 0;
745 let mut num_exprs_partially_unknown = 0;
746 let mut num_expr_type_mismatches = 0;
747 let mut num_pats = 0;
748 let mut num_pats_unknown = 0;
749 let mut num_pats_partially_unknown = 0;
750 let mut num_pat_type_mismatches = 0;
751 let mut panics = 0;
752 for &body_id in bodies {
753 let name = body_id.name(db).unwrap_or_else(Name::missing);
754 let module = body_id.module(db);
755 let display_target = module.krate().to_display_target(db);
756 if let Some(only_name) = self.only.as_deref()
757 && name.display(db, Edition::LATEST).to_string() != only_name
758 && full_name(db, body_id, module) != only_name
759 {
760 continue;
761 }
762 let msg = move || {
763 if verbosity.is_verbose() {
764 let source = match body_id {
765 DefWithBody::Function(it) => it.source(db).map(|it| it.syntax().cloned()),
766 DefWithBody::Static(it) => it.source(db).map(|it| it.syntax().cloned()),
767 DefWithBody::Const(it) => it.source(db).map(|it| it.syntax().cloned()),
768 DefWithBody::Variant(it) => it.source(db).map(|it| it.syntax().cloned()),
769 };
770 if let Some(src) = source {
771 let original_file = src.file_id.original_file(db);
772 let path = vfs.file_path(original_file.file_id(db));
773 let syntax_range = src.text_range();
774 format!(
775 "processing: {} ({} {:?})",
776 full_name(db, body_id, module),
777 path,
778 syntax_range
779 )
780 } else {
781 format!("processing: {}", full_name(db, body_id, module))
782 }
783 } else {
784 format!("processing: {}", full_name(db, body_id, module))
785 }
786 };
787 if verbosity.is_spammy() {
788 bar.println(msg());
789 }
790 bar.set_message(msg);
791 let body = db.body(body_id.into());
792 let inference_result = catch_unwind(AssertUnwindSafe(|| db.infer(body_id.into())));
793 let inference_result = match inference_result {
794 Ok(inference_result) => inference_result,
795 Err(p) => {
796 if let Some(s) = p.downcast_ref::<&str>() {
797 eprintln!("infer panicked for {}: {}", full_name(db, body_id, module), s);
798 } else if let Some(s) = p.downcast_ref::<String>() {
799 eprintln!("infer panicked for {}: {}", full_name(db, body_id, module), s);
800 } else {
801 eprintln!("infer panicked for {}", full_name(db, body_id, module));
802 }
803 panics += 1;
804 bar.inc(1);
805 continue;
806 }
807 };
808 let sm = || db.body_with_source_map(body_id.into()).1;
810
811 let (previous_exprs, previous_unknown, previous_partially_unknown) =
813 (num_exprs, num_exprs_unknown, num_exprs_partially_unknown);
814 for (expr_id, _) in body.exprs() {
815 let ty = &inference_result[expr_id];
816 num_exprs += 1;
817 let unknown_or_partial = if ty.is_unknown() {
818 num_exprs_unknown += 1;
819 if verbosity.is_spammy() {
820 if let Some((path, start, end)) = expr_syntax_range(db, vfs, &sm(), expr_id)
821 {
822 bar.println(format!(
823 "{} {}:{}-{}:{}: Unknown type",
824 path,
825 start.line + 1,
826 start.col,
827 end.line + 1,
828 end.col,
829 ));
830 } else {
831 bar.println(format!(
832 "{}: Unknown type",
833 name.display(db, Edition::LATEST)
834 ));
835 }
836 }
837 true
838 } else {
839 let is_partially_unknown =
840 ty.data(Interner).flags.contains(TypeFlags::HAS_ERROR);
841 if is_partially_unknown {
842 num_exprs_partially_unknown += 1;
843 }
844 is_partially_unknown
845 };
846 if self.only.is_some() && verbosity.is_spammy() {
847 if let Some((_, start, end)) = expr_syntax_range(db, vfs, &sm(), expr_id) {
849 bar.println(format!(
850 "{}:{}-{}:{}: {}",
851 start.line + 1,
852 start.col,
853 end.line + 1,
854 end.col,
855 ty.display(db, display_target)
856 ));
857 } else {
858 bar.println(format!(
859 "unknown location: {}",
860 ty.display(db, display_target)
861 ));
862 }
863 }
864 if unknown_or_partial && self.output == Some(OutputFormat::Csv) {
865 println!(
866 r#"{},type,"{}""#,
867 location_csv_expr(db, vfs, &sm(), expr_id),
868 ty.display(db, display_target)
869 );
870 }
871 if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) {
872 num_expr_type_mismatches += 1;
873 if verbosity.is_verbose() {
874 if let Some((path, start, end)) = expr_syntax_range(db, vfs, &sm(), expr_id)
875 {
876 bar.println(format!(
877 "{} {}:{}-{}:{}: Expected {}, got {}",
878 path,
879 start.line + 1,
880 start.col,
881 end.line + 1,
882 end.col,
883 mismatch.expected.display(db, display_target),
884 mismatch.actual.display(db, display_target)
885 ));
886 } else {
887 bar.println(format!(
888 "{}: Expected {}, got {}",
889 name.display(db, Edition::LATEST),
890 mismatch.expected.display(db, display_target),
891 mismatch.actual.display(db, display_target)
892 ));
893 }
894 }
895 if self.output == Some(OutputFormat::Csv) {
896 println!(
897 r#"{},mismatch,"{}","{}""#,
898 location_csv_expr(db, vfs, &sm(), expr_id),
899 mismatch.expected.display(db, display_target),
900 mismatch.actual.display(db, display_target)
901 );
902 }
903 }
904 }
905 if verbosity.is_spammy() {
906 bar.println(format!(
907 "In {}: {} exprs, {} unknown, {} partial",
908 full_name(db, body_id, module),
909 num_exprs - previous_exprs,
910 num_exprs_unknown - previous_unknown,
911 num_exprs_partially_unknown - previous_partially_unknown
912 ));
913 }
914 let (previous_pats, previous_unknown, previous_partially_unknown) =
918 (num_pats, num_pats_unknown, num_pats_partially_unknown);
919 for (pat_id, _) in body.pats() {
920 let ty = &inference_result[pat_id];
921 num_pats += 1;
922 let unknown_or_partial = if ty.is_unknown() {
923 num_pats_unknown += 1;
924 if verbosity.is_spammy() {
925 if let Some((path, start, end)) = pat_syntax_range(db, vfs, &sm(), pat_id) {
926 bar.println(format!(
927 "{} {}:{}-{}:{}: Unknown type",
928 path,
929 start.line + 1,
930 start.col,
931 end.line + 1,
932 end.col,
933 ));
934 } else {
935 bar.println(format!(
936 "{}: Unknown type",
937 name.display(db, Edition::LATEST)
938 ));
939 }
940 }
941 true
942 } else {
943 let is_partially_unknown =
944 ty.data(Interner).flags.contains(TypeFlags::HAS_ERROR);
945 if is_partially_unknown {
946 num_pats_partially_unknown += 1;
947 }
948 is_partially_unknown
949 };
950 if self.only.is_some() && verbosity.is_spammy() {
951 if let Some((_, start, end)) = pat_syntax_range(db, vfs, &sm(), pat_id) {
953 bar.println(format!(
954 "{}:{}-{}:{}: {}",
955 start.line + 1,
956 start.col,
957 end.line + 1,
958 end.col,
959 ty.display(db, display_target)
960 ));
961 } else {
962 bar.println(format!(
963 "unknown location: {}",
964 ty.display(db, display_target)
965 ));
966 }
967 }
968 if unknown_or_partial && self.output == Some(OutputFormat::Csv) {
969 println!(
970 r#"{},type,"{}""#,
971 location_csv_pat(db, vfs, &sm(), pat_id),
972 ty.display(db, display_target)
973 );
974 }
975 if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat_id) {
976 num_pat_type_mismatches += 1;
977 if verbosity.is_verbose() {
978 if let Some((path, start, end)) = pat_syntax_range(db, vfs, &sm(), pat_id) {
979 bar.println(format!(
980 "{} {}:{}-{}:{}: Expected {}, got {}",
981 path,
982 start.line + 1,
983 start.col,
984 end.line + 1,
985 end.col,
986 mismatch.expected.display(db, display_target),
987 mismatch.actual.display(db, display_target)
988 ));
989 } else {
990 bar.println(format!(
991 "{}: Expected {}, got {}",
992 name.display(db, Edition::LATEST),
993 mismatch.expected.display(db, display_target),
994 mismatch.actual.display(db, display_target)
995 ));
996 }
997 }
998 if self.output == Some(OutputFormat::Csv) {
999 println!(
1000 r#"{},mismatch,"{}","{}""#,
1001 location_csv_pat(db, vfs, &sm(), pat_id),
1002 mismatch.expected.display(db, display_target),
1003 mismatch.actual.display(db, display_target)
1004 );
1005 }
1006 }
1007 }
1008 if verbosity.is_spammy() {
1009 bar.println(format!(
1010 "In {}: {} pats, {} unknown, {} partial",
1011 full_name(db, body_id, module),
1012 num_pats - previous_pats,
1013 num_pats_unknown - previous_unknown,
1014 num_pats_partially_unknown - previous_partially_unknown
1015 ));
1016 }
1017 bar.inc(1);
1019 }
1020
1021 bar.finish_and_clear();
1022 let inference_time = inference_sw.elapsed();
1023 eprintln!(
1024 " exprs: {}, ??ty: {} ({}%), ?ty: {} ({}%), !ty: {}",
1025 num_exprs,
1026 num_exprs_unknown,
1027 percentage(num_exprs_unknown, num_exprs),
1028 num_exprs_partially_unknown,
1029 percentage(num_exprs_partially_unknown, num_exprs),
1030 num_expr_type_mismatches
1031 );
1032 eprintln!(
1033 " pats: {}, ??ty: {} ({}%), ?ty: {} ({}%), !ty: {}",
1034 num_pats,
1035 num_pats_unknown,
1036 percentage(num_pats_unknown, num_pats),
1037 num_pats_partially_unknown,
1038 percentage(num_pats_partially_unknown, num_pats),
1039 num_pat_type_mismatches
1040 );
1041 eprintln!(" panics: {panics}");
1042 eprintln!("{:<20} {}", "Inference:", inference_time);
1043 report_metric("unknown type", num_exprs_unknown, "#");
1044 report_metric("type mismatches", num_expr_type_mismatches, "#");
1045 report_metric("pattern unknown type", num_pats_unknown, "#");
1046 report_metric("pattern type mismatches", num_pat_type_mismatches, "#");
1047 report_metric("inference time", inference_time.time.as_millis() as u64, "ms");
1048 }
1049
1050 fn run_body_lowering(
1051 &self,
1052 db: &RootDatabase,
1053 vfs: &Vfs,
1054 bodies: &[DefWithBody],
1055 verbosity: Verbosity,
1056 ) {
1057 let mut bar = match verbosity {
1058 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
1059 _ if self.output.is_some() => ProgressReport::hidden(),
1060 _ => ProgressReport::new(bodies.len()),
1061 };
1062
1063 let mut sw = self.stop_watch();
1064 bar.tick();
1065 for &body_id in bodies {
1066 let module = body_id.module(db);
1067 if !self.should_process(db, body_id, module) {
1068 continue;
1069 }
1070 let msg = move || {
1071 if verbosity.is_verbose() {
1072 let source = match body_id {
1073 DefWithBody::Function(it) => it.source(db).map(|it| it.syntax().cloned()),
1074 DefWithBody::Static(it) => it.source(db).map(|it| it.syntax().cloned()),
1075 DefWithBody::Const(it) => it.source(db).map(|it| it.syntax().cloned()),
1076 DefWithBody::Variant(it) => it.source(db).map(|it| it.syntax().cloned()),
1077 };
1078 if let Some(src) = source {
1079 let original_file = src.file_id.original_file(db);
1080 let path = vfs.file_path(original_file.file_id(db));
1081 let syntax_range = src.text_range();
1082 format!(
1083 "processing: {} ({} {:?})",
1084 full_name(db, body_id, module),
1085 path,
1086 syntax_range
1087 )
1088 } else {
1089 format!("processing: {}", full_name(db, body_id, module))
1090 }
1091 } else {
1092 format!("processing: {}", full_name(db, body_id, module))
1093 }
1094 };
1095 if verbosity.is_spammy() {
1096 bar.println(msg());
1097 }
1098 bar.set_message(msg);
1099 db.body(body_id.into());
1100 bar.inc(1);
1101 }
1102
1103 bar.finish_and_clear();
1104 let body_lowering_time = sw.elapsed();
1105 eprintln!("{:<20} {}", "Body lowering:", body_lowering_time);
1106 report_metric("body lowering time", body_lowering_time.time.as_millis() as u64, "ms");
1107 }
1108
1109 fn run_ide_things(
1110 &self,
1111 analysis: Analysis,
1112 mut file_ids: Vec<EditionedFileId>,
1113 db: &RootDatabase,
1114 vfs: &Vfs,
1115 verbosity: Verbosity,
1116 ) {
1117 let len = file_ids.len();
1118 let create_bar = || match verbosity {
1119 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
1120 _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
1121 _ => ProgressReport::new(len),
1122 };
1123
1124 file_ids.sort();
1125 file_ids.dedup();
1126 let mut sw = self.stop_watch();
1127
1128 let mut bar = create_bar();
1129 for &file_id in &file_ids {
1130 let msg = format!("diagnostics: {}", vfs.file_path(file_id.file_id(db)));
1131 bar.set_message(move || msg.clone());
1132 _ = analysis.full_diagnostics(
1133 &DiagnosticsConfig {
1134 enabled: true,
1135 proc_macros_enabled: true,
1136 proc_attr_macros_enabled: true,
1137 disable_experimental: false,
1138 disabled: Default::default(),
1139 expr_fill_default: Default::default(),
1140 snippet_cap: SnippetCap::new(true),
1141 insert_use: ide_db::imports::insert_use::InsertUseConfig {
1142 granularity: ide_db::imports::insert_use::ImportGranularity::Crate,
1143 enforce_granularity: true,
1144 prefix_kind: hir::PrefixKind::ByCrate,
1145 group: true,
1146 skip_glob_imports: true,
1147 },
1148 prefer_no_std: false,
1149 prefer_prelude: true,
1150 prefer_absolute: false,
1151 style_lints: false,
1152 term_search_fuel: 400,
1153 term_search_borrowck: true,
1154 },
1155 ide::AssistResolveStrategy::All,
1156 analysis.editioned_file_id_to_vfs(file_id),
1157 );
1158 bar.inc(1);
1159 }
1160 bar.finish_and_clear();
1161
1162 let mut bar = create_bar();
1163 for &file_id in &file_ids {
1164 let msg = format!("inlay hints: {}", vfs.file_path(file_id.file_id(db)));
1165 bar.set_message(move || msg.clone());
1166 _ = analysis.inlay_hints(
1167 &InlayHintsConfig {
1168 render_colons: false,
1169 type_hints: true,
1170 sized_bound: false,
1171 discriminant_hints: ide::DiscriminantHints::Always,
1172 parameter_hints: true,
1173 generic_parameter_hints: ide::GenericParameterHints {
1174 type_hints: true,
1175 lifetime_hints: true,
1176 const_hints: true,
1177 },
1178 chaining_hints: true,
1179 adjustment_hints: ide::AdjustmentHints::Always,
1180 adjustment_hints_disable_reborrows: true,
1181 adjustment_hints_mode: ide::AdjustmentHintsMode::Postfix,
1182 adjustment_hints_hide_outside_unsafe: false,
1183 closure_return_type_hints: ide::ClosureReturnTypeHints::Always,
1184 closure_capture_hints: true,
1185 binding_mode_hints: true,
1186 implicit_drop_hints: true,
1187 lifetime_elision_hints: ide::LifetimeElisionHints::Always,
1188 param_names_for_lifetime_elision_hints: true,
1189 hide_named_constructor_hints: false,
1190 hide_closure_initialization_hints: false,
1191 hide_closure_parameter_hints: false,
1192 closure_style: hir::ClosureStyle::ImplFn,
1193 max_length: Some(25),
1194 closing_brace_hints_min_lines: Some(20),
1195 fields_to_resolve: InlayFieldsToResolve::empty(),
1196 range_exclusive_hints: true,
1197 },
1198 analysis.editioned_file_id_to_vfs(file_id),
1199 None,
1200 );
1201 bar.inc(1);
1202 }
1203 bar.finish_and_clear();
1204
1205 let mut bar = create_bar();
1206 for &file_id in &file_ids {
1207 let msg = format!("annotations: {}", vfs.file_path(file_id.file_id(db)));
1208 bar.set_message(move || msg.clone());
1209 analysis
1210 .annotations(
1211 &AnnotationConfig {
1212 binary_target: true,
1213 annotate_runnables: true,
1214 annotate_impls: true,
1215 annotate_references: false,
1216 annotate_method_references: false,
1217 annotate_enum_variant_references: false,
1218 location: ide::AnnotationLocation::AboveName,
1219 },
1220 analysis.editioned_file_id_to_vfs(file_id),
1221 )
1222 .unwrap()
1223 .into_iter()
1224 .for_each(|annotation| {
1225 _ = analysis.resolve_annotation(annotation);
1226 });
1227 bar.inc(1);
1228 }
1229 bar.finish_and_clear();
1230
1231 let ide_time = sw.elapsed();
1232 eprintln!("{:<20} {} ({} files)", "IDE:", ide_time, file_ids.len());
1233 }
1234
1235 fn should_process(&self, db: &RootDatabase, body_id: DefWithBody, module: hir::Module) -> bool {
1236 if let Some(only_name) = self.only.as_deref() {
1237 let name = body_id.name(db).unwrap_or_else(Name::missing);
1238
1239 if name.display(db, Edition::LATEST).to_string() != only_name
1240 && full_name(db, body_id, module) != only_name
1241 {
1242 return false;
1243 }
1244 }
1245 true
1246 }
1247
1248 fn stop_watch(&self) -> StopWatch {
1249 StopWatch::start()
1250 }
1251}
1252
1253fn full_name(db: &RootDatabase, body_id: DefWithBody, module: hir::Module) -> String {
1254 module
1255 .krate()
1256 .display_name(db)
1257 .map(|it| it.canonical_name().as_str().to_owned())
1258 .into_iter()
1259 .chain(
1260 module
1261 .path_to_root(db)
1262 .into_iter()
1263 .filter_map(|it| it.name(db))
1264 .rev()
1265 .chain(Some(body_id.name(db).unwrap_or_else(Name::missing)))
1266 .map(|it| it.display(db, Edition::LATEST).to_string()),
1267 )
1268 .join("::")
1269}
1270
1271fn location_csv_expr(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, expr_id: ExprId) -> String {
1272 let src = match sm.expr_syntax(expr_id) {
1273 Ok(s) => s,
1274 Err(SyntheticSyntax) => return "synthetic,,".to_owned(),
1275 };
1276 let root = db.parse_or_expand(src.file_id);
1277 let node = src.map(|e| e.to_node(&root).syntax().clone());
1278 let original_range = node.as_ref().original_file_range_rooted(db);
1279 let path = vfs.file_path(original_range.file_id.file_id(db));
1280 let line_index = db.line_index(original_range.file_id.file_id(db));
1281 let text_range = original_range.range;
1282 let (start, end) =
1283 (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
1284 format!("{path},{}:{},{}:{}", start.line + 1, start.col, end.line + 1, end.col)
1285}
1286
1287fn location_csv_pat(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, pat_id: PatId) -> String {
1288 let src = match sm.pat_syntax(pat_id) {
1289 Ok(s) => s,
1290 Err(SyntheticSyntax) => return "synthetic,,".to_owned(),
1291 };
1292 let root = db.parse_or_expand(src.file_id);
1293 let node = src.map(|e| e.to_node(&root).syntax().clone());
1294 let original_range = node.as_ref().original_file_range_rooted(db);
1295 let path = vfs.file_path(original_range.file_id.file_id(db));
1296 let line_index = db.line_index(original_range.file_id.file_id(db));
1297 let text_range = original_range.range;
1298 let (start, end) =
1299 (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
1300 format!("{path},{}:{},{}:{}", start.line + 1, start.col, end.line + 1, end.col)
1301}
1302
1303fn expr_syntax_range<'a>(
1304 db: &RootDatabase,
1305 vfs: &'a Vfs,
1306 sm: &BodySourceMap,
1307 expr_id: ExprId,
1308) -> Option<(&'a VfsPath, LineCol, LineCol)> {
1309 let src = sm.expr_syntax(expr_id);
1310 if let Ok(src) = src {
1311 let root = db.parse_or_expand(src.file_id);
1312 let node = src.map(|e| e.to_node(&root).syntax().clone());
1313 let original_range = node.as_ref().original_file_range_rooted(db);
1314 let path = vfs.file_path(original_range.file_id.file_id(db));
1315 let line_index = db.line_index(original_range.file_id.file_id(db));
1316 let text_range = original_range.range;
1317 let (start, end) =
1318 (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
1319 Some((path, start, end))
1320 } else {
1321 None
1322 }
1323}
1324fn pat_syntax_range<'a>(
1325 db: &RootDatabase,
1326 vfs: &'a Vfs,
1327 sm: &BodySourceMap,
1328 pat_id: PatId,
1329) -> Option<(&'a VfsPath, LineCol, LineCol)> {
1330 let src = sm.pat_syntax(pat_id);
1331 if let Ok(src) = src {
1332 let root = db.parse_or_expand(src.file_id);
1333 let node = src.map(|e| e.to_node(&root).syntax().clone());
1334 let original_range = node.as_ref().original_file_range_rooted(db);
1335 let path = vfs.file_path(original_range.file_id.file_id(db));
1336 let line_index = db.line_index(original_range.file_id.file_id(db));
1337 let text_range = original_range.range;
1338 let (start, end) =
1339 (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
1340 Some((path, start, end))
1341 } else {
1342 None
1343 }
1344}
1345
1346fn shuffle<T>(rng: &mut Rand32, slice: &mut [T]) {
1347 for i in 0..slice.len() {
1348 randomize_first(rng, &mut slice[i..]);
1349 }
1350
1351 fn randomize_first<T>(rng: &mut Rand32, slice: &mut [T]) {
1352 assert!(!slice.is_empty());
1353 let idx = rng.rand_range(0..slice.len() as u32) as usize;
1354 slice.swap(0, idx);
1355 }
1356}
1357
1358fn percentage(n: u64, total: u64) -> u64 {
1359 (n * 100).checked_div(total).unwrap_or(100)
1360}
1361
1362#[derive(Default, Debug, Eq, PartialEq)]
1363struct UsizeWithUnderscore(usize);
1364
1365impl fmt::Display for UsizeWithUnderscore {
1366 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1367 let num_str = self.0.to_string();
1368
1369 if num_str.len() <= 3 {
1370 return write!(f, "{num_str}");
1371 }
1372
1373 let mut result = String::new();
1374
1375 for (count, ch) in num_str.chars().rev().enumerate() {
1376 if count > 0 && count % 3 == 0 {
1377 result.push('_');
1378 }
1379 result.push(ch);
1380 }
1381
1382 let result = result.chars().rev().collect::<String>();
1383 write!(f, "{result}")
1384 }
1385}
1386
1387impl std::ops::AddAssign for UsizeWithUnderscore {
1388 fn add_assign(&mut self, other: UsizeWithUnderscore) {
1389 self.0 += other.0;
1390 }
1391}
1392
1393#[derive(Default, Debug, Eq, PartialEq)]
1394struct PrettyItemStats {
1395 traits: UsizeWithUnderscore,
1396 impls: UsizeWithUnderscore,
1397 mods: UsizeWithUnderscore,
1398 macro_calls: UsizeWithUnderscore,
1399 macro_rules: UsizeWithUnderscore,
1400}
1401
1402impl From<hir_def::item_tree::ItemTreeDataStats> for PrettyItemStats {
1403 fn from(value: hir_def::item_tree::ItemTreeDataStats) -> Self {
1404 Self {
1405 traits: UsizeWithUnderscore(value.traits),
1406 impls: UsizeWithUnderscore(value.impls),
1407 mods: UsizeWithUnderscore(value.mods),
1408 macro_calls: UsizeWithUnderscore(value.macro_calls),
1409 macro_rules: UsizeWithUnderscore(value.macro_rules),
1410 }
1411 }
1412}
1413
1414impl AddAssign for PrettyItemStats {
1415 fn add_assign(&mut self, rhs: Self) {
1416 self.traits += rhs.traits;
1417 self.impls += rhs.impls;
1418 self.mods += rhs.mods;
1419 self.macro_calls += rhs.macro_calls;
1420 self.macro_rules += rhs.macro_rules;
1421 }
1422}
1423
1424impl fmt::Display for PrettyItemStats {
1425 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1426 write!(
1427 f,
1428 "traits: {}, impl: {}, mods: {}, macro calls: {}, macro rules: {}",
1429 self.traits, self.impls, self.mods, self.macro_calls, self.macro_rules
1430 )
1431 }
1432}
1433
1434