1use std::{
5 env, fmt,
6 ops::AddAssign,
7 panic::{AssertUnwindSafe, catch_unwind},
8 time::{SystemTime, UNIX_EPOCH},
9};
10
11use cfg::{CfgAtom, CfgDiff};
12use hir::{
13 Adt, AssocItem, Crate, DefWithBody, FindPathConfig, HasCrate, HasSource, HirDisplay, ModuleDef,
14 Name, crate_lang_items,
15 db::{DefDatabase, ExpandDatabase, HirDatabase},
16 next_solver::{DbInterner, GenericArgs},
17};
18use hir_def::{
19 SyntheticSyntax,
20 expr_store::BodySourceMap,
21 hir::{ExprId, PatId},
22};
23use hir_ty::InferenceResult;
24use ide::{
25 Analysis, AnalysisHost, AnnotationConfig, DiagnosticsConfig, Edition, InlayFieldsToResolve,
26 InlayHintsConfig, LineCol, RootDatabase,
27};
28use ide_db::{
29 EditionedFileId, LineIndexDatabase, MiniCore, SnippetCap,
30 base_db::{SourceDatabase, salsa::Database},
31};
32use itertools::Itertools;
33use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace};
34use oorandom::Rand32;
35use profile::StopWatch;
36use project_model::{CargoConfig, CfgOverrides, ProjectManifest, ProjectWorkspace, RustLibSource};
37use rayon::prelude::*;
38use rustc_hash::{FxHashMap, FxHashSet};
39use rustc_type_ir::inherent::Ty as _;
40use syntax::AstNode;
41use vfs::{AbsPathBuf, Vfs, VfsPath};
42
43use crate::cli::{
44 Verbosity,
45 flags::{self, OutputFormat},
46 full_name_of_item, print_memory_usage,
47 progress_report::ProgressReport,
48 report_metric,
49};
50
51impl flags::AnalysisStats {
52 pub fn run(self, verbosity: Verbosity) -> anyhow::Result<()> {
53 let mut rng = {
54 let seed = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_millis() as u64;
55 Rand32::new(seed)
56 };
57
58 let cargo_config = CargoConfig {
59 sysroot: match self.no_sysroot {
60 true => None,
61 false => Some(RustLibSource::Discover),
62 },
63 all_targets: true,
64 set_test: !self.no_test,
65 cfg_overrides: CfgOverrides {
66 global: CfgDiff::new(vec![CfgAtom::Flag(hir::sym::miri)], vec![]),
67 selective: Default::default(),
68 },
69 ..Default::default()
70 };
71 let no_progress = &|_| ();
72
73 let mut db_load_sw = self.stop_watch();
74
75 let path = AbsPathBuf::assert_utf8(env::current_dir()?.join(&self.path));
76 let manifest = ProjectManifest::discover_single(&path)?;
77
78 let mut workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
79 let metadata_time = db_load_sw.elapsed();
80 let load_cargo_config = LoadCargoConfig {
81 load_out_dirs_from_check: !self.disable_build_scripts,
82 with_proc_macro_server: if self.disable_proc_macros {
83 ProcMacroServerChoice::None
84 } else {
85 match self.proc_macro_srv {
86 Some(ref path) => {
87 let path = vfs::AbsPathBuf::assert_utf8(path.to_owned());
88 ProcMacroServerChoice::Explicit(path)
89 }
90 None => ProcMacroServerChoice::Sysroot,
91 }
92 },
93 prefill_caches: false,
94 };
95
96 let build_scripts_time = if self.disable_build_scripts {
97 None
98 } else {
99 let mut build_scripts_sw = self.stop_watch();
100 let bs = workspace.run_build_scripts(&cargo_config, no_progress)?;
101 workspace.set_build_scripts(bs);
102 Some(build_scripts_sw.elapsed())
103 };
104
105 let (db, vfs, _proc_macro) =
106 load_workspace(workspace.clone(), &cargo_config.extra_env, &load_cargo_config)?;
107 eprint!("{:<20} {}", "Database loaded:", db_load_sw.elapsed());
108 eprint!(" (metadata {metadata_time}");
109 if let Some(build_scripts_time) = build_scripts_time {
110 eprint!("; build {build_scripts_time}");
111 }
112 eprintln!(")");
113
114 let mut host = AnalysisHost::with_database(db);
115 let db = host.raw_database();
116
117 let mut analysis_sw = self.stop_watch();
118
119 let mut krates = Crate::all(db);
120 if self.randomize {
121 shuffle(&mut rng, &mut krates);
122 }
123
124 let mut item_tree_sw = self.stop_watch();
125 let source_roots = krates
126 .iter()
127 .cloned()
128 .map(|krate| db.file_source_root(krate.root_file(db)).source_root_id(db))
129 .unique();
130
131 let mut dep_loc = 0;
132 let mut workspace_loc = 0;
133 let mut dep_item_trees = 0;
134 let mut workspace_item_trees = 0;
135
136 let mut workspace_item_stats = PrettyItemStats::default();
137 let mut dep_item_stats = PrettyItemStats::default();
138
139 for source_root_id in source_roots {
140 let source_root = db.source_root(source_root_id).source_root(db);
141 for file_id in source_root.iter() {
142 if let Some(p) = source_root.path_for_file(&file_id)
143 && let Some((_, Some("rs"))) = p.name_and_extension()
144 {
145 if !source_root.is_library || self.with_deps {
147 let length = db.file_text(file_id).text(db).lines().count();
148 let item_stats = db
149 .file_item_tree(
150 EditionedFileId::current_edition_guess_origin(db, file_id).into(),
151 )
152 .item_tree_stats()
153 .into();
154
155 workspace_loc += length;
156 workspace_item_trees += 1;
157 workspace_item_stats += item_stats;
158 } else {
159 let length = db.file_text(file_id).text(db).lines().count();
160 let item_stats = db
161 .file_item_tree(
162 EditionedFileId::current_edition_guess_origin(db, file_id).into(),
163 )
164 .item_tree_stats()
165 .into();
166
167 dep_loc += length;
168 dep_item_trees += 1;
169 dep_item_stats += item_stats;
170 }
171 }
172 }
173 }
174 eprintln!(" item trees: {workspace_item_trees}");
175 let item_tree_time = item_tree_sw.elapsed();
176
177 eprintln!(
178 " dependency lines of code: {}, item trees: {}",
179 UsizeWithUnderscore(dep_loc),
180 UsizeWithUnderscore(dep_item_trees),
181 );
182 eprintln!(" dependency item stats: {dep_item_stats}");
183
184 eprintln!("{:<20} {}", "Item Tree Collection:", item_tree_time);
201 report_metric("item tree time", item_tree_time.time.as_millis() as u64, "ms");
202 eprintln!(" Total Statistics:");
203
204 let mut crate_def_map_sw = self.stop_watch();
205 let mut num_crates = 0;
206 let mut visited_modules = FxHashSet::default();
207 let mut visit_queue = Vec::new();
208 for &krate in &krates {
209 let module = krate.root_module(db);
210 let file_id = module.definition_source_file_id(db);
211 let file_id = file_id.original_file(db);
212
213 let source_root = db.file_source_root(file_id.file_id(db)).source_root_id(db);
214 let source_root = db.source_root(source_root).source_root(db);
215 if !source_root.is_library || self.with_deps {
216 num_crates += 1;
217 visit_queue.push(module);
218 }
219 }
220
221 if self.randomize {
222 shuffle(&mut rng, &mut visit_queue);
223 }
224
225 eprint!(" crates: {num_crates}");
226 let mut num_decls = 0;
227 let mut bodies = Vec::new();
228 let mut adts = Vec::new();
229 let mut file_ids = Vec::new();
230
231 let mut num_traits = 0;
232 let mut num_macro_rules_macros = 0;
233 let mut num_proc_macros = 0;
234
235 while let Some(module) = visit_queue.pop() {
236 if visited_modules.insert(module) {
237 file_ids.extend(module.as_source_file_id(db));
238 visit_queue.extend(module.children(db));
239
240 for decl in module.declarations(db) {
241 num_decls += 1;
242 match decl {
243 ModuleDef::Function(f) => bodies.push(DefWithBody::from(f)),
244 ModuleDef::Adt(a) => {
245 if let Adt::Enum(e) = a {
246 for v in e.variants(db) {
247 bodies.push(DefWithBody::from(v));
248 }
249 }
250 adts.push(a)
251 }
252 ModuleDef::Const(c) => {
253 bodies.push(DefWithBody::from(c));
254 }
255 ModuleDef::Static(s) => bodies.push(DefWithBody::from(s)),
256 ModuleDef::Trait(_) => num_traits += 1,
257 ModuleDef::Macro(m) => match m.kind(db) {
258 hir::MacroKind::Declarative => num_macro_rules_macros += 1,
259 hir::MacroKind::Derive
260 | hir::MacroKind::Attr
261 | hir::MacroKind::ProcMacro => num_proc_macros += 1,
262 _ => (),
263 },
264 _ => (),
265 };
266 }
267
268 for impl_def in module.impl_defs(db) {
269 for item in impl_def.items(db) {
270 num_decls += 1;
271 match item {
272 AssocItem::Function(f) => bodies.push(DefWithBody::from(f)),
273 AssocItem::Const(c) => {
274 bodies.push(DefWithBody::from(c));
275 }
276 _ => (),
277 }
278 }
279 }
280 }
281 }
282 eprintln!(
283 ", mods: {}, decls: {num_decls}, bodies: {}, adts: {}, consts: {}",
284 visited_modules.len(),
285 bodies.len(),
286 adts.len(),
287 bodies
288 .iter()
289 .filter(|it| matches!(it, DefWithBody::Const(_) | DefWithBody::Static(_)))
290 .count(),
291 );
292
293 eprintln!(" Workspace:");
294 eprintln!(
295 " traits: {num_traits}, macro_rules macros: {num_macro_rules_macros}, proc_macros: {num_proc_macros}"
296 );
297 eprintln!(
298 " lines of code: {}, item trees: {}",
299 UsizeWithUnderscore(workspace_loc),
300 UsizeWithUnderscore(workspace_item_trees),
301 );
302 eprintln!(" usages: {workspace_item_stats}");
303
304 eprintln!(" Dependencies:");
305 eprintln!(
306 " lines of code: {}, item trees: {}",
307 UsizeWithUnderscore(dep_loc),
308 UsizeWithUnderscore(dep_item_trees),
309 );
310 eprintln!(" declarations: {dep_item_stats}");
311
312 let crate_def_map_time = crate_def_map_sw.elapsed();
313 eprintln!("{:<20} {}", "Item Collection:", crate_def_map_time);
314 report_metric("crate def map time", crate_def_map_time.time.as_millis() as u64, "ms");
315
316 if self.randomize {
317 shuffle(&mut rng, &mut bodies);
318 }
319
320 hir::attach_db(db, || {
321 if !self.skip_lang_items {
322 self.run_lang_items(db, &krates, verbosity);
323 }
324
325 if !self.skip_lowering {
326 self.run_body_lowering(db, &vfs, &bodies, verbosity);
327 }
328
329 if !self.skip_inference {
330 self.run_inference(db, &vfs, &bodies, verbosity);
331 }
332
333 if !self.skip_mir_stats {
334 self.run_mir_lowering(db, &bodies, verbosity);
335 }
336
337 if !self.skip_data_layout {
338 self.run_data_layout(db, &adts, verbosity);
339 }
340
341 if !self.skip_const_eval {
342 self.run_const_eval(db, &bodies, verbosity);
343 }
344 });
345
346 file_ids.sort();
347 file_ids.dedup();
348
349 if self.run_all_ide_things {
350 self.run_ide_things(host.analysis(), &file_ids, db, &vfs, verbosity);
351 }
352
353 if self.run_term_search {
354 self.run_term_search(&workspace, db, &vfs, &file_ids, verbosity);
355 }
356
357 hir::clear_tls_solver_cache();
358
359 let db = host.raw_database_mut();
360 db.trigger_lru_eviction();
361
362 let total_span = analysis_sw.elapsed();
363 eprintln!("{:<20} {total_span}", "Total:");
364 report_metric("total time", total_span.time.as_millis() as u64, "ms");
365 if let Some(instructions) = total_span.instructions {
366 report_metric("total instructions", instructions, "#instr");
367 }
368 report_metric("total memory", total_span.memory.allocated.megabytes() as u64, "MB");
369
370 if verbosity.is_verbose() {
371 print_memory_usage(host, vfs);
372 }
373
374 Ok(())
375 }
376
377 fn run_data_layout(&self, db: &RootDatabase, adts: &[hir::Adt], verbosity: Verbosity) {
378 let mut sw = self.stop_watch();
379 let mut all = 0;
380 let mut fail = 0;
381 for &a in adts {
382 let interner = DbInterner::new_no_crate(db);
383 let generic_params = db.generic_params(a.into());
384 if generic_params.iter_type_or_consts().next().is_some()
385 || generic_params.iter_lt().next().is_some()
386 {
387 continue;
389 }
390 all += 1;
391 let Err(e) = db.layout_of_adt(
392 hir_def::AdtId::from(a),
393 GenericArgs::new_from_iter(interner, []),
394 hir_ty::ParamEnvAndCrate {
395 param_env: db.trait_environment(a.into()),
396 krate: a.krate(db).into(),
397 },
398 ) else {
399 continue;
400 };
401 if verbosity.is_spammy() {
402 let full_name = full_name_of_item(db, a.module(db), a.name(db));
403 println!("Data layout for {full_name} failed due {e:?}");
404 }
405 fail += 1;
406 }
407 let data_layout_time = sw.elapsed();
408 eprintln!("{:<20} {}", "Data layouts:", data_layout_time);
409 eprintln!("Failed data layouts: {fail} ({}%)", percentage(fail, all));
410 report_metric("failed data layouts", fail, "#");
411 report_metric("data layout time", data_layout_time.time.as_millis() as u64, "ms");
412 }
413
414 fn run_const_eval(&self, db: &RootDatabase, bodies: &[DefWithBody], verbosity: Verbosity) {
415 let len = bodies
416 .iter()
417 .filter(|body| matches!(body, DefWithBody::Const(_) | DefWithBody::Static(_)))
418 .count();
419 let mut bar = match verbosity {
420 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
421 _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
422 _ => ProgressReport::new(len),
423 };
424
425 let mut sw = self.stop_watch();
426 let mut all = 0;
427 let mut fail = 0;
428 for &b in bodies {
429 bar.set_message(move || format!("const eval: {}", full_name(db, b, b.module(db))));
430 let res = match b {
431 DefWithBody::Const(c) => c.eval(db),
432 DefWithBody::Static(s) => s.eval(db),
433 _ => continue,
434 };
435 bar.inc(1);
436 all += 1;
437 let Err(error) = res else {
438 continue;
439 };
440 if verbosity.is_spammy() {
441 let full_name =
442 full_name_of_item(db, b.module(db), b.name(db).unwrap_or(Name::missing()));
443 bar.println(format!("Const eval for {full_name} failed due {error:?}"));
444 }
445 fail += 1;
446 }
447 bar.finish_and_clear();
448 let const_eval_time = sw.elapsed();
449 eprintln!("{:<20} {}", "Const evaluation:", const_eval_time);
450 eprintln!("Failed const evals: {fail} ({}%)", percentage(fail, all));
451 report_metric("failed const evals", fail, "#");
452 report_metric("const eval time", const_eval_time.time.as_millis() as u64, "ms");
453 }
454
455 fn run_term_search(
457 &self,
458 ws: &ProjectWorkspace,
459 db: &RootDatabase,
460 vfs: &Vfs,
461 file_ids: &[EditionedFileId],
462 verbosity: Verbosity,
463 ) {
464 let cargo_config = CargoConfig {
465 sysroot: match self.no_sysroot {
466 true => None,
467 false => Some(RustLibSource::Discover),
468 },
469 all_targets: true,
470 ..Default::default()
471 };
472
473 let mut bar = match verbosity {
474 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
475 _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
476 _ => ProgressReport::new(file_ids.len()),
477 };
478
479 #[derive(Debug, Default)]
480 struct Acc {
481 tail_expr_syntax_hits: u64,
482 tail_expr_no_term: u64,
483 total_tail_exprs: u64,
484 error_codes: FxHashMap<String, u32>,
485 syntax_errors: u32,
486 }
487
488 let mut acc: Acc = Default::default();
489 bar.tick();
490 let mut sw = self.stop_watch();
491
492 for &file_id in file_ids {
493 let file_id = file_id.editioned_file_id(db);
494 let sema = hir::Semantics::new(db);
495 let display_target = match sema.first_crate(file_id.file_id()) {
496 Some(krate) => krate.to_display_target(sema.db),
497 None => continue,
498 };
499
500 let parse = sema.parse_guess_edition(file_id.into());
501 let file_txt = db.file_text(file_id.into());
502 let path = vfs.file_path(file_id.into()).as_path().unwrap();
503
504 for node in parse.syntax().descendants() {
505 let expr = match syntax::ast::Expr::cast(node.clone()) {
506 Some(it) => it,
507 None => continue,
508 };
509 let block = match syntax::ast::BlockExpr::cast(expr.syntax().clone()) {
510 Some(it) => it,
511 None => continue,
512 };
513 let target_ty = match sema.type_of_expr(&expr) {
514 Some(it) => it.adjusted(),
515 None => continue, };
517
518 let expected_tail = match block.tail_expr() {
519 Some(it) => it,
520 None => continue,
521 };
522
523 if expected_tail.is_block_like() {
524 continue;
525 }
526
527 let range = sema.original_range(expected_tail.syntax()).range;
528 let original_text: String = db
529 .file_text(file_id.into())
530 .text(db)
531 .chars()
532 .skip(usize::from(range.start()))
533 .take(usize::from(range.end()) - usize::from(range.start()))
534 .collect();
535
536 let scope = match sema.scope(expected_tail.syntax()) {
537 Some(it) => it,
538 None => continue,
539 };
540
541 let ctx = hir::term_search::TermSearchCtx {
542 sema: &sema,
543 scope: &scope,
544 goal: target_ty,
545 config: hir::term_search::TermSearchConfig {
546 enable_borrowcheck: true,
547 ..Default::default()
548 },
549 };
550 let found_terms = hir::term_search::term_search(&ctx);
551
552 if found_terms.is_empty() {
553 acc.tail_expr_no_term += 1;
554 acc.total_tail_exprs += 1;
555 continue;
557 };
558
559 fn trim(s: &str) -> String {
560 s.chars().filter(|c| !c.is_whitespace()).collect()
561 }
562
563 let todo = syntax::ast::make::ext::expr_todo().to_string();
564 let mut formatter = |_: &hir::Type<'_>| todo.clone();
565 let mut syntax_hit_found = false;
566 for term in found_terms {
567 let generated = term
568 .gen_source_code(
569 &scope,
570 &mut formatter,
571 FindPathConfig {
572 prefer_no_std: false,
573 prefer_prelude: true,
574 prefer_absolute: false,
575 allow_unstable: true,
576 },
577 display_target,
578 )
579 .unwrap();
580 syntax_hit_found |= trim(&original_text) == trim(&generated);
581
582 let mut txt = file_txt.text(db).to_string();
584
585 let edit = ide::TextEdit::replace(range, generated.clone());
586 edit.apply(&mut txt);
587
588 if self.validate_term_search {
589 std::fs::write(path, txt).unwrap();
590
591 let res = ws.run_build_scripts(&cargo_config, &|_| ()).unwrap();
592 if let Some(err) = res.error()
593 && err.contains("error: could not compile")
594 {
595 if let Some(mut err_idx) = err.find("error[E") {
596 err_idx += 7;
597 let err_code = &err[err_idx..err_idx + 4];
598 match err_code {
599 "0282" | "0283" => continue, "0277" | "0308" if generated.contains(&todo) => continue, "0599"
604 if err.contains(
605 "the following trait is implemented but not in scope",
606 ) =>
607 {
608 continue;
609 }
610 _ => (),
611 }
612 bar.println(err);
613 bar.println(generated);
614 acc.error_codes
615 .entry(err_code.to_owned())
616 .and_modify(|n| *n += 1)
617 .or_insert(1);
618 } else {
619 acc.syntax_errors += 1;
620 bar.println(format!("Syntax error: \n{err}"));
621 }
622 }
623 }
624 }
625
626 if syntax_hit_found {
627 acc.tail_expr_syntax_hits += 1;
628 }
629 acc.total_tail_exprs += 1;
630
631 let msg = move || {
632 format!(
633 "processing: {:<50}",
634 trim(&original_text).chars().take(50).collect::<String>()
635 )
636 };
637 if verbosity.is_spammy() {
638 bar.println(msg());
639 }
640 bar.set_message(msg);
641 }
642 if self.validate_term_search {
644 std::fs::write(path, file_txt.text(db).to_string()).unwrap();
645 }
646
647 bar.inc(1);
648 }
649 let term_search_time = sw.elapsed();
650
651 bar.println(format!(
652 "Tail Expr syntactic hits: {}/{} ({}%)",
653 acc.tail_expr_syntax_hits,
654 acc.total_tail_exprs,
655 percentage(acc.tail_expr_syntax_hits, acc.total_tail_exprs)
656 ));
657 bar.println(format!(
658 "Tail Exprs found: {}/{} ({}%)",
659 acc.total_tail_exprs - acc.tail_expr_no_term,
660 acc.total_tail_exprs,
661 percentage(acc.total_tail_exprs - acc.tail_expr_no_term, acc.total_tail_exprs)
662 ));
663 if self.validate_term_search {
664 bar.println(format!(
665 "Tail Exprs total errors: {}, syntax errors: {}, error codes:",
666 acc.error_codes.values().sum::<u32>() + acc.syntax_errors,
667 acc.syntax_errors,
668 ));
669 for (err, count) in acc.error_codes {
670 bar.println(format!(
671 " E{err}: {count:>5} (https://doc.rust-lang.org/error_codes/E{err}.html)"
672 ));
673 }
674 }
675 bar.println(format!(
676 "Term search avg time: {}ms",
677 term_search_time.time.as_millis() as u64 / acc.total_tail_exprs
678 ));
679 bar.println(format!("{:<20} {}", "Term search:", term_search_time));
680 report_metric("term search time", term_search_time.time.as_millis() as u64, "ms");
681
682 bar.finish_and_clear();
683 }
684
685 fn run_mir_lowering(&self, db: &RootDatabase, bodies: &[DefWithBody], verbosity: Verbosity) {
686 let mut bar = match verbosity {
687 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
688 _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
689 _ => ProgressReport::new(bodies.len()),
690 };
691 let mut sw = self.stop_watch();
692 let mut all = 0;
693 let mut fail = 0;
694 for &body_id in bodies {
695 bar.set_message(move || {
696 format!("mir lowering: {}", full_name(db, body_id, body_id.module(db)))
697 });
698 bar.inc(1);
699 if matches!(body_id, DefWithBody::Variant(_)) {
700 continue;
701 }
702 let module = body_id.module(db);
703 if !self.should_process(db, body_id, module) {
704 continue;
705 }
706
707 all += 1;
708 let Err(e) = db.mir_body(body_id.into()) else {
709 continue;
710 };
711 if verbosity.is_spammy() {
712 let full_name = module
713 .path_to_root(db)
714 .into_iter()
715 .rev()
716 .filter_map(|it| it.name(db))
717 .chain(Some(body_id.name(db).unwrap_or_else(Name::missing)))
718 .map(|it| it.display(db, Edition::LATEST).to_string())
719 .join("::");
720 bar.println(format!("Mir body for {full_name} failed due {e:?}"));
721 }
722 fail += 1;
723 bar.tick();
724 }
725 let mir_lowering_time = sw.elapsed();
726 bar.finish_and_clear();
727 eprintln!("{:<20} {}", "MIR lowering:", mir_lowering_time);
728 eprintln!("Mir failed bodies: {fail} ({}%)", percentage(fail, all));
729 report_metric("mir failed bodies", fail, "#");
730 report_metric("mir lowering time", mir_lowering_time.time.as_millis() as u64, "ms");
731 }
732
733 fn run_inference(
734 &self,
735 db: &RootDatabase,
736 vfs: &Vfs,
737 bodies: &[DefWithBody],
738 verbosity: Verbosity,
739 ) {
740 let mut bar = match verbosity {
741 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
742 _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
743 _ => ProgressReport::new(bodies.len()),
744 };
745
746 if self.parallel {
747 let mut inference_sw = self.stop_watch();
748 bodies
749 .par_iter()
750 .map_with(db.clone(), |snap, &body| {
751 snap.body(body.into());
752 InferenceResult::for_body(snap, body.into());
753 })
754 .count();
755 eprintln!("{:<20} {}", "Parallel Inference:", inference_sw.elapsed());
756 }
757
758 let mut inference_sw = self.stop_watch();
759 bar.tick();
760 let mut num_exprs = 0;
761 let mut num_exprs_unknown = 0;
762 let mut num_exprs_partially_unknown = 0;
763 let mut num_expr_type_mismatches = 0;
764 let mut num_pats = 0;
765 let mut num_pats_unknown = 0;
766 let mut num_pats_partially_unknown = 0;
767 let mut num_pat_type_mismatches = 0;
768 let mut panics = 0;
769 for &body_id in bodies {
770 let name = body_id.name(db).unwrap_or_else(Name::missing);
771 let module = body_id.module(db);
772 let display_target = module.krate(db).to_display_target(db);
773 if let Some(only_name) = self.only.as_deref()
774 && name.display(db, Edition::LATEST).to_string() != only_name
775 && full_name(db, body_id, module) != only_name
776 {
777 continue;
778 }
779 let msg = move || {
780 if verbosity.is_verbose() {
781 let source = match body_id {
782 DefWithBody::Function(it) => it.source(db).map(|it| it.syntax().cloned()),
783 DefWithBody::Static(it) => it.source(db).map(|it| it.syntax().cloned()),
784 DefWithBody::Const(it) => it.source(db).map(|it| it.syntax().cloned()),
785 DefWithBody::Variant(it) => it.source(db).map(|it| it.syntax().cloned()),
786 };
787 if let Some(src) = source {
788 let original_file = src.file_id.original_file(db);
789 let path = vfs.file_path(original_file.file_id(db));
790 let syntax_range = src.text_range();
791 format!(
792 "processing: {} ({} {:?})",
793 full_name(db, body_id, module),
794 path,
795 syntax_range
796 )
797 } else {
798 format!("processing: {}", full_name(db, body_id, module))
799 }
800 } else {
801 format!("processing: {}", full_name(db, body_id, module))
802 }
803 };
804 if verbosity.is_spammy() {
805 bar.println(msg());
806 }
807 bar.set_message(msg);
808 let body = db.body(body_id.into());
809 let inference_result =
810 catch_unwind(AssertUnwindSafe(|| InferenceResult::for_body(db, body_id.into())));
811 let inference_result = match inference_result {
812 Ok(inference_result) => inference_result,
813 Err(p) => {
814 if let Some(s) = p.downcast_ref::<&str>() {
815 eprintln!("infer panicked for {}: {}", full_name(db, body_id, module), s);
816 } else if let Some(s) = p.downcast_ref::<String>() {
817 eprintln!("infer panicked for {}: {}", full_name(db, body_id, module), s);
818 } else {
819 eprintln!("infer panicked for {}", full_name(db, body_id, module));
820 }
821 panics += 1;
822 bar.inc(1);
823 continue;
824 }
825 };
826 let sm = || db.body_with_source_map(body_id.into()).1;
828
829 let (previous_exprs, previous_unknown, previous_partially_unknown) =
831 (num_exprs, num_exprs_unknown, num_exprs_partially_unknown);
832 for (expr_id, _) in body.exprs() {
833 let ty = &inference_result[expr_id];
834 num_exprs += 1;
835 let unknown_or_partial = if ty.is_ty_error() {
836 num_exprs_unknown += 1;
837 if verbosity.is_spammy() {
838 if let Some((path, start, end)) = expr_syntax_range(db, vfs, &sm(), expr_id)
839 {
840 bar.println(format!(
841 "{} {}:{}-{}:{}: Unknown type",
842 path,
843 start.line + 1,
844 start.col,
845 end.line + 1,
846 end.col,
847 ));
848 } else {
849 bar.println(format!(
850 "{}: Unknown type",
851 name.display(db, Edition::LATEST)
852 ));
853 }
854 }
855 true
856 } else {
857 let is_partially_unknown = ty.references_non_lt_error();
858 if is_partially_unknown {
859 num_exprs_partially_unknown += 1;
860 }
861 is_partially_unknown
862 };
863 if self.only.is_some() && verbosity.is_spammy() {
864 if let Some((_, start, end)) = expr_syntax_range(db, vfs, &sm(), expr_id) {
866 bar.println(format!(
867 "{}:{}-{}:{}: {}",
868 start.line + 1,
869 start.col,
870 end.line + 1,
871 end.col,
872 ty.display(db, display_target)
873 ));
874 } else {
875 bar.println(format!(
876 "unknown location: {}",
877 ty.display(db, display_target)
878 ));
879 }
880 }
881 if unknown_or_partial && self.output == Some(OutputFormat::Csv) {
882 println!(
883 r#"{},type,"{}""#,
884 location_csv_expr(db, vfs, &sm(), expr_id),
885 ty.display(db, display_target)
886 );
887 }
888 if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) {
889 num_expr_type_mismatches += 1;
890 if verbosity.is_verbose() {
891 if let Some((path, start, end)) = expr_syntax_range(db, vfs, &sm(), expr_id)
892 {
893 bar.println(format!(
894 "{} {}:{}-{}:{}: Expected {}, got {}",
895 path,
896 start.line + 1,
897 start.col,
898 end.line + 1,
899 end.col,
900 mismatch.expected.display(db, display_target),
901 mismatch.actual.display(db, display_target)
902 ));
903 } else {
904 bar.println(format!(
905 "{}: Expected {}, got {}",
906 name.display(db, Edition::LATEST),
907 mismatch.expected.display(db, display_target),
908 mismatch.actual.display(db, display_target)
909 ));
910 }
911 }
912 if self.output == Some(OutputFormat::Csv) {
913 println!(
914 r#"{},mismatch,"{}","{}""#,
915 location_csv_expr(db, vfs, &sm(), expr_id),
916 mismatch.expected.display(db, display_target),
917 mismatch.actual.display(db, display_target)
918 );
919 }
920 }
921 }
922 if verbosity.is_spammy() {
923 bar.println(format!(
924 "In {}: {} exprs, {} unknown, {} partial",
925 full_name(db, body_id, module),
926 num_exprs - previous_exprs,
927 num_exprs_unknown - previous_unknown,
928 num_exprs_partially_unknown - previous_partially_unknown
929 ));
930 }
931 let (previous_pats, previous_unknown, previous_partially_unknown) =
935 (num_pats, num_pats_unknown, num_pats_partially_unknown);
936 for (pat_id, _) in body.pats() {
937 let ty = &inference_result[pat_id];
938 num_pats += 1;
939 let unknown_or_partial = if ty.is_ty_error() {
940 num_pats_unknown += 1;
941 if verbosity.is_spammy() {
942 if let Some((path, start, end)) = pat_syntax_range(db, vfs, &sm(), pat_id) {
943 bar.println(format!(
944 "{} {}:{}-{}:{}: Unknown type",
945 path,
946 start.line + 1,
947 start.col,
948 end.line + 1,
949 end.col,
950 ));
951 } else {
952 bar.println(format!(
953 "{}: Unknown type",
954 name.display(db, Edition::LATEST)
955 ));
956 }
957 }
958 true
959 } else {
960 let is_partially_unknown = ty.references_non_lt_error();
961 if is_partially_unknown {
962 num_pats_partially_unknown += 1;
963 }
964 is_partially_unknown
965 };
966 if self.only.is_some() && verbosity.is_spammy() {
967 if let Some((_, start, end)) = pat_syntax_range(db, vfs, &sm(), pat_id) {
969 bar.println(format!(
970 "{}:{}-{}:{}: {}",
971 start.line + 1,
972 start.col,
973 end.line + 1,
974 end.col,
975 ty.display(db, display_target)
976 ));
977 } else {
978 bar.println(format!(
979 "unknown location: {}",
980 ty.display(db, display_target)
981 ));
982 }
983 }
984 if unknown_or_partial && self.output == Some(OutputFormat::Csv) {
985 println!(
986 r#"{},type,"{}""#,
987 location_csv_pat(db, vfs, &sm(), pat_id),
988 ty.display(db, display_target)
989 );
990 }
991 if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat_id) {
992 num_pat_type_mismatches += 1;
993 if verbosity.is_verbose() {
994 if let Some((path, start, end)) = pat_syntax_range(db, vfs, &sm(), pat_id) {
995 bar.println(format!(
996 "{} {}:{}-{}:{}: Expected {}, got {}",
997 path,
998 start.line + 1,
999 start.col,
1000 end.line + 1,
1001 end.col,
1002 mismatch.expected.display(db, display_target),
1003 mismatch.actual.display(db, display_target)
1004 ));
1005 } else {
1006 bar.println(format!(
1007 "{}: Expected {}, got {}",
1008 name.display(db, Edition::LATEST),
1009 mismatch.expected.display(db, display_target),
1010 mismatch.actual.display(db, display_target)
1011 ));
1012 }
1013 }
1014 if self.output == Some(OutputFormat::Csv) {
1015 println!(
1016 r#"{},mismatch,"{}","{}""#,
1017 location_csv_pat(db, vfs, &sm(), pat_id),
1018 mismatch.expected.display(db, display_target),
1019 mismatch.actual.display(db, display_target)
1020 );
1021 }
1022 }
1023 }
1024 if verbosity.is_spammy() {
1025 bar.println(format!(
1026 "In {}: {} pats, {} unknown, {} partial",
1027 full_name(db, body_id, module),
1028 num_pats - previous_pats,
1029 num_pats_unknown - previous_unknown,
1030 num_pats_partially_unknown - previous_partially_unknown
1031 ));
1032 }
1033 bar.inc(1);
1035 }
1036
1037 bar.finish_and_clear();
1038 let inference_time = inference_sw.elapsed();
1039 eprintln!(
1040 " exprs: {}, ??ty: {} ({}%), ?ty: {} ({}%), !ty: {}",
1041 num_exprs,
1042 num_exprs_unknown,
1043 percentage(num_exprs_unknown, num_exprs),
1044 num_exprs_partially_unknown,
1045 percentage(num_exprs_partially_unknown, num_exprs),
1046 num_expr_type_mismatches
1047 );
1048 eprintln!(
1049 " pats: {}, ??ty: {} ({}%), ?ty: {} ({}%), !ty: {}",
1050 num_pats,
1051 num_pats_unknown,
1052 percentage(num_pats_unknown, num_pats),
1053 num_pats_partially_unknown,
1054 percentage(num_pats_partially_unknown, num_pats),
1055 num_pat_type_mismatches
1056 );
1057 eprintln!(" panics: {panics}");
1058 eprintln!("{:<20} {}", "Inference:", inference_time);
1059 report_metric("unknown type", num_exprs_unknown, "#");
1060 report_metric("type mismatches", num_expr_type_mismatches, "#");
1061 report_metric("pattern unknown type", num_pats_unknown, "#");
1062 report_metric("pattern type mismatches", num_pat_type_mismatches, "#");
1063 report_metric("inference time", inference_time.time.as_millis() as u64, "ms");
1064 }
1065
1066 fn run_body_lowering(
1067 &self,
1068 db: &RootDatabase,
1069 vfs: &Vfs,
1070 bodies: &[DefWithBody],
1071 verbosity: Verbosity,
1072 ) {
1073 let mut bar = match verbosity {
1074 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
1075 _ if self.output.is_some() => ProgressReport::hidden(),
1076 _ => ProgressReport::new(bodies.len()),
1077 };
1078
1079 let mut sw = self.stop_watch();
1080 bar.tick();
1081 for &body_id in bodies {
1082 let module = body_id.module(db);
1083 if !self.should_process(db, body_id, module) {
1084 continue;
1085 }
1086 let msg = move || {
1087 if verbosity.is_verbose() {
1088 let source = match body_id {
1089 DefWithBody::Function(it) => it.source(db).map(|it| it.syntax().cloned()),
1090 DefWithBody::Static(it) => it.source(db).map(|it| it.syntax().cloned()),
1091 DefWithBody::Const(it) => it.source(db).map(|it| it.syntax().cloned()),
1092 DefWithBody::Variant(it) => it.source(db).map(|it| it.syntax().cloned()),
1093 };
1094 if let Some(src) = source {
1095 let original_file = src.file_id.original_file(db);
1096 let path = vfs.file_path(original_file.file_id(db));
1097 let syntax_range = src.text_range();
1098 format!(
1099 "processing: {} ({} {:?})",
1100 full_name(db, body_id, module),
1101 path,
1102 syntax_range
1103 )
1104 } else {
1105 format!("processing: {}", full_name(db, body_id, module))
1106 }
1107 } else {
1108 format!("processing: {}", full_name(db, body_id, module))
1109 }
1110 };
1111 if verbosity.is_spammy() {
1112 bar.println(msg());
1113 }
1114 bar.set_message(msg);
1115 db.body(body_id.into());
1116 bar.inc(1);
1117 }
1118
1119 bar.finish_and_clear();
1120 let body_lowering_time = sw.elapsed();
1121 eprintln!("{:<20} {}", "Body lowering:", body_lowering_time);
1122 report_metric("body lowering time", body_lowering_time.time.as_millis() as u64, "ms");
1123 }
1124
1125 fn run_lang_items(&self, db: &RootDatabase, crates: &[Crate], verbosity: Verbosity) {
1126 let mut bar = match verbosity {
1127 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
1128 _ if self.output.is_some() => ProgressReport::hidden(),
1129 _ => ProgressReport::new(crates.len()),
1130 };
1131
1132 let mut sw = self.stop_watch();
1133 bar.tick();
1134 for &krate in crates {
1135 crate_lang_items(db, krate.into());
1136 bar.inc(1);
1137 }
1138
1139 bar.finish_and_clear();
1140 let time = sw.elapsed();
1141 eprintln!("{:<20} {}", "Crate lang items:", time);
1142 report_metric("crate lang items time", time.time.as_millis() as u64, "ms");
1143 }
1144
1145 fn run_ide_things(
1147 &self,
1148 analysis: Analysis,
1149 file_ids: &[EditionedFileId],
1150 db: &RootDatabase,
1151 vfs: &Vfs,
1152 verbosity: Verbosity,
1153 ) {
1154 let len = file_ids.len();
1155 let create_bar = || match verbosity {
1156 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
1157 _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
1158 _ => ProgressReport::new(len),
1159 };
1160
1161 let mut sw = self.stop_watch();
1162
1163 let mut bar = create_bar();
1164 for &file_id in file_ids {
1165 let msg = format!("diagnostics: {}", vfs.file_path(file_id.file_id(db)));
1166 bar.set_message(move || msg.clone());
1167 _ = analysis.full_diagnostics(
1168 &DiagnosticsConfig {
1169 enabled: true,
1170 proc_macros_enabled: true,
1171 proc_attr_macros_enabled: true,
1172 disable_experimental: false,
1173 disabled: Default::default(),
1174 expr_fill_default: Default::default(),
1175 snippet_cap: SnippetCap::new(true),
1176 insert_use: ide_db::imports::insert_use::InsertUseConfig {
1177 granularity: ide_db::imports::insert_use::ImportGranularity::Crate,
1178 enforce_granularity: true,
1179 prefix_kind: hir::PrefixKind::ByCrate,
1180 group: true,
1181 skip_glob_imports: true,
1182 },
1183 prefer_no_std: false,
1184 prefer_prelude: true,
1185 prefer_absolute: false,
1186 style_lints: false,
1187 term_search_fuel: 400,
1188 term_search_borrowck: true,
1189 },
1190 ide::AssistResolveStrategy::All,
1191 analysis.editioned_file_id_to_vfs(file_id),
1192 );
1193 bar.inc(1);
1194 }
1195 bar.finish_and_clear();
1196
1197 let mut bar = create_bar();
1198 for &file_id in file_ids {
1199 let msg = format!("inlay hints: {}", vfs.file_path(file_id.file_id(db)));
1200 bar.set_message(move || msg.clone());
1201 _ = analysis.inlay_hints(
1202 &InlayHintsConfig {
1203 render_colons: false,
1204 type_hints: true,
1205 sized_bound: false,
1206 discriminant_hints: ide::DiscriminantHints::Always,
1207 parameter_hints: true,
1208 parameter_hints_for_missing_arguments: false,
1209 generic_parameter_hints: ide::GenericParameterHints {
1210 type_hints: true,
1211 lifetime_hints: true,
1212 const_hints: true,
1213 },
1214 chaining_hints: true,
1215 adjustment_hints: ide::AdjustmentHints::Always,
1216 adjustment_hints_disable_reborrows: true,
1217 adjustment_hints_mode: ide::AdjustmentHintsMode::Postfix,
1218 adjustment_hints_hide_outside_unsafe: false,
1219 closure_return_type_hints: ide::ClosureReturnTypeHints::Always,
1220 closure_capture_hints: true,
1221 binding_mode_hints: true,
1222 implicit_drop_hints: true,
1223 implied_dyn_trait_hints: true,
1224 lifetime_elision_hints: ide::LifetimeElisionHints::Always,
1225 param_names_for_lifetime_elision_hints: true,
1226 hide_inferred_type_hints: false,
1227 hide_named_constructor_hints: false,
1228 hide_closure_initialization_hints: false,
1229 hide_closure_parameter_hints: false,
1230 closure_style: hir::ClosureStyle::ImplFn,
1231 max_length: Some(25),
1232 closing_brace_hints_min_lines: Some(20),
1233 fields_to_resolve: InlayFieldsToResolve::empty(),
1234 range_exclusive_hints: true,
1235 minicore: MiniCore::default(),
1236 },
1237 analysis.editioned_file_id_to_vfs(file_id),
1238 None,
1239 );
1240 bar.inc(1);
1241 }
1242 bar.finish_and_clear();
1243
1244 let mut bar = create_bar();
1245 let annotation_config = AnnotationConfig {
1246 binary_target: true,
1247 annotate_runnables: true,
1248 annotate_impls: true,
1249 annotate_references: false,
1250 annotate_method_references: false,
1251 annotate_enum_variant_references: false,
1252 location: ide::AnnotationLocation::AboveName,
1253 filter_adjacent_derive_implementations: false,
1254 minicore: MiniCore::default(),
1255 };
1256 for &file_id in file_ids {
1257 let msg = format!("annotations: {}", vfs.file_path(file_id.file_id(db)));
1258 bar.set_message(move || msg.clone());
1259 analysis
1260 .annotations(&annotation_config, analysis.editioned_file_id_to_vfs(file_id))
1261 .unwrap()
1262 .into_iter()
1263 .for_each(|annotation| {
1264 _ = analysis.resolve_annotation(&annotation_config, annotation);
1265 });
1266 bar.inc(1);
1267 }
1268 bar.finish_and_clear();
1269
1270 let ide_time = sw.elapsed();
1271 eprintln!("{:<20} {} ({} files)", "IDE:", ide_time, file_ids.len());
1272 }
1273
1274 fn should_process(&self, db: &RootDatabase, body_id: DefWithBody, module: hir::Module) -> bool {
1275 if let Some(only_name) = self.only.as_deref() {
1276 let name = body_id.name(db).unwrap_or_else(Name::missing);
1277
1278 if name.display(db, Edition::LATEST).to_string() != only_name
1279 && full_name(db, body_id, module) != only_name
1280 {
1281 return false;
1282 }
1283 }
1284 true
1285 }
1286
1287 fn stop_watch(&self) -> StopWatch {
1288 StopWatch::start()
1289 }
1290}
1291
1292fn full_name(db: &RootDatabase, body_id: DefWithBody, module: hir::Module) -> String {
1293 module
1294 .krate(db)
1295 .display_name(db)
1296 .map(|it| it.canonical_name().as_str().to_owned())
1297 .into_iter()
1298 .chain(
1299 module
1300 .path_to_root(db)
1301 .into_iter()
1302 .filter_map(|it| it.name(db))
1303 .rev()
1304 .chain(Some(body_id.name(db).unwrap_or_else(Name::missing)))
1305 .map(|it| it.display(db, Edition::LATEST).to_string()),
1306 )
1307 .join("::")
1308}
1309
1310fn location_csv_expr(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, expr_id: ExprId) -> String {
1311 let src = match sm.expr_syntax(expr_id) {
1312 Ok(s) => s,
1313 Err(SyntheticSyntax) => return "synthetic,,".to_owned(),
1314 };
1315 let root = db.parse_or_expand(src.file_id);
1316 let node = src.map(|e| e.to_node(&root).syntax().clone());
1317 let original_range = node.as_ref().original_file_range_rooted(db);
1318 let path = vfs.file_path(original_range.file_id.file_id(db));
1319 let line_index = db.line_index(original_range.file_id.file_id(db));
1320 let text_range = original_range.range;
1321 let (start, end) =
1322 (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
1323 format!("{path},{}:{},{}:{}", start.line + 1, start.col, end.line + 1, end.col)
1324}
1325
1326fn location_csv_pat(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, pat_id: PatId) -> String {
1327 let src = match sm.pat_syntax(pat_id) {
1328 Ok(s) => s,
1329 Err(SyntheticSyntax) => return "synthetic,,".to_owned(),
1330 };
1331 let root = db.parse_or_expand(src.file_id);
1332 let node = src.map(|e| e.to_node(&root).syntax().clone());
1333 let original_range = node.as_ref().original_file_range_rooted(db);
1334 let path = vfs.file_path(original_range.file_id.file_id(db));
1335 let line_index = db.line_index(original_range.file_id.file_id(db));
1336 let text_range = original_range.range;
1337 let (start, end) =
1338 (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
1339 format!("{path},{}:{},{}:{}", start.line + 1, start.col, end.line + 1, end.col)
1340}
1341
1342fn expr_syntax_range<'a>(
1343 db: &RootDatabase,
1344 vfs: &'a Vfs,
1345 sm: &BodySourceMap,
1346 expr_id: ExprId,
1347) -> Option<(&'a VfsPath, LineCol, LineCol)> {
1348 let src = sm.expr_syntax(expr_id);
1349 if let Ok(src) = src {
1350 let root = db.parse_or_expand(src.file_id);
1351 let node = src.map(|e| e.to_node(&root).syntax().clone());
1352 let original_range = node.as_ref().original_file_range_rooted(db);
1353 let path = vfs.file_path(original_range.file_id.file_id(db));
1354 let line_index = db.line_index(original_range.file_id.file_id(db));
1355 let text_range = original_range.range;
1356 let (start, end) =
1357 (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
1358 Some((path, start, end))
1359 } else {
1360 None
1361 }
1362}
1363fn pat_syntax_range<'a>(
1364 db: &RootDatabase,
1365 vfs: &'a Vfs,
1366 sm: &BodySourceMap,
1367 pat_id: PatId,
1368) -> Option<(&'a VfsPath, LineCol, LineCol)> {
1369 let src = sm.pat_syntax(pat_id);
1370 if let Ok(src) = src {
1371 let root = db.parse_or_expand(src.file_id);
1372 let node = src.map(|e| e.to_node(&root).syntax().clone());
1373 let original_range = node.as_ref().original_file_range_rooted(db);
1374 let path = vfs.file_path(original_range.file_id.file_id(db));
1375 let line_index = db.line_index(original_range.file_id.file_id(db));
1376 let text_range = original_range.range;
1377 let (start, end) =
1378 (line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
1379 Some((path, start, end))
1380 } else {
1381 None
1382 }
1383}
1384
1385fn shuffle<T>(rng: &mut Rand32, slice: &mut [T]) {
1386 for i in 0..slice.len() {
1387 randomize_first(rng, &mut slice[i..]);
1388 }
1389
1390 fn randomize_first<T>(rng: &mut Rand32, slice: &mut [T]) {
1391 assert!(!slice.is_empty());
1392 let idx = rng.rand_range(0..slice.len() as u32) as usize;
1393 slice.swap(0, idx);
1394 }
1395}
1396
1397fn percentage(n: u64, total: u64) -> u64 {
1398 (n * 100).checked_div(total).unwrap_or(100)
1399}
1400
1401#[derive(Default, Debug, Eq, PartialEq)]
1402struct UsizeWithUnderscore(usize);
1403
1404impl fmt::Display for UsizeWithUnderscore {
1405 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1406 let num_str = self.0.to_string();
1407
1408 if num_str.len() <= 3 {
1409 return write!(f, "{num_str}");
1410 }
1411
1412 let mut result = String::new();
1413
1414 for (count, ch) in num_str.chars().rev().enumerate() {
1415 if count > 0 && count % 3 == 0 {
1416 result.push('_');
1417 }
1418 result.push(ch);
1419 }
1420
1421 let result = result.chars().rev().collect::<String>();
1422 write!(f, "{result}")
1423 }
1424}
1425
1426impl std::ops::AddAssign for UsizeWithUnderscore {
1427 fn add_assign(&mut self, other: UsizeWithUnderscore) {
1428 self.0 += other.0;
1429 }
1430}
1431
1432#[derive(Default, Debug, Eq, PartialEq)]
1433struct PrettyItemStats {
1434 traits: UsizeWithUnderscore,
1435 impls: UsizeWithUnderscore,
1436 mods: UsizeWithUnderscore,
1437 macro_calls: UsizeWithUnderscore,
1438 macro_rules: UsizeWithUnderscore,
1439}
1440
1441impl From<hir_def::item_tree::ItemTreeDataStats> for PrettyItemStats {
1442 fn from(value: hir_def::item_tree::ItemTreeDataStats) -> Self {
1443 Self {
1444 traits: UsizeWithUnderscore(value.traits),
1445 impls: UsizeWithUnderscore(value.impls),
1446 mods: UsizeWithUnderscore(value.mods),
1447 macro_calls: UsizeWithUnderscore(value.macro_calls),
1448 macro_rules: UsizeWithUnderscore(value.macro_rules),
1449 }
1450 }
1451}
1452
1453impl AddAssign for PrettyItemStats {
1454 fn add_assign(&mut self, rhs: Self) {
1455 self.traits += rhs.traits;
1456 self.impls += rhs.impls;
1457 self.mods += rhs.mods;
1458 self.macro_calls += rhs.macro_calls;
1459 self.macro_rules += rhs.macro_rules;
1460 }
1461}
1462
1463impl fmt::Display for PrettyItemStats {
1464 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1465 write!(
1466 f,
1467 "traits: {}, impl: {}, mods: {}, macro calls: {}, macro rules: {}",
1468 self.traits, self.impls, self.mods, self.macro_calls, self.macro_rules
1469 )
1470 }
1471}
1472
1473