1use std::{iter, mem};
17
18use hir::{ChangeWithProcMacros, ProcMacrosBuilder, db::DefDatabase};
19use ide_db::{
20 FxHashMap,
21 base_db::{CrateGraphBuilder, ProcMacroLoadingError, ProcMacroPaths, salsa::Durability},
22};
23use itertools::Itertools;
24use load_cargo::{ProjectFolders, load_proc_macro};
25use lsp_types::FileSystemWatcher;
26use paths::Utf8Path;
27use proc_macro_api::ProcMacroClient;
28use project_model::{ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, WorkspaceBuildScripts};
29use stdx::{format_to, thread::ThreadIntent};
30use triomphe::Arc;
31use vfs::{AbsPath, AbsPathBuf, ChangeKind};
32
33use crate::{
34 config::{Config, FilesWatcher, LinkedProject},
35 flycheck::{FlycheckConfig, FlycheckHandle},
36 global_state::{
37 FetchBuildDataResponse, FetchWorkspaceRequest, FetchWorkspaceResponse, GlobalState,
38 },
39 lsp_ext,
40 main_loop::{DiscoverProjectParam, Task},
41 op_queue::Cause,
42};
43use tracing::{debug, info};
44
45#[derive(Debug)]
46pub(crate) enum ProjectWorkspaceProgress {
47 Begin,
48 Report(String),
49 End(Vec<anyhow::Result<ProjectWorkspace>>, bool),
50}
51
52#[derive(Debug)]
53pub(crate) enum BuildDataProgress {
54 Begin,
55 Report(String),
56 End((Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)),
57}
58
59#[derive(Debug)]
60pub(crate) enum ProcMacroProgress {
61 Begin,
62 Report(String),
63 End(ChangeWithProcMacros),
64}
65
66impl GlobalState {
67 pub(crate) fn is_quiescent(&self) -> bool {
72 self.vfs_done
73 && self.fetch_ws_receiver.is_none()
74 && !self.fetch_workspaces_queue.op_in_progress()
75 && !self.fetch_build_data_queue.op_in_progress()
76 && !self.fetch_proc_macros_queue.op_in_progress()
77 && self.discover_jobs_active == 0
78 && self.vfs_progress_config_version >= self.vfs_config_version
79 }
80
81 fn is_fully_ready(&self) -> bool {
87 self.is_quiescent() && !self.prime_caches_queue.op_in_progress()
88 }
89
90 pub(crate) fn update_configuration(&mut self, config: Config) {
91 let _p = tracing::info_span!("GlobalState::update_configuration").entered();
92 let old_config = mem::replace(&mut self.config, Arc::new(config));
93 if self.config.lru_parse_query_capacity() != old_config.lru_parse_query_capacity() {
94 self.analysis_host.update_lru_capacity(self.config.lru_parse_query_capacity());
95 }
96 if self.config.lru_query_capacities_config() != old_config.lru_query_capacities_config() {
97 self.analysis_host.update_lru_capacities(
98 &self.config.lru_query_capacities_config().cloned().unwrap_or_default(),
99 );
100 }
101
102 if self.config.linked_or_discovered_projects() != old_config.linked_or_discovered_projects()
103 {
104 let req = FetchWorkspaceRequest { path: None, force_crate_graph_reload: false };
105 self.fetch_workspaces_queue.request_op("discovered projects changed".to_owned(), req)
106 } else if self.config.flycheck(None) != old_config.flycheck(None) {
107 self.reload_flycheck();
108 }
109
110 if self.analysis_host.raw_database().expand_proc_attr_macros()
111 != self.config.expand_proc_attr_macros()
112 {
113 self.analysis_host.raw_database_mut().set_expand_proc_attr_macros_with_durability(
114 self.config.expand_proc_attr_macros(),
115 Durability::HIGH,
116 );
117 }
118
119 if self.config.cargo(None) != old_config.cargo(None) {
120 let req = FetchWorkspaceRequest { path: None, force_crate_graph_reload: false };
121 self.fetch_workspaces_queue.request_op("cargo config changed".to_owned(), req)
122 }
123
124 if self.config.cfg_set_test(None) != old_config.cfg_set_test(None) {
125 let req = FetchWorkspaceRequest { path: None, force_crate_graph_reload: false };
126 self.fetch_workspaces_queue.request_op("cfg_set_test config changed".to_owned(), req)
127 }
128 }
129
130 pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams {
131 let mut status = lsp_ext::ServerStatusParams {
132 health: lsp_ext::Health::Ok,
133 quiescent: self.is_fully_ready(),
134 message: None,
135 };
136 let mut message = String::new();
137
138 if !self.config.cargo_autoreload_config(None)
139 && self.is_quiescent()
140 && self.fetch_workspaces_queue.op_requested()
141 && self.config.discover_workspace_config().is_none()
142 {
143 status.health |= lsp_ext::Health::Warning;
144 message.push_str("Auto-reloading is disabled and the workspace has changed, a manual workspace reload is required.\n\n");
145 }
146
147 if self.build_deps_changed {
148 status.health |= lsp_ext::Health::Warning;
149 message.push_str(
150 "Proc-macros and/or build scripts have changed and need to be rebuilt.\n\n",
151 );
152 }
153 if self.fetch_build_data_error().is_err() {
154 status.health |= lsp_ext::Health::Warning;
155 message.push_str("Failed to run build scripts of some packages.\n\n");
156 message.push_str(
157 "Please refer to the language server logs for more details on the errors.",
158 );
159 }
160 if let Some(err) = &self.config_errors {
161 status.health |= lsp_ext::Health::Warning;
162 format_to!(message, "{err}\n");
163 }
164 if let Some(err) = &self.last_flycheck_error {
165 status.health |= lsp_ext::Health::Warning;
166 message.push_str(err);
167 message.push('\n');
168 }
169
170 if self.config.linked_or_discovered_projects().is_empty()
171 && self.config.detached_files().is_empty()
172 {
173 status.health |= lsp_ext::Health::Warning;
174 message.push_str("Failed to discover workspace.\n");
175 message.push_str("Consider adding the `Cargo.toml` of the workspace to the [`linkedProjects`](https://rust-analyzer.github.io/book/configuration.html#linkedProjects) setting.\n\n");
176 }
177 if self.fetch_workspace_error().is_err() {
178 status.health |= lsp_ext::Health::Error;
179 message.push_str("Failed to load workspaces.");
180
181 if self.config.has_linked_projects() {
182 message.push_str(
183 "`rust-analyzer.linkedProjects` have been specified, which may be incorrect. Specified project paths:\n",
184 );
185 message
186 .push_str(&format!(" {}", self.config.linked_manifests().format("\n ")));
187 if self.config.has_linked_project_jsons() {
188 message.push_str("\nAdditionally, one or more project jsons are specified")
189 }
190 }
191 message.push_str("\n\n");
192 }
193
194 if !self.workspaces.is_empty() {
195 self.check_workspaces_msrv().for_each(|e| {
196 status.health |= lsp_ext::Health::Warning;
197 format_to!(message, "{e}");
198 });
199
200 let proc_macro_clients = self.proc_macro_clients.iter().chain(iter::repeat(&None));
201
202 for (ws, proc_macro_client) in self.workspaces.iter().zip(proc_macro_clients) {
203 if let ProjectWorkspaceKind::Cargo { error: Some(error), .. }
204 | ProjectWorkspaceKind::DetachedFile {
205 cargo: Some((_, _, Some(error))), ..
206 } = &ws.kind
207 {
208 status.health |= lsp_ext::Health::Warning;
209 format_to!(
210 message,
211 "Failed to read Cargo metadata with dependencies for `{}`: {:#}\n\n",
212 ws.manifest_or_root(),
213 error
214 );
215 }
216 if let Some(err) = ws.sysroot.error() {
217 status.health |= lsp_ext::Health::Warning;
218 format_to!(
219 message,
220 "Workspace `{}` has sysroot errors: ",
221 ws.manifest_or_root()
222 );
223 message.push_str(err);
224 message.push_str("\n\n");
225 }
226 if let Some(err) = ws.sysroot.metadata_error() {
227 status.health |= lsp_ext::Health::Warning;
228 format_to!(
229 message,
230 "Failed to read Cargo metadata with dependencies for sysroot of `{}`: ",
231 ws.manifest_or_root()
232 );
233 message.push_str(err);
234 message.push_str("\n\n");
235 }
236 if let ProjectWorkspaceKind::Cargo { rustc: Err(Some(err)), .. } = &ws.kind {
237 status.health |= lsp_ext::Health::Warning;
238 format_to!(
239 message,
240 "Failed loading rustc_private crates for workspace `{}`: ",
241 ws.manifest_or_root()
242 );
243 message.push_str(err);
244 message.push_str("\n\n");
245 };
246 match proc_macro_client {
247 Some(Err(err)) => {
248 status.health |= lsp_ext::Health::Warning;
249 format_to!(
250 message,
251 "Failed spawning proc-macro server for workspace `{}`: {err}",
252 ws.manifest_or_root()
253 );
254 message.push_str("\n\n");
255 }
256 Some(Ok(client)) => {
257 if let Some(err) = client.exited() {
258 status.health |= lsp_ext::Health::Warning;
259 format_to!(
260 message,
261 "proc-macro server for workspace `{}` exited: {err}",
262 ws.manifest_or_root()
263 );
264 message.push_str("\n\n");
265 }
266 }
267 None => {}
269 }
270 }
271 }
272
273 if !message.is_empty() {
274 status.message = Some(message.trim_end().to_owned());
275 }
276
277 status
278 }
279
280 pub(crate) fn fetch_workspaces(
281 &mut self,
282 cause: Cause,
283 path: Option<AbsPathBuf>,
284 force_crate_graph_reload: bool,
285 ) {
286 info!(%cause, "will fetch workspaces");
287
288 self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, {
289 let linked_projects = self.config.linked_or_discovered_projects();
290 let detached_files: Vec<_> = self
291 .config
292 .detached_files()
293 .iter()
294 .cloned()
295 .map(ManifestPath::try_from)
296 .filter_map(Result::ok)
297 .collect();
298 let cargo_config = self.config.cargo(None);
299 let discover_command = self.config.discover_workspace_config().cloned();
300 let is_quiescent = !(self.discover_jobs_active > 0
301 || self.vfs_progress_config_version < self.vfs_config_version
302 || !self.vfs_done);
303
304 move |sender| {
305 let progress = {
306 let sender = sender.clone();
307 move |msg| {
308 sender
309 .send(Task::FetchWorkspace(ProjectWorkspaceProgress::Report(msg)))
310 .unwrap()
311 }
312 };
313
314 sender.send(Task::FetchWorkspace(ProjectWorkspaceProgress::Begin)).unwrap();
315
316 if let (Some(_command), Some(path)) = (&discover_command, &path) {
317 let build = linked_projects.iter().find_map(|project| match project {
318 LinkedProject::InlineProjectJson(it) => it.crate_by_buildfile(path),
319 _ => None,
320 });
321
322 if let Some(build) = build
323 && is_quiescent
324 {
325 let path = AbsPathBuf::try_from(build.build_file)
326 .expect("Unable to convert to an AbsPath");
327 let arg = DiscoverProjectParam::Buildfile(path);
328 sender.send(Task::DiscoverLinkedProjects(arg)).unwrap();
329 }
330 }
331
332 let mut workspaces: Vec<_> = linked_projects
333 .iter()
334 .map(|project| match project {
335 LinkedProject::ProjectManifest(manifest) => {
336 debug!(path = %manifest, "loading project from manifest");
337
338 project_model::ProjectWorkspace::load(
339 manifest.clone(),
340 &cargo_config,
341 &progress,
342 )
343 }
344 LinkedProject::InlineProjectJson(it) => {
345 let workspace = project_model::ProjectWorkspace::load_inline(
346 it.clone(),
347 &cargo_config,
348 &progress,
349 );
350 Ok(workspace)
351 }
352 })
353 .collect();
354
355 let mut i = 0;
356 while i < workspaces.len() {
357 if let Ok(w) = &workspaces[i] {
358 let dupes: Vec<_> = workspaces[i + 1..]
359 .iter()
360 .positions(|it| it.as_ref().is_ok_and(|ws| ws.eq_ignore_build_data(w)))
361 .collect();
362 dupes.into_iter().rev().for_each(|d| {
363 _ = workspaces.remove(d + i + 1);
364 });
365 }
366 i += 1;
367 }
368
369 if !detached_files.is_empty() {
370 workspaces.extend(project_model::ProjectWorkspace::load_detached_files(
371 detached_files,
372 &cargo_config,
373 ));
374 }
375
376 info!(?workspaces, "did fetch workspaces");
377 sender
378 .send(Task::FetchWorkspace(ProjectWorkspaceProgress::End(
379 workspaces,
380 force_crate_graph_reload,
381 )))
382 .unwrap();
383 }
384 });
385 }
386
387 pub(crate) fn fetch_build_data(&mut self, cause: Cause) {
388 info!(%cause, "will fetch build data");
389 let workspaces = Arc::clone(&self.workspaces);
390 let config = self.config.cargo(None);
391 let root_path = self.config.root_path().clone();
392
393 self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, move |sender| {
394 sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap();
395
396 let progress = {
397 let sender = sender.clone();
398 move |msg| {
399 sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap()
400 }
401 };
402 let res = ProjectWorkspace::run_all_build_scripts(
403 &workspaces,
404 &config,
405 &progress,
406 &root_path,
407 );
408
409 sender.send(Task::FetchBuildData(BuildDataProgress::End((workspaces, res)))).unwrap();
410 });
411 }
412
413 pub(crate) fn fetch_proc_macros(
414 &mut self,
415 cause: Cause,
416 mut change: ChangeWithProcMacros,
417 paths: Vec<ProcMacroPaths>,
418 ) {
419 info!(%cause, "will load proc macros");
420 let ignored_proc_macros = self.config.ignored_proc_macros(None).clone();
421 let proc_macro_clients = self.proc_macro_clients.clone();
422
423 self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, move |sender| {
424 sender.send(Task::LoadProcMacros(ProcMacroProgress::Begin)).unwrap();
425
426 let ignored_proc_macros = &ignored_proc_macros;
427 let progress = {
428 let sender = sender.clone();
429 &move |msg| {
430 sender.send(Task::LoadProcMacros(ProcMacroProgress::Report(msg))).unwrap()
431 }
432 };
433
434 let mut builder = ProcMacrosBuilder::default();
435 let proc_macro_clients = proc_macro_clients.iter().chain(iter::repeat(&None));
436 for (client, paths) in proc_macro_clients.zip(paths) {
437 for (crate_id, res) in paths.iter() {
438 let expansion_res = match client {
439 Some(Ok(client)) => match res {
440 Ok((crate_name, path)) => {
441 progress(format!("loading proc-macros: {path}"));
442 let ignored_proc_macros = ignored_proc_macros
443 .iter()
444 .find_map(|(name, macros)| {
445 eq_ignore_underscore(name, crate_name).then_some(&**macros)
446 })
447 .unwrap_or_default();
448
449 load_proc_macro(client, path, ignored_proc_macros)
450 }
451 Err(e) => Err(e.clone()),
452 },
453 Some(Err(e)) => Err(ProcMacroLoadingError::ProcMacroSrvError(
454 e.to_string().into_boxed_str(),
455 )),
456 None => Err(ProcMacroLoadingError::ProcMacroSrvError(
457 "proc-macro-srv is not running".into(),
458 )),
459 };
460 builder.insert(*crate_id, expansion_res)
461 }
462 }
463
464 change.set_proc_macros(builder);
465 sender.send(Task::LoadProcMacros(ProcMacroProgress::End(change))).unwrap();
466 });
467 }
468
469 pub(crate) fn switch_workspaces(&mut self, cause: Cause) {
470 let _p = tracing::info_span!("GlobalState::switch_workspaces").entered();
471 tracing::info!(%cause, "will switch workspaces");
472
473 let Some(FetchWorkspaceResponse { workspaces, force_crate_graph_reload }) =
474 self.fetch_workspaces_queue.last_op_result()
475 else {
476 return;
477 };
478 let switching_from_empty_workspace = self.workspaces.is_empty();
479
480 info!(%cause, ?force_crate_graph_reload, %switching_from_empty_workspace);
481 if self.fetch_workspace_error().is_err() && !switching_from_empty_workspace {
482 if *force_crate_graph_reload {
483 self.recreate_crate_graph(cause, false);
484 }
485 return;
488 }
489
490 let workspaces =
491 workspaces.iter().filter_map(|res| res.as_ref().ok().cloned()).collect::<Vec<_>>();
492
493 let same_workspaces = workspaces.len() == self.workspaces.len()
494 && workspaces
495 .iter()
496 .zip(self.workspaces.iter())
497 .all(|(l, r)| l.eq_ignore_build_data(r));
498
499 if same_workspaces {
500 if switching_from_empty_workspace {
501 return;
503 }
504 if let Some(FetchBuildDataResponse { workspaces, build_scripts }) =
505 self.fetch_build_data_queue.last_op_result()
506 {
507 if Arc::ptr_eq(workspaces, &self.workspaces) {
508 info!("set build scripts to workspaces");
509
510 let workspaces = workspaces
511 .iter()
512 .cloned()
513 .zip(build_scripts)
514 .map(|(mut ws, bs)| {
515 ws.set_build_scripts(bs.as_ref().ok().cloned().unwrap_or_default());
516 ws
517 })
518 .collect::<Vec<_>>();
519 info!("same workspace, but new build data");
521 self.workspaces = Arc::new(workspaces);
522 } else {
523 info!("build scripts do not match the version of the active workspace");
524 if *force_crate_graph_reload {
525 self.recreate_crate_graph(cause, switching_from_empty_workspace);
526 }
527
528 return;
531 }
532 } else {
533 if *force_crate_graph_reload {
534 self.recreate_crate_graph(cause, switching_from_empty_workspace);
535 }
536
537 return;
539 }
540 } else {
541 info!("abandon build scripts for workspaces");
542
543 self.workspaces = Arc::new(workspaces);
547 self.check_workspaces_msrv().for_each(|message| {
548 self.send_notification::<lsp_types::notification::ShowMessage>(
549 lsp_types::ShowMessageParams { typ: lsp_types::MessageType::WARNING, message },
550 );
551 });
552
553 if self.config.run_build_scripts(None) {
554 self.build_deps_changed = false;
555 self.fetch_build_data_queue.request_op("workspace updated".to_owned(), ());
556
557 if !switching_from_empty_workspace {
558 return;
562 }
563 }
564 }
565
566 if let FilesWatcher::Client = self.config.files().watcher {
567 let filter = self
568 .workspaces
569 .iter()
570 .flat_map(|ws| ws.to_roots())
571 .filter(|it| it.is_local)
572 .map(|it| it.include);
573
574 let mut watchers: Vec<FileSystemWatcher> =
575 if self.config.did_change_watched_files_relative_pattern_support() {
576 filter
578 .flat_map(|include| {
579 include.into_iter().flat_map(|base| {
580 [
581 (base.clone(), "**/*.rs"),
582 (base.clone(), "**/Cargo.{lock,toml}"),
583 (base, "**/rust-analyzer.toml"),
584 ]
585 })
586 })
587 .map(|(base, pat)| lsp_types::FileSystemWatcher {
588 glob_pattern: lsp_types::GlobPattern::Relative(
589 lsp_types::RelativePattern {
590 base_uri: lsp_types::OneOf::Right(
591 lsp_types::Url::from_file_path(base).unwrap(),
592 ),
593 pattern: pat.to_owned(),
594 },
595 ),
596 kind: None,
597 })
598 .collect()
599 } else {
600 filter
602 .flat_map(|include| {
603 include.into_iter().flat_map(|base| {
604 [
605 format!("{base}/**/*.rs"),
606 format!("{base}/**/Cargo.{{toml,lock}}"),
607 format!("{base}/**/rust-analyzer.toml"),
608 ]
609 })
610 })
611 .map(|glob_pattern| lsp_types::FileSystemWatcher {
612 glob_pattern: lsp_types::GlobPattern::String(glob_pattern),
613 kind: None,
614 })
615 .collect()
616 };
617
618 for ws in self.workspaces.iter() {
620 if let ProjectWorkspaceKind::Json(project_json) = &ws.kind {
621 for (_, krate) in project_json.crates() {
622 let Some(build) = &krate.build else {
623 continue;
624 };
625 watchers.push(lsp_types::FileSystemWatcher {
626 glob_pattern: lsp_types::GlobPattern::String(
627 build.build_file.to_string(),
628 ),
629 kind: None,
630 });
631 }
632 }
633 }
634
635 watchers.extend(
636 iter::once(Config::user_config_dir_path().as_deref())
637 .chain(self.workspaces.iter().map(|ws| ws.manifest().map(ManifestPath::as_ref)))
638 .flatten()
639 .map(|glob_pattern| lsp_types::FileSystemWatcher {
640 glob_pattern: lsp_types::GlobPattern::String(glob_pattern.to_string()),
641 kind: None,
642 }),
643 );
644
645 let registration_options =
646 lsp_types::DidChangeWatchedFilesRegistrationOptions { watchers };
647 let registration = lsp_types::Registration {
648 id: "workspace/didChangeWatchedFiles".to_owned(),
649 method: "workspace/didChangeWatchedFiles".to_owned(),
650 register_options: Some(serde_json::to_value(registration_options).unwrap()),
651 };
652 self.send_request::<lsp_types::request::RegisterCapability>(
653 lsp_types::RegistrationParams { registrations: vec![registration] },
654 |_, _| (),
655 );
656 }
657
658 let files_config = self.config.files();
659 let project_folders = ProjectFolders::new(
660 &self.workspaces,
661 &files_config.exclude,
662 Config::user_config_dir_path().as_deref(),
663 );
664
665 if (self.proc_macro_clients.len() < self.workspaces.len() || !same_workspaces)
666 && self.config.expand_proc_macros()
667 {
668 info!("Spawning proc-macro servers");
669
670 self.proc_macro_clients = Arc::from_iter(self.workspaces.iter().map(|ws| {
671 let path = match self.config.proc_macro_srv() {
672 Some(path) => path,
673 None => match ws.find_sysroot_proc_macro_srv()? {
674 Ok(path) => path,
675 Err(e) => return Some(Err(e)),
676 },
677 };
678
679 let env: FxHashMap<_, _> = match &ws.kind {
680 ProjectWorkspaceKind::Cargo { cargo, .. }
681 | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, ..)), .. } => cargo
682 .env()
683 .into_iter()
684 .map(|(k, v)| (k.clone(), Some(v.clone())))
685 .chain(
686 self.config.extra_env(None).iter().map(|(k, v)| (k.clone(), v.clone())),
687 )
688 .chain(
689 ws.sysroot
690 .root()
691 .filter(|_| {
692 !self.config.extra_env(None).contains_key("RUSTUP_TOOLCHAIN")
693 && std::env::var_os("RUSTUP_TOOLCHAIN").is_none()
694 })
695 .map(|it| ("RUSTUP_TOOLCHAIN".to_owned(), Some(it.to_string()))),
696 )
697 .collect(),
698
699 _ => Default::default(),
700 };
701 info!("Using proc-macro server at {path}");
702
703 Some(ProcMacroClient::spawn(&path, &env, ws.toolchain.as_ref()).map_err(|err| {
704 tracing::error!(
705 "Failed to run proc-macro server from path {path}, error: {err:?}",
706 );
707 anyhow::format_err!(
708 "Failed to run proc-macro server from path {path}, error: {err:?}",
709 )
710 }))
711 }))
712 }
713
714 let watch = match files_config.watcher {
715 FilesWatcher::Client => vec![],
716 FilesWatcher::Server => project_folders.watch,
717 };
718 self.vfs_config_version += 1;
719 self.loader.handle.set_config(vfs::loader::Config {
720 load: project_folders.load,
721 watch,
722 version: self.vfs_config_version,
723 });
724 self.source_root_config = project_folders.source_root_config;
725 self.local_roots_parent_map = Arc::new(self.source_root_config.source_root_parent_map());
726
727 info!(?cause, "recreating the crate graph");
728 self.recreate_crate_graph(cause, switching_from_empty_workspace);
729
730 info!("did switch workspaces");
731 }
732
733 fn recreate_crate_graph(&mut self, cause: String, initial_build: bool) {
734 info!(?cause, "Building Crate Graph");
735 self.report_progress(
736 "Building CrateGraph",
737 crate::lsp::utils::Progress::Begin,
738 None,
739 None,
740 None,
741 );
742
743 self.crate_graph_file_dependencies.clear();
746 self.detached_files = self
747 .workspaces
748 .iter()
749 .filter_map(|ws| match &ws.kind {
750 ProjectWorkspaceKind::DetachedFile { file, .. } => Some(file.clone()),
751 _ => None,
752 })
753 .collect();
754
755 self.incomplete_crate_graph = false;
756 let (crate_graph, proc_macro_paths) = {
757 let vfs = &self.vfs.read().0;
759 let load = |path: &AbsPath| {
760 let vfs_path = vfs::VfsPath::from(path.to_path_buf());
761 self.crate_graph_file_dependencies.insert(vfs_path.clone());
762 let file_id = vfs.file_id(&vfs_path);
763 self.incomplete_crate_graph |= file_id.is_none();
764 file_id.and_then(|(file_id, excluded)| {
765 (excluded == vfs::FileExcluded::No).then_some(file_id)
766 })
767 };
768
769 ws_to_crate_graph(&self.workspaces, self.config.extra_env(None), load)
770 };
771 let mut change = ChangeWithProcMacros::default();
772 if initial_build || !self.config.expand_proc_macros() {
773 if self.config.expand_proc_macros() {
774 change.set_proc_macros(
775 crate_graph
776 .iter()
777 .map(|id| (id, Err(ProcMacroLoadingError::NotYetBuilt)))
778 .collect(),
779 );
780 } else {
781 change.set_proc_macros(
782 crate_graph
783 .iter()
784 .map(|id| (id, Err(ProcMacroLoadingError::Disabled)))
785 .collect(),
786 );
787 }
788
789 change.set_crate_graph(crate_graph);
790 self.analysis_host.apply_change(change);
791
792 self.finish_loading_crate_graph();
793 } else {
794 change.set_crate_graph(crate_graph);
795 self.fetch_proc_macros_queue.request_op(cause, (change, proc_macro_paths));
796 }
797
798 self.report_progress(
799 "Building CrateGraph",
800 crate::lsp::utils::Progress::End,
801 None,
802 None,
803 None,
804 );
805 }
806
807 pub(crate) fn finish_loading_crate_graph(&mut self) {
808 self.process_changes();
809 self.reload_flycheck();
810 }
811
812 pub(super) fn fetch_workspace_error(&self) -> Result<(), String> {
813 let mut buf = String::new();
814
815 let Some(FetchWorkspaceResponse { workspaces, .. }) =
816 self.fetch_workspaces_queue.last_op_result()
817 else {
818 return Ok(());
819 };
820
821 if workspaces.is_empty() && self.config.discover_workspace_config().is_none() {
822 stdx::format_to!(buf, "rust-analyzer failed to fetch workspace");
823 } else {
824 for ws in workspaces {
825 if let Err(err) = ws {
826 stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err);
827 }
828 }
829 }
830
831 if buf.is_empty() {
832 return Ok(());
833 }
834
835 Err(buf)
836 }
837
838 pub(super) fn fetch_build_data_error(&self) -> Result<(), String> {
839 let mut buf = String::new();
840
841 let Some(FetchBuildDataResponse { build_scripts, .. }) =
842 &self.fetch_build_data_queue.last_op_result()
843 else {
844 return Ok(());
845 };
846
847 for script in build_scripts {
848 match script {
849 Ok(data) => {
850 if let Some(stderr) = data.error() {
851 stdx::format_to!(buf, "{:#}\n", stderr)
852 }
853 }
854 Err(err) => stdx::format_to!(buf, "{:#}\n", err),
856 }
857 }
858
859 if buf.is_empty() { Ok(()) } else { Err(buf) }
860 }
861
862 fn reload_flycheck(&mut self) {
863 let _p = tracing::info_span!("GlobalState::reload_flycheck").entered();
864 let config = self.config.flycheck(None);
865 let sender = &self.flycheck_sender;
866 let invocation_strategy = config.invocation_strategy();
867 let next_gen =
868 self.flycheck.iter().map(FlycheckHandle::generation).max().unwrap_or_default() + 1;
869
870 self.flycheck = match invocation_strategy {
871 crate::flycheck::InvocationStrategy::Once => {
872 vec![FlycheckHandle::spawn(
873 0,
874 next_gen,
875 sender.clone(),
876 config,
877 None,
878 self.config.root_path().clone(),
879 None,
880 None,
881 )]
882 }
883 crate::flycheck::InvocationStrategy::PerWorkspace => {
884 self.workspaces
885 .iter()
886 .enumerate()
887 .filter_map(|(id, ws)| {
888 Some((
889 id,
890 match &ws.kind {
891 ProjectWorkspaceKind::Cargo { cargo, .. }
892 | ProjectWorkspaceKind::DetachedFile {
893 cargo: Some((cargo, _, _)),
894 ..
895 } => (
896 cargo.workspace_root(),
897 Some(cargo.manifest_path()),
898 Some(cargo.target_directory()),
899 ),
900 ProjectWorkspaceKind::Json(project) => {
901 match config {
904 FlycheckConfig::CustomCommand { .. } => {
905 (project.path(), None, None)
906 }
907 _ => return None,
908 }
909 }
910 ProjectWorkspaceKind::DetachedFile { .. } => return None,
911 },
912 ws.sysroot.root().map(ToOwned::to_owned),
913 ))
914 })
915 .map(|(id, (root, manifest_path, target_dir), sysroot_root)| {
916 FlycheckHandle::spawn(
917 id,
918 next_gen,
919 sender.clone(),
920 config.clone(),
921 sysroot_root,
922 root.to_path_buf(),
923 manifest_path.map(|it| it.to_path_buf()),
924 target_dir.map(|it| AsRef::<Utf8Path>::as_ref(it).to_path_buf()),
925 )
926 })
927 .collect()
928 }
929 }
930 .into();
931 }
932}
933
934pub fn ws_to_crate_graph(
936 workspaces: &[ProjectWorkspace],
937 extra_env: &FxHashMap<String, Option<String>>,
938 mut load: impl FnMut(&AbsPath) -> Option<vfs::FileId>,
939) -> (CrateGraphBuilder, Vec<ProcMacroPaths>) {
940 let mut crate_graph = CrateGraphBuilder::default();
941 let mut proc_macro_paths = Vec::default();
942 for ws in workspaces {
943 let (other, mut crate_proc_macros) = ws.to_crate_graph(&mut load, extra_env);
944
945 crate_graph.extend(other, &mut crate_proc_macros);
946 proc_macro_paths.push(crate_proc_macros);
947 }
948
949 crate_graph.shrink_to_fit();
950 proc_macro_paths.shrink_to_fit();
951 (crate_graph, proc_macro_paths)
952}
953
954pub(crate) fn should_refresh_for_change(
955 path: &AbsPath,
956 change_kind: ChangeKind,
957 additional_paths: &[&str],
958) -> bool {
959 const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
960 const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
961
962 let file_name = match path.file_name() {
963 Some(it) => it,
964 None => return false,
965 };
966
967 if let "Cargo.toml" | "Cargo.lock" = file_name {
968 return true;
969 }
970
971 if additional_paths.contains(&file_name) {
972 return true;
973 }
974
975 if change_kind == ChangeKind::Modify {
976 return false;
977 }
978
979 if path.extension().unwrap_or_default() != "rs" {
981 let is_cargo_config = matches!(file_name, "config.toml" | "config")
982 && path.parent().map(|parent| parent.as_str().ends_with(".cargo")).unwrap_or(false);
983 return is_cargo_config;
984 }
985
986 if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_str().ends_with(it)) {
987 return true;
988 }
989 let parent = match path.parent() {
990 Some(it) => it,
991 None => return false,
992 };
993 if IMPLICIT_TARGET_DIRS.iter().any(|it| parent.as_str().ends_with(it)) {
994 return true;
995 }
996 if file_name == "main.rs" {
997 let grand_parent = match parent.parent() {
998 Some(it) => it,
999 None => return false,
1000 };
1001 if IMPLICIT_TARGET_DIRS.iter().any(|it| grand_parent.as_str().ends_with(it)) {
1002 return true;
1003 }
1004 }
1005 false
1006}
1007
1008fn eq_ignore_underscore(s1: &str, s2: &str) -> bool {
1011 if s1.len() != s2.len() {
1012 return false;
1013 }
1014
1015 s1.as_bytes().iter().zip(s2.as_bytes()).all(|(c1, c2)| {
1016 let c1_underscore = c1 == &b'_' || c1 == &b'-';
1017 let c2_underscore = c2 == &b'_' || c2 == &b'-';
1018
1019 c1 == c2 || (c1_underscore && c2_underscore)
1020 })
1021}