base_db/
lib.rs

1//! base_db defines basic database traits. The concrete DB is defined by ide.
2
3pub use salsa;
4pub use salsa_macros;
5
6// FIXME: Rename this crate, base db is non descriptive
7mod change;
8mod input;
9
10use std::{
11    cell::RefCell,
12    hash::BuildHasherDefault,
13    panic,
14    sync::{Once, atomic::AtomicUsize},
15};
16
17pub use crate::{
18    change::FileChange,
19    input::{
20        BuiltCrateData, BuiltDependency, Crate, CrateBuilder, CrateBuilderId, CrateDataBuilder,
21        CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CratesIdMap, CratesMap,
22        DependencyBuilder, Env, ExtraCrateData, LangCrateOrigin, ProcMacroLoadingError,
23        ProcMacroPaths, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
24        UniqueCrateData,
25    },
26};
27use dashmap::{DashMap, mapref::entry::Entry};
28pub use query_group::{self};
29use rustc_hash::{FxHashSet, FxHasher};
30use salsa::{Durability, Setter};
31pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
32use span::Edition;
33use syntax::{Parse, SyntaxError, ast};
34use triomphe::Arc;
35pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet};
36
37pub type FxIndexSet<T> = indexmap::IndexSet<T, rustc_hash::FxBuildHasher>;
38pub type FxIndexMap<K, V> =
39    indexmap::IndexMap<K, V, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
40
41#[macro_export]
42macro_rules! impl_intern_key {
43    ($id:ident, $loc:ident) => {
44        #[salsa_macros::interned(no_lifetime, revisions = usize::MAX)]
45        #[derive(PartialOrd, Ord)]
46        pub struct $id {
47            pub loc: $loc,
48        }
49
50        // If we derive this salsa prints the values recursively, and this causes us to blow.
51        impl ::std::fmt::Debug for $id {
52            fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
53                f.debug_tuple(stringify!($id))
54                    .field(&format_args!("{:04x}", self.0.index()))
55                    .finish()
56            }
57        }
58    };
59}
60
61pub const DEFAULT_FILE_TEXT_LRU_CAP: u16 = 16;
62pub const DEFAULT_PARSE_LRU_CAP: u16 = 128;
63pub const DEFAULT_BORROWCK_LRU_CAP: u16 = 2024;
64
65#[derive(Debug, Default)]
66pub struct Files {
67    files: Arc<DashMap<vfs::FileId, FileText, BuildHasherDefault<FxHasher>>>,
68    source_roots: Arc<DashMap<SourceRootId, SourceRootInput, BuildHasherDefault<FxHasher>>>,
69    file_source_roots: Arc<DashMap<vfs::FileId, FileSourceRootInput, BuildHasherDefault<FxHasher>>>,
70}
71
72impl Files {
73    pub fn file_text(&self, file_id: vfs::FileId) -> FileText {
74        match self.files.get(&file_id) {
75            Some(text) => *text,
76            None => {
77                panic!("Unable to fetch file text for `vfs::FileId`: {file_id:?}; this is a bug")
78            }
79        }
80    }
81
82    pub fn set_file_text(&self, db: &mut dyn SourceDatabase, file_id: vfs::FileId, text: &str) {
83        match self.files.entry(file_id) {
84            Entry::Occupied(mut occupied) => {
85                occupied.get_mut().set_text(db).to(Arc::from(text));
86            }
87            Entry::Vacant(vacant) => {
88                let text = FileText::new(db, Arc::from(text), file_id);
89                vacant.insert(text);
90            }
91        };
92    }
93
94    pub fn set_file_text_with_durability(
95        &self,
96        db: &mut dyn SourceDatabase,
97        file_id: vfs::FileId,
98        text: &str,
99        durability: Durability,
100    ) {
101        match self.files.entry(file_id) {
102            Entry::Occupied(mut occupied) => {
103                occupied.get_mut().set_text(db).with_durability(durability).to(Arc::from(text));
104            }
105            Entry::Vacant(vacant) => {
106                let text =
107                    FileText::builder(Arc::from(text), file_id).durability(durability).new(db);
108                vacant.insert(text);
109            }
110        };
111    }
112
113    /// Source root of the file.
114    pub fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput {
115        let source_root = match self.source_roots.get(&source_root_id) {
116            Some(source_root) => source_root,
117            None => panic!(
118                "Unable to fetch `SourceRootInput` with `SourceRootId` ({source_root_id:?}); this is a bug"
119            ),
120        };
121
122        *source_root
123    }
124
125    pub fn set_source_root_with_durability(
126        &self,
127        db: &mut dyn SourceDatabase,
128        source_root_id: SourceRootId,
129        source_root: Arc<SourceRoot>,
130        durability: Durability,
131    ) {
132        match self.source_roots.entry(source_root_id) {
133            Entry::Occupied(mut occupied) => {
134                occupied.get_mut().set_source_root(db).with_durability(durability).to(source_root);
135            }
136            Entry::Vacant(vacant) => {
137                let source_root =
138                    SourceRootInput::builder(source_root).durability(durability).new(db);
139                vacant.insert(source_root);
140            }
141        };
142    }
143
144    pub fn file_source_root(&self, id: vfs::FileId) -> FileSourceRootInput {
145        let file_source_root = match self.file_source_roots.get(&id) {
146            Some(file_source_root) => file_source_root,
147            None => panic!(
148                "Unable to get `FileSourceRootInput` with `vfs::FileId` ({id:?}); this is a bug",
149            ),
150        };
151        *file_source_root
152    }
153
154    pub fn set_file_source_root_with_durability(
155        &self,
156        db: &mut dyn SourceDatabase,
157        id: vfs::FileId,
158        source_root_id: SourceRootId,
159        durability: Durability,
160    ) {
161        match self.file_source_roots.entry(id) {
162            Entry::Occupied(mut occupied) => {
163                occupied
164                    .get_mut()
165                    .set_source_root_id(db)
166                    .with_durability(durability)
167                    .to(source_root_id);
168            }
169            Entry::Vacant(vacant) => {
170                let file_source_root =
171                    FileSourceRootInput::builder(source_root_id).durability(durability).new(db);
172                vacant.insert(file_source_root);
173            }
174        };
175    }
176}
177
178#[salsa_macros::interned(no_lifetime, debug, constructor=from_span, revisions = usize::MAX)]
179#[derive(PartialOrd, Ord)]
180pub struct EditionedFileId {
181    pub editioned_file_id: span::EditionedFileId,
182}
183
184impl EditionedFileId {
185    // Salsa already uses the name `new`...
186    #[inline]
187    pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition) -> Self {
188        EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition))
189    }
190
191    #[inline]
192    pub fn current_edition(db: &dyn salsa::Database, file_id: FileId) -> Self {
193        EditionedFileId::new(db, file_id, Edition::CURRENT)
194    }
195
196    #[inline]
197    pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId {
198        let id = self.editioned_file_id(db);
199        id.file_id()
200    }
201
202    #[inline]
203    pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
204        let id = self.editioned_file_id(db);
205        (id.file_id(), id.edition())
206    }
207
208    #[inline]
209    pub fn edition(self, db: &dyn SourceDatabase) -> Edition {
210        self.editioned_file_id(db).edition()
211    }
212}
213
214#[salsa_macros::input(debug)]
215pub struct FileText {
216    #[returns(ref)]
217    pub text: Arc<str>,
218    pub file_id: vfs::FileId,
219}
220
221#[salsa_macros::input(debug)]
222pub struct FileSourceRootInput {
223    pub source_root_id: SourceRootId,
224}
225
226#[salsa_macros::input(debug)]
227pub struct SourceRootInput {
228    pub source_root: Arc<SourceRoot>,
229}
230
231/// Database which stores all significant input facts: source code and project
232/// model. Everything else in rust-analyzer is derived from these queries.
233#[query_group::query_group]
234pub trait RootQueryDb: SourceDatabase + salsa::Database {
235    /// Parses the file into the syntax tree.
236    #[salsa::invoke(parse)]
237    #[salsa::lru(128)]
238    fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
239
240    /// Returns the set of errors obtained from parsing the file including validation errors.
241    #[salsa::transparent]
242    fn parse_errors(&self, file_id: EditionedFileId) -> Option<&[SyntaxError]>;
243
244    #[salsa::transparent]
245    fn toolchain_channel(&self, krate: Crate) -> Option<ReleaseChannel>;
246
247    /// Crates whose root file is in `id`.
248    #[salsa::invoke_interned(source_root_crates)]
249    fn source_root_crates(&self, id: SourceRootId) -> Arc<[Crate]>;
250
251    #[salsa::transparent]
252    fn relevant_crates(&self, file_id: FileId) -> Arc<[Crate]>;
253
254    /// Returns the crates in topological order.
255    ///
256    /// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications.
257    #[salsa::input]
258    fn all_crates(&self) -> Arc<Box<[Crate]>>;
259
260    /// Returns an iterator over all transitive dependencies of the given crate,
261    /// including the crate itself.
262    ///
263    /// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications.
264    #[salsa::transparent]
265    fn transitive_deps(&self, crate_id: Crate) -> FxHashSet<Crate>;
266
267    /// Returns all transitive reverse dependencies of the given crate,
268    /// including the crate itself.
269    ///
270    /// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications.
271    #[salsa::invoke(input::transitive_rev_deps)]
272    #[salsa::transparent]
273    fn transitive_rev_deps(&self, of: Crate) -> FxHashSet<Crate>;
274}
275
276pub fn transitive_deps(db: &dyn SourceDatabase, crate_id: Crate) -> FxHashSet<Crate> {
277    // There is a bit of duplication here and in `CrateGraphBuilder` in the same method, but it's not terrible
278    // and removing that is a bit difficult.
279    let mut worklist = vec![crate_id];
280    let mut deps = FxHashSet::default();
281
282    while let Some(krate) = worklist.pop() {
283        if !deps.insert(krate) {
284            continue;
285        }
286
287        worklist.extend(krate.data(db).dependencies.iter().map(|dep| dep.crate_id));
288    }
289
290    deps
291}
292
293#[salsa_macros::db]
294pub trait SourceDatabase: salsa::Database {
295    /// Text of the file.
296    fn file_text(&self, file_id: vfs::FileId) -> FileText;
297
298    fn set_file_text(&mut self, file_id: vfs::FileId, text: &str);
299
300    fn set_file_text_with_durability(
301        &mut self,
302        file_id: vfs::FileId,
303        text: &str,
304        durability: Durability,
305    );
306
307    /// Contents of the source root.
308    fn source_root(&self, id: SourceRootId) -> SourceRootInput;
309
310    fn file_source_root(&self, id: vfs::FileId) -> FileSourceRootInput;
311
312    fn set_file_source_root_with_durability(
313        &mut self,
314        id: vfs::FileId,
315        source_root_id: SourceRootId,
316        durability: Durability,
317    );
318
319    /// Source root of the file.
320    fn set_source_root_with_durability(
321        &mut self,
322        source_root_id: SourceRootId,
323        source_root: Arc<SourceRoot>,
324        durability: Durability,
325    );
326
327    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
328        // FIXME: this *somehow* should be platform agnostic...
329        let source_root = self.file_source_root(path.anchor);
330        let source_root = self.source_root(source_root.source_root_id(self));
331        source_root.source_root(self).resolve_path(path)
332    }
333
334    #[doc(hidden)]
335    fn crates_map(&self) -> Arc<CratesMap>;
336
337    fn nonce_and_revision(&self) -> (Nonce, salsa::Revision);
338}
339
340static NEXT_NONCE: AtomicUsize = AtomicUsize::new(0);
341
342#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
343pub struct Nonce(usize);
344
345impl Default for Nonce {
346    #[inline]
347    fn default() -> Self {
348        Nonce::new()
349    }
350}
351
352impl Nonce {
353    #[inline]
354    pub fn new() -> Nonce {
355        Nonce(NEXT_NONCE.fetch_add(1, std::sync::atomic::Ordering::SeqCst))
356    }
357}
358
359/// Crate related data shared by the whole workspace.
360#[derive(Debug, PartialEq, Eq, Hash, Clone)]
361pub struct CrateWorkspaceData {
362    // FIXME: Consider removing this, making HirDatabase::target_data_layout an input query
363    pub data_layout: TargetLayoutLoadResult,
364    /// Toolchain version used to compile the crate.
365    pub toolchain: Option<Version>,
366}
367
368impl CrateWorkspaceData {
369    pub fn is_atleast_187(&self) -> bool {
370        const VERSION_187: Version = Version {
371            major: 1,
372            minor: 87,
373            patch: 0,
374            pre: Prerelease::EMPTY,
375            build: BuildMetadata::EMPTY,
376        };
377        self.toolchain.as_ref().map_or(false, |v| *v >= VERSION_187)
378    }
379}
380
381fn toolchain_channel(db: &dyn RootQueryDb, krate: Crate) -> Option<ReleaseChannel> {
382    krate.workspace_data(db).toolchain.as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre))
383}
384
385fn parse(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Parse<ast::SourceFile> {
386    let _p = tracing::info_span!("parse", ?file_id).entered();
387    let (file_id, edition) = file_id.unpack(db.as_dyn_database());
388    let text = db.file_text(file_id).text(db);
389    ast::SourceFile::parse(text, edition)
390}
391
392fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<&[SyntaxError]> {
393    #[salsa_macros::tracked(returns(ref))]
394    fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<Box<[SyntaxError]>> {
395        let errors = db.parse(file_id).errors();
396        match &*errors {
397            [] => None,
398            [..] => Some(errors.into()),
399        }
400    }
401    parse_errors(db, file_id).as_ref().map(|it| &**it)
402}
403
404fn source_root_crates(db: &dyn RootQueryDb, id: SourceRootId) -> Arc<[Crate]> {
405    let crates = db.all_crates();
406    crates
407        .iter()
408        .copied()
409        .filter(|&krate| {
410            let root_file = krate.data(db).root_file_id;
411            db.file_source_root(root_file).source_root_id(db) == id
412        })
413        .collect()
414}
415
416fn relevant_crates(db: &dyn RootQueryDb, file_id: FileId) -> Arc<[Crate]> {
417    let _p = tracing::info_span!("relevant_crates").entered();
418
419    let source_root = db.file_source_root(file_id);
420    db.source_root_crates(source_root.source_root_id(db))
421}
422
423#[must_use]
424#[non_exhaustive]
425pub struct DbPanicContext;
426
427impl Drop for DbPanicContext {
428    fn drop(&mut self) {
429        Self::with_ctx(|ctx| assert!(ctx.pop().is_some()));
430    }
431}
432
433impl DbPanicContext {
434    pub fn enter(frame: String) -> DbPanicContext {
435        #[expect(clippy::print_stderr, reason = "already panicking anyway")]
436        fn set_hook() {
437            let default_hook = panic::take_hook();
438            panic::set_hook(Box::new(move |panic_info| {
439                default_hook(panic_info);
440                if let Some(backtrace) = salsa::Backtrace::capture() {
441                    eprintln!("{backtrace:#}");
442                }
443                DbPanicContext::with_ctx(|ctx| {
444                    if !ctx.is_empty() {
445                        eprintln!("additional context:");
446                        for (idx, frame) in ctx.iter().enumerate() {
447                            eprintln!("{idx:>4}: {frame}\n");
448                        }
449                    }
450                });
451            }));
452        }
453
454        static SET_HOOK: Once = Once::new();
455        SET_HOOK.call_once(set_hook);
456
457        Self::with_ctx(|ctx| ctx.push(frame));
458        DbPanicContext
459    }
460
461    fn with_ctx(f: impl FnOnce(&mut Vec<String>)) {
462        thread_local! {
463            static CTX: RefCell<Vec<String>> = const { RefCell::new(Vec::new()) };
464        }
465        CTX.with(|ctx| f(&mut ctx.borrow_mut()));
466    }
467}