1use std::env;
4use std::time::Instant;
5
6use ide::{
7 Analysis, AnalysisHost, FileId, FileRange, MonikerKind, MonikerResult, PackageInformation,
8 RootDatabase, StaticIndex, StaticIndexedFile, TokenId, TokenStaticData,
9 VendoredLibrariesConfig,
10};
11use ide_db::{LineIndexDatabase, line_index::WideEncoding};
12use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace};
13use lsp_types::lsif;
14use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
15use rustc_hash::FxHashMap;
16use stdx::format_to;
17use vfs::{AbsPathBuf, Vfs};
18
19use crate::{
20 cli::flags,
21 line_index::{LineEndings, LineIndex, PositionEncoding},
22 lsp::to_proto,
23 version::version,
24};
25
26struct LsifManager<'a, 'w> {
27 count: i32,
28 token_map: FxHashMap<TokenId, Id>,
29 range_map: FxHashMap<FileRange, Id>,
30 file_map: FxHashMap<FileId, Id>,
31 package_map: FxHashMap<PackageInformation, Id>,
32 analysis: &'a Analysis,
33 db: &'a RootDatabase,
34 vfs: &'a Vfs,
35 out: &'w mut dyn std::io::Write,
36}
37
38#[derive(Clone, Copy)]
39struct Id(i32);
40
41impl From<Id> for lsp_types::NumberOrString {
42 fn from(Id(it): Id) -> Self {
43 lsp_types::NumberOrString::Number(it)
44 }
45}
46
47impl LsifManager<'_, '_> {
48 fn new<'a, 'w>(
49 analysis: &'a Analysis,
50 db: &'a RootDatabase,
51 vfs: &'a Vfs,
52 out: &'w mut dyn std::io::Write,
53 ) -> LsifManager<'a, 'w> {
54 LsifManager {
55 count: 0,
56 token_map: FxHashMap::default(),
57 range_map: FxHashMap::default(),
58 file_map: FxHashMap::default(),
59 package_map: FxHashMap::default(),
60 analysis,
61 db,
62 vfs,
63 out,
64 }
65 }
66
67 fn add(&mut self, data: lsif::Element) -> Id {
68 let id = Id(self.count);
69 self.emit(&serde_json::to_string(&lsif::Entry { id: id.into(), data }).unwrap());
70 self.count += 1;
71 id
72 }
73
74 fn add_vertex(&mut self, vertex: lsif::Vertex) -> Id {
75 self.add(lsif::Element::Vertex(vertex))
76 }
77
78 fn add_edge(&mut self, edge: lsif::Edge) -> Id {
79 self.add(lsif::Element::Edge(edge))
80 }
81
82 fn emit(&mut self, data: &str) {
83 format_to!(self.out, "{data}\n");
84 }
85
86 fn get_token_id(&mut self, id: TokenId) -> Id {
87 if let Some(it) = self.token_map.get(&id) {
88 return *it;
89 }
90 let result_set_id = self.add_vertex(lsif::Vertex::ResultSet(lsif::ResultSet { key: None }));
91 self.token_map.insert(id, result_set_id);
92 result_set_id
93 }
94
95 fn get_package_id(&mut self, package_information: PackageInformation) -> Id {
96 if let Some(it) = self.package_map.get(&package_information) {
97 return *it;
98 }
99 let pi = package_information.clone();
100 let result_set_id =
101 self.add_vertex(lsif::Vertex::PackageInformation(lsif::PackageInformation {
102 name: pi.name,
103 manager: "cargo".to_owned(),
104 uri: None,
105 content: None,
106 repository: pi.repo.map(|url| lsif::Repository {
107 url,
108 r#type: "git".to_owned(),
109 commit_id: None,
110 }),
111 version: pi.version,
112 }));
113 self.package_map.insert(package_information, result_set_id);
114 result_set_id
115 }
116
117 fn get_range_id(&mut self, id: FileRange) -> Id {
118 if let Some(it) = self.range_map.get(&id) {
119 return *it;
120 }
121 let file_id = id.file_id;
122 let doc_id = self.get_file_id(file_id);
123 let line_index = self.db.line_index(file_id);
124 let line_index = LineIndex {
125 index: line_index,
126 encoding: PositionEncoding::Wide(WideEncoding::Utf16),
127 endings: LineEndings::Unix,
128 };
129 let range_id = self.add_vertex(lsif::Vertex::Range {
130 range: to_proto::range(&line_index, id.range),
131 tag: None,
132 });
133 self.add_edge(lsif::Edge::Contains(lsif::EdgeDataMultiIn {
134 in_vs: vec![range_id.into()],
135 out_v: doc_id.into(),
136 }));
137 range_id
138 }
139
140 fn get_file_id(&mut self, id: FileId) -> Id {
141 if let Some(it) = self.file_map.get(&id) {
142 return *it;
143 }
144 let path = self.vfs.file_path(id);
145 let path = path.as_path().unwrap();
146 let doc_id = self.add_vertex(lsif::Vertex::Document(lsif::Document {
147 language_id: "rust".to_owned(),
148 uri: lsp_types::Url::from_file_path(path).unwrap(),
149 }));
150 self.file_map.insert(id, doc_id);
151 doc_id
152 }
153
154 fn add_token(&mut self, id: TokenId, token: TokenStaticData) {
155 let result_set_id = self.get_token_id(id);
156 if let Some(hover) = token.hover {
157 let hover_id = self.add_vertex(lsif::Vertex::HoverResult {
158 result: lsp_types::Hover {
159 contents: lsp_types::HoverContents::Markup(to_proto::markup_content(
160 hover.markup,
161 ide::HoverDocFormat::Markdown,
162 )),
163 range: None,
164 },
165 });
166 self.add_edge(lsif::Edge::Hover(lsif::EdgeData {
167 in_v: hover_id.into(),
168 out_v: result_set_id.into(),
169 }));
170 }
171 if let Some(MonikerResult::Moniker(moniker)) = token.moniker {
172 let package_id = self.get_package_id(moniker.package_information);
173 let moniker_id = self.add_vertex(lsif::Vertex::Moniker(lsp_types::Moniker {
174 scheme: "rust-analyzer".to_owned(),
175 identifier: moniker.identifier.to_string(),
176 unique: lsp_types::UniquenessLevel::Scheme,
177 kind: Some(match moniker.kind {
178 MonikerKind::Import => lsp_types::MonikerKind::Import,
179 MonikerKind::Export => lsp_types::MonikerKind::Export,
180 }),
181 }));
182 self.add_edge(lsif::Edge::PackageInformation(lsif::EdgeData {
183 in_v: package_id.into(),
184 out_v: moniker_id.into(),
185 }));
186 self.add_edge(lsif::Edge::Moniker(lsif::EdgeData {
187 in_v: moniker_id.into(),
188 out_v: result_set_id.into(),
189 }));
190 }
191 if let Some(def) = token.definition {
192 let result_id = self.add_vertex(lsif::Vertex::DefinitionResult);
193 let def_vertex = self.get_range_id(def);
194 self.add_edge(lsif::Edge::Item(lsif::Item {
195 document: (*self.file_map.get(&def.file_id).unwrap()).into(),
196 property: None,
197 edge_data: lsif::EdgeDataMultiIn {
198 in_vs: vec![def_vertex.into()],
199 out_v: result_id.into(),
200 },
201 }));
202 self.add_edge(lsif::Edge::Definition(lsif::EdgeData {
203 in_v: result_id.into(),
204 out_v: result_set_id.into(),
205 }));
206 }
207 if !token.references.is_empty() {
208 let result_id = self.add_vertex(lsif::Vertex::ReferenceResult);
209 self.add_edge(lsif::Edge::References(lsif::EdgeData {
210 in_v: result_id.into(),
211 out_v: result_set_id.into(),
212 }));
213 let mut edges = token.references.iter().fold(
214 FxHashMap::<_, Vec<lsp_types::NumberOrString>>::default(),
215 |mut edges, it| {
216 let entry = edges.entry((it.range.file_id, it.is_definition)).or_default();
217 entry.push((*self.range_map.get(&it.range).unwrap()).into());
218 edges
219 },
220 );
221 for it in token.references {
222 if let Some(vertices) = edges.remove(&(it.range.file_id, it.is_definition)) {
223 self.add_edge(lsif::Edge::Item(lsif::Item {
224 document: (*self.file_map.get(&it.range.file_id).unwrap()).into(),
225 property: Some(if it.is_definition {
226 lsif::ItemKind::Definitions
227 } else {
228 lsif::ItemKind::References
229 }),
230 edge_data: lsif::EdgeDataMultiIn {
231 in_vs: vertices,
232 out_v: result_id.into(),
233 },
234 }));
235 }
236 }
237 }
238 }
239
240 fn add_file(&mut self, file: StaticIndexedFile) {
241 let StaticIndexedFile { file_id, tokens, folds, .. } = file;
242 let doc_id = self.get_file_id(file_id);
243 let text = self.analysis.file_text(file_id).unwrap();
244 let line_index = self.db.line_index(file_id);
245 let line_index = LineIndex {
246 index: line_index,
247 encoding: PositionEncoding::Wide(WideEncoding::Utf16),
248 endings: LineEndings::Unix,
249 };
250 let result = folds
251 .into_iter()
252 .map(|it| to_proto::folding_range(&text, &line_index, false, it))
253 .collect();
254 let folding_id = self.add_vertex(lsif::Vertex::FoldingRangeResult { result });
255 self.add_edge(lsif::Edge::FoldingRange(lsif::EdgeData {
256 in_v: folding_id.into(),
257 out_v: doc_id.into(),
258 }));
259 let tokens_id = tokens
260 .into_iter()
261 .map(|(range, id)| {
262 let range_id = self.add_vertex(lsif::Vertex::Range {
263 range: to_proto::range(&line_index, range),
264 tag: None,
265 });
266 self.range_map.insert(FileRange { file_id, range }, range_id);
267 let result_set_id = self.get_token_id(id);
268 self.add_edge(lsif::Edge::Next(lsif::EdgeData {
269 in_v: result_set_id.into(),
270 out_v: range_id.into(),
271 }));
272 range_id.into()
273 })
274 .collect();
275 self.add_edge(lsif::Edge::Contains(lsif::EdgeDataMultiIn {
276 in_vs: tokens_id,
277 out_v: doc_id.into(),
278 }));
279 }
280}
281
282impl flags::Lsif {
283 pub fn run(
284 self,
285 out: &mut dyn std::io::Write,
286 sysroot: Option<RustLibSource>,
287 ) -> anyhow::Result<()> {
288 let now = Instant::now();
289 let cargo_config =
290 &CargoConfig { sysroot, all_targets: true, set_test: true, ..Default::default() };
291 let no_progress = &|_| ();
292 let load_cargo_config = LoadCargoConfig {
293 load_out_dirs_from_check: true,
294 with_proc_macro_server: ProcMacroServerChoice::Sysroot,
295 prefill_caches: false,
296 };
297 let path = AbsPathBuf::assert_utf8(env::current_dir()?.join(self.path));
298 let root = ProjectManifest::discover_single(&path)?;
299 eprintln!("Generating LSIF for project at {root}");
300 let mut workspace = ProjectWorkspace::load(root, cargo_config, no_progress)?;
301
302 let build_scripts = workspace.run_build_scripts(cargo_config, no_progress)?;
303 workspace.set_build_scripts(build_scripts);
304
305 let (db, vfs, _proc_macro) =
306 load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
307 let host = AnalysisHost::with_database(db);
308 let db = host.raw_database();
309 let analysis = host.analysis();
310
311 let vendored_libs_config = if self.exclude_vendored_libraries {
312 VendoredLibrariesConfig::Excluded
313 } else {
314 VendoredLibrariesConfig::Included { workspace_root: &path.clone().into() }
315 };
316
317 let si = StaticIndex::compute(&analysis, vendored_libs_config);
318
319 let mut lsif = LsifManager::new(&analysis, db, &vfs, out);
320 lsif.add_vertex(lsif::Vertex::MetaData(lsif::MetaData {
321 version: String::from("0.5.0"),
322 project_root: lsp_types::Url::from_file_path(path).unwrap(),
323 position_encoding: lsif::Encoding::Utf16,
324 tool_info: Some(lsp_types::lsif::ToolInfo {
325 name: "rust-analyzer".to_owned(),
326 args: vec![],
327 version: Some(version().to_string()),
328 }),
329 }));
330 for file in si.files {
331 lsif.add_file(file);
332 }
333 for (id, token) in si.tokens.iter() {
334 lsif.add_token(id, token);
335 }
336 eprintln!("Generating LSIF finished in {:?}", now.elapsed());
337 Ok(())
338 }
339}