xtask/
tidy.rs

1#![allow(clippy::disallowed_types, clippy::print_stderr)]
2use std::{
3    collections::HashSet,
4    path::{Path, PathBuf},
5};
6
7use itertools::Itertools;
8use xshell::Shell;
9
10use xshell::cmd;
11
12use crate::{flags::Tidy, project_root, util::list_files};
13
14impl Tidy {
15    pub(crate) fn run(&self, sh: &Shell) -> anyhow::Result<()> {
16        check_lsp_extensions_docs(sh);
17        files_are_tidy(sh);
18        check_licenses(sh);
19        Ok(())
20    }
21}
22
23fn check_lsp_extensions_docs(sh: &Shell) {
24    let expected_hash = {
25        let lsp_ext_rs =
26            sh.read_file(project_root().join("crates/rust-analyzer/src/lsp/ext.rs")).unwrap();
27        stable_hash(lsp_ext_rs.as_str())
28    };
29
30    let actual_hash = {
31        let lsp_extensions_md = sh
32            .read_file(project_root().join("docs/book/src/contributing/lsp-extensions.md"))
33            .unwrap();
34        let text = lsp_extensions_md
35            .lines()
36            .find_map(|line| line.strip_prefix("lsp/ext.rs hash:"))
37            .unwrap()
38            .trim();
39        u64::from_str_radix(text, 16).unwrap()
40    };
41
42    if actual_hash != expected_hash {
43        panic!(
44            "
45lsp/ext.rs was changed without touching lsp-extensions.md.
46
47Expected hash: {expected_hash:x}
48Actual hash:   {actual_hash:x}
49
50Please adjust docs/book/src/contributing/lsp-extensions.md.
51"
52        )
53    }
54}
55
56fn files_are_tidy(sh: &Shell) {
57    let files = list_files(&project_root().join("crates"));
58
59    let mut tidy_docs = TidyDocs::default();
60    let mut tidy_marks = TidyMarks::default();
61    for path in files {
62        let extension = path.extension().unwrap_or_default().to_str().unwrap_or_default();
63        match extension {
64            "rs" => {
65                let text = sh.read_file(&path).unwrap();
66                check_test_attrs(&path, &text);
67                check_trailing_ws(&path, &text);
68                tidy_docs.visit(&path, &text);
69                tidy_marks.visit(&path, &text);
70            }
71            "toml" => {
72                let text = sh.read_file(&path).unwrap();
73                check_cargo_toml(&path, text);
74            }
75            _ => (),
76        }
77    }
78
79    tidy_docs.finish();
80    tidy_marks.finish();
81}
82
83fn check_cargo_toml(path: &Path, text: String) {
84    let mut section = None;
85    for (line_no, text) in text.lines().enumerate() {
86        let text = text.trim();
87        if text.starts_with('[') {
88            if !text.ends_with(']') {
89                panic!(
90                    "\nplease don't add comments or trailing whitespace in section lines.\n\
91                        {}:{}\n",
92                    path.display(),
93                    line_no + 1
94                )
95            }
96            section = Some(text);
97            continue;
98        }
99        let text: String = text.split_whitespace().collect();
100        if !text.contains("path=") {
101            continue;
102        }
103        match section {
104            Some(s) if s.contains("dev-dependencies") => {
105                if text.contains("version") {
106                    panic!(
107                        "\ncargo internal dev-dependencies should not have a version.\n\
108                        {}:{}\n",
109                        path.display(),
110                        line_no + 1
111                    );
112                }
113            }
114            Some(s) if s.contains("dependencies") => {
115                if !text.contains("version") {
116                    panic!(
117                        "\ncargo internal dependencies should have a version.\n\
118                        {}:{}\n",
119                        path.display(),
120                        line_no + 1
121                    );
122                }
123            }
124            _ => {}
125        }
126    }
127}
128
129fn check_licenses(sh: &Shell) {
130    const EXPECTED: &[&str] = &[
131        "(MIT OR Apache-2.0) AND Unicode-3.0",
132        "0BSD OR MIT OR Apache-2.0",
133        "Apache-2.0 / MIT",
134        "Apache-2.0 OR BSL-1.0",
135        "Apache-2.0 OR MIT",
136        "Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT",
137        "Apache-2.0 WITH LLVM-exception",
138        "Apache-2.0",
139        "Apache-2.0/MIT",
140        "BSD-2-Clause OR Apache-2.0 OR MIT",
141        "CC0-1.0",
142        "ISC",
143        "MIT / Apache-2.0",
144        "MIT OR Apache-2.0 OR LGPL-2.1-or-later",
145        "MIT OR Apache-2.0",
146        "MIT OR Zlib OR Apache-2.0",
147        "MIT",
148        "MIT/Apache-2.0",
149        "MPL-2.0",
150        "Unicode-3.0",
151        "Unlicense OR MIT",
152        "Unlicense/MIT",
153        "Zlib",
154    ];
155
156    let meta = cmd!(sh, "cargo metadata --format-version 1").read().unwrap();
157    let mut licenses = meta
158        .split([',', '{', '}'])
159        .filter(|it| it.contains(r#""license""#))
160        .map(|it| it.trim())
161        .map(|it| it[r#""license":"#.len()..].trim_matches('"'))
162        .collect::<Vec<_>>();
163    licenses.sort_unstable();
164    licenses.dedup();
165    let mut expected = EXPECTED.to_vec();
166    expected.sort_unstable();
167    if licenses != expected {
168        let mut diff = String::new();
169
170        diff.push_str("New Licenses:\n");
171        for &l in licenses.iter() {
172            if !expected.contains(&l) {
173                diff += &format!("  {l}\n")
174            }
175        }
176
177        diff.push_str("\nMissing Licenses:\n");
178        for l in expected {
179            if !licenses.contains(&l) {
180                diff += &format!("  {l}\n")
181            }
182        }
183
184        panic!("different set of licenses!\n{diff}");
185    }
186    assert_eq!(licenses, expected);
187}
188
189fn check_test_attrs(path: &Path, text: &str) {
190    let panic_rule = "https://github.com/rust-lang/rust-analyzer/blob/master/docs/book/src/contributing/style.md#should_panic";
191    let need_panic: &[&str] = &[
192        // This file.
193        "slow-tests/tidy.rs",
194        "test-utils/src/fixture.rs",
195        // Generated code from lints contains doc tests in string literals.
196        "ide-db/src/generated/lints.rs",
197        "proc-macro-srv/src/tests/mod.rs",
198    ];
199    if need_panic.iter().any(|p| path.ends_with(p)) {
200        return;
201    }
202    if let Some((line, _)) = text
203        .lines()
204        .tuple_windows()
205        .enumerate()
206        .find(|(_, (a, b))| b.contains("#[should_panic") && !a.contains("FIXME"))
207    {
208        panic!(
209            "\ndon't add `#[should_panic]` tests, see:\n\n    {}\n\n   {}:{line}\n",
210            panic_rule,
211            path.display(),
212        )
213    }
214}
215
216fn check_trailing_ws(path: &Path, text: &str) {
217    if is_exclude_dir(path, &["test_data"]) {
218        return;
219    }
220    for (line_number, line) in text.lines().enumerate() {
221        if line.chars().last().is_some_and(char::is_whitespace) {
222            panic!("Trailing whitespace in {} at line {}", path.display(), line_number + 1)
223        }
224    }
225}
226
227#[derive(Default)]
228struct TidyDocs {
229    missing_docs: Vec<String>,
230    contains_fixme: Vec<PathBuf>,
231}
232
233impl TidyDocs {
234    fn visit(&mut self, path: &Path, text: &str) {
235        // Tests and diagnostic fixes don't need module level comments.
236        if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar", "stdx"]) {
237            return;
238        }
239
240        if is_exclude_file(path) {
241            return;
242        }
243
244        if is_ported_from_rustc(path, &["crates/hir-ty/src/next_solver"]) {
245            return;
246        }
247
248        let first_line = match text.lines().next() {
249            Some(it) => it,
250            None => return,
251        };
252
253        if first_line.starts_with("//!") {
254            if first_line.contains("FIXME") {
255                self.contains_fixme.push(path.to_path_buf());
256            }
257        } else {
258            if text.contains("// Feature:")
259                || text.contains("// Assist:")
260                || text.contains("// Diagnostic:")
261            {
262                return;
263            }
264            self.missing_docs.push(path.display().to_string());
265        }
266
267        fn is_exclude_file(d: &Path) -> bool {
268            let file_names = ["tests.rs", "famous_defs_fixture.rs", "frontmatter.rs"];
269
270            d.file_name()
271                .unwrap_or_default()
272                .to_str()
273                .map(|f_n| file_names.contains(&f_n))
274                .unwrap_or(false)
275        }
276    }
277
278    fn finish(self) {
279        if !self.missing_docs.is_empty() {
280            panic!(
281                "\nMissing docs strings\n\n\
282                 modules:\n{}\n\n",
283                self.missing_docs.join("\n")
284            )
285        }
286
287        if let Some(path) = self.contains_fixme.first() {
288            panic!("FIXME doc in a fully-documented crate: {}", path.display())
289        }
290    }
291}
292
293fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool {
294    p.strip_prefix(project_root())
295        .unwrap()
296        .components()
297        .rev()
298        .skip(1)
299        .filter_map(|it| it.as_os_str().to_str())
300        .any(|it| dirs_to_exclude.contains(&it))
301}
302
303fn is_ported_from_rustc(p: &Path, dirs_to_exclude: &[&str]) -> bool {
304    let p = p.strip_prefix(project_root()).unwrap();
305    dirs_to_exclude.iter().any(|exclude| p.starts_with(exclude))
306}
307
308#[derive(Default)]
309struct TidyMarks {
310    hits: HashSet<String>,
311    checks: HashSet<String>,
312}
313
314impl TidyMarks {
315    fn visit(&mut self, _path: &Path, text: &str) {
316        find_marks(&mut self.hits, text, "hit");
317        find_marks(&mut self.checks, text, "check");
318        find_marks(&mut self.checks, text, "check_count");
319    }
320
321    fn finish(self) {
322        assert!(!self.hits.is_empty());
323
324        let diff: Vec<_> =
325            self.hits.symmetric_difference(&self.checks).map(|it| it.as_str()).collect();
326
327        if !diff.is_empty() {
328            panic!("unpaired marks: {diff:?}")
329        }
330    }
331}
332
333#[allow(deprecated)]
334fn stable_hash(text: &str) -> u64 {
335    use std::hash::{Hash, Hasher, SipHasher};
336
337    let text = text.replace('\r', "");
338    let mut hasher = SipHasher::default();
339    text.hash(&mut hasher);
340    hasher.finish()
341}
342
343fn find_marks(set: &mut HashSet<String>, text: &str, mark: &str) {
344    let mut text = text;
345    let mut prev_text = "";
346    while text != prev_text {
347        prev_text = text;
348        if let Some(idx) = text.find(mark) {
349            text = &text[idx + mark.len()..];
350            if let Some(stripped_text) = text.strip_prefix("!(") {
351                text = stripped_text.trim_start();
352                if let Some(idx2) = text.find(|c: char| !(c.is_alphanumeric() || c == '_')) {
353                    let mark_text = &text[..idx2];
354                    set.insert(mark_text.to_owned());
355                    text = &text[idx2..];
356                }
357            }
358        }
359    }
360}
361
362#[test]
363fn test() {
364    Tidy {}.run(&Shell::new().unwrap()).unwrap();
365}