mirror of
https://github.com/denoland/deno.git
synced 2024-11-24 15:19:26 -05:00
perf(fmt/lint): incremental formatting and linting (#14314)
This commit is contained in:
parent
803499886b
commit
ae479b1036
11 changed files with 645 additions and 49 deletions
12
Cargo.lock
generated
12
Cargo.lock
generated
|
@ -795,6 +795,7 @@ dependencies = [
|
||||||
"tower-lsp",
|
"tower-lsp",
|
||||||
"trust-dns-client",
|
"trust-dns-client",
|
||||||
"trust-dns-server",
|
"trust-dns-server",
|
||||||
|
"twox-hash",
|
||||||
"typed-arena",
|
"typed-arena",
|
||||||
"uuid",
|
"uuid",
|
||||||
"walkdir",
|
"walkdir",
|
||||||
|
@ -4723,6 +4724,17 @@ dependencies = [
|
||||||
"webpki",
|
"webpki",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "twox-hash"
|
||||||
|
version = "1.6.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4ee73e6e4924fe940354b8d4d98cad5231175d615cd855b758adc658c0aac6a0"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"rand",
|
||||||
|
"static_assertions",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "typed-arena"
|
name = "typed-arena"
|
||||||
version = "2.0.1"
|
version = "2.0.1"
|
||||||
|
|
|
@ -95,6 +95,7 @@ text_lines = "=0.4.1"
|
||||||
tokio = { version = "=1.17", features = ["full"] }
|
tokio = { version = "=1.17", features = ["full"] }
|
||||||
tokio-util = "=0.7.0"
|
tokio-util = "=0.7.0"
|
||||||
tower-lsp = "=0.16.0"
|
tower-lsp = "=0.16.0"
|
||||||
|
twox-hash = "=1.6.2"
|
||||||
typed-arena = "2.0.1"
|
typed-arena = "2.0.1"
|
||||||
uuid = { version = "=0.8.2", features = ["v4", "serde"] }
|
uuid = { version = "=0.8.2", features = ["v4", "serde"] }
|
||||||
walkdir = "=2.3.2"
|
walkdir = "=2.3.2"
|
||||||
|
|
|
@ -480,7 +480,7 @@ pub struct LintConfig {
|
||||||
pub files: FilesConfig,
|
pub files: FilesConfig,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, Deserialize)]
|
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
|
||||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||||
pub enum ProseWrap {
|
pub enum ProseWrap {
|
||||||
Always,
|
Always,
|
||||||
|
@ -488,7 +488,7 @@ pub enum ProseWrap {
|
||||||
Preserve,
|
Preserve,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, Deserialize)]
|
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
|
||||||
#[serde(default, deny_unknown_fields, rename_all = "camelCase")]
|
#[serde(default, deny_unknown_fields, rename_all = "camelCase")]
|
||||||
pub struct FmtOptionsConfig {
|
pub struct FmtOptionsConfig {
|
||||||
pub use_tabs: Option<bool>,
|
pub use_tabs: Option<bool>,
|
||||||
|
|
|
@ -44,6 +44,18 @@ impl DenoDir {
|
||||||
|
|
||||||
Ok(deno_dir)
|
Ok(deno_dir)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Path for the incremental cache used for formatting.
|
||||||
|
pub fn fmt_incremental_cache_db_file_path(&self) -> PathBuf {
|
||||||
|
// bump this version name to invalidate the entire cache
|
||||||
|
self.root.join("fmt_incremental_cache_v1")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Path for the incremental cache used for linting.
|
||||||
|
pub fn lint_incremental_cache_db_file_path(&self) -> PathBuf {
|
||||||
|
// bump this version name to invalidate the entire cache
|
||||||
|
self.root.join("lint_incremental_cache_v1")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// To avoid the poorly managed dirs crate
|
/// To avoid the poorly managed dirs crate
|
||||||
|
|
|
@ -1154,7 +1154,7 @@ impl Inner {
|
||||||
Some(Err(err)) => Err(anyhow!("{}", err)),
|
Some(Err(err)) => Err(anyhow!("{}", err)),
|
||||||
None => {
|
None => {
|
||||||
// it's not a js/ts file, so attempt to format its contents
|
// it's not a js/ts file, so attempt to format its contents
|
||||||
format_file(&file_path, document.content().as_str(), fmt_options)
|
format_file(&file_path, document.content().as_str(), &fmt_options)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -908,7 +908,8 @@ async fn format_command(
|
||||||
return Ok(0);
|
return Ok(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
tools::fmt::format(ps.flags.as_ref(), fmt_flags, maybe_fmt_config).await?;
|
tools::fmt::format(ps.flags.as_ref(), fmt_flags, maybe_fmt_config, &ps.dir)
|
||||||
|
.await?;
|
||||||
Ok(0)
|
Ok(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
253
cli/tools/fmt.rs
253
cli/tools/fmt.rs
|
@ -11,6 +11,7 @@ use crate::colors;
|
||||||
use crate::config_file::FmtConfig;
|
use crate::config_file::FmtConfig;
|
||||||
use crate::config_file::FmtOptionsConfig;
|
use crate::config_file::FmtOptionsConfig;
|
||||||
use crate::config_file::ProseWrap;
|
use crate::config_file::ProseWrap;
|
||||||
|
use crate::deno_dir::DenoDir;
|
||||||
use crate::diff::diff;
|
use crate::diff::diff;
|
||||||
use crate::file_watcher;
|
use crate::file_watcher;
|
||||||
use crate::file_watcher::ResolutionResult;
|
use crate::file_watcher::ResolutionResult;
|
||||||
|
@ -40,11 +41,14 @@ use std::sync::atomic::AtomicUsize;
|
||||||
use std::sync::atomic::Ordering;
|
use std::sync::atomic::Ordering;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use super::incremental_cache::IncrementalCache;
|
||||||
|
|
||||||
/// Format JavaScript/TypeScript files.
|
/// Format JavaScript/TypeScript files.
|
||||||
pub async fn format(
|
pub async fn format(
|
||||||
flags: &Flags,
|
flags: &Flags,
|
||||||
fmt_flags: FmtFlags,
|
fmt_flags: FmtFlags,
|
||||||
maybe_fmt_config: Option<FmtConfig>,
|
maybe_fmt_config: Option<FmtConfig>,
|
||||||
|
deno_dir: &DenoDir,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
let FmtFlags {
|
let FmtFlags {
|
||||||
files,
|
files,
|
||||||
|
@ -132,11 +136,18 @@ pub async fn format(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let operation = |(paths, fmt_options): (Vec<PathBuf>, FmtOptionsConfig)| async move {
|
let operation = |(paths, fmt_options): (Vec<PathBuf>, FmtOptionsConfig)| async move {
|
||||||
|
let incremental_cache = Arc::new(IncrementalCache::new(
|
||||||
|
&deno_dir.fmt_incremental_cache_db_file_path(),
|
||||||
|
&fmt_options,
|
||||||
|
&paths,
|
||||||
|
));
|
||||||
if check {
|
if check {
|
||||||
check_source_files(paths, fmt_options).await?;
|
check_source_files(paths, fmt_options, incremental_cache.clone()).await?;
|
||||||
} else {
|
} else {
|
||||||
format_source_files(paths, fmt_options).await?;
|
format_source_files(paths, fmt_options, incremental_cache.clone())
|
||||||
|
.await?;
|
||||||
}
|
}
|
||||||
|
incremental_cache.wait_completion().await;
|
||||||
Ok(())
|
Ok(())
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -234,18 +245,18 @@ pub fn format_json(
|
||||||
pub fn format_file(
|
pub fn format_file(
|
||||||
file_path: &Path,
|
file_path: &Path,
|
||||||
file_text: &str,
|
file_text: &str,
|
||||||
fmt_options: FmtOptionsConfig,
|
fmt_options: &FmtOptionsConfig,
|
||||||
) -> Result<Option<String>, AnyError> {
|
) -> Result<Option<String>, AnyError> {
|
||||||
let ext = get_extension(file_path).unwrap_or_default();
|
let ext = get_extension(file_path).unwrap_or_default();
|
||||||
if matches!(
|
if matches!(
|
||||||
ext.as_str(),
|
ext.as_str(),
|
||||||
"md" | "mkd" | "mkdn" | "mdwn" | "mdown" | "markdown"
|
"md" | "mkd" | "mkdn" | "mdwn" | "mdown" | "markdown"
|
||||||
) {
|
) {
|
||||||
format_markdown(file_text, &fmt_options)
|
format_markdown(file_text, fmt_options)
|
||||||
} else if matches!(ext.as_str(), "json" | "jsonc") {
|
} else if matches!(ext.as_str(), "json" | "jsonc") {
|
||||||
format_json(file_text, &fmt_options)
|
format_json(file_text, fmt_options)
|
||||||
} else {
|
} else {
|
||||||
let config = get_resolved_typescript_config(&fmt_options);
|
let config = get_resolved_typescript_config(fmt_options);
|
||||||
dprint_plugin_typescript::format_text(file_path, file_text, &config)
|
dprint_plugin_typescript::format_text(file_path, file_text, &config)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -263,6 +274,7 @@ pub fn format_parsed_source(
|
||||||
async fn check_source_files(
|
async fn check_source_files(
|
||||||
paths: Vec<PathBuf>,
|
paths: Vec<PathBuf>,
|
||||||
fmt_options: FmtOptionsConfig,
|
fmt_options: FmtOptionsConfig,
|
||||||
|
incremental_cache: Arc<IncrementalCache>,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
let not_formatted_files_count = Arc::new(AtomicUsize::new(0));
|
let not_formatted_files_count = Arc::new(AtomicUsize::new(0));
|
||||||
let checked_files_count = Arc::new(AtomicUsize::new(0));
|
let checked_files_count = Arc::new(AtomicUsize::new(0));
|
||||||
|
@ -277,7 +289,12 @@ async fn check_source_files(
|
||||||
checked_files_count.fetch_add(1, Ordering::Relaxed);
|
checked_files_count.fetch_add(1, Ordering::Relaxed);
|
||||||
let file_text = read_file_contents(&file_path)?.text;
|
let file_text = read_file_contents(&file_path)?.text;
|
||||||
|
|
||||||
match format_file(&file_path, &file_text, fmt_options.clone()) {
|
// skip checking the file if we know it's formatted
|
||||||
|
if incremental_cache.is_file_same(&file_path, &file_text) {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
match format_file(&file_path, &file_text, &fmt_options) {
|
||||||
Ok(Some(formatted_text)) => {
|
Ok(Some(formatted_text)) => {
|
||||||
not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
|
not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
|
||||||
let _g = output_lock.lock();
|
let _g = output_lock.lock();
|
||||||
|
@ -286,7 +303,14 @@ async fn check_source_files(
|
||||||
info!("{} {}:", colors::bold("from"), file_path.display());
|
info!("{} {}:", colors::bold("from"), file_path.display());
|
||||||
info!("{}", diff);
|
info!("{}", diff);
|
||||||
}
|
}
|
||||||
Ok(None) => {}
|
Ok(None) => {
|
||||||
|
// When checking formatting, only update the incremental cache when
|
||||||
|
// the file is the same since we don't bother checking for stable
|
||||||
|
// formatting here. Additionally, ensure this is done during check
|
||||||
|
// so that CIs that cache the DENO_DIR will get the benefit of
|
||||||
|
// incremental formatting
|
||||||
|
incremental_cache.update_file(&file_path, &file_text);
|
||||||
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
let _g = output_lock.lock();
|
let _g = output_lock.lock();
|
||||||
eprintln!("Error checking: {}", file_path.to_string_lossy());
|
eprintln!("Error checking: {}", file_path.to_string_lossy());
|
||||||
|
@ -318,6 +342,7 @@ async fn check_source_files(
|
||||||
async fn format_source_files(
|
async fn format_source_files(
|
||||||
paths: Vec<PathBuf>,
|
paths: Vec<PathBuf>,
|
||||||
fmt_options: FmtOptionsConfig,
|
fmt_options: FmtOptionsConfig,
|
||||||
|
incremental_cache: Arc<IncrementalCache>,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
let formatted_files_count = Arc::new(AtomicUsize::new(0));
|
let formatted_files_count = Arc::new(AtomicUsize::new(0));
|
||||||
let checked_files_count = Arc::new(AtomicUsize::new(0));
|
let checked_files_count = Arc::new(AtomicUsize::new(0));
|
||||||
|
@ -330,8 +355,19 @@ async fn format_source_files(
|
||||||
checked_files_count.fetch_add(1, Ordering::Relaxed);
|
checked_files_count.fetch_add(1, Ordering::Relaxed);
|
||||||
let file_contents = read_file_contents(&file_path)?;
|
let file_contents = read_file_contents(&file_path)?;
|
||||||
|
|
||||||
match format_file(&file_path, &file_contents.text, fmt_options.clone()) {
|
// skip formatting the file if we know it's formatted
|
||||||
|
if incremental_cache.is_file_same(&file_path, &file_contents.text) {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
match format_ensure_stable(
|
||||||
|
&file_path,
|
||||||
|
&file_contents.text,
|
||||||
|
&fmt_options,
|
||||||
|
format_file,
|
||||||
|
) {
|
||||||
Ok(Some(formatted_text)) => {
|
Ok(Some(formatted_text)) => {
|
||||||
|
incremental_cache.update_file(&file_path, &formatted_text);
|
||||||
write_file_contents(
|
write_file_contents(
|
||||||
&file_path,
|
&file_path,
|
||||||
FileContents {
|
FileContents {
|
||||||
|
@ -343,7 +379,9 @@ async fn format_source_files(
|
||||||
let _g = output_lock.lock();
|
let _g = output_lock.lock();
|
||||||
info!("{}", file_path.to_string_lossy());
|
info!("{}", file_path.to_string_lossy());
|
||||||
}
|
}
|
||||||
Ok(None) => {}
|
Ok(None) => {
|
||||||
|
incremental_cache.update_file(&file_path, &file_contents.text);
|
||||||
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
let _g = output_lock.lock();
|
let _g = output_lock.lock();
|
||||||
eprintln!("Error formatting: {}", file_path.to_string_lossy());
|
eprintln!("Error formatting: {}", file_path.to_string_lossy());
|
||||||
|
@ -372,6 +410,66 @@ async fn format_source_files(
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// When storing any formatted text in the incremental cache, we want
|
||||||
|
/// to ensure that anything stored when formatted will have itself as
|
||||||
|
/// the output as well. This is to prevent "double format" issues where
|
||||||
|
/// a user formats their code locally and it fails on the CI afterwards.
|
||||||
|
fn format_ensure_stable(
|
||||||
|
file_path: &Path,
|
||||||
|
file_text: &str,
|
||||||
|
fmt_options: &FmtOptionsConfig,
|
||||||
|
fmt_func: impl Fn(
|
||||||
|
&Path,
|
||||||
|
&str,
|
||||||
|
&FmtOptionsConfig,
|
||||||
|
) -> Result<Option<String>, AnyError>,
|
||||||
|
) -> Result<Option<String>, AnyError> {
|
||||||
|
let formatted_text = fmt_func(file_path, file_text, fmt_options)?;
|
||||||
|
|
||||||
|
match formatted_text {
|
||||||
|
Some(mut current_text) => {
|
||||||
|
let mut count = 0;
|
||||||
|
loop {
|
||||||
|
match fmt_func(file_path, ¤t_text, fmt_options) {
|
||||||
|
Ok(Some(next_pass_text)) => {
|
||||||
|
// just in case
|
||||||
|
if next_pass_text == current_text {
|
||||||
|
return Ok(Some(next_pass_text));
|
||||||
|
}
|
||||||
|
current_text = next_pass_text;
|
||||||
|
}
|
||||||
|
Ok(None) => {
|
||||||
|
return Ok(Some(current_text));
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
panic!(
|
||||||
|
concat!(
|
||||||
|
"Formatting succeeded initially, but failed when ensuring a ",
|
||||||
|
"stable format. This indicates a bug in the formatter where ",
|
||||||
|
"the text it produces is not syntatically correct. As a temporary ",
|
||||||
|
"workfaround you can ignore this file.\n\n{:#}"
|
||||||
|
),
|
||||||
|
err,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
count += 1;
|
||||||
|
if count == 5 {
|
||||||
|
panic!(
|
||||||
|
concat!(
|
||||||
|
"Formatting not stable. Bailed after {} tries. This indicates a bug ",
|
||||||
|
"in the formatter where it formats the file differently each time. As a ",
|
||||||
|
"temporary workaround you can ignore this file."
|
||||||
|
),
|
||||||
|
count
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => Ok(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Format stdin and write result to stdout.
|
/// Format stdin and write result to stdout.
|
||||||
/// Treats input as TypeScript or as set by `--ext` flag.
|
/// Treats input as TypeScript or as set by `--ext` flag.
|
||||||
/// Compatible with `--check` flag.
|
/// Compatible with `--check` flag.
|
||||||
|
@ -386,7 +484,7 @@ pub fn format_stdin(
|
||||||
let file_path = PathBuf::from(format!("_stdin.{}", fmt_flags.ext));
|
let file_path = PathBuf::from(format!("_stdin.{}", fmt_flags.ext));
|
||||||
let fmt_options = resolve_fmt_options(&fmt_flags, fmt_options);
|
let fmt_options = resolve_fmt_options(&fmt_flags, fmt_options);
|
||||||
|
|
||||||
let formatted_text = format_file(&file_path, &source, fmt_options)?;
|
let formatted_text = format_file(&file_path, &source, &fmt_options)?;
|
||||||
if fmt_flags.check {
|
if fmt_flags.check {
|
||||||
if formatted_text.is_some() {
|
if formatted_text.is_some() {
|
||||||
println!("Not formatted stdin");
|
println!("Not formatted stdin");
|
||||||
|
@ -628,37 +726,106 @@ fn is_contain_git(path: &Path) -> bool {
|
||||||
path.components().any(|c| c.as_os_str() == ".git")
|
path.components().any(|c| c.as_os_str() == ".git")
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[cfg(test)]
|
||||||
fn test_is_supported_ext_fmt() {
|
mod test {
|
||||||
assert!(!is_supported_ext_fmt(Path::new("tests/subdir/redirects")));
|
use super::*;
|
||||||
assert!(is_supported_ext_fmt(Path::new("README.md")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("readme.MD")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("readme.mkd")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("readme.mkdn")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("readme.mdwn")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("readme.mdown")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("readme.markdown")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("lib/typescript.d.ts")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("testdata/001_hello.js")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("testdata/002_hello.ts")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("foo.jsx")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("foo.tsx")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("foo.TS")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("foo.TSX")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("foo.JS")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("foo.JSX")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("foo.mjs")));
|
|
||||||
assert!(!is_supported_ext_fmt(Path::new("foo.mjsx")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("foo.jsonc")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("foo.JSONC")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("foo.json")));
|
|
||||||
assert!(is_supported_ext_fmt(Path::new("foo.JsON")));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_is_located_in_git() {
|
fn test_is_supported_ext_fmt() {
|
||||||
assert!(is_contain_git(Path::new("test/.git")));
|
assert!(!is_supported_ext_fmt(Path::new("tests/subdir/redirects")));
|
||||||
assert!(is_contain_git(Path::new(".git/bad.json")));
|
assert!(is_supported_ext_fmt(Path::new("README.md")));
|
||||||
assert!(is_contain_git(Path::new("test/.git/bad.json")));
|
assert!(is_supported_ext_fmt(Path::new("readme.MD")));
|
||||||
assert!(!is_contain_git(Path::new("test/bad.git/bad.json")));
|
assert!(is_supported_ext_fmt(Path::new("readme.mkd")));
|
||||||
|
assert!(is_supported_ext_fmt(Path::new("readme.mkdn")));
|
||||||
|
assert!(is_supported_ext_fmt(Path::new("readme.mdwn")));
|
||||||
|
assert!(is_supported_ext_fmt(Path::new("readme.mdown")));
|
||||||
|
assert!(is_supported_ext_fmt(Path::new("readme.markdown")));
|
||||||
|
assert!(is_supported_ext_fmt(Path::new("lib/typescript.d.ts")));
|
||||||
|
assert!(is_supported_ext_fmt(Path::new("testdata/001_hello.js")));
|
||||||
|
assert!(is_supported_ext_fmt(Path::new("testdata/002_hello.ts")));
|
||||||
|
assert!(is_supported_ext_fmt(Path::new("foo.jsx")));
|
||||||
|
assert!(is_supported_ext_fmt(Path::new("foo.tsx")));
|
||||||
|
assert!(is_supported_ext_fmt(Path::new("foo.TS")));
|
||||||
|
assert!(is_supported_ext_fmt(Path::new("foo.TSX")));
|
||||||
|
assert!(is_supported_ext_fmt(Path::new("foo.JS")));
|
||||||
|
assert!(is_supported_ext_fmt(Path::new("foo.JSX")));
|
||||||
|
assert!(is_supported_ext_fmt(Path::new("foo.mjs")));
|
||||||
|
assert!(!is_supported_ext_fmt(Path::new("foo.mjsx")));
|
||||||
|
assert!(is_supported_ext_fmt(Path::new("foo.jsonc")));
|
||||||
|
assert!(is_supported_ext_fmt(Path::new("foo.JSONC")));
|
||||||
|
assert!(is_supported_ext_fmt(Path::new("foo.json")));
|
||||||
|
assert!(is_supported_ext_fmt(Path::new("foo.JsON")));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_is_located_in_git() {
|
||||||
|
assert!(is_contain_git(Path::new("test/.git")));
|
||||||
|
assert!(is_contain_git(Path::new(".git/bad.json")));
|
||||||
|
assert!(is_contain_git(Path::new("test/.git/bad.json")));
|
||||||
|
assert!(!is_contain_git(Path::new("test/bad.git/bad.json")));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic(expected = "Formatting not stable. Bailed after 5 tries.")]
|
||||||
|
fn test_format_ensure_stable_unstable_format() {
|
||||||
|
format_ensure_stable(
|
||||||
|
&PathBuf::from("mod.ts"),
|
||||||
|
"1",
|
||||||
|
&Default::default(),
|
||||||
|
|_, file_text, _| Ok(Some(format!("1{}", file_text))),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_format_ensure_stable_error_first() {
|
||||||
|
let err = format_ensure_stable(
|
||||||
|
&PathBuf::from("mod.ts"),
|
||||||
|
"1",
|
||||||
|
&Default::default(),
|
||||||
|
|_, _, _| bail!("Error formatting."),
|
||||||
|
)
|
||||||
|
.unwrap_err();
|
||||||
|
|
||||||
|
assert_eq!(err.to_string(), "Error formatting.");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[should_panic(expected = "Formatting succeeded initially, but failed when")]
|
||||||
|
fn test_format_ensure_stable_error_second() {
|
||||||
|
format_ensure_stable(
|
||||||
|
&PathBuf::from("mod.ts"),
|
||||||
|
"1",
|
||||||
|
&Default::default(),
|
||||||
|
|_, file_text, _| {
|
||||||
|
if file_text == "1" {
|
||||||
|
Ok(Some("11".to_string()))
|
||||||
|
} else {
|
||||||
|
bail!("Error formatting.")
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_format_stable_after_two() {
|
||||||
|
let result = format_ensure_stable(
|
||||||
|
&PathBuf::from("mod.ts"),
|
||||||
|
"1",
|
||||||
|
&Default::default(),
|
||||||
|
|_, file_text, _| {
|
||||||
|
if file_text == "1" {
|
||||||
|
Ok(Some("11".to_string()))
|
||||||
|
} else if file_text == "11" {
|
||||||
|
Ok(None)
|
||||||
|
} else {
|
||||||
|
unreachable!();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(result, Some("11".to_string()));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
371
cli/tools/incremental_cache.rs
Normal file
371
cli/tools/incremental_cache.rs
Normal file
|
@ -0,0 +1,371 @@
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::path::Path;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use deno_core::error::AnyError;
|
||||||
|
use deno_core::parking_lot::Mutex;
|
||||||
|
use deno_core::serde_json;
|
||||||
|
use deno_runtime::deno_webstorage::rusqlite::params;
|
||||||
|
use deno_runtime::deno_webstorage::rusqlite::Connection;
|
||||||
|
use serde::Serialize;
|
||||||
|
use tokio::task::JoinHandle;
|
||||||
|
|
||||||
|
/// Cache used to skip formatting/linting a file again when we
|
||||||
|
/// know it is already formatted or has no lint diagnostics.
|
||||||
|
pub struct IncrementalCache(Option<IncrementalCacheInner>);
|
||||||
|
|
||||||
|
impl IncrementalCache {
|
||||||
|
pub fn new<TState: Serialize>(
|
||||||
|
db_file_path: &Path,
|
||||||
|
state: &TState,
|
||||||
|
initial_file_paths: &[PathBuf],
|
||||||
|
) -> Self {
|
||||||
|
// if creating the incremental cache fails, then we
|
||||||
|
// treat it as not having a cache
|
||||||
|
let result =
|
||||||
|
IncrementalCacheInner::new(db_file_path, state, initial_file_paths);
|
||||||
|
IncrementalCache(match result {
|
||||||
|
Ok(inner) => Some(inner),
|
||||||
|
Err(err) => {
|
||||||
|
log::debug!("Creating the incremental cache failed.\n{:#}", err);
|
||||||
|
// Maybe the cache file is corrupt. Attempt to remove
|
||||||
|
// the cache file for next time
|
||||||
|
let _ = std::fs::remove_file(db_file_path);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_file_same(&self, file_path: &Path, file_text: &str) -> bool {
|
||||||
|
if let Some(inner) = &self.0 {
|
||||||
|
inner.is_file_same(file_path, file_text)
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_file(&self, file_path: &Path, file_text: &str) {
|
||||||
|
if let Some(inner) = &self.0 {
|
||||||
|
inner.update_file(file_path, file_text)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn wait_completion(&self) {
|
||||||
|
if let Some(inner) = &self.0 {
|
||||||
|
inner.wait_completion().await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enum ReceiverMessage {
|
||||||
|
Update(PathBuf, u64),
|
||||||
|
Exit,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct IncrementalCacheInner {
|
||||||
|
previous_hashes: HashMap<PathBuf, u64>,
|
||||||
|
sender: tokio::sync::mpsc::UnboundedSender<ReceiverMessage>,
|
||||||
|
handle: Mutex<Option<JoinHandle<()>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IncrementalCacheInner {
|
||||||
|
pub fn new<TState: Serialize>(
|
||||||
|
db_file_path: &Path,
|
||||||
|
state: &TState,
|
||||||
|
initial_file_paths: &[PathBuf],
|
||||||
|
) -> Result<Self, AnyError> {
|
||||||
|
let state_hash =
|
||||||
|
fast_insecure_hash(serde_json::to_string(state).unwrap().as_bytes());
|
||||||
|
let sql_cache = SqlIncrementalCache::new(db_file_path, state_hash)?;
|
||||||
|
Ok(Self::from_sql_incremental_cache(
|
||||||
|
sql_cache,
|
||||||
|
initial_file_paths,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_sql_incremental_cache(
|
||||||
|
cache: SqlIncrementalCache,
|
||||||
|
initial_file_paths: &[PathBuf],
|
||||||
|
) -> Self {
|
||||||
|
let mut previous_hashes = HashMap::new();
|
||||||
|
for path in initial_file_paths {
|
||||||
|
if let Some(hash) = cache.get_source_hash(path) {
|
||||||
|
previous_hashes.insert(path.to_path_buf(), hash);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let (sender, mut receiver) =
|
||||||
|
tokio::sync::mpsc::unbounded_channel::<ReceiverMessage>();
|
||||||
|
|
||||||
|
// sqlite isn't `Sync`, so we do all the updating on a dedicated task
|
||||||
|
let handle = tokio::task::spawn(async move {
|
||||||
|
while let Some(message) = receiver.recv().await {
|
||||||
|
match message {
|
||||||
|
ReceiverMessage::Update(path, hash) => {
|
||||||
|
let _ = cache.set_source_hash(&path, hash);
|
||||||
|
}
|
||||||
|
ReceiverMessage::Exit => break,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
IncrementalCacheInner {
|
||||||
|
previous_hashes,
|
||||||
|
sender,
|
||||||
|
handle: Mutex::new(Some(handle)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_file_same(&self, file_path: &Path, file_text: &str) -> bool {
|
||||||
|
match self.previous_hashes.get(file_path) {
|
||||||
|
Some(hash) => *hash == fast_insecure_hash(file_text.as_bytes()),
|
||||||
|
None => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_file(&self, file_path: &Path, file_text: &str) {
|
||||||
|
let hash = fast_insecure_hash(file_text.as_bytes());
|
||||||
|
if let Some(previous_hash) = self.previous_hashes.get(file_path) {
|
||||||
|
if *previous_hash == hash {
|
||||||
|
return; // do not bother updating the db file because nothing has changed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let _ = self
|
||||||
|
.sender
|
||||||
|
.send(ReceiverMessage::Update(file_path.to_path_buf(), hash));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn wait_completion(&self) {
|
||||||
|
if self.sender.send(ReceiverMessage::Exit).is_err() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let handle = self.handle.lock().take();
|
||||||
|
if let Some(handle) = handle {
|
||||||
|
handle.await.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct SqlIncrementalCache {
|
||||||
|
conn: Connection,
|
||||||
|
/// A hash of the state used to produce the formatting/linting other than
|
||||||
|
/// the CLI version. This state is a hash of the configuration and ensures
|
||||||
|
/// we format/lint a file when the configuration changes.
|
||||||
|
state_hash: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SqlIncrementalCache {
|
||||||
|
pub fn new(db_file_path: &Path, state_hash: u64) -> Result<Self, AnyError> {
|
||||||
|
let conn = Connection::open(db_file_path)?;
|
||||||
|
Self::from_connection(conn, state_hash, crate::version::deno())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_connection(
|
||||||
|
conn: Connection,
|
||||||
|
state_hash: u64,
|
||||||
|
cli_version: String,
|
||||||
|
) -> Result<Self, AnyError> {
|
||||||
|
run_pragma(&conn)?;
|
||||||
|
create_tables(&conn, cli_version)?;
|
||||||
|
|
||||||
|
Ok(Self { conn, state_hash })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_source_hash(&self, path: &Path) -> Option<u64> {
|
||||||
|
match self.get_source_hash_result(path) {
|
||||||
|
Ok(option) => option,
|
||||||
|
Err(err) => {
|
||||||
|
if cfg!(debug_assertions) {
|
||||||
|
panic!("Error retrieving hash: {}", err);
|
||||||
|
} else {
|
||||||
|
// fail silently when not debugging
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_source_hash_result(
|
||||||
|
&self,
|
||||||
|
path: &Path,
|
||||||
|
) -> Result<Option<u64>, AnyError> {
|
||||||
|
let query = "
|
||||||
|
SELECT
|
||||||
|
source_hash
|
||||||
|
FROM
|
||||||
|
incrementalcache
|
||||||
|
WHERE
|
||||||
|
file_path=?1
|
||||||
|
AND state_hash=?2
|
||||||
|
LIMIT 1";
|
||||||
|
let mut stmt = self.conn.prepare_cached(query)?;
|
||||||
|
let mut rows = stmt
|
||||||
|
.query(params![path.to_string_lossy(), self.state_hash.to_string()])?;
|
||||||
|
if let Some(row) = rows.next()? {
|
||||||
|
let hash: String = row.get(0)?;
|
||||||
|
Ok(Some(hash.parse::<u64>()?))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_source_hash(
|
||||||
|
&self,
|
||||||
|
path: &Path,
|
||||||
|
source_hash: u64,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
let sql = "
|
||||||
|
INSERT OR REPLACE INTO
|
||||||
|
incrementalcache (file_path, state_hash, source_hash)
|
||||||
|
VALUES
|
||||||
|
(?1, ?2, ?3)";
|
||||||
|
let mut stmt = self.conn.prepare_cached(sql)?;
|
||||||
|
stmt.execute(params![
|
||||||
|
path.to_string_lossy(),
|
||||||
|
&self.state_hash.to_string(),
|
||||||
|
&source_hash.to_string(),
|
||||||
|
])?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run_pragma(conn: &Connection) -> Result<(), AnyError> {
|
||||||
|
// Enable write-ahead-logging and tweak some other stuff
|
||||||
|
let initial_pragmas = "
|
||||||
|
-- enable write-ahead-logging mode
|
||||||
|
PRAGMA journal_mode=WAL;
|
||||||
|
PRAGMA synchronous=NORMAL;
|
||||||
|
PRAGMA temp_store=memory;
|
||||||
|
PRAGMA page_size=4096;
|
||||||
|
PRAGMA mmap_size=6000000;
|
||||||
|
PRAGMA optimize;
|
||||||
|
";
|
||||||
|
|
||||||
|
conn.execute_batch(initial_pragmas)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_tables(
|
||||||
|
conn: &Connection,
|
||||||
|
cli_version: String,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
// INT doesn't store up to u64, so use TEXT
|
||||||
|
conn.execute(
|
||||||
|
"CREATE TABLE IF NOT EXISTS incrementalcache (
|
||||||
|
file_path TEXT PRIMARY KEY,
|
||||||
|
state_hash TEXT NOT NULL,
|
||||||
|
source_hash TEXT NOT NULL
|
||||||
|
)",
|
||||||
|
[],
|
||||||
|
)?;
|
||||||
|
conn.execute(
|
||||||
|
"CREATE TABLE IF NOT EXISTS info (
|
||||||
|
key TEXT PRIMARY KEY,
|
||||||
|
value TEXT NOT NULL
|
||||||
|
)",
|
||||||
|
[],
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// delete the cache when the CLI version changes
|
||||||
|
let data_cli_version: Option<String> = conn
|
||||||
|
.query_row(
|
||||||
|
"SELECT value FROM info WHERE key='CLI_VERSION' LIMIT 1",
|
||||||
|
[],
|
||||||
|
|row| row.get(0),
|
||||||
|
)
|
||||||
|
.ok();
|
||||||
|
if data_cli_version != Some(cli_version.to_string()) {
|
||||||
|
conn.execute("DELETE FROM incrementalcache", params![])?;
|
||||||
|
let mut stmt = conn
|
||||||
|
.prepare("INSERT OR REPLACE INTO info (key, value) VALUES (?1, ?2)")?;
|
||||||
|
stmt.execute(params!["CLI_VERSION", &cli_version])?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Very fast non-cryptographically secure hash.
|
||||||
|
fn fast_insecure_hash(bytes: &[u8]) -> u64 {
|
||||||
|
use std::hash::Hasher;
|
||||||
|
use twox_hash::XxHash64;
|
||||||
|
|
||||||
|
let mut hasher = XxHash64::default();
|
||||||
|
hasher.write(bytes);
|
||||||
|
hasher.finish()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn sql_cache_general_use() {
|
||||||
|
let conn = Connection::open_in_memory().unwrap();
|
||||||
|
let cache =
|
||||||
|
SqlIncrementalCache::from_connection(conn, 1, "1.0.0".to_string())
|
||||||
|
.unwrap();
|
||||||
|
let path = PathBuf::from("/mod.ts");
|
||||||
|
|
||||||
|
assert_eq!(cache.get_source_hash(&path), None);
|
||||||
|
cache.set_source_hash(&path, 2).unwrap();
|
||||||
|
assert_eq!(cache.get_source_hash(&path), Some(2));
|
||||||
|
|
||||||
|
// try changing the cli version (should clear)
|
||||||
|
let conn = cache.conn;
|
||||||
|
let mut cache =
|
||||||
|
SqlIncrementalCache::from_connection(conn, 1, "2.0.0".to_string())
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(cache.get_source_hash(&path), None);
|
||||||
|
|
||||||
|
// add back the file to the cache
|
||||||
|
cache.set_source_hash(&path, 2).unwrap();
|
||||||
|
assert_eq!(cache.get_source_hash(&path), Some(2));
|
||||||
|
|
||||||
|
// try changing the state hash
|
||||||
|
cache.state_hash = 2;
|
||||||
|
assert_eq!(cache.get_source_hash(&path), None);
|
||||||
|
cache.state_hash = 1;
|
||||||
|
|
||||||
|
// should return now that everything is back
|
||||||
|
assert_eq!(cache.get_source_hash(&path), Some(2));
|
||||||
|
|
||||||
|
// recreating the cache should not remove the data because the CLI version and state hash is the same
|
||||||
|
let conn = cache.conn;
|
||||||
|
let cache =
|
||||||
|
SqlIncrementalCache::from_connection(conn, 1, "2.0.0".to_string())
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(cache.get_source_hash(&path), Some(2));
|
||||||
|
|
||||||
|
// now try replacing and using another path
|
||||||
|
cache.set_source_hash(&path, 3).unwrap();
|
||||||
|
cache.set_source_hash(&path, 4).unwrap();
|
||||||
|
let path2 = PathBuf::from("/mod2.ts");
|
||||||
|
cache.set_source_hash(&path2, 5).unwrap();
|
||||||
|
assert_eq!(cache.get_source_hash(&path), Some(4));
|
||||||
|
assert_eq!(cache.get_source_hash(&path2), Some(5));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
pub async fn incremental_cache_general_use() {
|
||||||
|
let conn = Connection::open_in_memory().unwrap();
|
||||||
|
let sql_cache =
|
||||||
|
SqlIncrementalCache::from_connection(conn, 1, "1.0.0".to_string())
|
||||||
|
.unwrap();
|
||||||
|
let file_path = PathBuf::from("/mod.ts");
|
||||||
|
let file_text = "test";
|
||||||
|
let file_hash = fast_insecure_hash(file_text.as_bytes());
|
||||||
|
sql_cache.set_source_hash(&file_path, file_hash).unwrap();
|
||||||
|
let cache = IncrementalCacheInner::from_sql_incremental_cache(
|
||||||
|
sql_cache,
|
||||||
|
&[file_path.clone()],
|
||||||
|
);
|
||||||
|
|
||||||
|
assert!(cache.is_file_same(&file_path, "test"));
|
||||||
|
assert!(!cache.is_file_same(&file_path, "other"));
|
||||||
|
|
||||||
|
// just ensure this doesn't panic
|
||||||
|
cache.update_file(&file_path, "other");
|
||||||
|
}
|
||||||
|
}
|
|
@ -34,6 +34,8 @@ use std::path::PathBuf;
|
||||||
use std::sync::atomic::{AtomicBool, Ordering};
|
use std::sync::atomic::{AtomicBool, Ordering};
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
|
use super::incremental_cache::IncrementalCache;
|
||||||
|
|
||||||
static STDIN_FILE_NAME: &str = "_stdin.ts";
|
static STDIN_FILE_NAME: &str = "_stdin.ts";
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
|
@ -147,6 +149,17 @@ pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
|
||||||
};
|
};
|
||||||
|
|
||||||
let operation = |paths: Vec<PathBuf>| async {
|
let operation = |paths: Vec<PathBuf>| async {
|
||||||
|
let incremental_cache = Arc::new(IncrementalCache::new(
|
||||||
|
&ps.dir.lint_incremental_cache_db_file_path(),
|
||||||
|
// use a hash of the rule names in order to bust the cache
|
||||||
|
&{
|
||||||
|
// ensure this is stable by sorting it
|
||||||
|
let mut names = lint_rules.iter().map(|r| r.code()).collect::<Vec<_>>();
|
||||||
|
names.sort_unstable();
|
||||||
|
names
|
||||||
|
},
|
||||||
|
&paths,
|
||||||
|
));
|
||||||
let target_files_len = paths.len();
|
let target_files_len = paths.len();
|
||||||
let reporter_kind = reporter_kind.clone();
|
let reporter_kind = reporter_kind.clone();
|
||||||
let reporter_lock = Arc::new(Mutex::new(create_reporter(reporter_kind)));
|
let reporter_lock = Arc::new(Mutex::new(create_reporter(reporter_kind)));
|
||||||
|
@ -154,8 +167,23 @@ pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
|
||||||
let has_error = has_error.clone();
|
let has_error = has_error.clone();
|
||||||
let lint_rules = lint_rules.clone();
|
let lint_rules = lint_rules.clone();
|
||||||
let reporter_lock = reporter_lock.clone();
|
let reporter_lock = reporter_lock.clone();
|
||||||
|
let incremental_cache = incremental_cache.clone();
|
||||||
move |file_path| {
|
move |file_path| {
|
||||||
let r = lint_file(file_path.clone(), lint_rules.clone());
|
let file_text = fs::read_to_string(&file_path)?;
|
||||||
|
|
||||||
|
// don't bother rechecking this file if it didn't have any diagnostics before
|
||||||
|
if incremental_cache.is_file_same(&file_path, &file_text) {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let r = lint_file(file_path.clone(), file_text, lint_rules.clone());
|
||||||
|
if let Ok((file_diagnostics, file_text)) = &r {
|
||||||
|
if file_diagnostics.is_empty() {
|
||||||
|
// update the incremental cache if there were no diagnostics
|
||||||
|
incremental_cache.update_file(&file_path, file_text)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
handle_lint_result(
|
handle_lint_result(
|
||||||
&file_path.to_string_lossy(),
|
&file_path.to_string_lossy(),
|
||||||
r,
|
r,
|
||||||
|
@ -167,6 +195,7 @@ pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
|
incremental_cache.wait_completion().await;
|
||||||
reporter_lock.lock().unwrap().close(target_files_len);
|
reporter_lock.lock().unwrap().close(target_files_len);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -262,10 +291,10 @@ pub fn create_linter(
|
||||||
|
|
||||||
fn lint_file(
|
fn lint_file(
|
||||||
file_path: PathBuf,
|
file_path: PathBuf,
|
||||||
|
source_code: String,
|
||||||
lint_rules: Vec<Arc<dyn LintRule>>,
|
lint_rules: Vec<Arc<dyn LintRule>>,
|
||||||
) -> Result<(Vec<LintDiagnostic>, String), AnyError> {
|
) -> Result<(Vec<LintDiagnostic>, String), AnyError> {
|
||||||
let file_name = file_path.to_string_lossy().to_string();
|
let file_name = file_path.to_string_lossy().to_string();
|
||||||
let source_code = fs::read_to_string(&file_path)?;
|
|
||||||
let media_type = MediaType::from(&file_path);
|
let media_type = MediaType::from(&file_path);
|
||||||
|
|
||||||
let linter = create_linter(media_type, lint_rules);
|
let linter = create_linter(media_type, lint_rules);
|
||||||
|
|
|
@ -4,6 +4,7 @@ pub mod bench;
|
||||||
pub mod coverage;
|
pub mod coverage;
|
||||||
pub mod doc;
|
pub mod doc;
|
||||||
pub mod fmt;
|
pub mod fmt;
|
||||||
|
pub mod incremental_cache;
|
||||||
pub mod installer;
|
pub mod installer;
|
||||||
pub mod lint;
|
pub mod lint;
|
||||||
pub mod repl;
|
pub mod repl;
|
||||||
|
|
|
@ -13,6 +13,8 @@ use rusqlite::OptionalExtension;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
pub use rusqlite;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct OriginStorageDir(PathBuf);
|
struct OriginStorageDir(PathBuf);
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue