1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-22 15:06:54 -05:00

fix(fmt, lint): Make sure that target paths are not directory (#8375)

This commit merges implementations of "collect_files" and "files_in_subtree",
leaving only the former. Additionally it was ensured that directories are not yielded
from this function.
This commit is contained in:
Yusuke Tanaka 2020-11-14 21:05:26 +09:00 committed by GitHub
parent a59f5eadd8
commit 3a0ebff641
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 141 additions and 89 deletions

View file

@ -9,6 +9,7 @@
use crate::colors; use crate::colors;
use crate::diff::diff; use crate::diff::diff;
use crate::fs::{collect_files, is_supported_ext};
use crate::text_encoding; use crate::text_encoding;
use deno_core::error::generic_error; use deno_core::error::generic_error;
use deno_core::error::AnyError; use deno_core::error::AnyError;
@ -23,7 +24,6 @@ use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use walkdir::WalkDir;
const BOM_CHAR: char = '\u{FEFF}'; const BOM_CHAR: char = '\u{FEFF}';
@ -40,7 +40,7 @@ pub async fn format(
return format_stdin(check); return format_stdin(check);
} }
// collect the files that are to be formatted // collect the files that are to be formatted
let target_files = collect_files(args, exclude)?; let target_files = collect_files(args, exclude, is_supported_ext)?;
let config = get_config(); let config = get_config();
if check { if check {
check_source_files(config, target_files).await check_source_files(config, target_files).await
@ -199,61 +199,6 @@ fn files_str(len: usize) -> &'static str {
} }
} }
fn is_supported(path: &Path) -> bool {
let lowercase_ext = path
.extension()
.and_then(|e| e.to_str())
.map(|e| e.to_lowercase());
if let Some(ext) = lowercase_ext {
ext == "ts" || ext == "tsx" || ext == "js" || ext == "jsx" || ext == "mjs"
} else {
false
}
}
pub fn collect_files(
files: Vec<PathBuf>,
mut ignore: Vec<PathBuf>,
) -> Result<Vec<PathBuf>, std::io::Error> {
let mut target_files: Vec<PathBuf> = vec![];
// retain only the paths which exist and ignore the rest
ignore.retain(|i| i.exists());
if files.is_empty() {
for entry in WalkDir::new(std::env::current_dir()?)
.into_iter()
.filter_entry(|e| {
!ignore.iter().any(|i| {
e.path()
.canonicalize()
.unwrap()
.starts_with(i.canonicalize().unwrap())
})
})
{
let entry_clone = entry?.clone();
if is_supported(entry_clone.path()) {
target_files.push(entry_clone.path().canonicalize()?)
}
}
} else {
for file in files {
for entry in WalkDir::new(file)
.into_iter()
.filter_entry(|e| !ignore.iter().any(|i| e.path().starts_with(i)))
{
let entry_clone = entry?.clone();
if is_supported(entry_clone.path()) {
target_files.push(entry_clone.into_path().canonicalize()?)
}
}
}
}
Ok(target_files)
}
fn get_config() -> dprint::configuration::Configuration { fn get_config() -> dprint::configuration::Configuration {
use dprint::configuration::*; use dprint::configuration::*;
ConfigurationBuilder::new().deno().build() ConfigurationBuilder::new().deno().build()
@ -336,20 +281,3 @@ where
Ok(()) Ok(())
} }
} }
#[test]
fn test_is_supported() {
assert!(!is_supported(Path::new("tests/subdir/redirects")));
assert!(!is_supported(Path::new("README.md")));
assert!(is_supported(Path::new("lib/typescript.d.ts")));
assert!(is_supported(Path::new("cli/tests/001_hello.js")));
assert!(is_supported(Path::new("cli/tests/002_hello.ts")));
assert!(is_supported(Path::new("foo.jsx")));
assert!(is_supported(Path::new("foo.tsx")));
assert!(is_supported(Path::new("foo.TS")));
assert!(is_supported(Path::new("foo.TSX")));
assert!(is_supported(Path::new("foo.JS")));
assert!(is_supported(Path::new("foo.JSX")));
assert!(is_supported(Path::new("foo.mjs")));
assert!(!is_supported(Path::new("foo.mjsx")));
}

147
cli/fs.rs
View file

@ -72,9 +72,67 @@ pub fn resolve_from_cwd(path: &Path) -> Result<PathBuf, AnyError> {
Ok(normalize_path(&resolved_path)) Ok(normalize_path(&resolved_path))
} }
/// Checks if the path has extension Deno supports.
pub fn is_supported_ext(path: &Path) -> bool {
let lowercase_ext = path
.extension()
.and_then(|e| e.to_str())
.map(|e| e.to_lowercase());
if let Some(ext) = lowercase_ext {
ext == "ts" || ext == "tsx" || ext == "js" || ext == "jsx" || ext == "mjs"
} else {
false
}
}
/// Collects file paths that satisfy the given predicate, by recursively walking `files`.
/// If the walker visits a path that is listed in `ignore`, it skips descending into the directory.
pub fn collect_files<P>(
files: Vec<PathBuf>,
ignore: Vec<PathBuf>,
predicate: P,
) -> Result<Vec<PathBuf>, AnyError>
where
P: Fn(&Path) -> bool,
{
let mut target_files = Vec::new();
// retain only the paths which exist and ignore the rest
let canonicalized_ignore: Vec<PathBuf> = ignore
.into_iter()
.filter_map(|i| i.canonicalize().ok())
.collect();
let files = if files.is_empty() {
vec![std::env::current_dir()?]
} else {
files
};
for file in files {
for entry in WalkDir::new(file)
.into_iter()
.filter_entry(|e| {
e.path().canonicalize().map_or(false, |c| {
!canonicalized_ignore.iter().any(|i| c.starts_with(i))
})
})
.filter_map(|e| match e {
Ok(e) if !e.file_type().is_dir() && predicate(e.path()) => Some(e),
_ => None,
})
{
target_files.push(entry.into_path().canonicalize()?)
}
}
Ok(target_files)
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use tempfile::TempDir;
#[test] #[test]
fn resolve_from_cwd_child() { fn resolve_from_cwd_child() {
@ -118,18 +176,83 @@ mod tests {
let expected = Path::new("/a"); let expected = Path::new("/a");
assert_eq!(resolve_from_cwd(expected).unwrap(), expected); assert_eq!(resolve_from_cwd(expected).unwrap(), expected);
} }
}
pub fn files_in_subtree<F>(root: PathBuf, filter: F) -> Vec<PathBuf> #[test]
where fn test_is_supported_ext() {
F: Fn(&Path) -> bool, assert!(!is_supported_ext(Path::new("tests/subdir/redirects")));
{ assert!(!is_supported_ext(Path::new("README.md")));
assert!(root.is_dir()); assert!(is_supported_ext(Path::new("lib/typescript.d.ts")));
assert!(is_supported_ext(Path::new("cli/tests/001_hello.js")));
assert!(is_supported_ext(Path::new("cli/tests/002_hello.ts")));
assert!(is_supported_ext(Path::new("foo.jsx")));
assert!(is_supported_ext(Path::new("foo.tsx")));
assert!(is_supported_ext(Path::new("foo.TS")));
assert!(is_supported_ext(Path::new("foo.TSX")));
assert!(is_supported_ext(Path::new("foo.JS")));
assert!(is_supported_ext(Path::new("foo.JSX")));
assert!(is_supported_ext(Path::new("foo.mjs")));
assert!(!is_supported_ext(Path::new("foo.mjsx")));
}
WalkDir::new(root) #[test]
.into_iter() fn test_collect_files() {
.filter_map(|e| e.ok()) fn create_files(dir_path: &PathBuf, files: &[&str]) {
.map(|e| e.path().to_owned()) std::fs::create_dir(dir_path).expect("Failed to create directory");
.filter(|p| !p.is_dir() && filter(&p)) for f in files {
.collect() let path = dir_path.join(f);
std::fs::write(path, "").expect("Failed to create file");
}
}
// dir.ts
// ├── a.ts
// ├── b.js
// ├── child
// │ ├── e.mjs
// │ ├── f.mjsx
// │ ├── .foo.TS
// │ └── README.md
// ├── c.tsx
// ├── d.jsx
// └── ignore
// ├── g.d.ts
// └── .gitignore
let t = TempDir::new().expect("tempdir fail");
let root_dir_path = t.path().join("dir.ts");
let root_dir_files = ["a.ts", "b.js", "c.tsx", "d.jsx"];
create_files(&root_dir_path, &root_dir_files);
let child_dir_path = root_dir_path.join("child");
let child_dir_files = ["e.mjs", "f.mjsx", ".foo.TS", "README.md"];
create_files(&child_dir_path, &child_dir_files);
let ignore_dir_path = root_dir_path.join("ignore");
let ignore_dir_files = ["g.d.ts", ".gitignore"];
create_files(&ignore_dir_path, &ignore_dir_files);
let result =
collect_files(vec![root_dir_path], vec![ignore_dir_path], |path| {
// exclude dotfiles
path
.file_name()
.and_then(|f| f.to_str())
.map_or(false, |f| !f.starts_with('.'))
})
.unwrap();
let expected = [
"a.ts",
"b.js",
"e.mjs",
"f.mjsx",
"README.md",
"c.tsx",
"d.jsx",
];
for e in expected.iter() {
assert!(result.iter().any(|r| r.ends_with(e)));
}
assert_eq!(result.len(), expected.len());
}
} }

View file

@ -8,9 +8,9 @@
//! the same functions as ops available in JS runtime. //! the same functions as ops available in JS runtime.
use crate::ast; use crate::ast;
use crate::colors; use crate::colors;
use crate::fmt::collect_files;
use crate::fmt::run_parallelized; use crate::fmt::run_parallelized;
use crate::fmt_errors; use crate::fmt_errors;
use crate::fs::{collect_files, is_supported_ext};
use crate::media_type::MediaType; use crate::media_type::MediaType;
use deno_core::error::{generic_error, AnyError, JsStackFrame}; use deno_core::error::{generic_error, AnyError, JsStackFrame};
use deno_core::serde_json; use deno_core::serde_json;
@ -47,7 +47,7 @@ pub async fn lint_files(
if args.len() == 1 && args[0].to_string_lossy() == "-" { if args.len() == 1 && args[0].to_string_lossy() == "-" {
return lint_stdin(json); return lint_stdin(json);
} }
let target_files = collect_files(args, ignore)?; let target_files = collect_files(args, ignore, is_supported_ext)?;
debug!("Found {} files", target_files.len()); debug!("Found {} files", target_files.len());
let target_files_len = target_files.len(); let target_files_len = target_files.len();

View file

@ -44,7 +44,8 @@ pub fn prepare_test_modules_urls(
for path in include_paths { for path in include_paths {
let p = deno_fs::normalize_path(&root_path.join(path)); let p = deno_fs::normalize_path(&root_path.join(path));
if p.is_dir() { if p.is_dir() {
let test_files = crate::fs::files_in_subtree(p, is_supported); let test_files =
crate::fs::collect_files(vec![p], vec![], is_supported).unwrap();
let test_files_as_urls = test_files let test_files_as_urls = test_files
.iter() .iter()
.map(|f| Url::from_file_path(f).unwrap()) .map(|f| Url::from_file_path(f).unwrap())