2020-01-29 21:16:48 -05:00
|
|
|
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
|
|
|
|
|
|
|
|
//! This module provides file formating utilities using
|
|
|
|
//! [`dprint`](https://github.com/dsherret/dprint).
|
|
|
|
//!
|
|
|
|
//! At the moment it is only consumed using CLI but in
|
|
|
|
//! the future it can be easily extended to provide
|
|
|
|
//! the same functions as ops available in JS runtime.
|
|
|
|
|
2020-05-23 09:22:08 -04:00
|
|
|
use crate::colors;
|
|
|
|
use crate::diff::diff;
|
2020-02-17 13:11:45 -05:00
|
|
|
use crate::fs::files_in_subtree;
|
2020-02-27 15:39:41 -05:00
|
|
|
use crate::op_error::OpError;
|
2020-08-03 17:39:48 -04:00
|
|
|
use crate::text_encoding;
|
2020-02-09 05:19:05 -05:00
|
|
|
use deno_core::ErrBox;
|
2020-07-31 10:59:22 -04:00
|
|
|
use dprint_plugin_typescript as dprint;
|
2020-01-29 21:16:48 -05:00
|
|
|
use std::fs;
|
2020-02-09 05:19:05 -05:00
|
|
|
use std::io::stdin;
|
|
|
|
use std::io::stdout;
|
|
|
|
use std::io::Read;
|
|
|
|
use std::io::Write;
|
2020-01-29 21:16:48 -05:00
|
|
|
use std::path::Path;
|
|
|
|
use std::path::PathBuf;
|
2020-04-23 19:01:15 -04:00
|
|
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
|
|
|
use std::sync::{Arc, Mutex};
|
2020-01-29 21:16:48 -05:00
|
|
|
|
2020-05-28 13:35:24 -04:00
|
|
|
const BOM_CHAR: char = '\u{FEFF}';
|
|
|
|
|
2020-05-04 15:17:15 -04:00
|
|
|
/// Format JavaScript/TypeScript files.
|
|
|
|
///
|
2020-07-30 12:09:08 -04:00
|
|
|
/// First argument and ignore supports globs, and if it is `None`
|
2020-05-04 15:17:15 -04:00
|
|
|
/// then the current directory is recursively walked.
|
2020-07-30 12:09:08 -04:00
|
|
|
pub async fn format(
|
|
|
|
args: Vec<String>,
|
|
|
|
check: bool,
|
|
|
|
exclude: Vec<String>,
|
|
|
|
) -> Result<(), ErrBox> {
|
2020-05-04 15:17:15 -04:00
|
|
|
if args.len() == 1 && args[0] == "-" {
|
|
|
|
return format_stdin(check);
|
2020-02-13 16:02:18 -05:00
|
|
|
}
|
2020-07-30 12:09:08 -04:00
|
|
|
// collect all files provided.
|
|
|
|
let mut target_files = collect_files(args)?;
|
|
|
|
if !exclude.is_empty() {
|
|
|
|
// collect all files to be ignored
|
|
|
|
// and retain only files that should be formatted.
|
|
|
|
let ignore_files = collect_files(exclude)?;
|
|
|
|
target_files.retain(|f| !ignore_files.contains(&f));
|
|
|
|
}
|
2020-05-04 15:17:15 -04:00
|
|
|
let config = get_config();
|
|
|
|
if check {
|
|
|
|
check_source_files(config, target_files).await
|
|
|
|
} else {
|
|
|
|
format_source_files(config, target_files).await
|
|
|
|
}
|
2020-03-25 17:24:26 -04:00
|
|
|
}
|
|
|
|
|
2020-04-23 19:01:15 -04:00
|
|
|
async fn check_source_files(
|
2020-02-03 15:52:32 -05:00
|
|
|
config: dprint::configuration::Configuration,
|
|
|
|
paths: Vec<PathBuf>,
|
2020-02-13 16:02:18 -05:00
|
|
|
) -> Result<(), ErrBox> {
|
2020-04-23 19:01:15 -04:00
|
|
|
let not_formatted_files_count = Arc::new(AtomicUsize::new(0));
|
|
|
|
let formatter = Arc::new(dprint::Formatter::new(config));
|
2020-05-23 09:22:08 -04:00
|
|
|
|
|
|
|
// prevent threads outputting at the same time
|
|
|
|
let output_lock = Arc::new(Mutex::new(0));
|
2020-04-23 19:01:15 -04:00
|
|
|
|
|
|
|
run_parallelized(paths, {
|
|
|
|
let not_formatted_files_count = not_formatted_files_count.clone();
|
|
|
|
move |file_path| {
|
2020-05-28 13:35:24 -04:00
|
|
|
let file_text = read_file_contents(&file_path)?.text;
|
|
|
|
let r = formatter.format_text(&file_path, &file_text);
|
2020-04-23 19:01:15 -04:00
|
|
|
match r {
|
|
|
|
Ok(formatted_text) => {
|
2020-05-28 13:35:24 -04:00
|
|
|
if formatted_text != file_text {
|
2020-04-23 19:01:15 -04:00
|
|
|
not_formatted_files_count.fetch_add(1, Ordering::SeqCst);
|
2020-05-23 09:22:08 -04:00
|
|
|
let _g = output_lock.lock().unwrap();
|
|
|
|
match diff(&file_text, &formatted_text) {
|
|
|
|
Ok(diff) => {
|
|
|
|
println!();
|
|
|
|
println!(
|
|
|
|
"{} {}:",
|
2020-06-29 08:17:37 -04:00
|
|
|
colors::bold("from"),
|
2020-05-23 09:22:08 -04:00
|
|
|
file_path.display().to_string()
|
|
|
|
);
|
|
|
|
println!("{}", diff);
|
|
|
|
}
|
|
|
|
Err(e) => {
|
|
|
|
eprintln!(
|
|
|
|
"Error generating diff: {}",
|
|
|
|
file_path.to_string_lossy()
|
|
|
|
);
|
|
|
|
eprintln!(" {}", e);
|
|
|
|
}
|
|
|
|
}
|
2020-04-23 19:01:15 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Err(e) => {
|
2020-04-24 05:14:18 -04:00
|
|
|
let _g = output_lock.lock().unwrap();
|
2020-04-28 15:17:40 -04:00
|
|
|
eprintln!("Error checking: {}", file_path.to_string_lossy());
|
2020-04-23 19:01:15 -04:00
|
|
|
eprintln!(" {}", e);
|
2020-01-29 21:16:48 -05:00
|
|
|
}
|
|
|
|
}
|
2020-04-23 19:01:15 -04:00
|
|
|
Ok(())
|
2020-01-29 21:16:48 -05:00
|
|
|
}
|
2020-04-23 19:01:15 -04:00
|
|
|
})
|
|
|
|
.await?;
|
2020-01-29 21:16:48 -05:00
|
|
|
|
2020-04-23 19:01:15 -04:00
|
|
|
let not_formatted_files_count =
|
|
|
|
not_formatted_files_count.load(Ordering::SeqCst);
|
|
|
|
if not_formatted_files_count == 0 {
|
2020-02-13 16:02:18 -05:00
|
|
|
Ok(())
|
|
|
|
} else {
|
2020-02-23 14:51:29 -05:00
|
|
|
Err(
|
2020-02-27 15:39:41 -05:00
|
|
|
OpError::other(format!(
|
2020-02-26 05:50:53 -05:00
|
|
|
"Found {} not formatted {}",
|
2020-04-23 19:01:15 -04:00
|
|
|
not_formatted_files_count,
|
|
|
|
files_str(not_formatted_files_count),
|
2020-02-23 14:51:29 -05:00
|
|
|
))
|
|
|
|
.into(),
|
|
|
|
)
|
2020-01-29 21:16:48 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-23 19:01:15 -04:00
|
|
|
async fn format_source_files(
|
2020-02-03 15:52:32 -05:00
|
|
|
config: dprint::configuration::Configuration,
|
|
|
|
paths: Vec<PathBuf>,
|
2020-02-26 05:50:53 -05:00
|
|
|
) -> Result<(), ErrBox> {
|
2020-04-23 19:01:15 -04:00
|
|
|
let formatted_files_count = Arc::new(AtomicUsize::new(0));
|
|
|
|
let formatter = Arc::new(dprint::Formatter::new(config));
|
|
|
|
let output_lock = Arc::new(Mutex::new(0)); // prevent threads outputting at the same time
|
|
|
|
|
|
|
|
run_parallelized(paths, {
|
|
|
|
let formatted_files_count = formatted_files_count.clone();
|
|
|
|
move |file_path| {
|
2020-05-28 13:35:24 -04:00
|
|
|
let file_contents = read_file_contents(&file_path)?;
|
|
|
|
let r = formatter.format_text(&file_path, &file_contents.text);
|
2020-04-23 19:01:15 -04:00
|
|
|
match r {
|
|
|
|
Ok(formatted_text) => {
|
2020-05-28 13:35:24 -04:00
|
|
|
if formatted_text != file_contents.text {
|
|
|
|
write_file_contents(
|
|
|
|
&file_path,
|
|
|
|
FileContents {
|
|
|
|
had_bom: file_contents.had_bom,
|
|
|
|
text: formatted_text,
|
|
|
|
},
|
|
|
|
)?;
|
2020-04-23 19:01:15 -04:00
|
|
|
formatted_files_count.fetch_add(1, Ordering::SeqCst);
|
2020-04-24 05:14:18 -04:00
|
|
|
let _g = output_lock.lock().unwrap();
|
2020-04-28 15:17:40 -04:00
|
|
|
println!("{}", file_path.to_string_lossy());
|
2020-04-23 19:01:15 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Err(e) => {
|
2020-04-24 05:14:18 -04:00
|
|
|
let _g = output_lock.lock().unwrap();
|
2020-04-28 15:17:40 -04:00
|
|
|
eprintln!("Error formatting: {}", file_path.to_string_lossy());
|
2020-04-23 19:01:15 -04:00
|
|
|
eprintln!(" {}", e);
|
2020-01-29 21:16:48 -05:00
|
|
|
}
|
|
|
|
}
|
2020-04-23 19:01:15 -04:00
|
|
|
Ok(())
|
2020-01-29 21:16:48 -05:00
|
|
|
}
|
2020-04-23 19:01:15 -04:00
|
|
|
})
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
let formatted_files_count = formatted_files_count.load(Ordering::SeqCst);
|
2020-04-08 10:31:48 -04:00
|
|
|
debug!(
|
|
|
|
"Formatted {} {}",
|
2020-04-23 19:01:15 -04:00
|
|
|
formatted_files_count,
|
|
|
|
files_str(formatted_files_count),
|
2020-04-08 10:31:48 -04:00
|
|
|
);
|
2020-02-26 05:50:53 -05:00
|
|
|
Ok(())
|
2020-01-29 21:16:48 -05:00
|
|
|
}
|
|
|
|
|
2020-02-09 05:19:05 -05:00
|
|
|
/// Format stdin and write result to stdout.
|
|
|
|
/// Treats input as TypeScript.
|
|
|
|
/// Compatible with `--check` flag.
|
2020-02-27 15:39:41 -05:00
|
|
|
fn format_stdin(check: bool) -> Result<(), ErrBox> {
|
2020-02-09 05:19:05 -05:00
|
|
|
let mut source = String::new();
|
|
|
|
if stdin().read_to_string(&mut source).is_err() {
|
2020-02-27 15:39:41 -05:00
|
|
|
return Err(OpError::other("Failed to read from stdin".to_string()).into());
|
2020-02-09 05:19:05 -05:00
|
|
|
}
|
2020-04-19 07:26:17 -04:00
|
|
|
let formatter = dprint::Formatter::new(get_config());
|
2020-02-09 05:19:05 -05:00
|
|
|
|
2020-04-28 15:17:40 -04:00
|
|
|
// dprint will fallback to jsx parsing if parsing this as a .ts file doesn't work
|
|
|
|
match formatter.format_text(&PathBuf::from("_stdin.ts"), &source) {
|
2020-04-19 07:26:17 -04:00
|
|
|
Ok(formatted_text) => {
|
2020-02-09 05:19:05 -05:00
|
|
|
if check {
|
|
|
|
if formatted_text != source {
|
|
|
|
println!("Not formatted stdin");
|
|
|
|
}
|
|
|
|
} else {
|
2020-02-27 15:39:41 -05:00
|
|
|
stdout().write_all(formatted_text.as_bytes())?;
|
2020-02-09 05:19:05 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Err(e) => {
|
2020-02-27 15:39:41 -05:00
|
|
|
return Err(OpError::other(e).into());
|
2020-02-09 05:19:05 -05:00
|
|
|
}
|
|
|
|
}
|
2020-02-27 15:39:41 -05:00
|
|
|
Ok(())
|
2020-01-29 21:16:48 -05:00
|
|
|
}
|
2020-02-13 16:02:18 -05:00
|
|
|
|
2020-05-04 15:17:15 -04:00
|
|
|
fn files_str(len: usize) -> &'static str {
|
|
|
|
if len == 1 {
|
|
|
|
"file"
|
|
|
|
} else {
|
|
|
|
"files"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn is_supported(path: &Path) -> bool {
|
|
|
|
let lowercase_ext = path
|
|
|
|
.extension()
|
|
|
|
.and_then(|e| e.to_str())
|
|
|
|
.map(|e| e.to_lowercase());
|
|
|
|
if let Some(ext) = lowercase_ext {
|
2020-06-05 23:36:50 -04:00
|
|
|
ext == "ts" || ext == "tsx" || ext == "js" || ext == "jsx" || ext == "mjs"
|
2020-05-04 15:17:15 -04:00
|
|
|
} else {
|
|
|
|
false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-10 17:29:48 -04:00
|
|
|
pub fn collect_files(files: Vec<String>) -> Result<Vec<PathBuf>, ErrBox> {
|
|
|
|
let mut target_files: Vec<PathBuf> = vec![];
|
|
|
|
|
|
|
|
if files.is_empty() {
|
|
|
|
target_files
|
|
|
|
.extend(files_in_subtree(std::env::current_dir()?, is_supported));
|
|
|
|
} else {
|
|
|
|
for arg in files {
|
|
|
|
let p = PathBuf::from(arg);
|
|
|
|
if p.is_dir() {
|
2020-07-30 12:09:08 -04:00
|
|
|
target_files.extend(files_in_subtree(p.canonicalize()?, is_supported));
|
2020-06-10 17:29:48 -04:00
|
|
|
} else {
|
2020-07-30 12:09:08 -04:00
|
|
|
target_files.push(p.canonicalize()?);
|
2020-06-10 17:29:48 -04:00
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(target_files)
|
|
|
|
}
|
|
|
|
|
2020-05-04 15:17:15 -04:00
|
|
|
fn get_config() -> dprint::configuration::Configuration {
|
|
|
|
use dprint::configuration::*;
|
|
|
|
ConfigurationBuilder::new().deno().build()
|
|
|
|
}
|
|
|
|
|
2020-05-28 13:35:24 -04:00
|
|
|
struct FileContents {
|
|
|
|
text: String,
|
|
|
|
had_bom: bool,
|
|
|
|
}
|
|
|
|
|
|
|
|
fn read_file_contents(file_path: &PathBuf) -> Result<FileContents, ErrBox> {
|
2020-08-03 17:39:48 -04:00
|
|
|
let file_bytes = fs::read(&file_path)?;
|
|
|
|
let charset = text_encoding::detect_charset(&file_bytes);
|
|
|
|
let file_text = text_encoding::convert_to_utf8(&file_bytes, charset)?;
|
2020-05-28 13:35:24 -04:00
|
|
|
let had_bom = file_text.starts_with(BOM_CHAR);
|
|
|
|
let text = if had_bom {
|
|
|
|
// remove the BOM
|
|
|
|
String::from(&file_text[BOM_CHAR.len_utf8()..])
|
|
|
|
} else {
|
2020-08-03 17:39:48 -04:00
|
|
|
String::from(file_text)
|
2020-05-28 13:35:24 -04:00
|
|
|
};
|
|
|
|
|
|
|
|
Ok(FileContents { text, had_bom })
|
|
|
|
}
|
|
|
|
|
|
|
|
fn write_file_contents(
|
|
|
|
file_path: &PathBuf,
|
|
|
|
file_contents: FileContents,
|
|
|
|
) -> Result<(), ErrBox> {
|
|
|
|
let file_text = if file_contents.had_bom {
|
|
|
|
// add back the BOM
|
|
|
|
format!("{}{}", BOM_CHAR, file_contents.text)
|
|
|
|
} else {
|
|
|
|
file_contents.text
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok(fs::write(file_path, file_text)?)
|
|
|
|
}
|
|
|
|
|
2020-06-11 19:44:17 -04:00
|
|
|
pub async fn run_parallelized<F>(
|
2020-04-23 19:01:15 -04:00
|
|
|
file_paths: Vec<PathBuf>,
|
|
|
|
f: F,
|
|
|
|
) -> Result<(), ErrBox>
|
|
|
|
where
|
|
|
|
F: FnOnce(PathBuf) -> Result<(), ErrBox> + Send + 'static + Clone,
|
|
|
|
{
|
|
|
|
let handles = file_paths.iter().map(|file_path| {
|
|
|
|
let f = f.clone();
|
|
|
|
let file_path = file_path.clone();
|
|
|
|
tokio::task::spawn_blocking(move || f(file_path))
|
|
|
|
});
|
|
|
|
let join_results = futures::future::join_all(handles).await;
|
|
|
|
|
|
|
|
// find the tasks that panicked and let the user know which files
|
|
|
|
let panic_file_paths = join_results
|
|
|
|
.iter()
|
|
|
|
.enumerate()
|
|
|
|
.filter_map(|(i, join_result)| {
|
|
|
|
join_result
|
|
|
|
.as_ref()
|
|
|
|
.err()
|
|
|
|
.map(|_| file_paths[i].to_string_lossy())
|
|
|
|
})
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
if !panic_file_paths.is_empty() {
|
|
|
|
panic!("Panic formatting: {}", panic_file_paths.join(", "))
|
|
|
|
}
|
|
|
|
|
|
|
|
// check for any errors and if so return the first one
|
|
|
|
let mut errors = join_results.into_iter().filter_map(|join_result| {
|
|
|
|
join_result
|
|
|
|
.ok()
|
|
|
|
.map(|handle_result| handle_result.err())
|
|
|
|
.flatten()
|
|
|
|
});
|
|
|
|
|
|
|
|
if let Some(e) = errors.next() {
|
|
|
|
Err(e)
|
|
|
|
} else {
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-02-13 16:02:18 -05:00
|
|
|
#[test]
|
|
|
|
fn test_is_supported() {
|
|
|
|
assert!(!is_supported(Path::new("tests/subdir/redirects")));
|
|
|
|
assert!(!is_supported(Path::new("README.md")));
|
2020-04-19 07:26:17 -04:00
|
|
|
assert!(is_supported(Path::new("lib/typescript.d.ts")));
|
2020-02-13 16:02:18 -05:00
|
|
|
assert!(is_supported(Path::new("cli/tests/001_hello.js")));
|
|
|
|
assert!(is_supported(Path::new("cli/tests/002_hello.ts")));
|
|
|
|
assert!(is_supported(Path::new("foo.jsx")));
|
|
|
|
assert!(is_supported(Path::new("foo.tsx")));
|
2020-04-28 15:17:40 -04:00
|
|
|
assert!(is_supported(Path::new("foo.TS")));
|
|
|
|
assert!(is_supported(Path::new("foo.TSX")));
|
|
|
|
assert!(is_supported(Path::new("foo.JS")));
|
|
|
|
assert!(is_supported(Path::new("foo.JSX")));
|
2020-06-05 23:36:50 -04:00
|
|
|
assert!(is_supported(Path::new("foo.mjs")));
|
|
|
|
assert!(!is_supported(Path::new("foo.mjsx")));
|
2020-02-13 16:02:18 -05:00
|
|
|
}
|