From 4e72ca313a8fa1d826783bdc5657937da97d590c Mon Sep 17 00:00:00 2001 From: David Sherret Date: Mon, 15 Jan 2024 19:15:39 -0500 Subject: [PATCH] refactor: use globbing from deno_config (#21925) --- Cargo.lock | 5 +- cli/Cargo.toml | 2 +- cli/args/flags.rs | 162 +++++-------- cli/args/mod.rs | 71 +++--- cli/lsp/config.rs | 52 +++-- cli/lsp/documents.rs | 129 ++++++----- cli/tools/bench/mod.rs | 5 +- cli/tools/bundle.rs | 3 +- cli/tools/coverage/mod.rs | 31 ++- cli/tools/doc.rs | 23 +- cli/tools/fmt.rs | 2 +- cli/tools/lint.rs | 2 +- cli/tools/registry/mod.rs | 8 +- cli/tools/registry/tar.rs | 2 +- cli/tools/test/mod.rs | 5 +- cli/util/fs.rs | 115 ++++------ cli/util/glob.rs | 465 -------------------------------------- cli/util/mod.rs | 1 - 18 files changed, 294 insertions(+), 789 deletions(-) delete mode 100644 cli/util/glob.rs diff --git a/Cargo.lock b/Cargo.lock index eaf124b1ae..858b0c17be 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1128,11 +1128,12 @@ dependencies = [ [[package]] name = "deno_config" -version = "0.6.5" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97979f94af93f388822233278ede930414efa273d6eb495de7680f2a6862a4d3" +checksum = "62edb1811b076bf29670385098bc6e1cfee37dae70b71a665e7a1534098ba805" dependencies = [ "anyhow", + "glob", "indexmap", "jsonc-parser", "log", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index faefe28c14..c36aebd924 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -55,7 +55,7 @@ winres.workspace = true [dependencies] deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "dep_graph", "module_specifier", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] } deno_cache_dir = "=0.6.1" -deno_config = "=0.6.5" +deno_config = "=0.7.1" deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_doc = { version = "=0.89.1", features = ["html"] } deno_emit = "=0.33.0" diff --git a/cli/args/flags.rs b/cli/args/flags.rs index 97cfbcaff8..46560b9bae 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -30,37 +30,8 @@ use super::flags_net; #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct FileFlags { - pub ignore: Vec, - pub include: Vec, -} - -impl FileFlags { - pub fn with_absolute_paths(self, base: &Path) -> Self { - fn to_absolute_path(path: PathBuf, base: &Path) -> PathBuf { - // todo(dsherret): don't store URLs in PathBufs - if path.starts_with("http:") - || path.starts_with("https:") - || path.starts_with("file:") - { - path - } else { - base.join(path) - } - } - - Self { - include: self - .include - .into_iter() - .map(|p| to_absolute_path(p, base)) - .collect(), - ignore: self - .ignore - .into_iter() - .map(|p| to_absolute_path(p, base)) - .collect(), - } - } + pub ignore: Vec, + pub include: Vec, } #[derive(Clone, Debug, Default, Eq, PartialEq)] @@ -75,7 +46,7 @@ pub struct BenchFlags { #[derive(Clone, Debug, Eq, PartialEq)] pub struct BundleFlags { pub source_file: String, - pub out_file: Option, + pub out_file: Option, pub watch: Option, } @@ -181,7 +152,7 @@ pub struct FmtFlags { impl FmtFlags { pub fn is_stdin(&self) -> bool { let args = &self.files.include; - args.len() == 1 && args[0].to_string_lossy() == "-" + args.len() == 1 && args[0] == "-" } } @@ -233,7 +204,7 @@ pub struct LintFlags { impl LintFlags { pub fn is_stdin(&self) -> bool { let args = &self.files.include; - args.len() == 1 && args[0].to_string_lossy() == "-" + args.len() == 1 && args[0] == "-" } } @@ -706,8 +677,12 @@ impl Flags { use DenoSubcommand::*; match &self.subcommand { - Fmt(FmtFlags { files, .. }) => Some(files.include.clone()), - Lint(LintFlags { files, .. }) => Some(files.include.clone()), + Fmt(FmtFlags { files, .. }) => { + Some(files.include.iter().map(|p| current_dir.join(p)).collect()) + } + Lint(LintFlags { files, .. }) => { + Some(files.include.iter().map(|p| current_dir.join(p)).collect()) + } Run(RunFlags { script, .. }) => { if let Ok(module_specifier) = resolve_url_or_path(script, current_dir) { if module_specifier.scheme() == "file" @@ -730,6 +705,7 @@ impl Flags { Task(TaskFlags { cwd: Some(path), .. }) => { + // todo(dsherret): Why is this canonicalized? Document why. // attempt to resolve the config file from the task subcommand's // `--cwd` when specified match canonicalize_path(&PathBuf::from(path)) { @@ -1124,8 +1100,7 @@ glob {*_,*.,}bench.{js,mjs,ts,mts,jsx,tsx}: .num_args(1..) .use_value_delimiter(true) .require_equals(true) - .help("Ignore files") - .value_parser(value_parser!(PathBuf)), + .help("Ignore files"), ) .arg( Arg::new("filter") @@ -1139,7 +1114,6 @@ glob {*_,*.,}bench.{js,mjs,ts,mts,jsx,tsx}: Arg::new("files") .help("List of file names to run") .num_args(..) - .value_parser(value_parser!(PathBuf)) .action(ArgAction::Append), ) .arg( @@ -1176,11 +1150,7 @@ If no output file is given, the output is written to standard output: .required(true) .value_hint(ValueHint::FilePath), ) - .arg( - Arg::new("out_file") - .value_parser(value_parser!(PathBuf)) - .value_hint(ValueHint::FilePath), - ) + .arg(Arg::new("out_file").value_hint(ValueHint::FilePath)) .arg(watch_arg(false)) .arg(no_clear_screen_arg()) .arg(executable_ext_arg()) @@ -1291,6 +1261,7 @@ supported in canary. Arg::new("output") .long("output") .short('o') + // todo(dsherret): remove value_parser!(PathBuf) and instead parse as string .value_parser(value_parser!(PathBuf)) .help("Output file (defaults to $PWD/)") .value_hint(ValueHint::FilePath), @@ -1414,6 +1385,7 @@ Generate html reports from lcov: Arg::new("output") .requires("lcov") .long("output") + // todo(dsherret): remove value_parser!(PathBuf) and instead parse as string .value_parser(value_parser!(PathBuf)) .help("Output file (defaults to stdout) for lcov") .long_help( @@ -1441,7 +1413,6 @@ Generate html reports from lcov: .arg( Arg::new("files") .num_args(0..) - .value_parser(value_parser!(PathBuf)) .action(ArgAction::Append) .value_hint(ValueHint::AnyPath), ) @@ -1521,6 +1492,7 @@ Show documentation for runtime built-ins: .action(ArgAction::Set) .require_equals(true) .value_hint(ValueHint::DirPath) + // todo(dsherret): remove value_parser!(PathBuf) and instead parse as string .value_parser(value_parser!(PathBuf)) ) .arg( @@ -1651,7 +1623,6 @@ Ignore formatting a file by adding an ignore comment at the top of the file: .arg( Arg::new("ignore") .long("ignore") - .value_parser(value_parser!(PathBuf)) .num_args(1..) .use_value_delimiter(true) .require_equals(true) @@ -1660,7 +1631,6 @@ Ignore formatting a file by adding an ignore comment at the top of the file: ) .arg( Arg::new("files") - .value_parser(value_parser!(PathBuf)) .num_args(1..) .action(ArgAction::Append) .required(false) @@ -1863,6 +1833,7 @@ fn jupyter_subcommand() -> Command { Arg::new("conn") .long("conn") .help("Path to JSON file describing connection parameters, provided by Jupyter") + // todo(dsherret): remove value_parser!(PathBuf) and instead parse as string .value_parser(value_parser!(PathBuf)) .value_hint(ValueHint::FilePath) .conflicts_with("install")) @@ -1988,7 +1959,6 @@ Ignore linting a file by adding an ignore comment at the top of the file: Arg::new("ignore") .long("ignore") .num_args(1..) - .value_parser(value_parser!(PathBuf)) .use_value_delimiter(true) .require_equals(true) .help("Ignore linting particular source files") @@ -2009,7 +1979,6 @@ Ignore linting a file by adding an ignore comment at the top of the file: ) .arg( Arg::new("files") - .value_parser(value_parser!(PathBuf)) .num_args(1..) .action(ArgAction::Append) .required(false) @@ -2128,7 +2097,6 @@ Directory arguments are expanded to all contained files matching the glob Arg::new("ignore") .long("ignore") .num_args(1..) - .value_parser(value_parser!(PathBuf)) .use_value_delimiter(true) .require_equals(true) .help("Ignore files") @@ -2216,7 +2184,6 @@ Directory arguments are expanded to all contained files matching the glob .help("List of file names to run") .num_args(0..) .action(ArgAction::Append) - .value_parser(value_parser!(PathBuf)) .value_hint(ValueHint::AnyPath), ) .arg( @@ -2283,6 +2250,7 @@ update to a different location, use the --output flag Arg::new("output") .long("output") .help("The path to output the updated version to") + // todo(dsherret): remove value_parser!(PathBuf) and instead parse as string .value_parser(value_parser!(PathBuf)) .value_hint(ValueHint::FilePath), ) @@ -2337,6 +2305,7 @@ Remote modules and multiple modules may also be specified: Arg::new("output") .long("output") .help("The directory to output the vendored modules to") + // todo(dsherret): remove value_parser!(PathBuf) and instead parse as string .value_parser(value_parser!(PathBuf)) .value_hint(ValueHint::DirPath), ) @@ -2570,6 +2539,7 @@ fn permission_args(app: Command) -> Command { .require_equals(true) .value_name("PATH") .help(ALLOW_READ_HELP) + // todo(dsherret): remove value_parser!(PathBuf) and instead parse as string .value_parser(value_parser!(PathBuf)) .value_hint(ValueHint::AnyPath), ) @@ -2581,6 +2551,7 @@ fn permission_args(app: Command) -> Command { .require_equals(true) .value_name("PATH") .help(DENY_READ_HELP) + // todo(dsherret): remove value_parser!(PathBuf) and instead parse as string .value_parser(value_parser!(PathBuf)) .value_hint(ValueHint::AnyPath), ) @@ -2592,6 +2563,7 @@ fn permission_args(app: Command) -> Command { .require_equals(true) .value_name("PATH") .help(ALLOW_WRITE_HELP) + // todo(dsherret): remove value_parser!(PathBuf) and instead parse as string .value_parser(value_parser!(PathBuf)) .value_hint(ValueHint::AnyPath), ) @@ -2603,6 +2575,7 @@ fn permission_args(app: Command) -> Command { .require_equals(true) .value_name("PATH") .help(DENY_WRITE_HELP) + // todo(dsherret): remove value_parser!(PathBuf) and instead parse as string .value_parser(value_parser!(PathBuf)) .value_hint(ValueHint::AnyPath), ) @@ -2713,6 +2686,7 @@ fn permission_args(app: Command) -> Command { .require_equals(true) .value_name("PATH") .help(ALLOW_FFI_HELP) + // todo(dsherret): remove value_parser!(PathBuf) and instead parse as string .value_parser(value_parser!(PathBuf)) .value_hint(ValueHint::AnyPath), ) @@ -2724,6 +2698,7 @@ fn permission_args(app: Command) -> Command { .require_equals(true) .value_name("PATH") .help(DENY_FFI_HELP) + // todo(dsherret): remove value_parser!(PathBuf) and instead parse as string .value_parser(value_parser!(PathBuf)) .value_hint(ValueHint::AnyPath), ) @@ -2973,6 +2948,7 @@ fn hmr_arg(takes_files: bool) -> Arg { arg .value_name("FILES") .num_args(0..) + // todo(dsherret): remove value_parser!(PathBuf) and instead parse as string .value_parser(value_parser!(PathBuf)) .use_value_delimiter(true) .require_equals(true) @@ -2999,6 +2975,7 @@ fn watch_arg(takes_files: bool) -> Arg { arg .value_name("FILES") .num_args(0..) + // todo(dsherret): remove value_parser!(PathBuf) and instead parse as string .value_parser(value_parser!(PathBuf)) .use_value_delimiter(true) .require_equals(true) @@ -3090,6 +3067,7 @@ fn lock_arg() -> Arg { If value is not provided, defaults to \"deno.lock\" in the current working directory.") .num_args(0..=1) + // todo(dsherret): remove value_parser!(PathBuf) and instead parse as string .value_parser(value_parser!(PathBuf)) .value_hint(ValueHint::FilePath) } @@ -3194,7 +3172,7 @@ fn bench_parse(flags: &mut Flags, matches: &mut ArgMatches) { let json = matches.get_flag("json"); - let ignore = match matches.remove_many::("ignore") { + let ignore = match matches.remove_many::("ignore") { Some(f) => f.collect(), None => vec![], }; @@ -3207,7 +3185,7 @@ fn bench_parse(flags: &mut Flags, matches: &mut ArgMatches) { .extend(matches.remove_many::("script_arg").unwrap()); } - let include = if let Some(files) = matches.remove_many::("files") { + let include = if let Some(files) = matches.remove_many::("files") { files.collect() } else { Vec::new() @@ -3232,7 +3210,7 @@ fn bundle_parse(flags: &mut Flags, matches: &mut ArgMatches) { let source_file = matches.remove_one::("source_file").unwrap(); let out_file = - if let Some(out_file) = matches.remove_one::("out_file") { + if let Some(out_file) = matches.remove_one::("out_file") { flags.allow_write = Some(vec![]); Some(out_file) } else { @@ -3320,12 +3298,11 @@ fn completions_parse( } fn coverage_parse(flags: &mut Flags, matches: &mut ArgMatches) { - let default_files = vec![PathBuf::from("coverage")]; - let files = match matches.remove_many::("files") { + let files = match matches.remove_many::("files") { Some(f) => f.collect(), - None => default_files, + None => vec!["coverage".to_string()], // default }; - let ignore = match matches.remove_many::("ignore") { + let ignore = match matches.remove_many::("ignore") { Some(f) => f.collect(), None => vec![], }; @@ -3448,11 +3425,11 @@ fn fmt_parse(flags: &mut Flags, matches: &mut ArgMatches) { config_args_parse(flags, matches); ext_arg_parse(flags, matches); - let include = match matches.remove_many::("files") { + let include = match matches.remove_many::("files") { Some(f) => f.collect(), None => vec![], }; - let ignore = match matches.remove_many::("ignore") { + let ignore = match matches.remove_many::("ignore") { Some(f) => f.collect(), None => vec![], }; @@ -3547,11 +3524,11 @@ fn lsp_parse(flags: &mut Flags, _matches: &mut ArgMatches) { fn lint_parse(flags: &mut Flags, matches: &mut ArgMatches) { config_args_parse(flags, matches); - let files = match matches.remove_many::("files") { + let files = match matches.remove_many::("files") { Some(f) => f.collect(), None => vec![], }; - let ignore = match matches.remove_many::("ignore") { + let ignore = match matches.remove_many::("ignore") { Some(f) => f.collect(), None => vec![], }; @@ -3666,7 +3643,7 @@ fn test_parse(flags: &mut Flags, matches: &mut ArgMatches) { // interactive prompts, unless done by user code flags.no_prompt = true; - let ignore = match matches.remove_many::("ignore") { + let ignore = match matches.remove_many::("ignore") { Some(f) => f.collect(), None => vec![], }; @@ -3725,7 +3702,7 @@ fn test_parse(flags: &mut Flags, matches: &mut ArgMatches) { None }; - let include = if let Some(files) = matches.remove_many::("files") { + let include = if let Some(files) = matches.remove_many::("files") { files.collect() } else { Vec::new() @@ -4642,10 +4619,7 @@ mod tests { subcommand: DenoSubcommand::Fmt(FmtFlags { check: false, files: FileFlags { - include: vec![ - PathBuf::from("script_1.ts"), - PathBuf::from("script_2.ts") - ], + include: vec!["script_1.ts".to_string(), "script_2.ts".to_string()], ignore: vec![], }, use_tabs: None, @@ -4771,8 +4745,8 @@ mod tests { subcommand: DenoSubcommand::Fmt(FmtFlags { check: true, files: FileFlags { - include: vec![PathBuf::from("foo.ts")], - ignore: vec![PathBuf::from("bar.js")], + include: vec!["foo.ts".to_string()], + ignore: vec!["bar.js".to_string()], }, use_tabs: None, line_width: None, @@ -4825,7 +4799,7 @@ mod tests { subcommand: DenoSubcommand::Fmt(FmtFlags { check: false, files: FileFlags { - include: vec![PathBuf::from("foo.ts")], + include: vec!["foo.ts".to_string()], ignore: vec![], }, use_tabs: None, @@ -4916,10 +4890,7 @@ mod tests { Flags { subcommand: DenoSubcommand::Lint(LintFlags { files: FileFlags { - include: vec![ - PathBuf::from("script_1.ts"), - PathBuf::from("script_2.ts") - ], + include: vec!["script_1.ts".to_string(), "script_2.ts".to_string(),], ignore: vec![], }, rules: false, @@ -4946,10 +4917,7 @@ mod tests { Flags { subcommand: DenoSubcommand::Lint(LintFlags { files: FileFlags { - include: vec![ - PathBuf::from("script_1.ts"), - PathBuf::from("script_2.ts") - ], + include: vec!["script_1.ts".to_string(), "script_2.ts".to_string()], ignore: vec![], }, rules: false, @@ -4977,10 +4945,7 @@ mod tests { Flags { subcommand: DenoSubcommand::Lint(LintFlags { files: FileFlags { - include: vec![ - PathBuf::from("script_1.ts"), - PathBuf::from("script_2.ts") - ], + include: vec!["script_1.ts".to_string(), "script_2.ts".to_string()], ignore: vec![], }, rules: false, @@ -5006,10 +4971,7 @@ mod tests { subcommand: DenoSubcommand::Lint(LintFlags { files: FileFlags { include: vec![], - ignore: vec![ - PathBuf::from("script_1.ts"), - PathBuf::from("script_2.ts") - ], + ignore: vec!["script_1.ts".to_string(), "script_2.ts".to_string()], }, rules: false, maybe_rules_tags: None, @@ -5103,7 +5065,7 @@ mod tests { Flags { subcommand: DenoSubcommand::Lint(LintFlags { files: FileFlags { - include: vec![PathBuf::from("script_1.ts")], + include: vec!["script_1.ts".to_string()], ignore: vec![], }, rules: false, @@ -5131,7 +5093,7 @@ mod tests { Flags { subcommand: DenoSubcommand::Lint(LintFlags { files: FileFlags { - include: vec![PathBuf::from("script_1.ts")], + include: vec!["script_1.ts".to_string()], ignore: vec![], }, rules: false, @@ -5160,7 +5122,7 @@ mod tests { Flags { subcommand: DenoSubcommand::Lint(LintFlags { files: FileFlags { - include: vec![PathBuf::from("script_1.ts")], + include: vec!["script_1.ts".to_string()], ignore: vec![], }, rules: false, @@ -6091,7 +6053,7 @@ mod tests { Flags { subcommand: DenoSubcommand::Bundle(BundleFlags { source_file: "source.ts".to_string(), - out_file: Some(PathBuf::from("bundle.js")), + out_file: Some("bundle.js".to_string()), watch: Default::default(), }), allow_write: Some(vec![]), @@ -6111,7 +6073,7 @@ mod tests { Flags { subcommand: DenoSubcommand::Bundle(BundleFlags { source_file: "source.ts".to_string(), - out_file: Some(PathBuf::from("bundle.js")), + out_file: Some("bundle.js".to_string()), watch: Default::default(), }), type_check_mode: TypeCheckMode::Local, @@ -7081,7 +7043,7 @@ mod tests { filter: Some("- foo".to_string()), allow_none: true, files: FileFlags { - include: vec![PathBuf::from("dir1/"), PathBuf::from("dir2/")], + include: vec!["dir1/".to_string(), "dir2/".to_string()], ignore: vec![], }, shuffle: None, @@ -7410,7 +7372,7 @@ mod tests { allow_none: false, shuffle: None, files: FileFlags { - include: vec![PathBuf::from("./")], + include: vec!["./".to_string()], ignore: vec![], }, concurrent_jobs: None, @@ -7925,7 +7887,7 @@ mod tests { Flags { subcommand: DenoSubcommand::Coverage(CoverageFlags { files: FileFlags { - include: vec![PathBuf::from("foo.json")], + include: vec!["foo.json".to_string()], ignore: vec![], }, include: vec![r"^file:".to_string()], @@ -7951,7 +7913,7 @@ mod tests { Flags { subcommand: DenoSubcommand::Coverage(CoverageFlags { files: FileFlags { - include: vec![PathBuf::from("foo.json")], + include: vec!["foo.json".to_string()], ignore: vec![], }, include: vec![r"^file:".to_string()], @@ -7972,7 +7934,7 @@ mod tests { Flags { subcommand: DenoSubcommand::Coverage(CoverageFlags { files: FileFlags { - include: vec![PathBuf::from("coverage")], + include: vec!["coverage".to_string()], ignore: vec![], }, include: vec![r"^file:".to_string()], @@ -8010,7 +7972,7 @@ mod tests { flags_from_vec(svec!["deno", "lint", "dir/a.js", "dir/b.js"]).unwrap(); assert_eq!( flags.config_path_args(&cwd), - Some(vec![PathBuf::from("dir/a.js"), PathBuf::from("dir/b.js")]) + Some(vec![cwd.join("dir/a.js"), cwd.join("dir/b.js")]) ); let flags = flags_from_vec(svec!["deno", "lint"]).unwrap(); @@ -8020,7 +7982,7 @@ mod tests { flags_from_vec(svec!["deno", "fmt", "dir/a.js", "dir/b.js"]).unwrap(); assert_eq!( flags.config_path_args(&cwd), - Some(vec![PathBuf::from("dir/a.js"), PathBuf::from("dir/b.js")]) + Some(vec![cwd.join("dir/a.js"), cwd.join("dir/b.js")]) ); } @@ -8319,7 +8281,7 @@ mod tests { json: true, no_run: true, files: FileFlags { - include: vec![PathBuf::from("dir1/"), PathBuf::from("dir2/")], + include: vec!["dir1/".to_string(), "dir2/".to_string()], ignore: vec![], }, watch: Default::default(), diff --git a/cli/args/mod.rs b/cli/args/mod.rs index ae48aae50e..121d2b4edb 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -9,6 +9,7 @@ pub mod package_json; pub use self::import_map::resolve_import_map_from_specifier; use self::package_json::PackageJsonDeps; use ::import_map::ImportMap; +use deno_config::glob::PathOrPattern; use deno_core::resolve_url_or_path; use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; use deno_npm::NpmSystemInfo; @@ -16,9 +17,9 @@ use deno_runtime::deno_tls::RootCertStoreProvider; use deno_semver::npm::NpmPackageReqReference; use indexmap::IndexMap; +pub use deno_config::glob::FilePatterns; pub use deno_config::BenchConfig; pub use deno_config::ConfigFile; -pub use deno_config::FilesConfig; pub use deno_config::FmtOptionsConfig; pub use deno_config::JsxImportSourceConfig; pub use deno_config::LintRulesConfig; @@ -69,10 +70,9 @@ use thiserror::Error; use crate::file_fetcher::FileFetcher; use crate::util::fs::canonicalize_path_maybe_not_exists; -use crate::util::glob::FilePatterns; -use crate::util::glob::PathOrPatternSet; use crate::version; +use deno_config::glob::PathOrPatternSet; use deno_config::FmtConfig; use deno_config::LintConfig; use deno_config::TestConfig; @@ -1187,16 +1187,13 @@ impl CliOptions { } pub fn resolve_config_excludes(&self) -> Result { - let maybe_files_config = if let Some(config_file) = &self.maybe_config_file + let maybe_config_files = if let Some(config_file) = &self.maybe_config_file { config_file.to_files_config()? } else { None }; - PathOrPatternSet::from_absolute_paths( - maybe_files_config.map(|c| c.exclude).unwrap_or_default(), - ) - .context("Invalid config file exclude pattern.") + Ok(maybe_config_files.map(|f| f.exclude).unwrap_or_default()) } pub fn resolve_test_options( @@ -1647,30 +1644,38 @@ impl StorageKeyResolver { /// over config file, i.e. if there's `files.ignore` in config file /// and `--ignore` CLI flag, only the flag value is taken into account. fn resolve_files( - maybe_files_config: Option, + maybe_files_config: Option, maybe_file_flags: Option, initial_cwd: &Path, ) -> Result { let mut maybe_files_config = maybe_files_config.unwrap_or_default(); if let Some(file_flags) = maybe_file_flags { - let file_flags = file_flags.with_absolute_paths(initial_cwd); if !file_flags.include.is_empty() { - maybe_files_config.include = Some(file_flags.include); + maybe_files_config.include = + Some(PathOrPatternSet::from_relative_path_or_patterns( + initial_cwd, + &file_flags.include, + )?); } if !file_flags.ignore.is_empty() { - maybe_files_config.exclude = file_flags.ignore + maybe_files_config.exclude = + PathOrPatternSet::from_relative_path_or_patterns( + initial_cwd, + &file_flags.ignore, + )?; } } Ok(FilePatterns { include: { let files = match maybe_files_config.include { Some(include) => include, - None => vec![initial_cwd.to_path_buf()], + None => PathOrPatternSet::new(vec![PathOrPattern::Path( + initial_cwd.to_path_buf(), + )]), }; - Some(PathOrPatternSet::from_absolute_paths(files)?) + Some(files) }, - exclude: PathOrPatternSet::from_absolute_paths(maybe_files_config.exclude) - .context("Invalid exclude.")?, + exclude: maybe_files_config.exclude, }) } @@ -1882,26 +1887,32 @@ mod test { temp_dir.write("pages/[id].ts", ""); let temp_dir_path = temp_dir.path().as_path(); - let error = resolve_files( - Some(FilesConfig { - include: Some(vec![temp_dir_path.join("data/**********.ts")]), - exclude: vec![], - }), - None, + let error = PathOrPatternSet::from_relative_path_or_patterns( temp_dir_path, + &["data/**********.ts".to_string()], ) .unwrap_err(); assert!(error.to_string().starts_with("Failed to expand glob")); let resolved_files = resolve_files( - Some(FilesConfig { - include: Some(vec![ - temp_dir_path.join("data/test1.?s"), - temp_dir_path.join("nested/foo/*.ts"), - temp_dir_path.join("nested/fizz/*.ts"), - temp_dir_path.join("pages/[id].ts"), - ]), - exclude: vec![temp_dir_path.join("nested/**/*bazz.ts")], + Some(FilePatterns { + include: Some( + PathOrPatternSet::from_relative_path_or_patterns( + temp_dir_path, + &[ + "data/test1.?s".to_string(), + "nested/foo/*.ts".to_string(), + "nested/fizz/*.ts".to_string(), + "pages/[id].ts".to_string(), + ], + ) + .unwrap(), + ), + exclude: PathOrPatternSet::from_relative_path_or_patterns( + temp_dir_path, + &["nested/**/*bazz.ts".to_string()], + ) + .unwrap(), }), None, temp_dir_path, diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index e7839de3be..d0909b926f 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -6,6 +6,8 @@ use crate::lsp::logging::lsp_warn; use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::path::specifier_to_file_path; use deno_ast::MediaType; +use deno_config::glob::PathOrPattern; +use deno_config::glob::PathOrPatternSet; use deno_config::FmtOptionsConfig; use deno_core::parking_lot::Mutex; use deno_core::serde::de::DeserializeOwned; @@ -750,10 +752,7 @@ impl ConfigSnapshot { } } } - if !self.specifier_enabled(specifier) { - return false; - } - true + self.specifier_enabled(specifier) } } @@ -1055,7 +1054,7 @@ impl Config { true } - pub fn get_enabled_paths(&self) -> Vec { + pub fn get_enabled_paths(&self) -> PathOrPatternSet { let mut paths = vec![]; for (workspace_uri, _) in &self.workspace_folders { let Ok(workspace_path) = specifier_to_file_path(workspace_uri) else { @@ -1065,23 +1064,28 @@ impl Config { let settings = self.workspace_settings_for_specifier(workspace_uri); if let Some(enable_paths) = &settings.enable_paths { for path in enable_paths { - paths.push(workspace_path.join(path)); + match PathOrPattern::from_relative(&workspace_path, path) { + Ok(path_or_pattern) => paths.push(path_or_pattern), + Err(err) => { + lsp_log!("Invalid enable path '{}': {:#}", path, err); + } + } } } else { - paths.push(workspace_path); + paths.push(PathOrPattern::Path(workspace_path)); } } paths.sort(); paths.dedup(); - paths + PathOrPatternSet::new(paths) } - pub fn get_disabled_paths(&self) -> Vec { - let mut paths = vec![]; + pub fn get_disabled_paths(&self) -> PathOrPatternSet { + let mut path_or_patterns = vec![]; if let Some(cf) = self.maybe_config_file() { if let Some(files) = cf.to_files_config().ok().flatten() { - for path in files.exclude { - paths.push(path); + for path in files.exclude.into_path_or_patterns() { + path_or_patterns.push(path); } } } @@ -1093,15 +1097,15 @@ impl Config { let settings = self.workspace_settings_for_specifier(workspace_uri); if settings.enable.unwrap_or_else(|| self.has_config_file()) { for path in &settings.disable_paths { - paths.push(workspace_path.join(path)); + path_or_patterns.push(PathOrPattern::Path(workspace_path.join(path))); } } else { - paths.push(workspace_path); + path_or_patterns.push(PathOrPattern::Path(workspace_path)); } } - paths.sort(); - paths.dedup(); - paths + path_or_patterns.sort(); + path_or_patterns.dedup(); + PathOrPatternSet::new(path_or_patterns) } pub fn log_file(&self) -> bool { @@ -1587,13 +1591,13 @@ mod tests { assert_eq!( config.get_enabled_paths(), - vec![ - PathBuf::from("/root1/sub_dir"), - PathBuf::from("/root1/sub_dir/other"), - PathBuf::from("/root1/test.ts"), - PathBuf::from("/root2/other.ts"), - PathBuf::from("/root3/"), - ] + PathOrPatternSet::new(vec![ + PathOrPattern::Path(PathBuf::from("/root1/sub_dir")), + PathOrPattern::Path(PathBuf::from("/root1/sub_dir/other")), + PathOrPattern::Path(PathBuf::from("/root1/test.ts")), + PathOrPattern::Path(PathBuf::from("/root2/other.ts")), + PathOrPattern::Path(PathBuf::from("/root3/")), + ]) ); } diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index bff56a6b8b..a03c083876 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -23,14 +23,15 @@ use crate::resolver::CliGraphResolverOptions; use crate::resolver::SloppyImportsFsEntry; use crate::resolver::SloppyImportsResolution; use crate::resolver::SloppyImportsResolver; -use crate::util::glob; -use crate::util::glob::FilePatterns; use crate::util::path::specifier_to_file_path; use crate::util::text_encoding; use deno_ast::MediaType; use deno_ast::ParsedSource; use deno_ast::SourceTextInfo; +use deno_config::glob::FilePatterns; +use deno_config::glob::PathOrPattern; +use deno_config::glob::PathOrPatternSet; use deno_core::error::custom_error; use deno_core::error::AnyError; use deno_core::futures::future; @@ -843,8 +844,8 @@ impl FileSystemDocuments { } pub struct UpdateDocumentConfigOptions<'a> { - pub enabled_paths: Vec, - pub disabled_paths: Vec, + pub enabled_paths: PathOrPatternSet, + pub disabled_paths: PathOrPatternSet, pub document_preload_limit: usize, pub maybe_import_map: Option>, pub maybe_config_file: Option<&'a ConfigFile>, @@ -1318,8 +1319,10 @@ impl Documents { } pub fn update_config(&mut self, options: UpdateDocumentConfigOptions) { + #[allow(clippy::too_many_arguments)] fn calculate_resolver_config_hash( - enabled_paths: &[PathBuf], + enabled_paths: &PathOrPatternSet, + disabled_paths: &PathOrPatternSet, document_preload_limit: usize, maybe_import_map: Option<&import_map::ImportMap>, maybe_jsx_config: Option<&JsxImportSourceConfig>, @@ -1327,14 +1330,27 @@ impl Documents { maybe_package_json_deps: Option<&PackageJsonDeps>, maybe_unstable_flags: Option<&Vec>, ) -> u64 { + fn get_pattern_set_vec(set: &PathOrPatternSet) -> Vec> { + let mut paths = set + .inner() + .iter() + .map(|p| match p { + PathOrPattern::Path(p) => { + Cow::Owned(p.to_string_lossy().to_string()) + } + PathOrPattern::RemoteUrl(p) => Cow::Borrowed(p.as_str()), + PathOrPattern::Pattern(p) => Cow::Borrowed(p.as_str()), + }) + .collect::>(); + // ensure these are sorted so the hashing is deterministic + paths.sort_unstable(); + paths + } + let mut hasher = FastInsecureHasher::default(); hasher.write_hashable(document_preload_limit); - hasher.write_hashable(&{ - // ensure these are sorted so the hashing is deterministic - let mut enabled_paths = enabled_paths.to_vec(); - enabled_paths.sort_unstable(); - enabled_paths - }); + hasher.write_hashable(&get_pattern_set_vec(enabled_paths)); + hasher.write_hashable(&get_pattern_set_vec(disabled_paths)); if let Some(import_map) = maybe_import_map { hasher.write_str(&import_map.to_json()); hasher.write_str(import_map.base_url().as_str()); @@ -1372,6 +1388,7 @@ impl Documents { .and_then(|cf| cf.to_maybe_jsx_import_source_config().ok().flatten()); let new_resolver_config_hash = calculate_resolver_config_hash( &options.enabled_paths, + &options.disabled_paths, options.document_preload_limit, options.maybe_import_map.as_deref(), maybe_jsx_config.as_ref(), @@ -1447,8 +1464,8 @@ impl Documents { fn refresh_dependencies( &mut self, - enabled_paths: Vec, - disabled_paths: Vec, + enabled_paths: PathOrPatternSet, + disabled_paths: PathOrPatternSet, document_preload_limit: usize, ) { let resolver = self.resolver.as_graph_resolver(); @@ -1883,8 +1900,8 @@ enum PendingEntry { } struct PreloadDocumentFinderOptions { - enabled_paths: Vec, - disabled_paths: Vec, + enabled_paths: PathOrPatternSet, + disabled_paths: PathOrPatternSet, limit: usize, } @@ -1900,18 +1917,6 @@ struct PreloadDocumentFinder { impl PreloadDocumentFinder { pub fn new(options: PreloadDocumentFinderOptions) -> Self { - fn paths_into_globs_and_paths( - input_paths: Vec, - ) -> glob::PathOrPatternSet { - let mut result = Vec::with_capacity(input_paths.len()); - for path in input_paths { - if let Ok(path_or_pattern) = glob::PathOrPattern::new(path) { - result.push(path_or_pattern); - } - } - glob::PathOrPatternSet::new(result) - } - fn is_allowed_root_dir(dir_path: &Path) -> bool { if dir_path.parent().is_none() { // never search the root directory of a drive @@ -1929,8 +1934,8 @@ impl PreloadDocumentFinder { }; let file_patterns = FilePatterns { - include: Some(paths_into_globs_and_paths(options.enabled_paths)), - exclude: paths_into_globs_and_paths(options.disabled_paths), + include: Some(options.enabled_paths), + exclude: options.disabled_paths, }; let file_patterns_by_base = file_patterns.split_by_base(); @@ -2242,8 +2247,8 @@ console.log(b, "hello deno"); .unwrap(); documents.update_config(UpdateDocumentConfigOptions { - enabled_paths: vec![], - disabled_paths: vec![], + enabled_paths: Default::default(), + disabled_paths: Default::default(), document_preload_limit: 1_000, maybe_import_map: Some(Arc::new(import_map)), maybe_config_file: None, @@ -2284,8 +2289,8 @@ console.log(b, "hello deno"); .unwrap(); documents.update_config(UpdateDocumentConfigOptions { - enabled_paths: vec![], - disabled_paths: vec![], + enabled_paths: Default::default(), + disabled_paths: Default::default(), document_preload_limit: 1_000, maybe_import_map: Some(Arc::new(import_map)), maybe_config_file: None, @@ -2352,17 +2357,17 @@ console.log(b, "hello deno"); temp_dir.write("root3/mod.ts", ""); // no, not provided let mut urls = PreloadDocumentFinder::new(PreloadDocumentFinderOptions { - enabled_paths: vec![ - temp_dir.path().to_path_buf().join("root1"), - temp_dir.path().to_path_buf().join("root2").join("file1.ts"), - temp_dir - .path() - .to_path_buf() - .join("root2") - .join("main.min.ts"), - temp_dir.path().to_path_buf().join("root2").join("folder"), - ], - disabled_paths: Vec::new(), + enabled_paths: PathOrPatternSet::from_relative_path_or_patterns( + temp_dir.path().as_path(), + &[ + "root1".to_string(), + "root2/file1.ts".to_string(), + "root2/main.min.ts".to_string(), + "root2/folder".to_string(), + ], + ) + .unwrap(), + disabled_paths: Default::default(), limit: 1_000, }) .collect::>(); @@ -2392,8 +2397,10 @@ console.log(b, "hello deno"); // now try iterating with a low limit let urls = PreloadDocumentFinder::new(PreloadDocumentFinderOptions { - enabled_paths: vec![temp_dir.path().to_path_buf()], - disabled_paths: Vec::new(), + enabled_paths: PathOrPatternSet::new(vec![PathOrPattern::Path( + temp_dir.path().to_path_buf(), + )]), + disabled_paths: Default::default(), limit: 10, // entries and not results }) .collect::>(); @@ -2405,12 +2412,18 @@ console.log(b, "hello deno"); // now try with certain directories and files disabled let mut urls = PreloadDocumentFinder::new(PreloadDocumentFinderOptions { - enabled_paths: vec![temp_dir.path().to_path_buf()], - disabled_paths: vec![ - temp_dir.path().to_path_buf().join("root1"), - temp_dir.path().to_path_buf().join("root2").join("file1.ts"), - temp_dir.path().to_path_buf().join("**/*.js"), // ignore js files - ], + enabled_paths: PathOrPatternSet::new(vec![PathOrPattern::Path( + temp_dir.path().to_path_buf(), + )]), + disabled_paths: PathOrPatternSet::from_relative_path_or_patterns( + temp_dir.path().as_path(), + &[ + "root1".to_string(), + "root2/file1.ts".to_string(), + "**/*.js".to_string(), // ignore js files + ], + ) + .unwrap(), limit: 1_000, }) .collect::>(); @@ -2430,16 +2443,20 @@ console.log(b, "hello deno"); pub fn test_pre_load_document_finder_disallowed_dirs() { if cfg!(windows) { let paths = PreloadDocumentFinder::new(PreloadDocumentFinderOptions { - enabled_paths: vec![PathBuf::from("C:\\")], - disabled_paths: Vec::new(), + enabled_paths: PathOrPatternSet::new(vec![PathOrPattern::Path( + PathBuf::from("C:\\"), + )]), + disabled_paths: Default::default(), limit: 1_000, }) .collect::>(); assert_eq!(paths, vec![]); } else { let paths = PreloadDocumentFinder::new(PreloadDocumentFinderOptions { - enabled_paths: vec![PathBuf::from("/")], - disabled_paths: Vec::new(), + enabled_paths: PathOrPatternSet::new(vec![PathOrPattern::Path( + PathBuf::from("/"), + )]), + disabled_paths: Default::default(), limit: 1_000, }) .collect::>(); diff --git a/cli/tools/bench/mod.rs b/cli/tools/bench/mod.rs index 1eb703813b..146c9e8bd0 100644 --- a/cli/tools/bench/mod.rs +++ b/cli/tools/bench/mod.rs @@ -15,12 +15,12 @@ use crate::tools::test::format_test_error; use crate::tools::test::TestFilter; use crate::util::file_watcher; use crate::util::fs::collect_specifiers; -use crate::util::glob::FilePatterns; -use crate::util::glob::PathOrPattern; use crate::util::path::is_script_ext; use crate::version::get_user_agent; use crate::worker::CliMainWorkerFactory; +use deno_config::glob::FilePatterns; +use deno_config::glob::PathOrPattern; use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::error::JsError; @@ -408,6 +408,7 @@ fn is_supported_bench_path(path: &Path, patterns: &FilePatterns) -> bool { .map(|p| { p.inner().iter().any(|p| match p { PathOrPattern::Path(p) => p == path, + PathOrPattern::RemoteUrl(_) => true, PathOrPattern::Pattern(p) => p.matches_path(path), }) }) diff --git a/cli/tools/bundle.rs b/cli/tools/bundle.rs index d66a993d6b..c703924190 100644 --- a/cli/tools/bundle.rs +++ b/cli/tools/bundle.rs @@ -99,9 +99,10 @@ async fn bundle_action( let out_file = &bundle_flags.out_file; if let Some(out_file) = out_file { + let out_file = cli_options.initial_cwd().join(out_file); let output_bytes = bundle_output.code.as_bytes(); let output_len = output_bytes.len(); - util::fs::write_file(out_file, output_bytes, 0o644)?; + util::fs::write_file(&out_file, output_bytes, 0o644)?; log::info!( "{} {:?} ({})", colors::green("Emit"), diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs index c7d84d91d8..ec77c1bb8c 100644 --- a/cli/tools/coverage/mod.rs +++ b/cli/tools/coverage/mod.rs @@ -9,12 +9,13 @@ use crate::npm::CliNpmResolver; use crate::tools::fmt::format_json; use crate::tools::test::is_supported_test_path; use crate::util::fs::FileCollector; -use crate::util::glob::FilePatterns; -use crate::util::glob::PathOrPatternSet; use crate::util::text_encoding::source_map_from_code; use deno_ast::MediaType; use deno_ast::ModuleSpecifier; +use deno_config::glob::FilePatterns; +use deno_config::glob::PathOrPattern; +use deno_config::glob::PathOrPatternSet; use deno_core::anyhow::anyhow; use deno_core::anyhow::Context; use deno_core::error::generic_error; @@ -375,19 +376,25 @@ fn collect_coverages( files: FileFlags, initial_cwd: &Path, ) -> Result, AnyError> { - let files = files.with_absolute_paths(initial_cwd); let mut coverages: Vec = Vec::new(); let file_patterns = FilePatterns { include: Some({ - let files = if files.include.is_empty() { - vec![initial_cwd.to_path_buf()] + if files.include.is_empty() { + PathOrPatternSet::new(vec![PathOrPattern::Path( + initial_cwd.to_path_buf(), + )]) } else { - files.include - }; - PathOrPatternSet::from_absolute_paths(files)? + PathOrPatternSet::from_relative_path_or_patterns( + initial_cwd, + &files.include, + )? + } }), - exclude: PathOrPatternSet::from_absolute_paths(files.ignore) - .context("Invalid ignore pattern.")?, + exclude: PathOrPatternSet::from_relative_path_or_patterns( + initial_cwd, + &files.ignore, + ) + .context("Invalid ignore pattern.")?, }; let file_paths = FileCollector::new(|file_path, _| { file_path @@ -463,7 +470,9 @@ pub async fn cover_files( assert!(!coverage_flags.files.include.is_empty()); // Use the first include path as the default output path. - let coverage_root = coverage_flags.files.include[0].clone(); + let coverage_root = cli_options + .initial_cwd() + .join(&coverage_flags.files.include[0]); let script_coverages = collect_coverages(coverage_flags.files, cli_options.initial_cwd())?; if script_coverages.is_empty() { diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs index 5e9f32a8fc..7a51f6356e 100644 --- a/cli/tools/doc.rs +++ b/cli/tools/doc.rs @@ -11,8 +11,8 @@ use crate::factory::CliFactory; use crate::graph_util::graph_lock_or_exit; use crate::tsc::get_types_declaration_file_text; use crate::util::fs::collect_specifiers; -use crate::util::glob::FilePatterns; -use crate::util::glob::PathOrPatternSet; +use deno_config::glob::FilePatterns; +use deno_config::glob::PathOrPatternSet; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::AnyError; @@ -25,7 +25,6 @@ use deno_graph::ModuleSpecifier; use doc::DocDiagnostic; use indexmap::IndexMap; use std::collections::BTreeMap; -use std::path::PathBuf; use std::rc::Rc; async fn generate_doc_nodes_for_builtin_types( @@ -94,21 +93,9 @@ pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> { let module_specifiers = collect_specifiers( FilePatterns { - include: Some(PathOrPatternSet::from_absolute_paths( - source_files - .iter() - .map(|p| { - if p.starts_with("https:") - || p.starts_with("http:") - || p.starts_with("file:") - { - // todo(dsherret): don't store URLs in PathBufs - PathBuf::from(p) - } else { - cli_options.initial_cwd().join(p) - } - }) - .collect(), + include: Some(PathOrPatternSet::from_relative_path_or_patterns( + cli_options.initial_cwd(), + source_files, )?), exclude: Default::default(), }, diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs index c35c72844f..ad35615a08 100644 --- a/cli/tools/fmt.rs +++ b/cli/tools/fmt.rs @@ -19,10 +19,10 @@ use crate::util::diff::diff; use crate::util::file_watcher; use crate::util::fs::canonicalize_path; use crate::util::fs::FileCollector; -use crate::util::glob::FilePatterns; use crate::util::path::get_extension; use crate::util::text_encoding; use deno_ast::ParsedSource; +use deno_config::glob::FilePatterns; use deno_core::anyhow::anyhow; use deno_core::anyhow::bail; use deno_core::anyhow::Context; diff --git a/cli/tools/lint.rs b/cli/tools/lint.rs index 52890ae243..20fd12ce2e 100644 --- a/cli/tools/lint.rs +++ b/cli/tools/lint.rs @@ -13,10 +13,10 @@ use crate::tools::fmt::run_parallelized; use crate::util::file_watcher; use crate::util::fs::canonicalize_path; use crate::util::fs::FileCollector; -use crate::util::glob::FilePatterns; use crate::util::path::is_script_ext; use crate::util::sync::AtomicFlag; use deno_ast::MediaType; +use deno_config::glob::FilePatterns; use deno_core::anyhow::bail; use deno_core::error::generic_error; use deno_core::error::AnyError; diff --git a/cli/tools/registry/mod.rs b/cli/tools/registry/mod.rs index d635ccf555..20a1d70aef 100644 --- a/cli/tools/registry/mod.rs +++ b/cli/tools/registry/mod.rs @@ -36,7 +36,6 @@ use crate::tools::registry::graph::resolve_config_file_roots_from_exports; use crate::tools::registry::graph::surface_fast_check_type_graph_errors; use crate::tools::registry::graph::MemberRoots; use crate::util::display::human_size; -use crate::util::glob::PathOrPatternSet; use crate::util::import_map::ImportMapUnfurler; mod api; @@ -127,10 +126,9 @@ async fn prepare_publish( let Some((scope, package_name)) = name.split_once('/') else { bail!("Invalid package name, use '@/ format"); }; - let exclude_patterns = deno_json.to_files_config().and_then(|files| { - PathOrPatternSet::from_absolute_paths(files.unwrap_or_default().exclude) - .context("Invalid config file exclude pattern.") - })?; + let exclude_patterns = deno_json + .to_files_config() + .map(|files| files.unwrap_or_default().exclude)?; let tarball = deno_core::unsync::spawn_blocking(move || { let unfurler = ImportMapUnfurler::new(&import_map); diff --git a/cli/tools/registry/tar.rs b/cli/tools/registry/tar.rs index 218e4f67ee..6eaaf1095f 100644 --- a/cli/tools/registry/tar.rs +++ b/cli/tools/registry/tar.rs @@ -12,8 +12,8 @@ use std::path::Path; use std::path::PathBuf; use tar::Header; -use crate::util::glob::PathOrPatternSet; use crate::util::import_map::ImportMapUnfurler; +use deno_config::glob::PathOrPatternSet; #[derive(Debug, Clone, PartialEq)] pub struct PublishableTarballFile { diff --git a/cli/tools/test/mod.rs b/cli/tools/test/mod.rs index 2c226db4d7..d1dc76028a 100644 --- a/cli/tools/test/mod.rs +++ b/cli/tools/test/mod.rs @@ -16,8 +16,6 @@ use crate::module_loader::ModuleLoadPreparer; use crate::ops; use crate::util::file_watcher; use crate::util::fs::collect_specifiers; -use crate::util::glob::FilePatterns; -use crate::util::glob::PathOrPattern; use crate::util::path::get_extension; use crate::util::path::is_script_ext; use crate::util::path::mapped_specifier_for_tsc; @@ -26,6 +24,8 @@ use crate::worker::CliMainWorkerFactory; use deno_ast::swc::common::comments::CommentKind; use deno_ast::MediaType; use deno_ast::SourceRangedForSpanned; +use deno_config::glob::FilePatterns; +use deno_config::glob::PathOrPattern; use deno_core::anyhow; use deno_core::anyhow::bail; use deno_core::anyhow::Context as _; @@ -1065,6 +1065,7 @@ fn is_supported_test_path_predicate( .map(|p| { p.inner().iter().any(|p| match p { PathOrPattern::Path(p) => p == path, + PathOrPattern::RemoteUrl(_) => true, PathOrPattern::Pattern(p) => p.matches_path(path), }) }) diff --git a/cli/util/fs.rs b/cli/util/fs.rs index 86b17754bf..b7832f2bbc 100644 --- a/cli/util/fs.rs +++ b/cli/util/fs.rs @@ -1,14 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::anyhow::anyhow; -use deno_core::anyhow::Context; -use deno_core::error::AnyError; -pub use deno_core::normalize_path; -use deno_core::unsync::spawn_blocking; -use deno_core::ModuleSpecifier; -use deno_runtime::deno_crypto::rand; -use deno_runtime::deno_fs::FileSystem; -use deno_runtime::deno_node::PathClean; use std::collections::HashSet; use std::env::current_dir; use std::fmt::Write as FmtWrite; @@ -22,15 +13,23 @@ use std::sync::Arc; use std::time::Duration; use walkdir::WalkDir; +use deno_config::glob::FilePatterns; +use deno_config::glob::PathOrPattern; +use deno_config::glob::PathOrPatternSet; +use deno_core::anyhow::anyhow; +use deno_core::anyhow::Context; +use deno_core::error::AnyError; +pub use deno_core::normalize_path; +use deno_core::unsync::spawn_blocking; +use deno_core::ModuleSpecifier; +use deno_runtime::deno_crypto::rand; +use deno_runtime::deno_fs::FileSystem; +use deno_runtime::deno_node::PathClean; + use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; use crate::util::progress_bar::ProgressMessagePrompt; -use super::glob::FilePatterns; -use super::glob::PathOrPattern; -use super::glob::PathOrPatternSet; -use super::path::specifier_to_file_path; - /// Writes the file to the file system at a temporary path, then /// renames it to the destination in a single sys call in order /// to never leave the file system in a corrupted state. @@ -359,26 +358,7 @@ pub fn collect_specifiers( for path_or_pattern in path_or_patterns { match path_or_pattern { PathOrPattern::Path(path) => { - // todo(dsherret): we should improve this to not store URLs in a PathBuf - let path_str = path.to_string_lossy(); - let lowercase_path = path_str.to_lowercase(); - if lowercase_path.starts_with("http://") - || lowercase_path.starts_with("https://") - { - // take out the url - let url = ModuleSpecifier::parse(&path_str) - .with_context(|| format!("Invalid URL '{}'", path_str))?; - prepared.push(url); - } else if lowercase_path.starts_with("file://") { - let url = ModuleSpecifier::parse(&path_str) - .with_context(|| format!("Invalid URL '{}'", path_str))?; - let p = specifier_to_file_path(&url)?; - if p.is_dir() { - result.push(PathOrPattern::Path(p)); - } else { - prepared.push(url) - } - } else if path.is_dir() { + if path.is_dir() { result.push(PathOrPattern::Path(path)); } else if !files.exclude.matches_path(&path) { let url = ModuleSpecifier::from_file_path(&path) @@ -386,6 +366,9 @@ pub fn collect_specifiers( prepared.push(url); } } + PathOrPattern::RemoteUrl(remote_url) => { + prepared.push(remote_url); + } PathOrPattern::Pattern(pattern) => { // add it back result.push(PathOrPattern::Pattern(pattern)); @@ -824,16 +807,12 @@ mod tests { create_files(&ignore_dir_path, &ignore_dir_files); let file_patterns = FilePatterns { - include: Some( - PathOrPatternSet::from_absolute_paths( - vec![root_dir_path.to_path_buf()], - ) - .unwrap(), - ), - exclude: PathOrPatternSet::from_absolute_paths(vec![ - ignore_dir_path.to_path_buf() - ]) - .unwrap(), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir_path.to_path_buf(), + )])), + exclude: PathOrPatternSet::new(vec![PathOrPattern::Path( + ignore_dir_path.to_path_buf(), + )]), }; let file_collector = FileCollector::new(|path, _| { // exclude dotfiles @@ -892,17 +871,15 @@ mod tests { // test opting out of ignoring by specifying the dir let file_patterns = FilePatterns { - include: Some( - PathOrPatternSet::from_absolute_paths(vec![ - root_dir_path.to_path_buf(), + include: Some(PathOrPatternSet::new(vec![ + PathOrPattern::Path(root_dir_path.to_path_buf()), + PathOrPattern::Path( root_dir_path.to_path_buf().join("child/node_modules/"), - ]) - .unwrap(), - ), - exclude: PathOrPatternSet::from_absolute_paths(vec![ - ignore_dir_path.to_path_buf() - ]) - .unwrap(), + ), + ])), + exclude: PathOrPatternSet::new(vec![PathOrPattern::Path( + ignore_dir_path.to_path_buf(), + )]), }; let result = file_collector.collect_file_patterns(file_patterns).unwrap(); let expected = [ @@ -972,17 +949,19 @@ mod tests { let result = collect_specifiers( FilePatterns { include: Some( - PathOrPatternSet::from_absolute_paths(vec![ - PathBuf::from("http://localhost:8080"), - root_dir_path.to_path_buf(), - PathBuf::from("https://localhost:8080".to_string()), - ]) + PathOrPatternSet::from_relative_path_or_patterns( + root_dir_path.as_path(), + &[ + "http://localhost:8080".to_string(), + "./".to_string(), + "https://localhost:8080".to_string(), + ], + ) .unwrap(), ), - exclude: PathOrPatternSet::from_absolute_paths(vec![ - ignore_dir_path.to_path_buf() - ]) - .unwrap(), + exclude: PathOrPatternSet::new(vec![PathOrPattern::Path( + ignore_dir_path.to_path_buf(), + )]), }, predicate, ) @@ -1018,14 +997,14 @@ mod tests { }; let result = collect_specifiers( FilePatterns { - include: Some( - PathOrPatternSet::from_absolute_paths(vec![PathBuf::from(format!( + include: Some(PathOrPatternSet::new(vec![PathOrPattern::new( + &format!( "{}{}", scheme, root_dir_path.join("child").to_string().replace('\\', "/") - ))]) - .unwrap(), - ), + ), + ) + .unwrap()])), exclude: Default::default(), }, predicate, diff --git a/cli/util/glob.rs b/cli/util/glob.rs deleted file mode 100644 index 4fe8a9a0ac..0000000000 --- a/cli/util/glob.rs +++ /dev/null @@ -1,465 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -use std::path::Path; -use std::path::PathBuf; - -use deno_core::anyhow::Context; -use deno_core::error::AnyError; -use deno_core::normalize_path; -use deno_core::url::Url; -use indexmap::IndexMap; - -use super::path::specifier_to_file_path; - -#[derive(Clone, Default, Debug, Eq, PartialEq)] -pub struct FilePatterns { - pub include: Option, - pub exclude: PathOrPatternSet, -} - -impl FilePatterns { - pub fn matches_specifier(&self, specifier: &Url) -> bool { - let path = match specifier_to_file_path(specifier) { - Ok(path) => path, - Err(_) => return true, - }; - self.matches_path(&path) - } - - pub fn matches_path(&self, path: &Path) -> bool { - // Skip files in the exclude list. - if self.exclude.matches_path(path) { - return false; - } - - // Ignore files not in the include list if it's present. - self - .include - .as_ref() - .map(|m| m.matches_path(path)) - .unwrap_or(true) - } - - /// Creates a collection of `FilePatterns` by base where the containing patterns - /// are only the ones applicable to the base. - /// - /// The order these are returned in is the order that the directory traversal - /// should occur in. - pub fn split_by_base(&self) -> Vec<(PathBuf, Self)> { - let Some(include) = &self.include else { - return Vec::new(); - }; - - let mut include_paths = Vec::new(); - let mut include_patterns = Vec::new(); - for path_or_pattern in &include.0 { - match path_or_pattern { - PathOrPattern::Path(path) => include_paths.push((path.is_file(), path)), - PathOrPattern::Pattern(pattern) => include_patterns.push(pattern), - } - } - let include_patterns_by_base_path = include_patterns.into_iter().fold( - IndexMap::new(), - |mut map: IndexMap<_, Vec<_>>, p| { - map.entry(p.base_path()).or_default().push(p); - map - }, - ); - let exclude_by_base_path = self - .exclude - .0 - .iter() - .map(|s| (s.base_path(), s)) - .collect::>(); - let get_applicable_excludes = - |is_file_path: bool, base_path: &PathBuf| -> Vec { - exclude_by_base_path - .iter() - .filter_map(|(exclude_base_path, exclude)| { - match exclude { - PathOrPattern::Path(exclude_path) => { - // For explicitly specified files, ignore when the exclude path starts - // with it. Regardless, include excludes that are on a sub path of the dir. - if is_file_path && base_path.starts_with(exclude_path) - || exclude_path.starts_with(base_path) - { - Some((*exclude).clone()) - } else { - None - } - } - PathOrPattern::Pattern(_) => { - // include globs that's are sub paths or a parent path - if exclude_base_path.starts_with(base_path) - || base_path.starts_with(exclude_base_path) - { - Some((*exclude).clone()) - } else { - None - } - } - } - }) - .collect::>() - }; - - let mut result = Vec::with_capacity( - include_paths.len() + include_patterns_by_base_path.len(), - ); - for (is_file, path) in include_paths { - let applicable_excludes = get_applicable_excludes(is_file, path); - result.push(( - path.clone(), - Self { - include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( - path.clone(), - )])), - exclude: PathOrPatternSet::new(applicable_excludes), - }, - )); - } - - // todo(dsherret): This could be further optimized by not including - // patterns that will only ever match another base. - for base_path in include_patterns_by_base_path.keys() { - let applicable_excludes = get_applicable_excludes(false, base_path); - let mut applicable_includes = Vec::new(); - // get all patterns that apply to the current or ancestor directories - for path in base_path.ancestors() { - if let Some(patterns) = include_patterns_by_base_path.get(path) { - applicable_includes.extend( - patterns - .iter() - .map(|p| PathOrPattern::Pattern((*p).clone())), - ); - } - } - result.push(( - base_path.clone(), - Self { - include: Some(PathOrPatternSet::new(applicable_includes)), - exclude: PathOrPatternSet::new(applicable_excludes), - }, - )); - } - - // Sort by the longest base path first. This ensures that we visit opted into - // nested directories first before visiting the parent directory. The directory - // traverser will handle not going into directories it's already been in. - result.sort_by(|a, b| b.0.as_os_str().len().cmp(&a.0.as_os_str().len())); - - result - } -} - -#[derive(Clone, Default, Debug, Eq, PartialEq)] -pub struct PathOrPatternSet(Vec); - -impl PathOrPatternSet { - pub fn new(elements: Vec) -> Self { - Self(elements) - } - - pub fn from_absolute_paths(path: Vec) -> Result { - Ok(Self( - path - .into_iter() - .map(PathOrPattern::new) - .collect::, _>>()?, - )) - } - - pub fn inner(&self) -> &Vec { - &self.0 - } - - pub fn into_path_or_patterns(self) -> Vec { - self.0 - } - - pub fn matches_path(&self, path: &Path) -> bool { - self.0.iter().any(|p| p.matches_path(path)) - } - - pub fn base_paths(&self) -> Vec { - let mut result = Vec::with_capacity(self.0.len()); - for element in &self.0 { - match element { - PathOrPattern::Path(path) => { - result.push(path.to_path_buf()); - } - PathOrPattern::Pattern(pattern) => { - result.push(pattern.base_path()); - } - } - } - result - } -} - -#[derive(Clone, Debug, Eq, PartialEq)] -pub enum PathOrPattern { - Path(PathBuf), - Pattern(GlobPattern), -} - -impl PathOrPattern { - pub fn new(path: PathBuf) -> Result { - let path_str = path.to_string_lossy(); - // todo(dsherret): don't store URLs in PathBufs - if path_str.starts_with("http:") - || path_str.starts_with("https:") - || path_str.starts_with("file:") - { - return Ok(Self::Path(path)); - } - - GlobPattern::new_if_pattern(&path_str).map(|maybe_pattern| { - maybe_pattern - .map(PathOrPattern::Pattern) - .unwrap_or_else(|| PathOrPattern::Path(normalize_path(path))) - }) - } - - pub fn matches_path(&self, path: &Path) -> bool { - match self { - PathOrPattern::Path(p) => path.starts_with(p), - PathOrPattern::Pattern(p) => p.matches_path(path), - } - } - - pub fn base_path(&self) -> PathBuf { - match self { - PathOrPattern::Path(p) => p.clone(), - PathOrPattern::Pattern(p) => p.base_path(), - } - } -} - -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct GlobPattern(glob::Pattern); - -impl GlobPattern { - pub fn new_if_pattern(pattern: &str) -> Result, AnyError> { - if !is_glob_pattern(pattern) { - return Ok(None); - } - Self::new(pattern).map(Some) - } - - pub fn new(pattern: &str) -> Result { - let pattern = escape_brackets(pattern) - .replace('\\', "/") - .replace("/./", "/"); - let pattern = glob::Pattern::new(&pattern) - .with_context(|| format!("Failed to expand glob: \"{}\"", pattern))?; - Ok(Self(pattern)) - } - - pub fn matches_path(&self, path: &Path) -> bool { - self.0.matches_path_with(path, match_options()) - } - - pub fn base_path(&self) -> PathBuf { - let base_path = self - .0 - .as_str() - .split('/') - .take_while(|c| !has_glob_chars(c)) - .collect::>() - .join(std::path::MAIN_SEPARATOR_STR); - PathBuf::from(base_path) - } -} - -pub fn is_glob_pattern(path: &str) -> bool { - !path.starts_with("http:") - && !path.starts_with("https:") - && !path.starts_with("file:") - && has_glob_chars(path) -} - -fn has_glob_chars(pattern: &str) -> bool { - // we don't support [ and ] - pattern.chars().any(|c| matches!(c, '*' | '?')) -} - -fn escape_brackets(pattern: &str) -> String { - // Escape brackets - we currently don't support them, because with introduction - // of glob expansion paths like "pages/[id].ts" would suddenly start giving - // wrong results. We might want to revisit that in the future. - pattern.replace('[', "[[]").replace(']', "[]]") -} - -fn match_options() -> glob::MatchOptions { - // Matches what `deno_task_shell` does - glob::MatchOptions { - // false because it should work the same way on case insensitive file systems - case_sensitive: false, - // true because it copies what sh does - require_literal_separator: true, - // true because it copies with sh does—these files are considered "hidden" - require_literal_leading_dot: true, - } -} - -#[cfg(test)] -mod test { - use pretty_assertions::assert_eq; - use test_util::TempDir; - - use super::*; - - // For easier comparisons in tests. - #[derive(Debug, PartialEq, Eq)] - struct ComparableFilePatterns { - include: Option>, - exclude: Vec, - } - - impl ComparableFilePatterns { - pub fn new(root: &Path, file_patterns: &FilePatterns) -> Self { - fn path_or_pattern_to_string(root: &Path, p: &PathOrPattern) -> String { - match p { - PathOrPattern::Path(p) => p - .strip_prefix(root) - .unwrap() - .to_string_lossy() - .replace('\\', "/"), - PathOrPattern::Pattern(p) => p - .0 - .as_str() - .strip_prefix(&format!( - "{}/", - root.to_string_lossy().replace('\\', "/") - )) - .unwrap() - .to_string(), - } - } - - Self { - include: file_patterns.include.as_ref().map(|p| { - p.0 - .iter() - .map(|p| path_or_pattern_to_string(root, p)) - .collect() - }), - exclude: file_patterns - .exclude - .0 - .iter() - .map(|p| path_or_pattern_to_string(root, p)) - .collect(), - } - } - - pub fn from_split( - root: &Path, - patterns_by_base: &[(PathBuf, FilePatterns)], - ) -> Vec<(String, ComparableFilePatterns)> { - patterns_by_base - .iter() - .map(|(base_path, file_patterns)| { - ( - base_path - .strip_prefix(root) - .unwrap() - .to_string_lossy() - .replace('\\', "/"), - ComparableFilePatterns::new(root, file_patterns), - ) - }) - .collect() - } - } - - #[test] - fn should_split_globs_by_base_dir() { - let temp_dir = TempDir::new(); - let patterns = FilePatterns { - include: Some(PathOrPatternSet::new(vec![ - PathOrPattern::Pattern( - GlobPattern::new(&format!( - "{}/inner/**/*.ts", - temp_dir.path().to_string_lossy().replace('\\', "/") - )) - .unwrap(), - ), - PathOrPattern::Pattern( - GlobPattern::new(&format!( - "{}/inner/sub/deeper/**/*.js", - temp_dir.path().to_string_lossy().replace('\\', "/") - )) - .unwrap(), - ), - PathOrPattern::Pattern( - GlobPattern::new(&format!( - "{}/other/**/*.js", - temp_dir.path().to_string_lossy().replace('\\', "/") - )) - .unwrap(), - ), - PathOrPattern::Path(temp_dir.path().join("sub/file.ts").to_path_buf()), - ])), - exclude: PathOrPatternSet::new(vec![ - PathOrPattern::Pattern( - GlobPattern::new(&format!( - "{}/inner/other/**/*.ts", - temp_dir.path().to_string_lossy().replace('\\', "/") - )) - .unwrap(), - ), - PathOrPattern::Path( - temp_dir - .path() - .join("inner/sub/deeper/file.js") - .to_path_buf(), - ), - ]), - }; - let split = ComparableFilePatterns::from_split( - temp_dir.path().as_path(), - &patterns.split_by_base(), - ); - assert_eq!( - split, - vec![ - ( - "inner/sub/deeper".to_string(), - ComparableFilePatterns { - include: Some(vec![ - "inner/sub/deeper/**/*.js".to_string(), - "inner/**/*.ts".to_string(), - ]), - exclude: vec!["inner/sub/deeper/file.js".to_string()], - } - ), - ( - "sub/file.ts".to_string(), - ComparableFilePatterns { - include: Some(vec!["sub/file.ts".to_string()]), - exclude: vec![], - } - ), - ( - "inner".to_string(), - ComparableFilePatterns { - include: Some(vec!["inner/**/*.ts".to_string()]), - exclude: vec![ - "inner/other/**/*.ts".to_string(), - "inner/sub/deeper/file.js".to_string(), - ], - } - ), - ( - "other".to_string(), - ComparableFilePatterns { - include: Some(vec!["other/**/*.js".to_string()]), - exclude: vec![], - } - ) - ] - ); - } -} diff --git a/cli/util/mod.rs b/cli/util/mod.rs index 58d647101a..a3f4a5aa42 100644 --- a/cli/util/mod.rs +++ b/cli/util/mod.rs @@ -8,7 +8,6 @@ pub mod display; pub mod draw_thread; pub mod file_watcher; pub mod fs; -pub mod glob; pub mod import_map; pub mod logger; pub mod path;