mirror of
https://github.com/denoland/deno.git
synced 2024-11-25 15:29:32 -05:00
fix(publish): make include and exclude work (#22720)
1. Stops `deno publish` using some custom include/exclude behaviour from other sub commands 2. Takes ancestor directories into account when resolving gitignore 3. Backards compatible change that adds ability to unexclude an exclude by using a negated glob at a more specific level for all sub commands (see https://github.com/denoland/deno_config/pull/44).
This commit is contained in:
parent
2ed984ba3a
commit
2dfc0aca7c
24 changed files with 1017 additions and 222 deletions
4
Cargo.lock
generated
4
Cargo.lock
generated
|
@ -1232,9 +1232,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "deno_config"
|
name = "deno_config"
|
||||||
version = "0.12.0"
|
version = "0.14.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ebbc05e20df2d5b8562205f9b0c296bc528e833b0de126d489781952e13d939f"
|
checksum = "61c801e30b12aa3f15f59d4d4947621eef34d6798a93f6a5037c0efa26f87a8b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"glob",
|
"glob",
|
||||||
|
|
|
@ -64,7 +64,7 @@ winres.workspace = true
|
||||||
[dependencies]
|
[dependencies]
|
||||||
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
|
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
|
||||||
deno_cache_dir = { workspace = true }
|
deno_cache_dir = { workspace = true }
|
||||||
deno_config = "=0.12.0"
|
deno_config = "=0.14.1"
|
||||||
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||||
deno_doc = { version = "=0.113.1", features = ["html"] }
|
deno_doc = { version = "=0.113.1", features = ["html"] }
|
||||||
deno_emit = "=0.38.2"
|
deno_emit = "=0.38.2"
|
||||||
|
|
|
@ -1258,7 +1258,7 @@ impl CliOptions {
|
||||||
pub fn resolve_config_excludes(&self) -> Result<PathOrPatternSet, AnyError> {
|
pub fn resolve_config_excludes(&self) -> Result<PathOrPatternSet, AnyError> {
|
||||||
let maybe_config_files = if let Some(config_file) = &self.maybe_config_file
|
let maybe_config_files = if let Some(config_file) = &self.maybe_config_file
|
||||||
{
|
{
|
||||||
config_file.to_files_config()?
|
Some(config_file.to_files_config()?)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
@ -1750,14 +1750,14 @@ fn resolve_files(
|
||||||
if let Some(file_flags) = maybe_file_flags {
|
if let Some(file_flags) = maybe_file_flags {
|
||||||
if !file_flags.include.is_empty() {
|
if !file_flags.include.is_empty() {
|
||||||
maybe_files_config.include =
|
maybe_files_config.include =
|
||||||
Some(PathOrPatternSet::from_relative_path_or_patterns(
|
Some(PathOrPatternSet::from_include_relative_path_or_patterns(
|
||||||
initial_cwd,
|
initial_cwd,
|
||||||
&file_flags.include,
|
&file_flags.include,
|
||||||
)?);
|
)?);
|
||||||
}
|
}
|
||||||
if !file_flags.ignore.is_empty() {
|
if !file_flags.ignore.is_empty() {
|
||||||
maybe_files_config.exclude =
|
maybe_files_config.exclude =
|
||||||
PathOrPatternSet::from_relative_path_or_patterns(
|
PathOrPatternSet::from_exclude_relative_path_or_patterns(
|
||||||
initial_cwd,
|
initial_cwd,
|
||||||
&file_flags.ignore,
|
&file_flags.ignore,
|
||||||
)?;
|
)?;
|
||||||
|
@ -1886,7 +1886,7 @@ mod test {
|
||||||
temp_dir.write("pages/[id].ts", "");
|
temp_dir.write("pages/[id].ts", "");
|
||||||
|
|
||||||
let temp_dir_path = temp_dir.path().as_path();
|
let temp_dir_path = temp_dir.path().as_path();
|
||||||
let error = PathOrPatternSet::from_relative_path_or_patterns(
|
let error = PathOrPatternSet::from_include_relative_path_or_patterns(
|
||||||
temp_dir_path,
|
temp_dir_path,
|
||||||
&["data/**********.ts".to_string()],
|
&["data/**********.ts".to_string()],
|
||||||
)
|
)
|
||||||
|
@ -1897,7 +1897,7 @@ mod test {
|
||||||
Some(FilePatterns {
|
Some(FilePatterns {
|
||||||
base: temp_dir_path.to_path_buf(),
|
base: temp_dir_path.to_path_buf(),
|
||||||
include: Some(
|
include: Some(
|
||||||
PathOrPatternSet::from_relative_path_or_patterns(
|
PathOrPatternSet::from_include_relative_path_or_patterns(
|
||||||
temp_dir_path,
|
temp_dir_path,
|
||||||
&[
|
&[
|
||||||
"data/test1.?s".to_string(),
|
"data/test1.?s".to_string(),
|
||||||
|
@ -1908,7 +1908,7 @@ mod test {
|
||||||
)
|
)
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
),
|
),
|
||||||
exclude: PathOrPatternSet::from_relative_path_or_patterns(
|
exclude: PathOrPatternSet::from_exclude_relative_path_or_patterns(
|
||||||
temp_dir_path,
|
temp_dir_path,
|
||||||
&["nested/**/*bazz.ts".to_string()],
|
&["nested/**/*bazz.ts".to_string()],
|
||||||
)
|
)
|
||||||
|
@ -1919,7 +1919,7 @@ mod test {
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let mut files = FileCollector::new(|_, _| true)
|
let mut files = FileCollector::new(|_| true)
|
||||||
.ignore_git_folder()
|
.ignore_git_folder()
|
||||||
.ignore_node_modules()
|
.ignore_node_modules()
|
||||||
.ignore_vendor_folder()
|
.ignore_vendor_folder()
|
||||||
|
|
|
@ -1083,7 +1083,7 @@ impl Config {
|
||||||
pub fn get_disabled_paths(&self) -> PathOrPatternSet {
|
pub fn get_disabled_paths(&self) -> PathOrPatternSet {
|
||||||
let mut path_or_patterns = vec![];
|
let mut path_or_patterns = vec![];
|
||||||
if let Some(cf) = self.maybe_config_file() {
|
if let Some(cf) = self.maybe_config_file() {
|
||||||
if let Some(files) = cf.to_files_config().ok().flatten() {
|
if let Ok(files) = cf.to_files_config() {
|
||||||
for path in files.exclude.into_path_or_patterns() {
|
for path in files.exclude.into_path_or_patterns() {
|
||||||
path_or_patterns.push(path);
|
path_or_patterns.push(path);
|
||||||
}
|
}
|
||||||
|
@ -1095,7 +1095,14 @@ impl Config {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
let settings = self.workspace_settings_for_specifier(workspace_uri);
|
let settings = self.workspace_settings_for_specifier(workspace_uri);
|
||||||
if settings.enable.unwrap_or_else(|| self.has_config_file()) {
|
let is_enabled = settings
|
||||||
|
.enable_paths
|
||||||
|
.as_ref()
|
||||||
|
.map(|p| !p.is_empty())
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
settings.enable.unwrap_or_else(|| self.has_config_file())
|
||||||
|
});
|
||||||
|
if is_enabled {
|
||||||
for path in &settings.disable_paths {
|
for path in &settings.disable_paths {
|
||||||
path_or_patterns.push(PathOrPattern::Path(workspace_path.join(path)));
|
path_or_patterns.push(PathOrPattern::Path(workspace_path.join(path)));
|
||||||
}
|
}
|
||||||
|
@ -1177,7 +1184,7 @@ fn specifier_enabled(
|
||||||
workspace_folders: &[(Url, lsp::WorkspaceFolder)],
|
workspace_folders: &[(Url, lsp::WorkspaceFolder)],
|
||||||
) -> bool {
|
) -> bool {
|
||||||
if let Some(cf) = config_file {
|
if let Some(cf) = config_file {
|
||||||
if let Some(files) = cf.to_files_config().ok().flatten() {
|
if let Ok(files) = cf.to_files_config() {
|
||||||
if !files.matches_specifier(specifier) {
|
if !files.matches_specifier(specifier) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1341,11 +1341,12 @@ impl Documents {
|
||||||
.inner()
|
.inner()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|p| match p {
|
.map(|p| match p {
|
||||||
PathOrPattern::Path(p) => {
|
PathOrPattern::Path(p) => p.to_string_lossy(),
|
||||||
Cow::Owned(p.to_string_lossy().to_string())
|
PathOrPattern::NegatedPath(p) => {
|
||||||
|
Cow::Owned(format!("!{}", p.to_string_lossy()))
|
||||||
}
|
}
|
||||||
PathOrPattern::RemoteUrl(p) => Cow::Borrowed(p.as_str()),
|
PathOrPattern::RemoteUrl(p) => Cow::Borrowed(p.as_str()),
|
||||||
PathOrPattern::Pattern(p) => Cow::Borrowed(p.as_str()),
|
PathOrPattern::Pattern(p) => p.as_str(),
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
// ensure these are sorted so the hashing is deterministic
|
// ensure these are sorted so the hashing is deterministic
|
||||||
|
@ -2061,8 +2062,13 @@ impl Iterator for PreloadDocumentFinder {
|
||||||
if let Ok(entry) = entry {
|
if let Ok(entry) = entry {
|
||||||
let path = entry.path();
|
let path = entry.path();
|
||||||
if let Ok(file_type) = entry.file_type() {
|
if let Ok(file_type) = entry.file_type() {
|
||||||
if file_patterns.matches_path(&path) {
|
let is_dir = file_type.is_dir();
|
||||||
if file_type.is_dir() && is_discoverable_dir(&path) {
|
let path_kind = match is_dir {
|
||||||
|
true => deno_config::glob::PathKind::Directory,
|
||||||
|
false => deno_config::glob::PathKind::File,
|
||||||
|
};
|
||||||
|
if file_patterns.matches_path(&path, path_kind) {
|
||||||
|
if is_dir && is_discoverable_dir(&path) {
|
||||||
self.pending_entries.push_back(PendingEntry::Dir(
|
self.pending_entries.push_back(PendingEntry::Dir(
|
||||||
path.to_path_buf(),
|
path.to_path_buf(),
|
||||||
file_patterns.clone(),
|
file_patterns.clone(),
|
||||||
|
@ -2354,7 +2360,7 @@ console.log(b, "hello deno");
|
||||||
file_patterns: FilePatterns {
|
file_patterns: FilePatterns {
|
||||||
base: temp_dir.path().to_path_buf(),
|
base: temp_dir.path().to_path_buf(),
|
||||||
include: Some(
|
include: Some(
|
||||||
PathOrPatternSet::from_relative_path_or_patterns(
|
PathOrPatternSet::from_include_relative_path_or_patterns(
|
||||||
temp_dir.path().as_path(),
|
temp_dir.path().as_path(),
|
||||||
&[
|
&[
|
||||||
"root1".to_string(),
|
"root1".to_string(),
|
||||||
|
@ -2415,7 +2421,7 @@ console.log(b, "hello deno");
|
||||||
file_patterns: FilePatterns {
|
file_patterns: FilePatterns {
|
||||||
base: temp_dir.path().to_path_buf(),
|
base: temp_dir.path().to_path_buf(),
|
||||||
include: Default::default(),
|
include: Default::default(),
|
||||||
exclude: PathOrPatternSet::from_relative_path_or_patterns(
|
exclude: PathOrPatternSet::from_exclude_relative_path_or_patterns(
|
||||||
temp_dir.path().as_path(),
|
temp_dir.path().as_path(),
|
||||||
&[
|
&[
|
||||||
"root1".to_string(),
|
"root1".to_string(),
|
||||||
|
|
|
@ -14,12 +14,12 @@ use crate::tools::test::format_test_error;
|
||||||
use crate::tools::test::TestFilter;
|
use crate::tools::test::TestFilter;
|
||||||
use crate::util::file_watcher;
|
use crate::util::file_watcher;
|
||||||
use crate::util::fs::collect_specifiers;
|
use crate::util::fs::collect_specifiers;
|
||||||
|
use crate::util::fs::WalkEntry;
|
||||||
use crate::util::path::is_script_ext;
|
use crate::util::path::is_script_ext;
|
||||||
|
use crate::util::path::matches_pattern_or_exact_path;
|
||||||
use crate::version::get_user_agent;
|
use crate::version::get_user_agent;
|
||||||
use crate::worker::CliMainWorkerFactory;
|
use crate::worker::CliMainWorkerFactory;
|
||||||
|
|
||||||
use deno_config::glob::FilePatterns;
|
|
||||||
use deno_config::glob::PathOrPattern;
|
|
||||||
use deno_core::error::generic_error;
|
use deno_core::error::generic_error;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use deno_core::error::JsError;
|
use deno_core::error::JsError;
|
||||||
|
@ -394,25 +394,16 @@ async fn bench_specifiers(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks if the path has a basename and extension Deno supports for benches.
|
/// Checks if the path has a basename and extension Deno supports for benches.
|
||||||
fn is_supported_bench_path(path: &Path, patterns: &FilePatterns) -> bool {
|
fn is_supported_bench_path(entry: WalkEntry) -> bool {
|
||||||
if !is_script_ext(path) {
|
if !is_script_ext(entry.path) {
|
||||||
false
|
false
|
||||||
} else if has_supported_bench_path_name(path) {
|
} else if has_supported_bench_path_name(entry.path) {
|
||||||
true
|
true
|
||||||
} else {
|
} else if let Some(include) = &entry.patterns.include {
|
||||||
// allow someone to explicitly specify a path
|
// allow someone to explicitly specify a path
|
||||||
let matches_exact_path_or_pattern = patterns
|
matches_pattern_or_exact_path(include, entry.path)
|
||||||
.include
|
} else {
|
||||||
.as_ref()
|
false
|
||||||
.map(|p| {
|
|
||||||
p.inner().iter().any(|p| match p {
|
|
||||||
PathOrPattern::Path(p) => p == path,
|
|
||||||
PathOrPattern::RemoteUrl(_) => true,
|
|
||||||
PathOrPattern::Pattern(p) => p.matches_path(path),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.unwrap_or(false);
|
|
||||||
matches_exact_path_or_pattern
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -388,23 +388,20 @@ fn collect_coverages(
|
||||||
initial_cwd.to_path_buf(),
|
initial_cwd.to_path_buf(),
|
||||||
)])
|
)])
|
||||||
} else {
|
} else {
|
||||||
PathOrPatternSet::from_relative_path_or_patterns(
|
PathOrPatternSet::from_include_relative_path_or_patterns(
|
||||||
initial_cwd,
|
initial_cwd,
|
||||||
&files.include,
|
&files.include,
|
||||||
)?
|
)?
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
exclude: PathOrPatternSet::from_relative_path_or_patterns(
|
exclude: PathOrPatternSet::from_exclude_relative_path_or_patterns(
|
||||||
initial_cwd,
|
initial_cwd,
|
||||||
&files.ignore,
|
&files.ignore,
|
||||||
)
|
)
|
||||||
.context("Invalid ignore pattern.")?,
|
.context("Invalid ignore pattern.")?,
|
||||||
};
|
};
|
||||||
let file_paths = FileCollector::new(|file_path, _| {
|
let file_paths = FileCollector::new(|e| {
|
||||||
file_path
|
e.path.extension().map(|ext| ext == "json").unwrap_or(false)
|
||||||
.extension()
|
|
||||||
.map(|ext| ext == "json")
|
|
||||||
.unwrap_or(false)
|
|
||||||
})
|
})
|
||||||
.ignore_git_folder()
|
.ignore_git_folder()
|
||||||
.ignore_node_modules()
|
.ignore_node_modules()
|
||||||
|
|
|
@ -96,13 +96,15 @@ pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
|
||||||
let module_specifiers = collect_specifiers(
|
let module_specifiers = collect_specifiers(
|
||||||
FilePatterns {
|
FilePatterns {
|
||||||
base: cli_options.initial_cwd().to_path_buf(),
|
base: cli_options.initial_cwd().to_path_buf(),
|
||||||
include: Some(PathOrPatternSet::from_relative_path_or_patterns(
|
include: Some(
|
||||||
cli_options.initial_cwd(),
|
PathOrPatternSet::from_include_relative_path_or_patterns(
|
||||||
source_files,
|
cli_options.initial_cwd(),
|
||||||
)?),
|
source_files,
|
||||||
|
)?,
|
||||||
|
),
|
||||||
exclude: Default::default(),
|
exclude: Default::default(),
|
||||||
},
|
},
|
||||||
|_, _| true,
|
|_| true,
|
||||||
)?;
|
)?;
|
||||||
let graph = module_graph_creator
|
let graph = module_graph_creator
|
||||||
.create_graph(GraphKind::TypesOnly, module_specifiers.clone())
|
.create_graph(GraphKind::TypesOnly, module_specifiers.clone())
|
||||||
|
|
|
@ -154,7 +154,7 @@ async fn format_files(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_fmt_files(files: FilePatterns) -> Result<Vec<PathBuf>, AnyError> {
|
fn collect_fmt_files(files: FilePatterns) -> Result<Vec<PathBuf>, AnyError> {
|
||||||
FileCollector::new(|path, _| is_supported_ext_fmt(path))
|
FileCollector::new(|e| is_supported_ext_fmt(e.path))
|
||||||
.ignore_git_folder()
|
.ignore_git_folder()
|
||||||
.ignore_node_modules()
|
.ignore_node_modules()
|
||||||
.ignore_vendor_folder()
|
.ignore_vendor_folder()
|
||||||
|
|
|
@ -263,7 +263,7 @@ async fn lint_files(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_lint_files(files: FilePatterns) -> Result<Vec<PathBuf>, AnyError> {
|
fn collect_lint_files(files: FilePatterns) -> Result<Vec<PathBuf>, AnyError> {
|
||||||
FileCollector::new(|path, _| is_script_ext(path))
|
FileCollector::new(|e| is_script_ext(e.path))
|
||||||
.ignore_git_folder()
|
.ignore_git_folder()
|
||||||
.ignore_node_modules()
|
.ignore_node_modules()
|
||||||
.ignore_vendor_folder()
|
.ignore_vendor_folder()
|
||||||
|
|
|
@ -2,13 +2,11 @@
|
||||||
|
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use deno_ast::MediaType;
|
use deno_ast::MediaType;
|
||||||
|
use deno_ast::ModuleSpecifier;
|
||||||
use deno_config::glob::FilePatterns;
|
use deno_config::glob::FilePatterns;
|
||||||
use deno_config::glob::PathOrPattern;
|
|
||||||
use deno_core::anyhow::Context;
|
use deno_core::anyhow::Context;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use deno_core::url::Url;
|
use deno_core::url::Url;
|
||||||
use ignore::overrides::OverrideBuilder;
|
|
||||||
use ignore::WalkBuilder;
|
|
||||||
use sha2::Digest;
|
use sha2::Digest;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::fmt::Write as FmtWrite;
|
use std::fmt::Write as FmtWrite;
|
||||||
|
@ -18,6 +16,7 @@ use tar::Header;
|
||||||
|
|
||||||
use crate::cache::LazyGraphSourceParser;
|
use crate::cache::LazyGraphSourceParser;
|
||||||
use crate::tools::registry::paths::PackagePath;
|
use crate::tools::registry::paths::PackagePath;
|
||||||
|
use crate::util::fs::FileCollector;
|
||||||
|
|
||||||
use super::diagnostics::PublishDiagnostic;
|
use super::diagnostics::PublishDiagnostic;
|
||||||
use super::diagnostics::PublishDiagnosticsCollector;
|
use super::diagnostics::PublishDiagnosticsCollector;
|
||||||
|
@ -45,52 +44,37 @@ pub fn create_gzipped_tarball(
|
||||||
unfurler: &SpecifierUnfurler,
|
unfurler: &SpecifierUnfurler,
|
||||||
file_patterns: Option<FilePatterns>,
|
file_patterns: Option<FilePatterns>,
|
||||||
) -> Result<PublishableTarball, AnyError> {
|
) -> Result<PublishableTarball, AnyError> {
|
||||||
|
let file_patterns = file_patterns
|
||||||
|
.unwrap_or_else(|| FilePatterns::new_with_base(dir.to_path_buf()));
|
||||||
let mut tar = TarGzArchive::new();
|
let mut tar = TarGzArchive::new();
|
||||||
let mut files = vec![];
|
let mut files = vec![];
|
||||||
|
|
||||||
let mut paths = HashSet::new();
|
let iter_paths = FileCollector::new(|e| {
|
||||||
|
if !e.file_type.is_file() {
|
||||||
let mut ob = OverrideBuilder::new(dir);
|
if let Ok(specifier) = ModuleSpecifier::from_file_path(e.path) {
|
||||||
ob.add("!.git")?.add("!node_modules")?.add("!.DS_Store")?;
|
diagnostics_collector.push(PublishDiagnostic::UnsupportedFileType {
|
||||||
|
specifier,
|
||||||
for pattern in file_patterns.as_ref().iter().flat_map(|p| p.include.iter()) {
|
kind: if e.file_type.is_symlink() {
|
||||||
for path_or_pat in pattern.inner() {
|
"symlink".to_owned()
|
||||||
match path_or_pat {
|
} else {
|
||||||
PathOrPattern::Path(p) => ob.add(p.to_str().unwrap())?,
|
format!("{:?}", e.file_type)
|
||||||
PathOrPattern::Pattern(p) => ob.add(p.as_str())?,
|
},
|
||||||
PathOrPattern::RemoteUrl(_) => continue,
|
});
|
||||||
};
|
}
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
}
|
e.path.file_name().map(|s| s != ".DS_Store").unwrap_or(true)
|
||||||
|
})
|
||||||
|
.ignore_git_folder()
|
||||||
|
.ignore_node_modules()
|
||||||
|
.ignore_vendor_folder()
|
||||||
|
.use_gitignore()
|
||||||
|
.collect_file_patterns(file_patterns)?;
|
||||||
|
|
||||||
let overrides = ob.build()?;
|
let mut paths = HashSet::with_capacity(iter_paths.len());
|
||||||
|
|
||||||
let iterator = WalkBuilder::new(dir)
|
for path in iter_paths {
|
||||||
.follow_links(false)
|
let Ok(specifier) = Url::from_file_path(&path) else {
|
||||||
.require_git(false)
|
|
||||||
.git_ignore(true)
|
|
||||||
.git_global(true)
|
|
||||||
.git_exclude(true)
|
|
||||||
.overrides(overrides)
|
|
||||||
.filter_entry(move |entry| {
|
|
||||||
let matches_pattern = file_patterns
|
|
||||||
.as_ref()
|
|
||||||
.map(|p| p.matches_path(entry.path()))
|
|
||||||
.unwrap_or(true);
|
|
||||||
matches_pattern
|
|
||||||
})
|
|
||||||
.build();
|
|
||||||
|
|
||||||
for entry in iterator {
|
|
||||||
let entry = entry?;
|
|
||||||
|
|
||||||
let path = entry.path();
|
|
||||||
let Some(file_type) = entry.file_type() else {
|
|
||||||
// entry doesn’t have a file type if it corresponds to stdin.
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
|
|
||||||
let Ok(specifier) = Url::from_file_path(path) else {
|
|
||||||
diagnostics_collector
|
diagnostics_collector
|
||||||
.to_owned()
|
.to_owned()
|
||||||
.push(PublishDiagnostic::InvalidPath {
|
.push(PublishDiagnostic::InvalidPath {
|
||||||
|
@ -100,20 +84,20 @@ pub fn create_gzipped_tarball(
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
if file_type.is_file() {
|
let Ok(relative_path) = path.strip_prefix(dir) else {
|
||||||
let Ok(relative_path) = path.strip_prefix(dir) else {
|
diagnostics_collector
|
||||||
diagnostics_collector
|
.to_owned()
|
||||||
.to_owned()
|
.push(PublishDiagnostic::InvalidPath {
|
||||||
.push(PublishDiagnostic::InvalidPath {
|
path: path.to_path_buf(),
|
||||||
path: path.to_path_buf(),
|
message: "path is not in publish directory".to_string(),
|
||||||
message: "path is not in publish directory".to_string(),
|
});
|
||||||
});
|
continue;
|
||||||
continue;
|
};
|
||||||
};
|
|
||||||
|
|
||||||
let path_str = relative_path.components().fold(
|
let path_str =
|
||||||
"".to_string(),
|
relative_path
|
||||||
|mut path, component| {
|
.components()
|
||||||
|
.fold("".to_string(), |mut path, component| {
|
||||||
path.push('/');
|
path.push('/');
|
||||||
match component {
|
match component {
|
||||||
std::path::Component::Normal(normal) => {
|
std::path::Component::Normal(normal) => {
|
||||||
|
@ -124,66 +108,55 @@ pub fn create_gzipped_tarball(
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
path
|
path
|
||||||
},
|
});
|
||||||
);
|
|
||||||
|
|
||||||
match PackagePath::new(path_str.clone()) {
|
match PackagePath::new(path_str.clone()) {
|
||||||
Ok(package_path) => {
|
Ok(package_path) => {
|
||||||
if !paths.insert(package_path) {
|
if !paths.insert(package_path) {
|
||||||
diagnostics_collector.to_owned().push(
|
|
||||||
PublishDiagnostic::DuplicatePath {
|
|
||||||
path: path.to_path_buf(),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
diagnostics_collector.to_owned().push(
|
diagnostics_collector.to_owned().push(
|
||||||
PublishDiagnostic::InvalidPath {
|
PublishDiagnostic::DuplicatePath {
|
||||||
path: path.to_path_buf(),
|
path: path.to_path_buf(),
|
||||||
message: err.to_string(),
|
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Err(err) => {
|
||||||
let content = resolve_content_maybe_unfurling(
|
diagnostics_collector
|
||||||
path,
|
.to_owned()
|
||||||
&specifier,
|
.push(PublishDiagnostic::InvalidPath {
|
||||||
unfurler,
|
path: path.to_path_buf(),
|
||||||
source_parser,
|
message: err.to_string(),
|
||||||
diagnostics_collector,
|
});
|
||||||
)?;
|
|
||||||
|
|
||||||
let media_type = MediaType::from_specifier(&specifier);
|
|
||||||
if matches!(media_type, MediaType::Jsx | MediaType::Tsx) {
|
|
||||||
diagnostics_collector.push(PublishDiagnostic::UnsupportedJsxTsx {
|
|
||||||
specifier: specifier.clone(),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
files.push(PublishableTarballFile {
|
let content = resolve_content_maybe_unfurling(
|
||||||
path_str: path_str.clone(),
|
&path,
|
||||||
|
&specifier,
|
||||||
|
unfurler,
|
||||||
|
source_parser,
|
||||||
|
diagnostics_collector,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let media_type = MediaType::from_specifier(&specifier);
|
||||||
|
if matches!(media_type, MediaType::Jsx | MediaType::Tsx) {
|
||||||
|
diagnostics_collector.push(PublishDiagnostic::UnsupportedJsxTsx {
|
||||||
specifier: specifier.clone(),
|
specifier: specifier.clone(),
|
||||||
// This hash string matches the checksum computed by registry
|
|
||||||
hash: format!("sha256-{:x}", sha2::Sha256::digest(&content)),
|
|
||||||
size: content.len(),
|
|
||||||
});
|
|
||||||
tar
|
|
||||||
.add_file(format!(".{}", path_str), &content)
|
|
||||||
.with_context(|| {
|
|
||||||
format!("Unable to add file to tarball '{}'", entry.path().display())
|
|
||||||
})?;
|
|
||||||
} else if !file_type.is_dir() {
|
|
||||||
diagnostics_collector.push(PublishDiagnostic::UnsupportedFileType {
|
|
||||||
specifier,
|
|
||||||
kind: if file_type.is_symlink() {
|
|
||||||
"symlink".to_owned()
|
|
||||||
} else {
|
|
||||||
format!("{file_type:?}")
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
files.push(PublishableTarballFile {
|
||||||
|
path_str: path_str.clone(),
|
||||||
|
specifier: specifier.clone(),
|
||||||
|
// This hash string matches the checksum computed by registry
|
||||||
|
hash: format!("sha256-{:x}", sha2::Sha256::digest(&content)),
|
||||||
|
size: content.len(),
|
||||||
|
});
|
||||||
|
tar
|
||||||
|
.add_file(format!(".{}", path_str), &content)
|
||||||
|
.with_context(|| {
|
||||||
|
format!("Unable to add file to tarball '{}'", path.display())
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let v = tar.finish().context("Unable to finish tarball")?;
|
let v = tar.finish().context("Unable to finish tarball")?;
|
||||||
|
|
|
@ -15,16 +15,17 @@ use crate::module_loader::ModuleLoadPreparer;
|
||||||
use crate::ops;
|
use crate::ops;
|
||||||
use crate::util::file_watcher;
|
use crate::util::file_watcher;
|
||||||
use crate::util::fs::collect_specifiers;
|
use crate::util::fs::collect_specifiers;
|
||||||
|
use crate::util::fs::WalkEntry;
|
||||||
use crate::util::path::get_extension;
|
use crate::util::path::get_extension;
|
||||||
use crate::util::path::is_script_ext;
|
use crate::util::path::is_script_ext;
|
||||||
use crate::util::path::mapped_specifier_for_tsc;
|
use crate::util::path::mapped_specifier_for_tsc;
|
||||||
|
use crate::util::path::matches_pattern_or_exact_path;
|
||||||
use crate::worker::CliMainWorkerFactory;
|
use crate::worker::CliMainWorkerFactory;
|
||||||
|
|
||||||
use deno_ast::swc::common::comments::CommentKind;
|
use deno_ast::swc::common::comments::CommentKind;
|
||||||
use deno_ast::MediaType;
|
use deno_ast::MediaType;
|
||||||
use deno_ast::SourceRangedForSpanned;
|
use deno_ast::SourceRangedForSpanned;
|
||||||
use deno_config::glob::FilePatterns;
|
use deno_config::glob::FilePatterns;
|
||||||
use deno_config::glob::PathOrPattern;
|
|
||||||
use deno_core::anyhow;
|
use deno_core::anyhow;
|
||||||
use deno_core::anyhow::bail;
|
use deno_core::anyhow::bail;
|
||||||
use deno_core::anyhow::Context as _;
|
use deno_core::anyhow::Context as _;
|
||||||
|
@ -1350,28 +1351,16 @@ pub async fn report_tests(
|
||||||
(Ok(()), receiver)
|
(Ok(()), receiver)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_supported_test_path_predicate(
|
fn is_supported_test_path_predicate(entry: WalkEntry) -> bool {
|
||||||
path: &Path,
|
if !is_script_ext(entry.path) {
|
||||||
patterns: &FilePatterns,
|
|
||||||
) -> bool {
|
|
||||||
if !is_script_ext(path) {
|
|
||||||
false
|
false
|
||||||
} else if has_supported_test_path_name(path) {
|
} else if has_supported_test_path_name(entry.path) {
|
||||||
true
|
true
|
||||||
} else {
|
} else if let Some(include) = &entry.patterns.include {
|
||||||
// allow someone to explicitly specify a path
|
// allow someone to explicitly specify a path
|
||||||
let matches_exact_path_or_pattern = patterns
|
matches_pattern_or_exact_path(include, entry.path)
|
||||||
.include
|
} else {
|
||||||
.as_ref()
|
false
|
||||||
.map(|p| {
|
|
||||||
p.inner().iter().any(|p| match p {
|
|
||||||
PathOrPattern::Path(p) => p == path,
|
|
||||||
PathOrPattern::RemoteUrl(_) => true,
|
|
||||||
PathOrPattern::Pattern(p) => p.matches_path(path),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.unwrap_or(false);
|
|
||||||
matches_exact_path_or_pattern
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1432,7 +1421,7 @@ fn collect_specifiers_with_test_mode(
|
||||||
collect_specifiers(files.clone(), is_supported_test_path_predicate)?;
|
collect_specifiers(files.clone(), is_supported_test_path_predicate)?;
|
||||||
|
|
||||||
if *include_inline {
|
if *include_inline {
|
||||||
return collect_specifiers(files, |p, _| is_supported_test_ext(p)).map(
|
return collect_specifiers(files, |e| is_supported_test_ext(e.path)).map(
|
||||||
|specifiers| {
|
|specifiers| {
|
||||||
specifiers
|
specifiers
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -1608,8 +1597,8 @@ pub async fn run_tests_with_watch(
|
||||||
let module_graph_creator = factory.module_graph_creator().await?;
|
let module_graph_creator = factory.module_graph_creator().await?;
|
||||||
let file_fetcher = factory.file_fetcher()?;
|
let file_fetcher = factory.file_fetcher()?;
|
||||||
let test_modules = if test_options.doc {
|
let test_modules = if test_options.doc {
|
||||||
collect_specifiers(test_options.files.clone(), |p, _| {
|
collect_specifiers(test_options.files.clone(), |e| {
|
||||||
is_supported_test_ext(p)
|
is_supported_test_ext(e.path)
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
collect_specifiers(
|
collect_specifiers(
|
||||||
|
|
104
cli/util/fs.rs
104
cli/util/fs.rs
|
@ -3,6 +3,7 @@
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::env::current_dir;
|
use std::env::current_dir;
|
||||||
use std::fmt::Write as FmtWrite;
|
use std::fmt::Write as FmtWrite;
|
||||||
|
use std::fs::FileType;
|
||||||
use std::fs::OpenOptions;
|
use std::fs::OpenOptions;
|
||||||
use std::io::Error;
|
use std::io::Error;
|
||||||
use std::io::ErrorKind;
|
use std::io::ErrorKind;
|
||||||
|
@ -26,6 +27,8 @@ use deno_runtime::deno_crypto::rand;
|
||||||
use deno_runtime::deno_fs::FileSystem;
|
use deno_runtime::deno_fs::FileSystem;
|
||||||
use deno_runtime::deno_node::PathClean;
|
use deno_runtime::deno_node::PathClean;
|
||||||
|
|
||||||
|
use crate::util::gitignore::DirGitIgnores;
|
||||||
|
use crate::util::gitignore::GitIgnoreTree;
|
||||||
use crate::util::progress_bar::ProgressBar;
|
use crate::util::progress_bar::ProgressBar;
|
||||||
use crate::util::progress_bar::ProgressBarStyle;
|
use crate::util::progress_bar::ProgressBarStyle;
|
||||||
use crate::util::progress_bar::ProgressMessagePrompt;
|
use crate::util::progress_bar::ProgressMessagePrompt;
|
||||||
|
@ -244,22 +247,31 @@ pub fn resolve_from_cwd(path: &Path) -> Result<PathBuf, AnyError> {
|
||||||
Ok(normalize_path(resolved_path))
|
Ok(normalize_path(resolved_path))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct WalkEntry<'a> {
|
||||||
|
pub path: &'a Path,
|
||||||
|
pub file_type: &'a FileType,
|
||||||
|
pub patterns: &'a FilePatterns,
|
||||||
|
}
|
||||||
|
|
||||||
/// Collects file paths that satisfy the given predicate, by recursively walking `files`.
|
/// Collects file paths that satisfy the given predicate, by recursively walking `files`.
|
||||||
/// If the walker visits a path that is listed in `ignore`, it skips descending into the directory.
|
/// If the walker visits a path that is listed in `ignore`, it skips descending into the directory.
|
||||||
pub struct FileCollector<TFilter: Fn(&Path, &FilePatterns) -> bool> {
|
pub struct FileCollector<TFilter: Fn(WalkEntry) -> bool> {
|
||||||
file_filter: TFilter,
|
file_filter: TFilter,
|
||||||
ignore_git_folder: bool,
|
ignore_git_folder: bool,
|
||||||
ignore_node_modules: bool,
|
ignore_node_modules: bool,
|
||||||
ignore_vendor_folder: bool,
|
ignore_vendor_folder: bool,
|
||||||
|
use_gitignore: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<TFilter: Fn(&Path, &FilePatterns) -> bool> FileCollector<TFilter> {
|
impl<TFilter: Fn(WalkEntry) -> bool> FileCollector<TFilter> {
|
||||||
pub fn new(file_filter: TFilter) -> Self {
|
pub fn new(file_filter: TFilter) -> Self {
|
||||||
Self {
|
Self {
|
||||||
file_filter,
|
file_filter,
|
||||||
ignore_git_folder: false,
|
ignore_git_folder: false,
|
||||||
ignore_node_modules: false,
|
ignore_node_modules: false,
|
||||||
ignore_vendor_folder: false,
|
ignore_vendor_folder: false,
|
||||||
|
use_gitignore: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -278,10 +290,46 @@ impl<TFilter: Fn(&Path, &FilePatterns) -> bool> FileCollector<TFilter> {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn use_gitignore(mut self) -> Self {
|
||||||
|
self.use_gitignore = true;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
pub fn collect_file_patterns(
|
pub fn collect_file_patterns(
|
||||||
&self,
|
&self,
|
||||||
file_patterns: FilePatterns,
|
file_patterns: FilePatterns,
|
||||||
) -> Result<Vec<PathBuf>, AnyError> {
|
) -> Result<Vec<PathBuf>, AnyError> {
|
||||||
|
fn is_pattern_matched(
|
||||||
|
maybe_git_ignore: Option<&DirGitIgnores>,
|
||||||
|
path: &Path,
|
||||||
|
is_dir: bool,
|
||||||
|
file_patterns: &FilePatterns,
|
||||||
|
) -> bool {
|
||||||
|
use deno_config::glob::FilePatternsMatch;
|
||||||
|
|
||||||
|
let path_kind = match is_dir {
|
||||||
|
true => deno_config::glob::PathKind::Directory,
|
||||||
|
false => deno_config::glob::PathKind::File,
|
||||||
|
};
|
||||||
|
match file_patterns.matches_path_detail(path, path_kind) {
|
||||||
|
FilePatternsMatch::Passed => {
|
||||||
|
// check gitignore
|
||||||
|
let is_gitignored = maybe_git_ignore
|
||||||
|
.as_ref()
|
||||||
|
.map(|git_ignore| git_ignore.is_ignored(path, is_dir))
|
||||||
|
.unwrap_or(false);
|
||||||
|
!is_gitignored
|
||||||
|
}
|
||||||
|
FilePatternsMatch::PassedOptedOutExclude => true,
|
||||||
|
FilePatternsMatch::Excluded => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut maybe_git_ignores = if self.use_gitignore {
|
||||||
|
Some(GitIgnoreTree::new(Arc::new(deno_runtime::deno_fs::RealFs)))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
let mut target_files = Vec::new();
|
let mut target_files = Vec::new();
|
||||||
let mut visited_paths = HashSet::new();
|
let mut visited_paths = HashSet::new();
|
||||||
let file_patterns_by_base = file_patterns.split_by_base();
|
let file_patterns_by_base = file_patterns.split_by_base();
|
||||||
|
@ -299,20 +347,23 @@ impl<TFilter: Fn(&Path, &FilePatterns) -> bool> FileCollector<TFilter> {
|
||||||
};
|
};
|
||||||
let file_type = e.file_type();
|
let file_type = e.file_type();
|
||||||
let is_dir = file_type.is_dir();
|
let is_dir = file_type.is_dir();
|
||||||
let c = e.path().to_path_buf();
|
let path = e.path().to_path_buf();
|
||||||
if file_patterns.exclude.matches_path(&c)
|
let maybe_gitignore =
|
||||||
|| !is_dir
|
maybe_git_ignores.as_mut().and_then(|git_ignores| {
|
||||||
&& !file_patterns
|
let dir_path = if is_dir { &path } else { path.parent()? };
|
||||||
.include
|
git_ignores.get_resolved_git_ignore(dir_path)
|
||||||
.as_ref()
|
});
|
||||||
.map(|i| i.matches_path(&c))
|
if !is_pattern_matched(
|
||||||
.unwrap_or(true)
|
maybe_gitignore.as_deref(),
|
||||||
{
|
&path,
|
||||||
|
is_dir,
|
||||||
|
&file_patterns,
|
||||||
|
) {
|
||||||
if is_dir {
|
if is_dir {
|
||||||
iterator.skip_current_dir();
|
iterator.skip_current_dir();
|
||||||
}
|
}
|
||||||
} else if is_dir {
|
} else if is_dir {
|
||||||
let should_ignore_dir = c
|
let should_ignore_dir = path
|
||||||
.file_name()
|
.file_name()
|
||||||
.map(|dir_name| {
|
.map(|dir_name| {
|
||||||
let dir_name = dir_name.to_string_lossy().to_lowercase();
|
let dir_name = dir_name.to_string_lossy().to_lowercase();
|
||||||
|
@ -323,17 +374,20 @@ impl<TFilter: Fn(&Path, &FilePatterns) -> bool> FileCollector<TFilter> {
|
||||||
_ => false,
|
_ => false,
|
||||||
};
|
};
|
||||||
// allow the user to opt out of ignoring by explicitly specifying the dir
|
// allow the user to opt out of ignoring by explicitly specifying the dir
|
||||||
file != c && is_ignored_file
|
file != path && is_ignored_file
|
||||||
})
|
})
|
||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
|| !visited_paths.insert(c.clone());
|
|| !visited_paths.insert(path.clone());
|
||||||
if should_ignore_dir {
|
if should_ignore_dir {
|
||||||
iterator.skip_current_dir();
|
iterator.skip_current_dir();
|
||||||
}
|
}
|
||||||
} else if (self.file_filter)(&c, &file_patterns)
|
} else if (self.file_filter)(WalkEntry {
|
||||||
&& visited_paths.insert(c.clone())
|
path: &path,
|
||||||
|
file_type: &file_type,
|
||||||
|
patterns: &file_patterns,
|
||||||
|
}) && visited_paths.insert(path.clone())
|
||||||
{
|
{
|
||||||
target_files.push(c);
|
target_files.push(path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -346,7 +400,7 @@ impl<TFilter: Fn(&Path, &FilePatterns) -> bool> FileCollector<TFilter> {
|
||||||
/// Note: This ignores all .git and node_modules folders.
|
/// Note: This ignores all .git and node_modules folders.
|
||||||
pub fn collect_specifiers(
|
pub fn collect_specifiers(
|
||||||
mut files: FilePatterns,
|
mut files: FilePatterns,
|
||||||
predicate: impl Fn(&Path, &FilePatterns) -> bool,
|
predicate: impl Fn(WalkEntry) -> bool,
|
||||||
) -> Result<Vec<ModuleSpecifier>, AnyError> {
|
) -> Result<Vec<ModuleSpecifier>, AnyError> {
|
||||||
let mut prepared = vec![];
|
let mut prepared = vec![];
|
||||||
|
|
||||||
|
@ -365,6 +419,10 @@ pub fn collect_specifiers(
|
||||||
prepared.push(url);
|
prepared.push(url);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
PathOrPattern::NegatedPath(path) => {
|
||||||
|
// add it back
|
||||||
|
result.push(PathOrPattern::NegatedPath(path));
|
||||||
|
}
|
||||||
PathOrPattern::RemoteUrl(remote_url) => {
|
PathOrPattern::RemoteUrl(remote_url) => {
|
||||||
prepared.push(remote_url);
|
prepared.push(remote_url);
|
||||||
}
|
}
|
||||||
|
@ -819,9 +877,9 @@ mod tests {
|
||||||
ignore_dir_path.to_path_buf(),
|
ignore_dir_path.to_path_buf(),
|
||||||
)]),
|
)]),
|
||||||
};
|
};
|
||||||
let file_collector = FileCollector::new(|path, _| {
|
let file_collector = FileCollector::new(|e| {
|
||||||
// exclude dotfiles
|
// exclude dotfiles
|
||||||
path
|
e.path
|
||||||
.file_name()
|
.file_name()
|
||||||
.and_then(|f| f.to_str())
|
.and_then(|f| f.to_str())
|
||||||
.map(|f| !f.starts_with('.'))
|
.map(|f| !f.starts_with('.'))
|
||||||
|
@ -943,9 +1001,9 @@ mod tests {
|
||||||
let ignore_dir_files = ["g.d.ts", ".gitignore"];
|
let ignore_dir_files = ["g.d.ts", ".gitignore"];
|
||||||
create_files(&ignore_dir_path, &ignore_dir_files);
|
create_files(&ignore_dir_path, &ignore_dir_files);
|
||||||
|
|
||||||
let predicate = |path: &Path, _: &FilePatterns| {
|
let predicate = |e: WalkEntry| {
|
||||||
// exclude dotfiles
|
// exclude dotfiles
|
||||||
path
|
e.path
|
||||||
.file_name()
|
.file_name()
|
||||||
.and_then(|f| f.to_str())
|
.and_then(|f| f.to_str())
|
||||||
.map(|f| !f.starts_with('.'))
|
.map(|f| !f.starts_with('.'))
|
||||||
|
@ -956,7 +1014,7 @@ mod tests {
|
||||||
FilePatterns {
|
FilePatterns {
|
||||||
base: root_dir_path.to_path_buf(),
|
base: root_dir_path.to_path_buf(),
|
||||||
include: Some(
|
include: Some(
|
||||||
PathOrPatternSet::from_relative_path_or_patterns(
|
PathOrPatternSet::from_include_relative_path_or_patterns(
|
||||||
root_dir_path.as_path(),
|
root_dir_path.as_path(),
|
||||||
&[
|
&[
|
||||||
"http://localhost:8080".to_string(),
|
"http://localhost:8080".to_string(),
|
||||||
|
|
151
cli/util/gitignore.rs
Normal file
151
cli/util/gitignore.rs
Normal file
|
@ -0,0 +1,151 @@
|
||||||
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::path::Path;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::rc::Rc;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
/// Resolved gitignore for a directory.
|
||||||
|
pub struct DirGitIgnores {
|
||||||
|
current: Option<Rc<ignore::gitignore::Gitignore>>,
|
||||||
|
parent: Option<Rc<DirGitIgnores>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DirGitIgnores {
|
||||||
|
pub fn is_ignored(&self, path: &Path, is_dir: bool) -> bool {
|
||||||
|
let mut is_ignored = false;
|
||||||
|
if let Some(parent) = &self.parent {
|
||||||
|
is_ignored = parent.is_ignored(path, is_dir);
|
||||||
|
}
|
||||||
|
if let Some(current) = &self.current {
|
||||||
|
match current.matched(path, is_dir) {
|
||||||
|
ignore::Match::None => {}
|
||||||
|
ignore::Match::Ignore(_) => {
|
||||||
|
is_ignored = true;
|
||||||
|
}
|
||||||
|
ignore::Match::Whitelist(_) => {
|
||||||
|
is_ignored = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
is_ignored
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Resolves gitignores in a directory tree taking into account
|
||||||
|
/// ancestor gitignores that may be found in a directory.
|
||||||
|
pub struct GitIgnoreTree {
|
||||||
|
fs: Arc<dyn deno_runtime::deno_fs::FileSystem>,
|
||||||
|
ignores: HashMap<PathBuf, Option<Rc<DirGitIgnores>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GitIgnoreTree {
|
||||||
|
pub fn new(fs: Arc<dyn deno_runtime::deno_fs::FileSystem>) -> Self {
|
||||||
|
Self {
|
||||||
|
fs,
|
||||||
|
ignores: Default::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_resolved_git_ignore(
|
||||||
|
&mut self,
|
||||||
|
dir_path: &Path,
|
||||||
|
) -> Option<Rc<DirGitIgnores>> {
|
||||||
|
self.get_resolved_git_ignore_inner(dir_path, None)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_resolved_git_ignore_inner(
|
||||||
|
&mut self,
|
||||||
|
dir_path: &Path,
|
||||||
|
maybe_parent: Option<&Path>,
|
||||||
|
) -> Option<Rc<DirGitIgnores>> {
|
||||||
|
let maybe_resolved = self.ignores.get(dir_path).cloned();
|
||||||
|
if let Some(resolved) = maybe_resolved {
|
||||||
|
resolved
|
||||||
|
} else {
|
||||||
|
let resolved = self.resolve_gitignore_in_dir(dir_path, maybe_parent);
|
||||||
|
self.ignores.insert(dir_path.to_owned(), resolved.clone());
|
||||||
|
resolved
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resolve_gitignore_in_dir(
|
||||||
|
&mut self,
|
||||||
|
dir_path: &Path,
|
||||||
|
maybe_parent: Option<&Path>,
|
||||||
|
) -> Option<Rc<DirGitIgnores>> {
|
||||||
|
if let Some(parent) = maybe_parent {
|
||||||
|
// stop searching if the parent dir had a .git directory in it
|
||||||
|
if self.fs.exists_sync(&parent.join(".git")) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let parent = dir_path.parent().and_then(|parent| {
|
||||||
|
self.get_resolved_git_ignore_inner(parent, Some(dir_path))
|
||||||
|
});
|
||||||
|
let current = self
|
||||||
|
.fs
|
||||||
|
.read_text_file_sync(&dir_path.join(".gitignore"))
|
||||||
|
.ok()
|
||||||
|
.and_then(|text| {
|
||||||
|
let mut builder = ignore::gitignore::GitignoreBuilder::new(dir_path);
|
||||||
|
for line in text.lines() {
|
||||||
|
builder.add_line(None, line).ok()?;
|
||||||
|
}
|
||||||
|
let gitignore = builder.build().ok()?;
|
||||||
|
Some(Rc::new(gitignore))
|
||||||
|
});
|
||||||
|
if parent.is_none() && current.is_none() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(Rc::new(DirGitIgnores { current, parent }))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use deno_runtime::deno_fs::InMemoryFs;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn git_ignore_tree() {
|
||||||
|
let fs = InMemoryFs::default();
|
||||||
|
fs.setup_text_files(vec![
|
||||||
|
("/.gitignore".into(), "file.txt".into()),
|
||||||
|
("/sub_dir/.gitignore".into(), "data.txt".into()),
|
||||||
|
(
|
||||||
|
"/sub_dir/sub_dir/.gitignore".into(),
|
||||||
|
"!file.txt\nignore.txt".into(),
|
||||||
|
),
|
||||||
|
]);
|
||||||
|
let mut ignore_tree = GitIgnoreTree::new(Arc::new(fs));
|
||||||
|
let mut run_test = |path: &str, expected: bool| {
|
||||||
|
let path = PathBuf::from(path);
|
||||||
|
let gitignore = ignore_tree
|
||||||
|
.get_resolved_git_ignore(path.parent().unwrap())
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
gitignore.is_ignored(&path, /* is_dir */ false),
|
||||||
|
expected,
|
||||||
|
"Path: {}",
|
||||||
|
path.display()
|
||||||
|
);
|
||||||
|
};
|
||||||
|
run_test("/file.txt", true);
|
||||||
|
run_test("/other.txt", false);
|
||||||
|
run_test("/data.txt", false);
|
||||||
|
run_test("/sub_dir/file.txt", true);
|
||||||
|
run_test("/sub_dir/other.txt", false);
|
||||||
|
run_test("/sub_dir/data.txt", true);
|
||||||
|
run_test("/sub_dir/sub_dir/file.txt", false); // unignored up here
|
||||||
|
run_test("/sub_dir/sub_dir/sub_dir/file.txt", false);
|
||||||
|
run_test("/sub_dir/sub_dir/sub_dir/ignore.txt", true);
|
||||||
|
run_test("/sub_dir/sub_dir/ignore.txt", true);
|
||||||
|
run_test("/sub_dir/ignore.txt", false);
|
||||||
|
run_test("/ignore.txt", false);
|
||||||
|
}
|
||||||
|
}
|
|
@ -8,6 +8,7 @@ pub mod display;
|
||||||
pub mod draw_thread;
|
pub mod draw_thread;
|
||||||
pub mod file_watcher;
|
pub mod file_watcher;
|
||||||
pub mod fs;
|
pub mod fs;
|
||||||
|
pub mod gitignore;
|
||||||
pub mod logger;
|
pub mod logger;
|
||||||
pub mod path;
|
pub mod path;
|
||||||
pub mod progress_bar;
|
pub mod progress_bar;
|
||||||
|
|
|
@ -6,6 +6,9 @@ use std::path::PathBuf;
|
||||||
|
|
||||||
use deno_ast::MediaType;
|
use deno_ast::MediaType;
|
||||||
use deno_ast::ModuleSpecifier;
|
use deno_ast::ModuleSpecifier;
|
||||||
|
use deno_config::glob::PathGlobMatch;
|
||||||
|
use deno_config::glob::PathOrPattern;
|
||||||
|
use deno_config::glob::PathOrPatternSet;
|
||||||
use deno_core::error::uri_error;
|
use deno_core::error::uri_error;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
|
|
||||||
|
@ -244,6 +247,38 @@ pub fn root_url_to_safe_local_dirname(root: &ModuleSpecifier) -> PathBuf {
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Slightly different behaviour than the default matching
|
||||||
|
/// where an exact path needs to be matched to be opted-in
|
||||||
|
/// rather than just a partial directory match.
|
||||||
|
///
|
||||||
|
/// This is used by the test and bench filtering.
|
||||||
|
pub fn matches_pattern_or_exact_path(
|
||||||
|
path_or_pattern_set: &PathOrPatternSet,
|
||||||
|
path: &Path,
|
||||||
|
) -> bool {
|
||||||
|
for p in path_or_pattern_set.inner().iter().rev() {
|
||||||
|
match p {
|
||||||
|
PathOrPattern::Path(p) => {
|
||||||
|
if p == path {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
PathOrPattern::NegatedPath(p) => {
|
||||||
|
if path.starts_with(p) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
PathOrPattern::RemoteUrl(_) => {}
|
||||||
|
PathOrPattern::Pattern(p) => match p.matches_path(path) {
|
||||||
|
PathGlobMatch::Matched => return true,
|
||||||
|
PathGlobMatch::MatchedNegated => return false,
|
||||||
|
PathGlobMatch::NotMatched => {}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
425
ext/fs/in_memory_fs.rs
Normal file
425
ext/fs/in_memory_fs.rs
Normal file
|
@ -0,0 +1,425 @@
|
||||||
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
// Allow using Arc for this module.
|
||||||
|
#![allow(clippy::disallowed_types)]
|
||||||
|
|
||||||
|
use std::collections::hash_map::Entry;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::io::Error;
|
||||||
|
use std::io::ErrorKind;
|
||||||
|
use std::path::Path;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::rc::Rc;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use deno_core::normalize_path;
|
||||||
|
use deno_core::parking_lot::Mutex;
|
||||||
|
use deno_io::fs::File;
|
||||||
|
use deno_io::fs::FsError;
|
||||||
|
use deno_io::fs::FsResult;
|
||||||
|
use deno_io::fs::FsStat;
|
||||||
|
|
||||||
|
use crate::interface::FsDirEntry;
|
||||||
|
use crate::interface::FsFileType;
|
||||||
|
use crate::FileSystem;
|
||||||
|
use crate::OpenOptions;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum PathEntry {
|
||||||
|
Dir,
|
||||||
|
File(Vec<u8>),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A very basic in-memory file system useful for swapping out in
|
||||||
|
/// the place of a RealFs for testing purposes.
|
||||||
|
///
|
||||||
|
/// Please develop this out as you need functionality.
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct InMemoryFs {
|
||||||
|
entries: Mutex<HashMap<PathBuf, Arc<PathEntry>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InMemoryFs {
|
||||||
|
pub fn setup_text_files(&self, files: Vec<(String, String)>) {
|
||||||
|
for (path, text) in files {
|
||||||
|
let path = PathBuf::from(path);
|
||||||
|
self.mkdir_sync(path.parent().unwrap(), true, 0).unwrap();
|
||||||
|
self
|
||||||
|
.write_file_sync(
|
||||||
|
&path,
|
||||||
|
OpenOptions::write(true, false, false, None),
|
||||||
|
&text.into_bytes(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_entry(&self, path: &Path) -> Option<Arc<PathEntry>> {
|
||||||
|
let path = normalize_path(path);
|
||||||
|
self.entries.lock().get(&path).cloned()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait(?Send)]
|
||||||
|
impl FileSystem for InMemoryFs {
|
||||||
|
fn cwd(&self) -> FsResult<PathBuf> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tmp_dir(&self) -> FsResult<PathBuf> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn chdir(&self, _path: &Path) -> FsResult<()> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn umask(&self, _mask: Option<u32>) -> FsResult<u32> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn open_sync(
|
||||||
|
&self,
|
||||||
|
_path: &Path,
|
||||||
|
_options: OpenOptions,
|
||||||
|
) -> FsResult<Rc<dyn File>> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
async fn open_async(
|
||||||
|
&self,
|
||||||
|
path: PathBuf,
|
||||||
|
options: OpenOptions,
|
||||||
|
) -> FsResult<Rc<dyn File>> {
|
||||||
|
self.open_sync(&path, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn mkdir_sync(
|
||||||
|
&self,
|
||||||
|
path: &Path,
|
||||||
|
recursive: bool,
|
||||||
|
_mode: u32,
|
||||||
|
) -> FsResult<()> {
|
||||||
|
let path = normalize_path(path);
|
||||||
|
|
||||||
|
if let Some(parent) = path.parent() {
|
||||||
|
let entry = self.entries.lock().get(parent).cloned();
|
||||||
|
match entry {
|
||||||
|
Some(entry) => match &*entry {
|
||||||
|
PathEntry::File(_) => {
|
||||||
|
return Err(FsError::Io(Error::new(
|
||||||
|
ErrorKind::InvalidInput,
|
||||||
|
"Parent is a file",
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
PathEntry::Dir => {}
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
if recursive {
|
||||||
|
self.mkdir_sync(parent, true, 0)?;
|
||||||
|
} else {
|
||||||
|
return Err(FsError::Io(Error::new(
|
||||||
|
ErrorKind::NotFound,
|
||||||
|
"Not found",
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let entry = self.entries.lock().get(&path).cloned();
|
||||||
|
match entry {
|
||||||
|
Some(entry) => match &*entry {
|
||||||
|
PathEntry::File(_) => Err(FsError::Io(Error::new(
|
||||||
|
ErrorKind::InvalidInput,
|
||||||
|
"Is a file",
|
||||||
|
))),
|
||||||
|
PathEntry::Dir => Ok(()),
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
self.entries.lock().insert(path, Arc::new(PathEntry::Dir));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async fn mkdir_async(
|
||||||
|
&self,
|
||||||
|
path: PathBuf,
|
||||||
|
recursive: bool,
|
||||||
|
mode: u32,
|
||||||
|
) -> FsResult<()> {
|
||||||
|
self.mkdir_sync(&path, recursive, mode)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn chmod_sync(&self, _path: &Path, _mode: u32) -> FsResult<()> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
async fn chmod_async(&self, path: PathBuf, mode: u32) -> FsResult<()> {
|
||||||
|
self.chmod_sync(&path, mode)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn chown_sync(
|
||||||
|
&self,
|
||||||
|
_path: &Path,
|
||||||
|
_uid: Option<u32>,
|
||||||
|
_gid: Option<u32>,
|
||||||
|
) -> FsResult<()> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
async fn chown_async(
|
||||||
|
&self,
|
||||||
|
path: PathBuf,
|
||||||
|
uid: Option<u32>,
|
||||||
|
gid: Option<u32>,
|
||||||
|
) -> FsResult<()> {
|
||||||
|
self.chown_sync(&path, uid, gid)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn remove_sync(&self, _path: &Path, _recursive: bool) -> FsResult<()> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
async fn remove_async(&self, path: PathBuf, recursive: bool) -> FsResult<()> {
|
||||||
|
self.remove_sync(&path, recursive)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn copy_file_sync(&self, _from: &Path, _to: &Path) -> FsResult<()> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
async fn copy_file_async(&self, from: PathBuf, to: PathBuf) -> FsResult<()> {
|
||||||
|
self.copy_file_sync(&from, &to)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cp_sync(&self, _from: &Path, _to: &Path) -> FsResult<()> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
async fn cp_async(&self, from: PathBuf, to: PathBuf) -> FsResult<()> {
|
||||||
|
self.cp_sync(&from, &to)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn stat_sync(&self, path: &Path) -> FsResult<FsStat> {
|
||||||
|
let entry = self.get_entry(path);
|
||||||
|
match entry {
|
||||||
|
Some(entry) => match &*entry {
|
||||||
|
PathEntry::Dir => Ok(FsStat {
|
||||||
|
is_file: false,
|
||||||
|
is_directory: true,
|
||||||
|
is_symlink: false,
|
||||||
|
size: 0,
|
||||||
|
mtime: None,
|
||||||
|
atime: None,
|
||||||
|
birthtime: None,
|
||||||
|
dev: 0,
|
||||||
|
ino: 0,
|
||||||
|
mode: 0,
|
||||||
|
nlink: 0,
|
||||||
|
uid: 0,
|
||||||
|
gid: 0,
|
||||||
|
rdev: 0,
|
||||||
|
blksize: 0,
|
||||||
|
blocks: 0,
|
||||||
|
is_block_device: false,
|
||||||
|
is_char_device: false,
|
||||||
|
is_fifo: false,
|
||||||
|
is_socket: false,
|
||||||
|
}),
|
||||||
|
PathEntry::File(data) => Ok(FsStat {
|
||||||
|
is_file: true,
|
||||||
|
is_directory: false,
|
||||||
|
is_symlink: false,
|
||||||
|
size: data.len() as u64,
|
||||||
|
mtime: None,
|
||||||
|
atime: None,
|
||||||
|
birthtime: None,
|
||||||
|
dev: 0,
|
||||||
|
ino: 0,
|
||||||
|
mode: 0,
|
||||||
|
nlink: 0,
|
||||||
|
uid: 0,
|
||||||
|
gid: 0,
|
||||||
|
rdev: 0,
|
||||||
|
blksize: 0,
|
||||||
|
blocks: 0,
|
||||||
|
is_block_device: false,
|
||||||
|
is_char_device: false,
|
||||||
|
is_fifo: false,
|
||||||
|
is_socket: false,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
None => Err(FsError::Io(Error::new(ErrorKind::NotFound, "Not found"))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async fn stat_async(&self, path: PathBuf) -> FsResult<FsStat> {
|
||||||
|
self.stat_sync(&path)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lstat_sync(&self, _path: &Path) -> FsResult<FsStat> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
async fn lstat_async(&self, path: PathBuf) -> FsResult<FsStat> {
|
||||||
|
self.lstat_sync(&path)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn realpath_sync(&self, _path: &Path) -> FsResult<PathBuf> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
async fn realpath_async(&self, path: PathBuf) -> FsResult<PathBuf> {
|
||||||
|
self.realpath_sync(&path)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_dir_sync(&self, _path: &Path) -> FsResult<Vec<FsDirEntry>> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
async fn read_dir_async(&self, path: PathBuf) -> FsResult<Vec<FsDirEntry>> {
|
||||||
|
self.read_dir_sync(&path)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rename_sync(&self, _oldpath: &Path, _newpath: &Path) -> FsResult<()> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
async fn rename_async(
|
||||||
|
&self,
|
||||||
|
oldpath: PathBuf,
|
||||||
|
newpath: PathBuf,
|
||||||
|
) -> FsResult<()> {
|
||||||
|
self.rename_sync(&oldpath, &newpath)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn link_sync(&self, _oldpath: &Path, _newpath: &Path) -> FsResult<()> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
async fn link_async(
|
||||||
|
&self,
|
||||||
|
oldpath: PathBuf,
|
||||||
|
newpath: PathBuf,
|
||||||
|
) -> FsResult<()> {
|
||||||
|
self.link_sync(&oldpath, &newpath)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn symlink_sync(
|
||||||
|
&self,
|
||||||
|
_oldpath: &Path,
|
||||||
|
_newpath: &Path,
|
||||||
|
_file_type: Option<FsFileType>,
|
||||||
|
) -> FsResult<()> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
async fn symlink_async(
|
||||||
|
&self,
|
||||||
|
oldpath: PathBuf,
|
||||||
|
newpath: PathBuf,
|
||||||
|
file_type: Option<FsFileType>,
|
||||||
|
) -> FsResult<()> {
|
||||||
|
self.symlink_sync(&oldpath, &newpath, file_type)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_link_sync(&self, _path: &Path) -> FsResult<PathBuf> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
async fn read_link_async(&self, path: PathBuf) -> FsResult<PathBuf> {
|
||||||
|
self.read_link_sync(&path)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn truncate_sync(&self, _path: &Path, _len: u64) -> FsResult<()> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
async fn truncate_async(&self, path: PathBuf, len: u64) -> FsResult<()> {
|
||||||
|
self.truncate_sync(&path, len)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn utime_sync(
|
||||||
|
&self,
|
||||||
|
_path: &Path,
|
||||||
|
_atime_secs: i64,
|
||||||
|
_atime_nanos: u32,
|
||||||
|
_mtime_secs: i64,
|
||||||
|
_mtime_nanos: u32,
|
||||||
|
) -> FsResult<()> {
|
||||||
|
Err(FsError::NotSupported)
|
||||||
|
}
|
||||||
|
async fn utime_async(
|
||||||
|
&self,
|
||||||
|
path: PathBuf,
|
||||||
|
atime_secs: i64,
|
||||||
|
atime_nanos: u32,
|
||||||
|
mtime_secs: i64,
|
||||||
|
mtime_nanos: u32,
|
||||||
|
) -> FsResult<()> {
|
||||||
|
self.utime_sync(&path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_file_sync(
|
||||||
|
&self,
|
||||||
|
path: &Path,
|
||||||
|
options: OpenOptions,
|
||||||
|
data: &[u8],
|
||||||
|
) -> FsResult<()> {
|
||||||
|
let path = normalize_path(path);
|
||||||
|
let has_parent_dir = path
|
||||||
|
.parent()
|
||||||
|
.and_then(|parent| self.get_entry(parent))
|
||||||
|
.map(|e| matches!(*e, PathEntry::Dir))
|
||||||
|
.unwrap_or(false);
|
||||||
|
if !has_parent_dir {
|
||||||
|
return Err(FsError::Io(Error::new(
|
||||||
|
ErrorKind::NotFound,
|
||||||
|
"Parent directory does not exist",
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
let mut entries = self.entries.lock();
|
||||||
|
let entry = entries.entry(path.clone());
|
||||||
|
match entry {
|
||||||
|
Entry::Occupied(mut entry) => {
|
||||||
|
if let PathEntry::File(existing_data) = &**entry.get() {
|
||||||
|
if options.create_new {
|
||||||
|
return Err(FsError::Io(Error::new(
|
||||||
|
ErrorKind::AlreadyExists,
|
||||||
|
"File already exists",
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
if options.append {
|
||||||
|
let mut new_data = existing_data.clone();
|
||||||
|
new_data.extend_from_slice(data);
|
||||||
|
entry.insert(Arc::new(PathEntry::File(new_data)));
|
||||||
|
} else {
|
||||||
|
entry.insert(Arc::new(PathEntry::File(data.to_vec())));
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(FsError::Io(Error::new(
|
||||||
|
ErrorKind::InvalidInput,
|
||||||
|
"Not a file",
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Entry::Vacant(entry) => {
|
||||||
|
entry.insert(Arc::new(PathEntry::File(data.to_vec())));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn write_file_async(
|
||||||
|
&self,
|
||||||
|
path: PathBuf,
|
||||||
|
options: OpenOptions,
|
||||||
|
data: Vec<u8>,
|
||||||
|
) -> FsResult<()> {
|
||||||
|
self.write_file_sync(&path, options, &data)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_file_sync(&self, path: &Path) -> FsResult<Vec<u8>> {
|
||||||
|
let entry = self.get_entry(path);
|
||||||
|
match entry {
|
||||||
|
Some(entry) => match &*entry {
|
||||||
|
PathEntry::File(data) => Ok(data.clone()),
|
||||||
|
PathEntry::Dir => Err(FsError::Io(Error::new(
|
||||||
|
ErrorKind::InvalidInput,
|
||||||
|
"Is a directory",
|
||||||
|
))),
|
||||||
|
},
|
||||||
|
None => Err(FsError::Io(Error::new(ErrorKind::NotFound, "Not found"))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async fn read_file_async(&self, path: PathBuf) -> FsResult<Vec<u8>> {
|
||||||
|
self.read_file_sync(&path)
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,10 +1,12 @@
|
||||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
mod in_memory_fs;
|
||||||
mod interface;
|
mod interface;
|
||||||
mod ops;
|
mod ops;
|
||||||
mod std_fs;
|
mod std_fs;
|
||||||
pub mod sync;
|
pub mod sync;
|
||||||
|
|
||||||
|
pub use crate::in_memory_fs::InMemoryFs;
|
||||||
pub use crate::interface::FileSystem;
|
pub use crate::interface::FileSystem;
|
||||||
pub use crate::interface::FileSystemRc;
|
pub use crate::interface::FileSystemRc;
|
||||||
pub use crate::interface::FsDirEntry;
|
pub use crate::interface::FsDirEntry;
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use deno_core::serde_json::json;
|
||||||
use deno_core::url::Url;
|
use deno_core::url::Url;
|
||||||
use test_util as util;
|
use test_util as util;
|
||||||
use test_util::itest;
|
use test_util::itest;
|
||||||
|
@ -8,6 +9,7 @@ use util::assert_contains;
|
||||||
use util::assert_not_contains;
|
use util::assert_not_contains;
|
||||||
use util::env_vars_for_npm_tests;
|
use util::env_vars_for_npm_tests;
|
||||||
use util::TestContext;
|
use util::TestContext;
|
||||||
|
use util::TestContextBuilder;
|
||||||
|
|
||||||
itest!(overloads {
|
itest!(overloads {
|
||||||
args: "bench bench/overloads.ts",
|
args: "bench bench/overloads.ts",
|
||||||
|
@ -285,3 +287,32 @@ fn conditionally_loads_type_graph() {
|
||||||
.run();
|
.run();
|
||||||
assert_not_contains!(output.combined_output(), "type_reference.d.ts");
|
assert_not_contains!(output.combined_output(), "type_reference.d.ts");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn opt_out_top_level_exclude_via_bench_unexclude() {
|
||||||
|
let context = TestContextBuilder::new().use_temp_cwd().build();
|
||||||
|
let temp_dir = context.temp_dir().path();
|
||||||
|
temp_dir.join("deno.json").write_json(&json!({
|
||||||
|
"bench": {
|
||||||
|
"exclude": [ "!excluded.bench.ts" ]
|
||||||
|
},
|
||||||
|
"exclude": [ "excluded.bench.ts", "actually_excluded.bench.ts" ]
|
||||||
|
}));
|
||||||
|
|
||||||
|
temp_dir
|
||||||
|
.join("main.bench.ts")
|
||||||
|
.write("Deno.bench('test1', () => {});");
|
||||||
|
temp_dir
|
||||||
|
.join("excluded.bench.ts")
|
||||||
|
.write("Deno.bench('test2', () => {});");
|
||||||
|
temp_dir
|
||||||
|
.join("actually_excluded.bench.ts")
|
||||||
|
.write("Deno.bench('test3', () => {});");
|
||||||
|
|
||||||
|
let output = context.new_command().arg("bench").run();
|
||||||
|
output.assert_exit_code(0);
|
||||||
|
let output = output.combined_output();
|
||||||
|
assert_contains!(output, "main.bench.ts");
|
||||||
|
assert_contains!(output, "excluded.bench.ts");
|
||||||
|
assert_not_contains!(output, "actually_excluded.bench.ts");
|
||||||
|
}
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use deno_core::serde_json::json;
|
||||||
use test_util as util;
|
use test_util as util;
|
||||||
use test_util::itest;
|
use test_util::itest;
|
||||||
use util::assert_contains;
|
use util::assert_contains;
|
||||||
|
use util::assert_not_contains;
|
||||||
use util::PathRef;
|
use util::PathRef;
|
||||||
use util::TestContext;
|
use util::TestContext;
|
||||||
use util::TestContextBuilder;
|
use util::TestContextBuilder;
|
||||||
|
@ -351,3 +353,28 @@ fn fmt_with_glob_config_and_flags() {
|
||||||
|
|
||||||
assert_contains!(output, "Found 2 not formatted files in 2 files");
|
assert_contains!(output, "Found 2 not formatted files in 2 files");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn opt_out_top_level_exclude_via_fmt_unexclude() {
|
||||||
|
let context = TestContextBuilder::new().use_temp_cwd().build();
|
||||||
|
let temp_dir = context.temp_dir().path();
|
||||||
|
temp_dir.join("deno.json").write_json(&json!({
|
||||||
|
"fmt": {
|
||||||
|
"exclude": [ "!excluded.ts" ]
|
||||||
|
},
|
||||||
|
"exclude": [ "excluded.ts", "actually_excluded.ts" ]
|
||||||
|
}));
|
||||||
|
|
||||||
|
temp_dir.join("main.ts").write("const a = 1;");
|
||||||
|
temp_dir.join("excluded.ts").write("const a = 2;");
|
||||||
|
temp_dir
|
||||||
|
.join("actually_excluded.ts")
|
||||||
|
.write("const a = 2;");
|
||||||
|
|
||||||
|
let output = context.new_command().arg("fmt").run();
|
||||||
|
output.assert_exit_code(0);
|
||||||
|
let output = output.combined_output();
|
||||||
|
assert_contains!(output, "main.ts");
|
||||||
|
assert_contains!(output, "excluded.ts");
|
||||||
|
assert_not_contains!(output, "actually_excluded.ts");
|
||||||
|
}
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use deno_core::serde_json::json;
|
||||||
use test_util::assert_contains;
|
use test_util::assert_contains;
|
||||||
|
use test_util::assert_not_contains;
|
||||||
use test_util::itest;
|
use test_util::itest;
|
||||||
use test_util::TestContextBuilder;
|
use test_util::TestContextBuilder;
|
||||||
|
|
||||||
|
@ -252,3 +254,26 @@ itest!(no_slow_types_workspace {
|
||||||
cwd: Some("lint/no_slow_types_workspace"),
|
cwd: Some("lint/no_slow_types_workspace"),
|
||||||
exit_code: 1,
|
exit_code: 1,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn opt_out_top_level_exclude_via_lint_unexclude() {
|
||||||
|
let context = TestContextBuilder::new().use_temp_cwd().build();
|
||||||
|
let temp_dir = context.temp_dir().path();
|
||||||
|
temp_dir.join("deno.json").write_json(&json!({
|
||||||
|
"lint": {
|
||||||
|
"exclude": [ "!excluded.ts" ]
|
||||||
|
},
|
||||||
|
"exclude": [ "excluded.ts", "actually_excluded.ts" ]
|
||||||
|
}));
|
||||||
|
|
||||||
|
temp_dir.join("main.ts").write("const a = 1;");
|
||||||
|
temp_dir.join("excluded.ts").write("const a = 2;");
|
||||||
|
temp_dir.join("actually_excluded.ts").write("const a = 2;");
|
||||||
|
|
||||||
|
let output = context.new_command().arg("lint").run();
|
||||||
|
output.assert_exit_code(1);
|
||||||
|
let output = output.combined_output();
|
||||||
|
assert_contains!(output, "main.ts");
|
||||||
|
assert_contains!(output, "excluded.ts");
|
||||||
|
assert_not_contains!(output, "actually_excluded.ts");
|
||||||
|
}
|
||||||
|
|
|
@ -1652,11 +1652,9 @@ fn lsp_workspace_disable_enable_paths() {
|
||||||
.unwrap()
|
.unwrap()
|
||||||
},
|
},
|
||||||
name: "project".to_string(),
|
name: "project".to_string(),
|
||||||
}])
|
}]);
|
||||||
.set_deno_enable(false);
|
|
||||||
},
|
},
|
||||||
json!({ "deno": {
|
json!({ "deno": {
|
||||||
"enable": false,
|
|
||||||
"disablePaths": ["./worker/node.ts"],
|
"disablePaths": ["./worker/node.ts"],
|
||||||
"enablePaths": ["./worker"],
|
"enablePaths": ["./worker"],
|
||||||
} }),
|
} }),
|
||||||
|
|
|
@ -408,7 +408,7 @@ fn ignores_directories() {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn includes_directories_with_gitignore() {
|
fn not_include_gitignored_file_even_if_matched_in_include() {
|
||||||
let context = publish_context_builder().build();
|
let context = publish_context_builder().build();
|
||||||
let temp_dir = context.temp_dir().path();
|
let temp_dir = context.temp_dir().path();
|
||||||
temp_dir.join("deno.json").write_json(&json!({
|
temp_dir.join("deno.json").write_json(&json!({
|
||||||
|
@ -416,23 +416,75 @@ fn includes_directories_with_gitignore() {
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"exports": "./main.ts",
|
"exports": "./main.ts",
|
||||||
"publish": {
|
"publish": {
|
||||||
"include": [ "deno.json", "main.ts" ]
|
// won't match ignored because it needs to be
|
||||||
|
// unexcluded via a negated glob in exclude
|
||||||
|
"include": [ "deno.json", "*.ts" ]
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
|
||||||
temp_dir.join(".gitignore").write("main.ts");
|
temp_dir.join(".gitignore").write("ignored.ts");
|
||||||
temp_dir.join("main.ts").write("");
|
temp_dir.join("main.ts").write("");
|
||||||
temp_dir.join("ignored.ts").write("");
|
temp_dir.join("ignored.ts").write("");
|
||||||
|
|
||||||
let output = context
|
let output = context.new_command().arg("publish").arg("--dry-run").run();
|
||||||
.new_command()
|
|
||||||
.arg("publish")
|
|
||||||
.arg("--token")
|
|
||||||
.arg("sadfasdf")
|
|
||||||
.run();
|
|
||||||
output.assert_exit_code(0);
|
output.assert_exit_code(0);
|
||||||
let output = output.combined_output();
|
let output = output.combined_output();
|
||||||
assert_contains!(output, "main.ts");
|
assert_contains!(output, "main.ts");
|
||||||
|
// it's gitignored
|
||||||
|
assert_not_contains!(output, "ignored.ts");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn includes_directories_with_gitignore_when_unexcluded() {
|
||||||
|
let context = publish_context_builder().build();
|
||||||
|
let temp_dir = context.temp_dir().path();
|
||||||
|
temp_dir.join("deno.json").write_json(&json!({
|
||||||
|
"name": "@foo/bar",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"exports": "./main.ts",
|
||||||
|
"publish": {
|
||||||
|
"include": [ "deno.json", "*.ts" ],
|
||||||
|
"exclude": [ "!ignored.ts" ]
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
|
temp_dir.join(".gitignore").write("ignored.ts");
|
||||||
|
temp_dir.join("main.ts").write("");
|
||||||
|
temp_dir.join("ignored.ts").write("");
|
||||||
|
|
||||||
|
let output = context.new_command().arg("publish").arg("--dry-run").run();
|
||||||
|
output.assert_exit_code(0);
|
||||||
|
let output = output.combined_output();
|
||||||
|
assert_contains!(output, "main.ts");
|
||||||
|
assert_contains!(output, "ignored.ts");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn includes_unexcluded_sub_dir() {
|
||||||
|
let context = publish_context_builder().build();
|
||||||
|
let temp_dir = context.temp_dir().path();
|
||||||
|
temp_dir.join("deno.json").write_json(&json!({
|
||||||
|
"name": "@foo/bar",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"exports": "./included1.ts",
|
||||||
|
"publish": {
|
||||||
|
"exclude": [
|
||||||
|
"ignored",
|
||||||
|
"!ignored/unexcluded",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
|
temp_dir.join("included1.ts").write("");
|
||||||
|
temp_dir.join("ignored/unexcluded").create_dir_all();
|
||||||
|
temp_dir.join("ignored/ignored.ts").write("");
|
||||||
|
temp_dir.join("ignored/unexcluded/included2.ts").write("");
|
||||||
|
|
||||||
|
let output = context.new_command().arg("publish").arg("--dry-run").run();
|
||||||
|
output.assert_exit_code(0);
|
||||||
|
let output = output.combined_output();
|
||||||
|
assert_contains!(output, "included1.ts");
|
||||||
|
assert_contains!(output, "included2.ts");
|
||||||
assert_not_contains!(output, "ignored.ts");
|
assert_not_contains!(output, "ignored.ts");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -465,7 +517,7 @@ fn includes_directories() {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn includes_dotenv() {
|
fn not_includes_gitignored_dotenv() {
|
||||||
let context = publish_context_builder().build();
|
let context = publish_context_builder().build();
|
||||||
let temp_dir = context.temp_dir().path();
|
let temp_dir = context.temp_dir().path();
|
||||||
temp_dir.join("deno.json").write_json(&json!({
|
temp_dir.join("deno.json").write_json(&json!({
|
||||||
|
@ -476,14 +528,9 @@ fn includes_dotenv() {
|
||||||
|
|
||||||
temp_dir.join("main.ts").write("");
|
temp_dir.join("main.ts").write("");
|
||||||
temp_dir.join(".env").write("FOO=BAR");
|
temp_dir.join(".env").write("FOO=BAR");
|
||||||
|
temp_dir.join(".gitignore").write(".env");
|
||||||
|
|
||||||
let output = context
|
let output = context.new_command().arg("publish").arg("--dry-run").run();
|
||||||
.new_command()
|
|
||||||
.arg("publish")
|
|
||||||
.arg("--token")
|
|
||||||
.arg("sadfasdf")
|
|
||||||
.arg("--dry-run")
|
|
||||||
.run();
|
|
||||||
output.assert_exit_code(0);
|
output.assert_exit_code(0);
|
||||||
let output = output.combined_output();
|
let output = output.combined_output();
|
||||||
assert_contains!(output, "main.ts");
|
assert_contains!(output, "main.ts");
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use deno_core::serde_json::json;
|
||||||
use deno_core::url::Url;
|
use deno_core::url::Url;
|
||||||
use test_util as util;
|
use test_util as util;
|
||||||
use test_util::itest;
|
use test_util::itest;
|
||||||
|
@ -668,3 +669,32 @@ itest!(test_include_relative_pattern_dot_slash {
|
||||||
output: "test/relative_pattern_dot_slash/output.out",
|
output: "test/relative_pattern_dot_slash/output.out",
|
||||||
cwd: Some("test/relative_pattern_dot_slash"),
|
cwd: Some("test/relative_pattern_dot_slash"),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn opt_out_top_level_exclude_via_test_unexclude() {
|
||||||
|
let context = TestContextBuilder::new().use_temp_cwd().build();
|
||||||
|
let temp_dir = context.temp_dir().path();
|
||||||
|
temp_dir.join("deno.json").write_json(&json!({
|
||||||
|
"test": {
|
||||||
|
"exclude": [ "!excluded.test.ts" ]
|
||||||
|
},
|
||||||
|
"exclude": [ "excluded.test.ts", "actually_excluded.test.ts" ]
|
||||||
|
}));
|
||||||
|
|
||||||
|
temp_dir
|
||||||
|
.join("main.test.ts")
|
||||||
|
.write("Deno.test('test1', () => {});");
|
||||||
|
temp_dir
|
||||||
|
.join("excluded.test.ts")
|
||||||
|
.write("Deno.test('test2', () => {});");
|
||||||
|
temp_dir
|
||||||
|
.join("actually_excluded.test.ts")
|
||||||
|
.write("Deno.test('test3', () => {});");
|
||||||
|
|
||||||
|
let output = context.new_command().arg("test").run();
|
||||||
|
output.assert_exit_code(0);
|
||||||
|
let output = output.combined_output();
|
||||||
|
assert_contains!(output, "main.test.ts");
|
||||||
|
assert_contains!(output, "excluded.test.ts");
|
||||||
|
assert_not_contains!(output, "actually_excluded.test.ts");
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in a new issue