mirror of
https://github.com/denoland/deno.git
synced 2024-11-21 15:04:11 -05:00
fix(publish): regression - publishing with vendor folder (#22830)
In https://github.com/denoland/deno/pull/22720/files#diff-d62d85de2a7ffb816cd2fdbaa47e588352f521c7c43d058b75649bbb255e0ae1R70 , I copy and pasted from another area of the code and didn't think about removing how it ignores the vendor folder by default.
This commit is contained in:
parent
dff056ae41
commit
f8543a9fd8
4 changed files with 84 additions and 20 deletions
|
@ -97,9 +97,9 @@ async fn prepare_publish(
|
|||
deno_json: &ConfigFile,
|
||||
source_cache: Arc<ParsedSourceCache>,
|
||||
graph: Arc<deno_graph::ModuleGraph>,
|
||||
cli_options: Arc<CliOptions>,
|
||||
mapped_resolver: Arc<MappedSpecifierResolver>,
|
||||
sloppy_imports_resolver: Option<SloppyImportsResolver>,
|
||||
bare_node_builtins: bool,
|
||||
diagnostics_collector: &PublishDiagnosticsCollector,
|
||||
) -> Result<Rc<PreparedPublishPackage>, AnyError> {
|
||||
let config_path = deno_json.specifier.to_file_path().unwrap();
|
||||
|
@ -145,6 +145,7 @@ async fn prepare_publish(
|
|||
|
||||
let diagnostics_collector = diagnostics_collector.clone();
|
||||
let tarball = deno_core::unsync::spawn_blocking(move || {
|
||||
let bare_node_builtins = cli_options.unstable_bare_node_builtins();
|
||||
let unfurler = SpecifierUnfurler::new(
|
||||
&mapped_resolver,
|
||||
sloppy_imports_resolver.as_ref(),
|
||||
|
@ -152,6 +153,7 @@ async fn prepare_publish(
|
|||
);
|
||||
tar::create_gzipped_tarball(
|
||||
&dir_path,
|
||||
&cli_options,
|
||||
LazyGraphSourceParser::new(&source_cache, &graph),
|
||||
&diagnostics_collector,
|
||||
&unfurler,
|
||||
|
@ -745,7 +747,6 @@ async fn prepare_packages_for_publishing(
|
|||
let type_checker = cli_factory.type_checker().await?;
|
||||
let fs = cli_factory.fs();
|
||||
let cli_options = cli_factory.cli_options();
|
||||
let bare_node_builtins = cli_options.unstable_bare_node_builtins();
|
||||
|
||||
if members.len() > 1 {
|
||||
println!("Publishing a workspace...");
|
||||
|
@ -776,15 +777,16 @@ async fn prepare_packages_for_publishing(
|
|||
None
|
||||
};
|
||||
let graph = graph.clone();
|
||||
let cli_options = cli_options.clone();
|
||||
async move {
|
||||
let package = prepare_publish(
|
||||
&member.package_name,
|
||||
&member.config_file,
|
||||
source_cache.clone(),
|
||||
graph,
|
||||
cli_options,
|
||||
mapped_resolver,
|
||||
sloppy_imports_resolver,
|
||||
bare_node_builtins,
|
||||
diagnostics_collector,
|
||||
)
|
||||
.await
|
||||
|
|
|
@ -14,6 +14,7 @@ use std::io::Write;
|
|||
use std::path::Path;
|
||||
use tar::Header;
|
||||
|
||||
use crate::args::CliOptions;
|
||||
use crate::cache::LazyGraphSourceParser;
|
||||
use crate::tools::registry::paths::PackagePath;
|
||||
use crate::util::fs::FileCollector;
|
||||
|
@ -39,6 +40,7 @@ pub struct PublishableTarball {
|
|||
|
||||
pub fn create_gzipped_tarball(
|
||||
dir: &Path,
|
||||
cli_options: &CliOptions,
|
||||
source_parser: LazyGraphSourceParser,
|
||||
diagnostics_collector: &PublishDiagnosticsCollector,
|
||||
unfurler: &SpecifierUnfurler,
|
||||
|
@ -70,7 +72,7 @@ pub fn create_gzipped_tarball(
|
|||
})
|
||||
.ignore_git_folder()
|
||||
.ignore_node_modules()
|
||||
.ignore_vendor_folder()
|
||||
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
|
||||
.use_gitignore()
|
||||
.collect_file_patterns(file_patterns)?;
|
||||
|
||||
|
|
|
@ -261,6 +261,7 @@ pub struct FileCollector<TFilter: Fn(WalkEntry) -> bool> {
|
|||
ignore_git_folder: bool,
|
||||
ignore_node_modules: bool,
|
||||
ignore_vendor_folder: bool,
|
||||
vendor_folder: Option<PathBuf>,
|
||||
use_gitignore: bool,
|
||||
}
|
||||
|
||||
|
@ -271,6 +272,7 @@ impl<TFilter: Fn(WalkEntry) -> bool> FileCollector<TFilter> {
|
|||
ignore_git_folder: false,
|
||||
ignore_node_modules: false,
|
||||
ignore_vendor_folder: false,
|
||||
vendor_folder: None,
|
||||
use_gitignore: false,
|
||||
}
|
||||
}
|
||||
|
@ -285,6 +287,11 @@ impl<TFilter: Fn(WalkEntry) -> bool> FileCollector<TFilter> {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn set_vendor_folder(mut self, vendor_folder: Option<PathBuf>) -> Self {
|
||||
self.vendor_folder = vendor_folder;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn ignore_git_folder(mut self) -> Self {
|
||||
self.ignore_git_folder = true;
|
||||
self
|
||||
|
@ -389,22 +396,10 @@ impl<TFilter: Fn(WalkEntry) -> bool> FileCollector<TFilter> {
|
|||
iterator.skip_current_dir();
|
||||
}
|
||||
} else if is_dir {
|
||||
let should_ignore_dir = path
|
||||
.file_name()
|
||||
.map(|dir_name| {
|
||||
let dir_name = dir_name.to_string_lossy().to_lowercase();
|
||||
let is_ignored_file = match dir_name.as_str() {
|
||||
"node_modules" => self.ignore_node_modules,
|
||||
"vendor" => self.ignore_vendor_folder,
|
||||
".git" => self.ignore_git_folder,
|
||||
_ => false,
|
||||
};
|
||||
// allow the user to opt out of ignoring by explicitly specifying the dir
|
||||
file != path && is_ignored_file
|
||||
})
|
||||
.unwrap_or(false)
|
||||
|| !visited_paths.insert(path.clone());
|
||||
if should_ignore_dir {
|
||||
// allow the user to opt out of ignoring by explicitly specifying the dir
|
||||
let opt_out_ignore = file == path;
|
||||
let should_ignore_dir = !opt_out_ignore && self.is_ignored_dir(&path);
|
||||
if should_ignore_dir || !visited_paths.insert(path.clone()) {
|
||||
iterator.skip_current_dir();
|
||||
}
|
||||
} else if (self.file_filter)(WalkEntry {
|
||||
|
@ -419,6 +414,31 @@ impl<TFilter: Fn(WalkEntry) -> bool> FileCollector<TFilter> {
|
|||
}
|
||||
Ok(target_files)
|
||||
}
|
||||
|
||||
fn is_ignored_dir(&self, path: &Path) -> bool {
|
||||
path
|
||||
.file_name()
|
||||
.map(|dir_name| {
|
||||
let dir_name = dir_name.to_string_lossy().to_lowercase();
|
||||
let is_ignored_file = match dir_name.as_str() {
|
||||
"node_modules" => self.ignore_node_modules,
|
||||
"vendor" => self.ignore_vendor_folder,
|
||||
".git" => self.ignore_git_folder,
|
||||
_ => false,
|
||||
};
|
||||
is_ignored_file
|
||||
})
|
||||
.unwrap_or(false)
|
||||
|| self.is_vendor_folder(path)
|
||||
}
|
||||
|
||||
fn is_vendor_folder(&self, path: &Path) -> bool {
|
||||
self
|
||||
.vendor_folder
|
||||
.as_ref()
|
||||
.map(|vendor_folder| path == *vendor_folder)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
/// Collects module specifiers that satisfy the given predicate as a file path, by recursively walking `include`.
|
||||
|
|
|
@ -589,6 +589,46 @@ fn not_includes_gitignored_dotenv() {
|
|||
assert_not_contains!(output, ".env");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn not_includes_vendor_dir_only_when_vendor_true() {
|
||||
let context = publish_context_builder().build();
|
||||
let temp_dir = context.temp_dir().path();
|
||||
temp_dir.join("deno.json").write_json(&json!({
|
||||
"name": "@foo/bar",
|
||||
"version": "1.0.0",
|
||||
"exports": "./main.ts",
|
||||
}));
|
||||
|
||||
temp_dir.join("main.ts").write("");
|
||||
let vendor_folder = temp_dir.join("vendor");
|
||||
vendor_folder.create_dir_all();
|
||||
vendor_folder.join("vendor.ts").write("");
|
||||
|
||||
let publish_cmd = context.new_command().args("publish --dry-run");
|
||||
{
|
||||
let output = publish_cmd.run();
|
||||
output.assert_exit_code(0);
|
||||
let output = output.combined_output();
|
||||
assert_contains!(output, "main.ts");
|
||||
assert_contains!(output, "vendor.ts");
|
||||
}
|
||||
|
||||
// with vendor
|
||||
{
|
||||
temp_dir.join("deno.json").write_json(&json!({
|
||||
"name": "@foo/bar",
|
||||
"version": "1.0.0",
|
||||
"exports": "./main.ts",
|
||||
"vendor": true,
|
||||
}));
|
||||
let output = publish_cmd.run();
|
||||
output.assert_exit_code(0);
|
||||
let output = output.combined_output();
|
||||
assert_contains!(output, "main.ts");
|
||||
assert_not_contains!(output, "vendor.ts");
|
||||
}
|
||||
}
|
||||
|
||||
fn publish_context_builder() -> TestContextBuilder {
|
||||
TestContextBuilder::new()
|
||||
.use_http_server()
|
||||
|
|
Loading…
Reference in a new issue