1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-21 15:04:11 -05:00

fix(unstable/tar): skip node_modules, .git, and config "exclude" (#21816)

This commit is contained in:
David Sherret 2024-01-08 18:51:49 -05:00 committed by GitHub
parent ea5c9a3383
commit 0efb17b2cb
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 172 additions and 44 deletions

View file

@ -1,5 +1,10 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_core::serde_json::json;
use test_util::assert_contains;
use test_util::assert_not_contains;
use test_util::TestContextBuilder;
static TEST_REGISTRY_URL: &str = "http://127.0.0.1:4250";
pub fn env_vars_for_registry() -> Vec<(String, String)> {
@ -29,3 +34,51 @@ itest!(successful {
http_server: true,
temp_cwd: true,
});
#[test]
fn ignores_directories() {
let context = publish_context_builder().build();
let temp_dir = context.temp_dir().path();
temp_dir.join("deno.json").write_json(&json!({
"name": "@foo/bar",
"version": "1.0.0",
"exclude": [ "ignore" ],
"exports": "main_included.ts"
}));
let ignored_dirs = vec![
temp_dir.join(".git"),
temp_dir.join("node_modules"),
temp_dir.join("ignore"),
];
for ignored_dir in ignored_dirs {
ignored_dir.create_dir_all();
ignored_dir.join("ignored.ts").write("");
}
let sub_dir = temp_dir.join("sub_dir");
sub_dir.create_dir_all();
sub_dir.join("sub_included.ts").write("");
temp_dir.join("main_included.ts").write("");
let output = context
.new_command()
.arg("publish")
.arg("--log-level=debug")
.arg("--token")
.arg("sadfasdf")
.arg(temp_dir)
.run();
output.assert_exit_code(0);
let output = output.combined_output();
assert_contains!(output, "sub_included.ts");
assert_contains!(output, "main_included.ts");
assert_not_contains!(output, "ignored.ts");
}
fn publish_context_builder() -> TestContextBuilder {
TestContextBuilder::new()
.use_http_server()
.envs(env_vars_for_registry())
}

View file

@ -1,14 +1,12 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::collections::HashMap;
use std::fmt::Write;
use std::io::IsTerminal;
use std::rc::Rc;
use std::sync::Arc;
use base64::prelude::BASE64_STANDARD;
use base64::Engine;
use bytes::Bytes;
use deno_config::ConfigFile;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
@ -30,6 +28,8 @@ use crate::args::Flags;
use crate::args::PublishFlags;
use crate::factory::CliFactory;
use crate::http_util::HttpClient;
use crate::util::display::human_size;
use crate::util::glob::PathOrPatternSet;
use crate::util::import_map::ImportMapUnfurler;
mod api;
@ -41,6 +41,8 @@ use auth::get_auth_method;
use auth::AuthMethod;
use publish_order::PublishOrderGraph;
use self::tar::PublishableTarball;
fn ring_bell() {
// ASCII code for the bell character.
print!("\x07");
@ -50,9 +52,13 @@ struct PreparedPublishPackage {
scope: String,
package: String,
version: String,
tarball_hash: String,
tarball: Bytes,
diagnostics: Vec<String>,
tarball: PublishableTarball,
}
impl PreparedPublishPackage {
pub fn display_name(&self) -> String {
format!("@{}/{}@{}", self.scope, self.package, self.version)
}
}
static SUGGESTED_ENTRYPOINTS: [&str; 4] =
@ -104,28 +110,23 @@ async fn prepare_publish(
let Some((scope, package_name)) = name.split_once('/') else {
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
};
let exclude_patterns = deno_json.to_files_config().and_then(|files| {
PathOrPatternSet::from_absolute_paths(files.unwrap_or_default().exclude)
.context("Invalid config file exclude pattern.")
})?;
let (tarball, diagnostics) = deno_core::unsync::spawn_blocking(move || {
let tarball = deno_core::unsync::spawn_blocking(move || {
let unfurler = ImportMapUnfurler::new(&import_map);
tar::create_gzipped_tarball(&dir_path, unfurler)
tar::create_gzipped_tarball(&dir_path, &unfurler, &exclude_patterns)
.context("Failed to create a tarball")
})
.await??;
let tarball_hash_bytes: Vec<u8> =
sha2::Sha256::digest(&tarball).iter().cloned().collect();
let mut tarball_hash = "sha256-".to_string();
for byte in tarball_hash_bytes {
write!(&mut tarball_hash, "{:02x}", byte).unwrap();
}
Ok(Rc::new(PreparedPublishPackage {
scope: scope.to_string(),
package: package_name.to_string(),
version: version.to_string(),
tarball_hash,
tarball,
diagnostics,
}))
}
@ -155,7 +156,7 @@ pub enum Permission<'s> {
/// └╌╌ at file:///dev/foo/bar/foo.ts:4:5
///
/// ```
fn print_diagnostics(diagnostics: Vec<String>) {
fn print_diagnostics(diagnostics: &[String]) {
if !diagnostics.is_empty() {
let len = diagnostics.len();
log::warn!("");
@ -194,7 +195,7 @@ async fn get_auth_headers(
scope: &package.scope,
package: &package.package,
version: &package.version,
tarball_hash: &package.tarball_hash,
tarball_hash: &package.tarball.hash,
})
.collect::<Vec<_>>();
@ -461,9 +462,9 @@ async fn perform_publish(
.collect::<Vec<_>>();
let diagnostics = packages
.iter()
.flat_map(|p| p.diagnostics.clone())
.flat_map(|p| p.tarball.diagnostics.iter().cloned())
.collect::<Vec<_>>();
print_diagnostics(diagnostics);
print_diagnostics(&diagnostics);
ensure_scopes_and_packages_exist(
client,
@ -484,6 +485,19 @@ async fn perform_publish(
for package_name in next_batch {
let package = prepared_package_by_name.remove(&package_name).unwrap();
// todo(dsherret): output something that looks better than this even not in debug
if log::log_enabled!(log::Level::Debug) {
log::debug!("Publishing {}", package.display_name());
for file in &package.tarball.files {
log::debug!(
" Tarball file {} {}",
human_size(file.size as f64),
file.path.display()
);
}
}
let authorization = authorizations
.remove(&(
package.scope.clone(),
@ -495,8 +509,7 @@ async fn perform_publish(
let registry_url = registry_url.clone();
let http_client = http_client.clone();
futures.spawn(async move {
let display_name =
format!("@{}/{}@{}", package.scope, package.package, package.version);
let display_name = package.display_name();
publish_package(
&http_client,
package,
@ -548,7 +561,7 @@ async fn publish_package(
.post(url)
.header(reqwest::header::AUTHORIZATION, authorization)
.header(reqwest::header::CONTENT_ENCODING, "gzip")
.body(package.tarball.clone())
.body(package.tarball.bytes.clone())
.send()
.await?;

View file

@ -5,59 +5,108 @@ use deno_core::anyhow;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::url::Url;
use sha2::Digest;
use std::fmt::Write as FmtWrite;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use tar::Header;
use crate::util::glob::PathOrPatternSet;
use crate::util::import_map::ImportMapUnfurler;
#[derive(Debug, Clone, PartialEq)]
pub struct PublishableTarballFile {
pub path: PathBuf,
pub size: usize,
}
#[derive(Debug, Clone, PartialEq)]
pub struct PublishableTarball {
pub files: Vec<PublishableTarballFile>,
pub diagnostics: Vec<String>,
pub hash: String,
pub bytes: Bytes,
}
pub fn create_gzipped_tarball(
dir: &Path,
// TODO(bartlomieju): this is too specific, factor it out into a callback that
// returns data
unfurler: ImportMapUnfurler,
) -> Result<(Bytes, Vec<String>), AnyError> {
unfurler: &ImportMapUnfurler,
exclude_patterns: &PathOrPatternSet,
) -> Result<PublishableTarball, AnyError> {
let mut tar = TarGzArchive::new();
let dir = dir
.canonicalize()
.map_err(|_| anyhow::anyhow!("Unable to canonicalize path {:?}", dir))?;
let mut diagnostics = vec![];
let mut files = vec![];
for entry in walkdir::WalkDir::new(&dir).follow_links(false) {
let mut iterator = walkdir::WalkDir::new(dir).follow_links(false).into_iter();
while let Some(entry) = iterator.next() {
let entry = entry?;
if exclude_patterns.matches_path(entry.path()) {
if entry.file_type().is_dir() {
iterator.skip_current_dir();
}
continue;
}
if entry.file_type().is_file() {
let url = Url::from_file_path(entry.path())
.map_err(|_| anyhow::anyhow!("Unable to convert path to url"))?;
let relative_path = entry
.path()
.strip_prefix(&dir)
.map_err(|err| anyhow::anyhow!("Unable to strip prefix: {err}"))?;
let relative_path = relative_path.to_str().ok_or_else(|| {
anyhow::anyhow!("Unable to convert path to string {:?}", relative_path)
.strip_prefix(dir)
.map_err(|err| anyhow::anyhow!("Unable to strip prefix: {err:#}"))?;
let relative_path_str = relative_path.to_str().ok_or_else(|| {
anyhow::anyhow!(
"Unable to convert path to string '{}'",
relative_path.display()
)
})?;
let data = std::fs::read(entry.path())
.with_context(|| format!("Unable to read file {:?}", entry.path()))?;
let (content, unfurl_diagnostics) = unfurler
.unfurl(&url, data)
.with_context(|| format!("Unable to unfurl file {:?}", entry.path()))?;
let data = std::fs::read(entry.path()).with_context(|| {
format!("Unable to read file '{}'", entry.path().display())
})?;
files.push(PublishableTarballFile {
path: relative_path.to_path_buf(),
size: data.len(),
});
let (content, unfurl_diagnostics) =
unfurler.unfurl(&url, data).with_context(|| {
format!("Unable to unfurl file '{}'", entry.path().display())
})?;
diagnostics.extend_from_slice(&unfurl_diagnostics);
tar
.add_file(relative_path.to_string(), &content)
.add_file(relative_path_str.to_string(), &content)
.with_context(|| {
format!("Unable to add file to tarball {:?}", entry.path())
format!("Unable to add file to tarball '{}'", entry.path().display())
})?;
} else if entry.file_type().is_dir() {
// skip
if entry.file_name() == ".git" || entry.file_name() == "node_modules" {
iterator.skip_current_dir();
}
} else {
diagnostics
.push(format!("Unsupported file type at path {:?}", entry.path()));
diagnostics.push(format!(
"Unsupported file type at path '{}'",
entry.path().display()
));
}
}
let v = tar.finish().context("Unable to finish tarball")?;
Ok((Bytes::from(v), diagnostics))
let hash_bytes: Vec<u8> = sha2::Sha256::digest(&v).iter().cloned().collect();
let mut hash = "sha256-".to_string();
for byte in hash_bytes {
write!(&mut hash, "{:02x}", byte).unwrap();
}
Ok(PublishableTarball {
files,
diagnostics,
hash,
bytes: Bytes::from(v),
})
}
struct TarGzArchive {

View file

@ -130,6 +130,19 @@ impl TestContextBuilder {
self
}
pub fn envs<I, K, V>(self, vars: I) -> Self
where
I: IntoIterator<Item = (K, V)>,
K: AsRef<str>,
V: AsRef<str>,
{
let mut this = self;
for (key, value) in vars {
this = this.env(key, value);
}
this
}
pub fn env(mut self, key: impl AsRef<str>, value: impl AsRef<str>) -> Self {
self
.envs