2024-01-01 14:58:21 -05:00
|
|
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2023-12-26 08:32:21 -05:00
|
|
|
use bytes::Bytes;
|
2023-11-23 18:38:07 -05:00
|
|
|
use deno_core::anyhow;
|
|
|
|
use deno_core::anyhow::Context;
|
|
|
|
use deno_core::error::AnyError;
|
|
|
|
use deno_core::url::Url;
|
2024-01-08 18:51:49 -05:00
|
|
|
use sha2::Digest;
|
|
|
|
use std::fmt::Write as FmtWrite;
|
2023-11-23 18:38:07 -05:00
|
|
|
use std::io::Write;
|
2023-12-14 04:55:56 -05:00
|
|
|
use std::path::Path;
|
2024-01-08 18:51:49 -05:00
|
|
|
use std::path::PathBuf;
|
2023-11-23 18:38:07 -05:00
|
|
|
use tar::Header;
|
|
|
|
|
|
|
|
use crate::util::import_map::ImportMapUnfurler;
|
2024-01-15 19:15:39 -05:00
|
|
|
use deno_config::glob::PathOrPatternSet;
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2024-01-08 18:51:49 -05:00
|
|
|
#[derive(Debug, Clone, PartialEq)]
|
|
|
|
pub struct PublishableTarballFile {
|
|
|
|
pub path: PathBuf,
|
|
|
|
pub size: usize,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, PartialEq)]
|
|
|
|
pub struct PublishableTarball {
|
|
|
|
pub files: Vec<PublishableTarballFile>,
|
|
|
|
pub diagnostics: Vec<String>,
|
|
|
|
pub hash: String,
|
|
|
|
pub bytes: Bytes,
|
|
|
|
}
|
|
|
|
|
2023-11-23 18:38:07 -05:00
|
|
|
pub fn create_gzipped_tarball(
|
2023-12-14 04:55:56 -05:00
|
|
|
dir: &Path,
|
2023-11-23 18:38:07 -05:00
|
|
|
// TODO(bartlomieju): this is too specific, factor it out into a callback that
|
|
|
|
// returns data
|
2024-01-08 18:51:49 -05:00
|
|
|
unfurler: &ImportMapUnfurler,
|
|
|
|
exclude_patterns: &PathOrPatternSet,
|
|
|
|
) -> Result<PublishableTarball, AnyError> {
|
2023-11-23 18:38:07 -05:00
|
|
|
let mut tar = TarGzArchive::new();
|
2023-12-12 09:45:45 -05:00
|
|
|
let mut diagnostics = vec![];
|
2024-01-08 18:51:49 -05:00
|
|
|
let mut files = vec![];
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2024-01-08 18:51:49 -05:00
|
|
|
let mut iterator = walkdir::WalkDir::new(dir).follow_links(false).into_iter();
|
|
|
|
while let Some(entry) = iterator.next() {
|
2023-11-23 18:38:07 -05:00
|
|
|
let entry = entry?;
|
|
|
|
|
2024-01-08 18:51:49 -05:00
|
|
|
if exclude_patterns.matches_path(entry.path()) {
|
|
|
|
if entry.file_type().is_dir() {
|
|
|
|
iterator.skip_current_dir();
|
|
|
|
}
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2023-11-23 18:38:07 -05:00
|
|
|
if entry.file_type().is_file() {
|
|
|
|
let url = Url::from_file_path(entry.path())
|
2023-11-30 13:54:54 -05:00
|
|
|
.map_err(|_| anyhow::anyhow!("Unable to convert path to url"))?;
|
|
|
|
let relative_path = entry
|
|
|
|
.path()
|
2024-01-08 18:51:49 -05:00
|
|
|
.strip_prefix(dir)
|
|
|
|
.map_err(|err| anyhow::anyhow!("Unable to strip prefix: {err:#}"))?;
|
|
|
|
let relative_path_str = relative_path.to_str().ok_or_else(|| {
|
|
|
|
anyhow::anyhow!(
|
|
|
|
"Unable to convert path to string '{}'",
|
|
|
|
relative_path.display()
|
|
|
|
)
|
|
|
|
})?;
|
|
|
|
let data = std::fs::read(entry.path()).with_context(|| {
|
|
|
|
format!("Unable to read file '{}'", entry.path().display())
|
2023-11-30 13:54:54 -05:00
|
|
|
})?;
|
2024-01-08 18:51:49 -05:00
|
|
|
files.push(PublishableTarballFile {
|
|
|
|
path: relative_path.to_path_buf(),
|
|
|
|
size: data.len(),
|
|
|
|
});
|
|
|
|
let (content, unfurl_diagnostics) =
|
|
|
|
unfurler.unfurl(&url, data).with_context(|| {
|
|
|
|
format!("Unable to unfurl file '{}'", entry.path().display())
|
|
|
|
})?;
|
2023-12-12 09:45:45 -05:00
|
|
|
|
|
|
|
diagnostics.extend_from_slice(&unfurl_diagnostics);
|
2023-11-30 13:54:54 -05:00
|
|
|
tar
|
2024-01-08 18:51:49 -05:00
|
|
|
.add_file(relative_path_str.to_string(), &content)
|
2023-11-30 13:54:54 -05:00
|
|
|
.with_context(|| {
|
2024-01-08 18:51:49 -05:00
|
|
|
format!("Unable to add file to tarball '{}'", entry.path().display())
|
2023-11-30 13:54:54 -05:00
|
|
|
})?;
|
2023-11-23 18:38:07 -05:00
|
|
|
} else if entry.file_type().is_dir() {
|
2024-01-08 18:51:49 -05:00
|
|
|
if entry.file_name() == ".git" || entry.file_name() == "node_modules" {
|
|
|
|
iterator.skip_current_dir();
|
|
|
|
}
|
2023-11-23 18:38:07 -05:00
|
|
|
} else {
|
2024-01-08 18:51:49 -05:00
|
|
|
diagnostics.push(format!(
|
|
|
|
"Unsupported file type at path '{}'",
|
|
|
|
entry.path().display()
|
|
|
|
));
|
2023-11-23 18:38:07 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let v = tar.finish().context("Unable to finish tarball")?;
|
2024-01-08 18:51:49 -05:00
|
|
|
let hash_bytes: Vec<u8> = sha2::Sha256::digest(&v).iter().cloned().collect();
|
|
|
|
let mut hash = "sha256-".to_string();
|
|
|
|
for byte in hash_bytes {
|
|
|
|
write!(&mut hash, "{:02x}", byte).unwrap();
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(PublishableTarball {
|
|
|
|
files,
|
|
|
|
diagnostics,
|
|
|
|
hash,
|
|
|
|
bytes: Bytes::from(v),
|
|
|
|
})
|
2023-11-23 18:38:07 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
struct TarGzArchive {
|
|
|
|
builder: tar::Builder<Vec<u8>>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl TarGzArchive {
|
|
|
|
pub fn new() -> Self {
|
|
|
|
Self {
|
|
|
|
builder: tar::Builder::new(Vec::new()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn add_file(
|
|
|
|
&mut self,
|
|
|
|
path: String,
|
|
|
|
data: &[u8],
|
|
|
|
) -> Result<(), AnyError> {
|
|
|
|
let mut header = Header::new_gnu();
|
|
|
|
header.set_size(data.len() as u64);
|
|
|
|
self.builder.append_data(&mut header, &path, data)?;
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn finish(mut self) -> Result<Vec<u8>, AnyError> {
|
|
|
|
self.builder.finish()?;
|
|
|
|
let bytes = self.builder.into_inner()?;
|
|
|
|
let mut gz_bytes = Vec::new();
|
|
|
|
let mut encoder = flate2::write::GzEncoder::new(
|
|
|
|
&mut gz_bytes,
|
|
|
|
flate2::Compression::default(),
|
|
|
|
);
|
|
|
|
encoder.write_all(&bytes)?;
|
|
|
|
encoder.finish()?;
|
|
|
|
Ok(gz_bytes)
|
|
|
|
}
|
|
|
|
}
|