2024-01-01 14:58:21 -05:00
|
|
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2023-12-26 08:32:21 -05:00
|
|
|
use bytes::Bytes;
|
2024-02-06 15:57:10 -05:00
|
|
|
use deno_ast::MediaType;
|
2024-03-07 20:16:32 -05:00
|
|
|
use deno_ast::ModuleSpecifier;
|
2024-01-24 15:30:08 -05:00
|
|
|
use deno_config::glob::FilePatterns;
|
2023-11-23 18:38:07 -05:00
|
|
|
use deno_core::anyhow::Context;
|
|
|
|
use deno_core::error::AnyError;
|
|
|
|
use deno_core::url::Url;
|
2024-01-08 18:51:49 -05:00
|
|
|
use sha2::Digest;
|
2024-01-24 16:24:52 -05:00
|
|
|
use std::collections::HashSet;
|
2024-01-08 18:51:49 -05:00
|
|
|
use std::fmt::Write as FmtWrite;
|
2023-11-23 18:38:07 -05:00
|
|
|
use std::io::Write;
|
2023-12-14 04:55:56 -05:00
|
|
|
use std::path::Path;
|
2023-11-23 18:38:07 -05:00
|
|
|
use tar::Header;
|
|
|
|
|
2024-03-09 20:40:53 -05:00
|
|
|
use crate::args::CliOptions;
|
2024-02-06 15:57:10 -05:00
|
|
|
use crate::cache::LazyGraphSourceParser;
|
2024-01-24 16:24:52 -05:00
|
|
|
use crate::tools::registry::paths::PackagePath;
|
2024-03-07 20:16:32 -05:00
|
|
|
use crate::util::fs::FileCollector;
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2024-01-24 08:49:33 -05:00
|
|
|
use super::diagnostics::PublishDiagnostic;
|
|
|
|
use super::diagnostics::PublishDiagnosticsCollector;
|
2024-02-27 10:13:16 -05:00
|
|
|
use super::unfurl::SpecifierUnfurler;
|
2024-01-24 08:49:33 -05:00
|
|
|
|
2024-01-08 18:51:49 -05:00
|
|
|
#[derive(Debug, Clone, PartialEq)]
|
|
|
|
pub struct PublishableTarballFile {
|
2024-02-27 21:28:02 -05:00
|
|
|
pub path_str: String,
|
2024-01-24 16:24:52 -05:00
|
|
|
pub specifier: Url,
|
2024-02-27 21:28:02 -05:00
|
|
|
pub hash: String,
|
2024-01-08 18:51:49 -05:00
|
|
|
pub size: usize,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, PartialEq)]
|
|
|
|
pub struct PublishableTarball {
|
|
|
|
pub files: Vec<PublishableTarballFile>,
|
|
|
|
pub hash: String,
|
|
|
|
pub bytes: Bytes,
|
|
|
|
}
|
|
|
|
|
2023-11-23 18:38:07 -05:00
|
|
|
pub fn create_gzipped_tarball(
|
2023-12-14 04:55:56 -05:00
|
|
|
dir: &Path,
|
2024-03-09 20:40:53 -05:00
|
|
|
cli_options: &CliOptions,
|
2024-02-06 15:57:10 -05:00
|
|
|
source_parser: LazyGraphSourceParser,
|
2024-01-24 08:49:33 -05:00
|
|
|
diagnostics_collector: &PublishDiagnosticsCollector,
|
2024-02-27 10:13:16 -05:00
|
|
|
unfurler: &SpecifierUnfurler,
|
2024-01-24 15:30:08 -05:00
|
|
|
file_patterns: Option<FilePatterns>,
|
2024-01-08 18:51:49 -05:00
|
|
|
) -> Result<PublishableTarball, AnyError> {
|
2024-03-07 20:16:32 -05:00
|
|
|
let file_patterns = file_patterns
|
|
|
|
.unwrap_or_else(|| FilePatterns::new_with_base(dir.to_path_buf()));
|
2023-11-23 18:38:07 -05:00
|
|
|
let mut tar = TarGzArchive::new();
|
2024-01-08 18:51:49 -05:00
|
|
|
let mut files = vec![];
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2024-03-07 20:16:32 -05:00
|
|
|
let iter_paths = FileCollector::new(|e| {
|
|
|
|
if !e.file_type.is_file() {
|
|
|
|
if let Ok(specifier) = ModuleSpecifier::from_file_path(e.path) {
|
|
|
|
diagnostics_collector.push(PublishDiagnostic::UnsupportedFileType {
|
|
|
|
specifier,
|
|
|
|
kind: if e.file_type.is_symlink() {
|
|
|
|
"symlink".to_owned()
|
|
|
|
} else {
|
|
|
|
format!("{:?}", e.file_type)
|
|
|
|
},
|
|
|
|
});
|
|
|
|
}
|
|
|
|
return false;
|
2024-02-22 21:26:34 -05:00
|
|
|
}
|
2024-03-07 23:16:19 -05:00
|
|
|
e.path
|
|
|
|
.file_name()
|
|
|
|
.map(|s| s != ".DS_Store" && s != ".gitignore")
|
|
|
|
.unwrap_or(true)
|
2024-03-07 20:16:32 -05:00
|
|
|
})
|
|
|
|
.ignore_git_folder()
|
|
|
|
.ignore_node_modules()
|
2024-03-09 20:40:53 -05:00
|
|
|
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
|
2024-03-07 20:16:32 -05:00
|
|
|
.use_gitignore()
|
|
|
|
.collect_file_patterns(file_patterns)?;
|
2024-02-22 21:26:34 -05:00
|
|
|
|
2024-03-07 20:16:32 -05:00
|
|
|
let mut paths = HashSet::with_capacity(iter_paths.len());
|
2024-01-08 18:51:49 -05:00
|
|
|
|
2024-03-07 20:16:32 -05:00
|
|
|
for path in iter_paths {
|
|
|
|
let Ok(specifier) = Url::from_file_path(&path) else {
|
2024-01-24 16:24:52 -05:00
|
|
|
diagnostics_collector
|
|
|
|
.to_owned()
|
|
|
|
.push(PublishDiagnostic::InvalidPath {
|
|
|
|
path: path.to_path_buf(),
|
|
|
|
message: "unable to convert path to url".to_string(),
|
|
|
|
});
|
|
|
|
continue;
|
|
|
|
};
|
|
|
|
|
2024-03-07 20:16:32 -05:00
|
|
|
let Ok(relative_path) = path.strip_prefix(dir) else {
|
|
|
|
diagnostics_collector
|
|
|
|
.to_owned()
|
|
|
|
.push(PublishDiagnostic::InvalidPath {
|
|
|
|
path: path.to_path_buf(),
|
|
|
|
message: "path is not in publish directory".to_string(),
|
|
|
|
});
|
|
|
|
continue;
|
|
|
|
};
|
2024-01-24 16:24:52 -05:00
|
|
|
|
2024-03-07 20:16:32 -05:00
|
|
|
let path_str =
|
|
|
|
relative_path
|
|
|
|
.components()
|
|
|
|
.fold("".to_string(), |mut path, component| {
|
2024-01-24 16:24:52 -05:00
|
|
|
path.push('/');
|
|
|
|
match component {
|
|
|
|
std::path::Component::Normal(normal) => {
|
|
|
|
path.push_str(&normal.to_string_lossy())
|
|
|
|
}
|
|
|
|
std::path::Component::CurDir => path.push('.'),
|
|
|
|
std::path::Component::ParentDir => path.push_str(".."),
|
|
|
|
_ => unreachable!(),
|
|
|
|
}
|
|
|
|
path
|
2024-03-07 20:16:32 -05:00
|
|
|
});
|
|
|
|
|
|
|
|
match PackagePath::new(path_str.clone()) {
|
|
|
|
Ok(package_path) => {
|
|
|
|
if !paths.insert(package_path) {
|
2024-01-24 16:24:52 -05:00
|
|
|
diagnostics_collector.to_owned().push(
|
2024-03-07 20:16:32 -05:00
|
|
|
PublishDiagnostic::DuplicatePath {
|
2024-01-24 16:24:52 -05:00
|
|
|
path: path.to_path_buf(),
|
|
|
|
},
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
2024-03-07 20:16:32 -05:00
|
|
|
Err(err) => {
|
|
|
|
diagnostics_collector
|
|
|
|
.to_owned()
|
|
|
|
.push(PublishDiagnostic::InvalidPath {
|
|
|
|
path: path.to_path_buf(),
|
|
|
|
message: err.to_string(),
|
|
|
|
});
|
2024-02-29 06:54:57 -05:00
|
|
|
}
|
2024-03-07 20:16:32 -05:00
|
|
|
}
|
2024-02-29 06:54:57 -05:00
|
|
|
|
2024-03-07 20:16:32 -05:00
|
|
|
let content = resolve_content_maybe_unfurling(
|
|
|
|
&path,
|
|
|
|
&specifier,
|
|
|
|
unfurler,
|
|
|
|
source_parser,
|
|
|
|
diagnostics_collector,
|
|
|
|
)?;
|
|
|
|
|
|
|
|
let media_type = MediaType::from_specifier(&specifier);
|
|
|
|
if matches!(media_type, MediaType::Jsx | MediaType::Tsx) {
|
|
|
|
diagnostics_collector.push(PublishDiagnostic::UnsupportedJsxTsx {
|
2024-01-24 16:24:52 -05:00
|
|
|
specifier: specifier.clone(),
|
|
|
|
});
|
2023-11-23 18:38:07 -05:00
|
|
|
}
|
2024-03-07 20:16:32 -05:00
|
|
|
|
|
|
|
files.push(PublishableTarballFile {
|
|
|
|
path_str: path_str.clone(),
|
|
|
|
specifier: specifier.clone(),
|
|
|
|
// This hash string matches the checksum computed by registry
|
|
|
|
hash: format!("sha256-{:x}", sha2::Sha256::digest(&content)),
|
|
|
|
size: content.len(),
|
|
|
|
});
|
|
|
|
tar
|
|
|
|
.add_file(format!(".{}", path_str), &content)
|
|
|
|
.with_context(|| {
|
|
|
|
format!("Unable to add file to tarball '{}'", path.display())
|
|
|
|
})?;
|
2023-11-23 18:38:07 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
let v = tar.finish().context("Unable to finish tarball")?;
|
2024-01-08 18:51:49 -05:00
|
|
|
let hash_bytes: Vec<u8> = sha2::Sha256::digest(&v).iter().cloned().collect();
|
|
|
|
let mut hash = "sha256-".to_string();
|
|
|
|
for byte in hash_bytes {
|
|
|
|
write!(&mut hash, "{:02x}", byte).unwrap();
|
|
|
|
}
|
|
|
|
|
2024-02-20 07:30:34 -05:00
|
|
|
files.sort_by(|a, b| a.specifier.cmp(&b.specifier));
|
|
|
|
|
2024-01-08 18:51:49 -05:00
|
|
|
Ok(PublishableTarball {
|
|
|
|
files,
|
|
|
|
hash,
|
|
|
|
bytes: Bytes::from(v),
|
|
|
|
})
|
2023-11-23 18:38:07 -05:00
|
|
|
}
|
|
|
|
|
2024-02-06 15:57:10 -05:00
|
|
|
fn resolve_content_maybe_unfurling(
|
|
|
|
path: &Path,
|
|
|
|
specifier: &Url,
|
2024-02-27 10:13:16 -05:00
|
|
|
unfurler: &SpecifierUnfurler,
|
2024-02-06 15:57:10 -05:00
|
|
|
source_parser: LazyGraphSourceParser,
|
|
|
|
diagnostics_collector: &PublishDiagnosticsCollector,
|
|
|
|
) -> Result<Vec<u8>, AnyError> {
|
|
|
|
let parsed_source = match source_parser.get_or_parse_source(specifier)? {
|
|
|
|
Some(parsed_source) => parsed_source,
|
|
|
|
None => {
|
|
|
|
let data = std::fs::read(path)
|
|
|
|
.with_context(|| format!("Unable to read file '{}'", path.display()))?;
|
|
|
|
let media_type = MediaType::from_specifier(specifier);
|
|
|
|
|
|
|
|
match media_type {
|
|
|
|
MediaType::JavaScript
|
|
|
|
| MediaType::Jsx
|
|
|
|
| MediaType::Mjs
|
|
|
|
| MediaType::Cjs
|
|
|
|
| MediaType::TypeScript
|
|
|
|
| MediaType::Mts
|
|
|
|
| MediaType::Cts
|
|
|
|
| MediaType::Dts
|
|
|
|
| MediaType::Dmts
|
|
|
|
| MediaType::Dcts
|
|
|
|
| MediaType::Tsx => {
|
|
|
|
// continue
|
|
|
|
}
|
|
|
|
MediaType::SourceMap
|
|
|
|
| MediaType::Unknown
|
|
|
|
| MediaType::Json
|
|
|
|
| MediaType::Wasm
|
|
|
|
| MediaType::TsBuildInfo => {
|
|
|
|
// not unfurlable data
|
|
|
|
return Ok(data);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let text = String::from_utf8(data)?;
|
|
|
|
deno_ast::parse_module(deno_ast::ParseParams {
|
2024-02-08 20:40:26 -05:00
|
|
|
specifier: specifier.clone(),
|
2024-02-06 15:57:10 -05:00
|
|
|
text_info: deno_ast::SourceTextInfo::from_string(text),
|
|
|
|
media_type,
|
|
|
|
capture_tokens: false,
|
|
|
|
maybe_syntax: None,
|
|
|
|
scope_analysis: false,
|
|
|
|
})?
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
log::debug!("Unfurling {}", specifier);
|
|
|
|
let mut reporter = |diagnostic| {
|
2024-02-27 10:13:16 -05:00
|
|
|
diagnostics_collector.push(PublishDiagnostic::SpecifierUnfurl(diagnostic));
|
2024-02-06 15:57:10 -05:00
|
|
|
};
|
|
|
|
let content = unfurler.unfurl(specifier, &parsed_source, &mut reporter);
|
|
|
|
Ok(content.into_bytes())
|
|
|
|
}
|
|
|
|
|
2023-11-23 18:38:07 -05:00
|
|
|
struct TarGzArchive {
|
|
|
|
builder: tar::Builder<Vec<u8>>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl TarGzArchive {
|
|
|
|
pub fn new() -> Self {
|
|
|
|
Self {
|
|
|
|
builder: tar::Builder::new(Vec::new()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn add_file(
|
|
|
|
&mut self,
|
|
|
|
path: String,
|
|
|
|
data: &[u8],
|
|
|
|
) -> Result<(), AnyError> {
|
|
|
|
let mut header = Header::new_gnu();
|
|
|
|
header.set_size(data.len() as u64);
|
|
|
|
self.builder.append_data(&mut header, &path, data)?;
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn finish(mut self) -> Result<Vec<u8>, AnyError> {
|
|
|
|
self.builder.finish()?;
|
|
|
|
let bytes = self.builder.into_inner()?;
|
|
|
|
let mut gz_bytes = Vec::new();
|
|
|
|
let mut encoder = flate2::write::GzEncoder::new(
|
|
|
|
&mut gz_bytes,
|
|
|
|
flate2::Compression::default(),
|
|
|
|
);
|
|
|
|
encoder.write_all(&bytes)?;
|
|
|
|
encoder.finish()?;
|
|
|
|
Ok(gz_bytes)
|
|
|
|
}
|
|
|
|
}
|