2024-01-01 14:58:21 -05:00
|
|
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2023-12-14 04:55:56 -05:00
|
|
|
use std::collections::HashMap;
|
2024-04-24 14:52:05 -04:00
|
|
|
use std::collections::HashSet;
|
2023-12-15 05:27:10 -05:00
|
|
|
use std::io::IsTerminal;
|
2024-03-07 16:13:36 -05:00
|
|
|
use std::path::Path;
|
2024-07-24 21:43:30 -04:00
|
|
|
use std::path::PathBuf;
|
2024-03-07 16:13:36 -05:00
|
|
|
use std::process::Stdio;
|
2023-11-30 13:54:54 -05:00
|
|
|
use std::rc::Rc;
|
2023-11-23 18:38:07 -05:00
|
|
|
use std::sync::Arc;
|
|
|
|
|
|
|
|
use base64::prelude::BASE64_STANDARD;
|
|
|
|
use base64::Engine;
|
2024-03-21 17:42:23 -04:00
|
|
|
use deno_ast::ModuleSpecifier;
|
2024-07-03 20:54:33 -04:00
|
|
|
use deno_config::workspace::JsrPackageConfig;
|
2024-07-04 20:41:01 -04:00
|
|
|
use deno_config::workspace::PackageJsonDepResolution;
|
2024-07-24 21:43:30 -04:00
|
|
|
use deno_config::workspace::Workspace;
|
2023-11-23 18:38:07 -05:00
|
|
|
use deno_core::anyhow::bail;
|
|
|
|
use deno_core::anyhow::Context;
|
|
|
|
use deno_core::error::AnyError;
|
2024-06-03 17:17:08 -04:00
|
|
|
use deno_core::futures::future::LocalBoxFuture;
|
|
|
|
use deno_core::futures::stream::FuturesUnordered;
|
2024-01-24 08:49:33 -05:00
|
|
|
use deno_core::futures::FutureExt;
|
2024-06-03 17:17:08 -04:00
|
|
|
use deno_core::futures::StreamExt;
|
2023-11-23 18:38:07 -05:00
|
|
|
use deno_core::serde_json;
|
|
|
|
use deno_core::serde_json::json;
|
2024-02-27 21:28:02 -05:00
|
|
|
use deno_core::serde_json::Value;
|
2024-02-07 11:25:14 -05:00
|
|
|
use deno_terminal::colors;
|
2024-07-17 19:37:31 -04:00
|
|
|
use http_body_util::BodyExt;
|
2023-11-29 08:59:30 -05:00
|
|
|
use lsp_types::Url;
|
2024-02-27 21:28:02 -05:00
|
|
|
use serde::Deserialize;
|
2023-11-23 18:38:07 -05:00
|
|
|
use serde::Serialize;
|
|
|
|
use sha2::Digest;
|
2024-03-07 16:13:36 -05:00
|
|
|
use tokio::process::Command;
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2024-02-14 13:30:44 -05:00
|
|
|
use crate::args::jsr_api_url;
|
|
|
|
use crate::args::jsr_url;
|
2024-01-10 17:40:30 -05:00
|
|
|
use crate::args::CliOptions;
|
2023-11-23 18:38:07 -05:00
|
|
|
use crate::args::Flags;
|
|
|
|
use crate::args::PublishFlags;
|
2024-02-06 15:57:10 -05:00
|
|
|
use crate::cache::LazyGraphSourceParser;
|
2024-01-23 06:40:23 -05:00
|
|
|
use crate::cache::ParsedSourceCache;
|
2023-11-23 18:38:07 -05:00
|
|
|
use crate::factory::CliFactory;
|
2024-02-20 16:29:57 -05:00
|
|
|
use crate::graph_util::ModuleGraphCreator;
|
2023-11-23 18:38:07 -05:00
|
|
|
use crate::http_util::HttpClient;
|
2024-02-27 10:13:16 -05:00
|
|
|
use crate::resolver::SloppyImportsResolver;
|
2024-01-10 17:40:30 -05:00
|
|
|
use crate::tools::check::CheckOptions;
|
2024-07-25 09:07:59 -04:00
|
|
|
use crate::tools::lint::collect_no_slow_type_diagnostics;
|
2024-02-19 10:28:41 -05:00
|
|
|
use crate::tools::registry::diagnostics::PublishDiagnostic;
|
2024-01-23 10:37:43 -05:00
|
|
|
use crate::tools::registry::diagnostics::PublishDiagnosticsCollector;
|
2024-01-08 18:51:49 -05:00
|
|
|
use crate::util::display::human_size;
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
mod api;
|
|
|
|
mod auth;
|
2024-01-23 10:37:43 -05:00
|
|
|
mod diagnostics;
|
2024-01-10 17:40:30 -05:00
|
|
|
mod graph;
|
2024-01-24 16:24:52 -05:00
|
|
|
mod paths;
|
2024-02-29 14:12:04 -05:00
|
|
|
mod pm;
|
2024-02-27 21:28:02 -05:00
|
|
|
mod provenance;
|
2023-12-14 04:55:56 -05:00
|
|
|
mod publish_order;
|
2023-11-23 18:38:07 -05:00
|
|
|
mod tar;
|
2024-02-27 10:13:16 -05:00
|
|
|
mod unfurl;
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
use auth::get_auth_method;
|
|
|
|
use auth::AuthMethod;
|
2024-02-29 14:12:04 -05:00
|
|
|
pub use pm::add;
|
2023-12-14 06:05:59 -05:00
|
|
|
use publish_order::PublishOrderGraph;
|
2024-02-27 10:13:16 -05:00
|
|
|
use unfurl::SpecifierUnfurler;
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2024-01-10 17:40:30 -05:00
|
|
|
use super::check::TypeChecker;
|
|
|
|
|
2024-05-27 21:35:08 -04:00
|
|
|
use self::graph::GraphDiagnosticsCollector;
|
2024-04-24 14:52:05 -04:00
|
|
|
use self::paths::CollectedPublishPath;
|
2024-01-08 18:51:49 -05:00
|
|
|
use self::tar::PublishableTarball;
|
|
|
|
|
2024-05-27 21:35:08 -04:00
|
|
|
pub async fn publish(
|
2024-07-23 19:00:48 -04:00
|
|
|
flags: Arc<Flags>,
|
2024-05-27 21:35:08 -04:00
|
|
|
publish_flags: PublishFlags,
|
|
|
|
) -> Result<(), AnyError> {
|
2024-07-23 19:00:48 -04:00
|
|
|
let cli_factory = CliFactory::from_flags(flags);
|
2024-05-27 21:35:08 -04:00
|
|
|
|
|
|
|
let auth_method =
|
|
|
|
get_auth_method(publish_flags.token, publish_flags.dry_run)?;
|
|
|
|
|
2024-07-23 19:00:48 -04:00
|
|
|
let cli_options = cli_factory.cli_options()?;
|
|
|
|
let directory_path = cli_options.initial_cwd();
|
2024-07-19 15:56:07 -04:00
|
|
|
let publish_configs = cli_options.start_dir.jsr_packages_for_publish();
|
2024-07-03 20:54:33 -04:00
|
|
|
if publish_configs.is_empty() {
|
2024-07-19 15:56:07 -04:00
|
|
|
match cli_options.start_dir.maybe_deno_json() {
|
2024-07-03 20:54:33 -04:00
|
|
|
Some(deno_json) => {
|
|
|
|
debug_assert!(!deno_json.is_package());
|
|
|
|
bail!(
|
|
|
|
"Missing 'name', 'version' and 'exports' field in '{}'.",
|
|
|
|
deno_json.specifier
|
|
|
|
);
|
|
|
|
}
|
|
|
|
None => {
|
|
|
|
bail!(
|
|
|
|
"Couldn't find a deno.json, deno.jsonc, jsr.json or jsr.jsonc configuration file in {}.",
|
|
|
|
directory_path.display()
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2024-07-04 20:41:01 -04:00
|
|
|
let specifier_unfurler = Arc::new(SpecifierUnfurler::new(
|
|
|
|
if cli_options.unstable_sloppy_imports() {
|
|
|
|
Some(SloppyImportsResolver::new(cli_factory.fs().clone()))
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
},
|
|
|
|
cli_options
|
|
|
|
.create_workspace_resolver(
|
|
|
|
cli_factory.file_fetcher()?,
|
|
|
|
PackageJsonDepResolution::Enabled,
|
|
|
|
)
|
|
|
|
.await?,
|
|
|
|
cli_options.unstable_bare_node_builtins(),
|
|
|
|
));
|
2024-05-27 21:35:08 -04:00
|
|
|
|
|
|
|
let diagnostics_collector = PublishDiagnosticsCollector::default();
|
|
|
|
let publish_preparer = PublishPreparer::new(
|
|
|
|
GraphDiagnosticsCollector::new(cli_factory.parsed_source_cache().clone()),
|
|
|
|
cli_factory.module_graph_creator().await?.clone(),
|
|
|
|
cli_factory.parsed_source_cache().clone(),
|
|
|
|
cli_factory.type_checker().await?.clone(),
|
2024-07-23 19:00:48 -04:00
|
|
|
cli_options.clone(),
|
2024-07-04 20:41:01 -04:00
|
|
|
specifier_unfurler,
|
2024-05-27 21:35:08 -04:00
|
|
|
);
|
|
|
|
|
|
|
|
let prepared_data = publish_preparer
|
|
|
|
.prepare_packages_for_publishing(
|
|
|
|
publish_flags.allow_slow_types,
|
|
|
|
&diagnostics_collector,
|
2024-07-03 20:54:33 -04:00
|
|
|
publish_configs,
|
2024-05-27 21:35:08 -04:00
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
diagnostics_collector.print_and_error()?;
|
|
|
|
|
|
|
|
if prepared_data.package_by_name.is_empty() {
|
|
|
|
bail!("No packages to publish");
|
|
|
|
}
|
|
|
|
|
|
|
|
if std::env::var("DENO_TESTING_DISABLE_GIT_CHECK")
|
|
|
|
.ok()
|
|
|
|
.is_none()
|
|
|
|
&& !publish_flags.allow_dirty
|
|
|
|
{
|
2024-07-12 15:35:57 -04:00
|
|
|
if let Some(dirty_text) =
|
|
|
|
check_if_git_repo_dirty(cli_options.initial_cwd()).await
|
|
|
|
{
|
|
|
|
log::error!("\nUncommitted changes:\n\n{}\n", dirty_text);
|
|
|
|
bail!("Aborting due to uncommitted changes. Check in source code or run with --allow-dirty");
|
|
|
|
}
|
2024-05-27 21:35:08 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
if publish_flags.dry_run {
|
|
|
|
for (_, package) in prepared_data.package_by_name {
|
|
|
|
log::info!(
|
|
|
|
"{} of {} with files:",
|
|
|
|
colors::green_bold("Simulating publish"),
|
|
|
|
colors::gray(package.display_name()),
|
|
|
|
);
|
|
|
|
for file in &package.tarball.files {
|
|
|
|
log::info!(" {} ({})", file.specifier, human_size(file.size as f64),);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
log::warn!("{} Aborting due to --dry-run", colors::yellow("Warning"));
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
perform_publish(
|
2024-06-03 17:17:08 -04:00
|
|
|
&cli_factory.http_client_provider().get_or_create()?,
|
2024-05-27 21:35:08 -04:00
|
|
|
prepared_data.publish_order_graph,
|
|
|
|
prepared_data.package_by_name,
|
|
|
|
auth_method,
|
|
|
|
!publish_flags.no_provenance,
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
Ok(())
|
2023-12-15 05:27:10 -05:00
|
|
|
}
|
|
|
|
|
2023-11-23 18:38:07 -05:00
|
|
|
struct PreparedPublishPackage {
|
|
|
|
scope: String,
|
|
|
|
package: String,
|
|
|
|
version: String,
|
2024-01-08 18:51:49 -05:00
|
|
|
tarball: PublishableTarball,
|
2024-02-14 06:53:15 -05:00
|
|
|
config: String,
|
2024-02-27 21:28:02 -05:00
|
|
|
exports: HashMap<String, String>,
|
2024-01-08 18:51:49 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl PreparedPublishPackage {
|
|
|
|
pub fn display_name(&self) -> String {
|
|
|
|
format!("@{}/{}@{}", self.scope, self.package, self.version)
|
|
|
|
}
|
2023-11-23 18:38:07 -05:00
|
|
|
}
|
|
|
|
|
2024-05-27 21:35:08 -04:00
|
|
|
struct PreparePackagesData {
|
|
|
|
publish_order_graph: PublishOrderGraph,
|
|
|
|
package_by_name: HashMap<String, Rc<PreparedPublishPackage>>,
|
|
|
|
}
|
2023-12-12 13:18:02 -05:00
|
|
|
|
2024-05-27 21:35:08 -04:00
|
|
|
struct PublishPreparer {
|
|
|
|
graph_diagnostics_collector: GraphDiagnosticsCollector,
|
|
|
|
module_graph_creator: Arc<ModuleGraphCreator>,
|
2024-01-23 06:40:23 -05:00
|
|
|
source_cache: Arc<ParsedSourceCache>,
|
2024-05-27 21:35:08 -04:00
|
|
|
type_checker: Arc<TypeChecker>,
|
2024-03-09 20:40:53 -05:00
|
|
|
cli_options: Arc<CliOptions>,
|
2024-07-04 20:41:01 -04:00
|
|
|
specifier_unfurler: Arc<SpecifierUnfurler>,
|
2024-05-27 21:35:08 -04:00
|
|
|
}
|
2023-12-12 13:18:02 -05:00
|
|
|
|
2024-05-27 21:35:08 -04:00
|
|
|
impl PublishPreparer {
|
|
|
|
pub fn new(
|
|
|
|
graph_diagnostics_collector: GraphDiagnosticsCollector,
|
|
|
|
module_graph_creator: Arc<ModuleGraphCreator>,
|
|
|
|
source_cache: Arc<ParsedSourceCache>,
|
|
|
|
type_checker: Arc<TypeChecker>,
|
|
|
|
cli_options: Arc<CliOptions>,
|
2024-07-04 20:41:01 -04:00
|
|
|
specifier_unfurler: Arc<SpecifierUnfurler>,
|
2024-05-27 21:35:08 -04:00
|
|
|
) -> Self {
|
|
|
|
Self {
|
|
|
|
graph_diagnostics_collector,
|
|
|
|
module_graph_creator,
|
|
|
|
source_cache,
|
|
|
|
type_checker,
|
|
|
|
cli_options,
|
2024-07-04 20:41:01 -04:00
|
|
|
specifier_unfurler,
|
2024-05-27 21:35:08 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn prepare_packages_for_publishing(
|
|
|
|
&self,
|
|
|
|
allow_slow_types: bool,
|
|
|
|
diagnostics_collector: &PublishDiagnosticsCollector,
|
2024-07-03 20:54:33 -04:00
|
|
|
publish_configs: Vec<JsrPackageConfig>,
|
2024-05-27 21:35:08 -04:00
|
|
|
) -> Result<PreparePackagesData, AnyError> {
|
2024-07-03 20:54:33 -04:00
|
|
|
if publish_configs.len() > 1 {
|
2024-05-27 21:35:08 -04:00
|
|
|
log::info!("Publishing a workspace...");
|
|
|
|
}
|
|
|
|
|
|
|
|
// create the module graph
|
|
|
|
let graph = self
|
|
|
|
.build_and_check_graph_for_publish(
|
|
|
|
allow_slow_types,
|
|
|
|
diagnostics_collector,
|
2024-07-03 20:54:33 -04:00
|
|
|
&publish_configs,
|
2024-05-27 21:35:08 -04:00
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
|
2024-07-03 20:54:33 -04:00
|
|
|
let mut package_by_name = HashMap::with_capacity(publish_configs.len());
|
2024-05-27 21:35:08 -04:00
|
|
|
let publish_order_graph =
|
2024-07-03 20:54:33 -04:00
|
|
|
publish_order::build_publish_order_graph(&graph, &publish_configs)?;
|
2024-05-27 21:35:08 -04:00
|
|
|
|
2024-07-03 20:54:33 -04:00
|
|
|
let results = publish_configs
|
2024-05-27 21:35:08 -04:00
|
|
|
.into_iter()
|
|
|
|
.map(|member| {
|
|
|
|
let graph = graph.clone();
|
|
|
|
async move {
|
|
|
|
let package = self
|
2024-07-03 20:54:33 -04:00
|
|
|
.prepare_publish(&member, graph, diagnostics_collector)
|
2024-05-27 21:35:08 -04:00
|
|
|
.await
|
2024-07-03 20:54:33 -04:00
|
|
|
.with_context(|| format!("Failed preparing '{}'.", member.name))?;
|
|
|
|
Ok::<_, AnyError>((member.name, package))
|
2024-05-27 21:35:08 -04:00
|
|
|
}
|
|
|
|
.boxed()
|
|
|
|
})
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
let results = deno_core::futures::future::join_all(results).await;
|
|
|
|
for result in results {
|
|
|
|
let (package_name, package) = result?;
|
|
|
|
package_by_name.insert(package_name, package);
|
|
|
|
}
|
|
|
|
Ok(PreparePackagesData {
|
|
|
|
publish_order_graph,
|
|
|
|
package_by_name,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn build_and_check_graph_for_publish(
|
|
|
|
&self,
|
|
|
|
allow_slow_types: bool,
|
|
|
|
diagnostics_collector: &PublishDiagnosticsCollector,
|
2024-07-03 20:54:33 -04:00
|
|
|
package_configs: &[JsrPackageConfig],
|
2024-05-27 21:35:08 -04:00
|
|
|
) -> Result<Arc<deno_graph::ModuleGraph>, deno_core::anyhow::Error> {
|
|
|
|
let build_fast_check_graph = !allow_slow_types;
|
|
|
|
let graph = self
|
|
|
|
.module_graph_creator
|
2024-07-03 20:54:33 -04:00
|
|
|
.create_and_validate_publish_graph(
|
|
|
|
package_configs,
|
|
|
|
build_fast_check_graph,
|
|
|
|
)
|
2024-05-27 21:35:08 -04:00
|
|
|
.await?;
|
|
|
|
|
|
|
|
// todo(dsherret): move to lint rule
|
|
|
|
self
|
|
|
|
.graph_diagnostics_collector
|
|
|
|
.collect_diagnostics_for_graph(&graph, diagnostics_collector)?;
|
|
|
|
|
|
|
|
if allow_slow_types {
|
|
|
|
log::info!(
|
|
|
|
concat!(
|
|
|
|
"{} Publishing a library with slow types is not recommended. ",
|
|
|
|
"This may lead to poor type checking performance for users of ",
|
|
|
|
"your package, may affect the quality of automatic documentation ",
|
|
|
|
"generation, and your package will not be shipped with a .d.ts ",
|
|
|
|
"file for Node.js users."
|
|
|
|
),
|
|
|
|
colors::yellow("Warning"),
|
|
|
|
);
|
|
|
|
Ok(Arc::new(graph))
|
|
|
|
} else if std::env::var("DENO_INTERNAL_FAST_CHECK_OVERWRITE").as_deref()
|
|
|
|
== Ok("1")
|
|
|
|
{
|
2024-07-12 15:35:57 -04:00
|
|
|
if check_if_git_repo_dirty(self.cli_options.initial_cwd())
|
|
|
|
.await
|
|
|
|
.is_some()
|
|
|
|
{
|
2024-05-27 21:35:08 -04:00
|
|
|
bail!("When using DENO_INTERNAL_FAST_CHECK_OVERWRITE, the git repo must be in a clean state.");
|
|
|
|
}
|
|
|
|
|
|
|
|
for module in graph.modules() {
|
|
|
|
if module.specifier().scheme() != "file" {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
let Some(js) = module.js() else {
|
|
|
|
continue;
|
|
|
|
};
|
|
|
|
if let Some(module) = js.fast_check_module() {
|
|
|
|
std::fs::write(
|
|
|
|
js.specifier.to_file_path().unwrap(),
|
|
|
|
module.source.as_ref(),
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bail!("Exiting due to DENO_INTERNAL_FAST_CHECK_OVERWRITE")
|
|
|
|
} else {
|
|
|
|
log::info!("Checking for slow types in the public API...");
|
|
|
|
let mut any_pkg_had_diagnostics = false;
|
2024-07-03 20:54:33 -04:00
|
|
|
for package in package_configs {
|
2024-05-27 21:35:08 -04:00
|
|
|
let export_urls = package.config_file.resolve_export_value_urls()?;
|
|
|
|
let diagnostics =
|
2024-07-25 09:07:59 -04:00
|
|
|
collect_no_slow_type_diagnostics(&graph, &export_urls);
|
2024-05-27 21:35:08 -04:00
|
|
|
if !diagnostics.is_empty() {
|
|
|
|
any_pkg_had_diagnostics = true;
|
|
|
|
for diagnostic in diagnostics {
|
|
|
|
diagnostics_collector
|
|
|
|
.push(PublishDiagnostic::FastCheck(diagnostic));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if any_pkg_had_diagnostics {
|
|
|
|
Ok(Arc::new(graph))
|
|
|
|
} else {
|
|
|
|
// fast check passed, type check the output as a temporary measure
|
|
|
|
// until we know that it's reliable and stable
|
|
|
|
let (graph, check_diagnostics) = self
|
|
|
|
.type_checker
|
|
|
|
.check_diagnostics(
|
|
|
|
graph,
|
|
|
|
CheckOptions {
|
|
|
|
build_fast_check_graph: false, // already built
|
|
|
|
lib: self.cli_options.ts_type_lib_window(),
|
|
|
|
log_ignored_options: false,
|
|
|
|
reload: self.cli_options.reload_flag(),
|
|
|
|
type_check_mode: self.cli_options.type_check_mode(),
|
|
|
|
},
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
// ignore unused parameter diagnostics that may occur due to fast check
|
|
|
|
// not having function body implementations
|
|
|
|
let check_diagnostics =
|
|
|
|
check_diagnostics.filter(|d| d.include_when_remote());
|
|
|
|
if !check_diagnostics.is_empty() {
|
|
|
|
bail!(
|
|
|
|
concat!(
|
|
|
|
"Failed ensuring public API type output is valid.\n\n",
|
|
|
|
"{:#}\n\n",
|
|
|
|
"You may have discovered a bug in Deno. Please open an issue at: ",
|
|
|
|
"https://github.com/denoland/deno/issues/"
|
|
|
|
),
|
|
|
|
check_diagnostics
|
|
|
|
);
|
|
|
|
}
|
|
|
|
Ok(graph)
|
2023-12-12 13:18:02 -05:00
|
|
|
}
|
|
|
|
}
|
2024-05-27 21:35:08 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
#[allow(clippy::too_many_arguments)]
|
|
|
|
async fn prepare_publish(
|
|
|
|
&self,
|
2024-07-03 20:54:33 -04:00
|
|
|
package: &JsrPackageConfig,
|
2024-05-27 21:35:08 -04:00
|
|
|
graph: Arc<deno_graph::ModuleGraph>,
|
|
|
|
diagnostics_collector: &PublishDiagnosticsCollector,
|
|
|
|
) -> Result<Rc<PreparedPublishPackage>, AnyError> {
|
|
|
|
static SUGGESTED_ENTRYPOINTS: [&str; 4] =
|
|
|
|
["mod.ts", "mod.js", "index.ts", "index.js"];
|
|
|
|
|
2024-07-03 20:54:33 -04:00
|
|
|
let deno_json = &package.config_file;
|
2024-05-27 21:35:08 -04:00
|
|
|
let config_path = deno_json.specifier.to_file_path().unwrap();
|
|
|
|
let root_dir = config_path.parent().unwrap().to_path_buf();
|
|
|
|
let Some(version) = deno_json.json.version.clone() else {
|
|
|
|
bail!("{} is missing 'version' field", deno_json.specifier);
|
|
|
|
};
|
|
|
|
if deno_json.json.exports.is_none() {
|
|
|
|
let mut suggested_entrypoint = None;
|
2023-12-12 13:18:02 -05:00
|
|
|
|
2024-05-27 21:35:08 -04:00
|
|
|
for entrypoint in SUGGESTED_ENTRYPOINTS {
|
|
|
|
if root_dir.join(entrypoint).exists() {
|
|
|
|
suggested_entrypoint = Some(entrypoint);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let exports_content = format!(
|
|
|
|
r#"{{
|
2023-12-12 13:18:02 -05:00
|
|
|
"name": "{}",
|
|
|
|
"version": "{}",
|
|
|
|
"exports": "{}"
|
|
|
|
}}"#,
|
2024-07-03 20:54:33 -04:00
|
|
|
package.name,
|
2024-05-27 21:35:08 -04:00
|
|
|
version,
|
|
|
|
suggested_entrypoint.unwrap_or("<path_to_entrypoint>")
|
|
|
|
);
|
2023-12-12 13:18:02 -05:00
|
|
|
|
2024-05-27 21:35:08 -04:00
|
|
|
bail!(
|
2023-12-12 13:18:02 -05:00
|
|
|
"You did not specify an entrypoint to \"{}\" package in {}. Add `exports` mapping in the configuration file, eg:\n{}",
|
2024-07-03 20:54:33 -04:00
|
|
|
package.name,
|
2023-12-14 04:55:56 -05:00
|
|
|
deno_json.specifier,
|
2023-12-12 13:18:02 -05:00
|
|
|
exports_content
|
|
|
|
);
|
2024-05-14 14:15:43 -04:00
|
|
|
}
|
2024-07-03 20:54:33 -04:00
|
|
|
let Some(name_no_at) = package.name.strip_prefix('@') else {
|
2024-05-27 21:35:08 -04:00
|
|
|
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
|
|
|
|
};
|
|
|
|
let Some((scope, name_no_scope)) = name_no_at.split_once('/') else {
|
|
|
|
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
|
|
|
|
};
|
2024-07-19 15:56:07 -04:00
|
|
|
let file_patterns = package.member_dir.to_publish_config()?.files;
|
2024-05-27 21:35:08 -04:00
|
|
|
|
|
|
|
let tarball = deno_core::unsync::spawn_blocking({
|
|
|
|
let diagnostics_collector = diagnostics_collector.clone();
|
2024-07-04 20:41:01 -04:00
|
|
|
let unfurler = self.specifier_unfurler.clone();
|
2024-05-27 21:35:08 -04:00
|
|
|
let cli_options = self.cli_options.clone();
|
|
|
|
let source_cache = self.source_cache.clone();
|
|
|
|
let config_path = config_path.clone();
|
|
|
|
move || {
|
|
|
|
let root_specifier =
|
|
|
|
ModuleSpecifier::from_directory_path(&root_dir).unwrap();
|
2024-07-24 21:43:30 -04:00
|
|
|
let mut publish_paths =
|
2024-05-27 21:35:08 -04:00
|
|
|
paths::collect_publish_paths(paths::CollectPublishPathsOptions {
|
|
|
|
root_dir: &root_dir,
|
|
|
|
cli_options: &cli_options,
|
|
|
|
diagnostics_collector: &diagnostics_collector,
|
|
|
|
file_patterns,
|
|
|
|
force_include_paths: vec![config_path],
|
|
|
|
})?;
|
|
|
|
collect_excluded_module_diagnostics(
|
|
|
|
&root_specifier,
|
|
|
|
&graph,
|
|
|
|
&publish_paths,
|
|
|
|
&diagnostics_collector,
|
|
|
|
);
|
2024-07-22 15:46:37 -04:00
|
|
|
|
|
|
|
if !has_license_file(publish_paths.iter().map(|p| &p.specifier)) {
|
2024-07-24 21:43:30 -04:00
|
|
|
if let Some(license_path) =
|
|
|
|
resolve_license_file(&root_dir, cli_options.workspace())
|
|
|
|
{
|
|
|
|
// force including the license file from the package or workspace root
|
|
|
|
publish_paths.push(CollectedPublishPath {
|
|
|
|
specifier: ModuleSpecifier::from_file_path(&license_path)
|
|
|
|
.unwrap(),
|
|
|
|
relative_path: "LICENSE".to_string(),
|
|
|
|
maybe_content: Some(std::fs::read(&license_path).with_context(
|
|
|
|
|| format!("failed reading '{}'.", license_path.display()),
|
|
|
|
)?),
|
|
|
|
path: license_path,
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
diagnostics_collector.push(PublishDiagnostic::MissingLicense {
|
|
|
|
expected_path: root_dir.join("LICENSE"),
|
|
|
|
});
|
|
|
|
}
|
2024-07-22 15:46:37 -04:00
|
|
|
}
|
|
|
|
|
2024-05-27 21:35:08 -04:00
|
|
|
tar::create_gzipped_tarball(
|
2024-07-24 21:43:30 -04:00
|
|
|
publish_paths,
|
2024-05-27 21:35:08 -04:00
|
|
|
LazyGraphSourceParser::new(&source_cache, &graph),
|
|
|
|
&diagnostics_collector,
|
|
|
|
&unfurler,
|
|
|
|
)
|
|
|
|
.context("Failed to create a tarball")
|
2024-04-11 19:52:35 -04:00
|
|
|
}
|
|
|
|
})
|
2024-05-27 21:35:08 -04:00
|
|
|
.await??;
|
|
|
|
|
2024-07-03 20:54:33 -04:00
|
|
|
log::debug!("Tarball size ({}): {}", package.name, tarball.bytes.len());
|
2024-05-27 21:35:08 -04:00
|
|
|
|
|
|
|
Ok(Rc::new(PreparedPublishPackage {
|
|
|
|
scope: scope.to_string(),
|
|
|
|
package: name_no_scope.to_string(),
|
|
|
|
version: version.to_string(),
|
|
|
|
tarball,
|
|
|
|
exports: match &deno_json.json.exports {
|
|
|
|
Some(Value::Object(exports)) => exports
|
|
|
|
.into_iter()
|
|
|
|
.map(|(k, v)| (k.to_string(), v.as_str().unwrap().to_string()))
|
|
|
|
.collect(),
|
|
|
|
Some(Value::String(exports)) => {
|
|
|
|
let mut map = HashMap::new();
|
|
|
|
map.insert(".".to_string(), exports.to_string());
|
|
|
|
map
|
|
|
|
}
|
|
|
|
_ => HashMap::new(),
|
|
|
|
},
|
|
|
|
// the config file is always at the root of a publishing dir,
|
|
|
|
// so getting the file name is always correct
|
|
|
|
config: config_path
|
|
|
|
.file_name()
|
|
|
|
.unwrap()
|
|
|
|
.to_string_lossy()
|
|
|
|
.to_string(),
|
|
|
|
}))
|
2024-03-21 17:42:23 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-11-23 18:38:07 -05:00
|
|
|
#[derive(Serialize)]
|
|
|
|
#[serde(tag = "permission")]
|
|
|
|
pub enum Permission<'s> {
|
|
|
|
#[serde(rename = "package/publish", rename_all = "camelCase")]
|
|
|
|
VersionPublish {
|
|
|
|
scope: &'s str,
|
|
|
|
package: &'s str,
|
|
|
|
version: &'s str,
|
|
|
|
tarball_hash: &'s str,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
async fn get_auth_headers(
|
2024-06-03 17:17:08 -04:00
|
|
|
client: &HttpClient,
|
|
|
|
registry_url: &Url,
|
|
|
|
packages: &[Rc<PreparedPublishPackage>],
|
2023-11-23 18:38:07 -05:00
|
|
|
auth_method: AuthMethod,
|
2023-12-14 06:05:59 -05:00
|
|
|
) -> Result<HashMap<(String, String, String), Rc<str>>, AnyError> {
|
2023-11-30 13:54:54 -05:00
|
|
|
let permissions = packages
|
|
|
|
.iter()
|
|
|
|
.map(|package| Permission::VersionPublish {
|
|
|
|
scope: &package.scope,
|
|
|
|
package: &package.package,
|
|
|
|
version: &package.version,
|
2024-01-08 18:51:49 -05:00
|
|
|
tarball_hash: &package.tarball.hash,
|
2023-11-30 13:54:54 -05:00
|
|
|
})
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
2023-12-14 04:55:56 -05:00
|
|
|
let mut authorizations = HashMap::with_capacity(packages.len());
|
|
|
|
|
|
|
|
match auth_method {
|
2023-11-23 18:38:07 -05:00
|
|
|
AuthMethod::Interactive => {
|
|
|
|
let verifier = uuid::Uuid::new_v4().to_string();
|
|
|
|
let challenge = BASE64_STANDARD.encode(sha2::Sha256::digest(&verifier));
|
|
|
|
|
|
|
|
let response = client
|
2024-07-17 19:37:31 -04:00
|
|
|
.post_json(
|
|
|
|
format!("{}authorizations", registry_url).parse()?,
|
|
|
|
&serde_json::json!({
|
|
|
|
"challenge": challenge,
|
|
|
|
"permissions": permissions,
|
|
|
|
}),
|
|
|
|
)?
|
2023-11-23 18:38:07 -05:00
|
|
|
.send()
|
|
|
|
.await
|
|
|
|
.context("Failed to create interactive authorization")?;
|
2023-12-14 06:05:59 -05:00
|
|
|
let auth =
|
|
|
|
api::parse_response::<api::CreateAuthorizationResponse>(response)
|
|
|
|
.await
|
|
|
|
.context("Failed to create interactive authorization")?;
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2024-01-31 16:38:57 -05:00
|
|
|
let auth_url = format!("{}?code={}", auth.verification_url, auth.code);
|
2024-05-08 22:45:06 -04:00
|
|
|
let pkgs_text = if packages.len() > 1 {
|
|
|
|
format!("{} packages", packages.len())
|
2023-11-23 18:38:07 -05:00
|
|
|
} else {
|
2024-05-08 22:45:06 -04:00
|
|
|
format!("@{}/{}", packages[0].scope, packages[0].package)
|
|
|
|
};
|
|
|
|
log::warn!(
|
|
|
|
"Visit {} to authorize publishing of {}",
|
|
|
|
colors::cyan(&auth_url),
|
|
|
|
pkgs_text,
|
|
|
|
);
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2023-12-15 05:27:10 -05:00
|
|
|
ring_bell();
|
2024-05-08 22:45:06 -04:00
|
|
|
log::info!("{}", colors::gray("Waiting..."));
|
2024-02-01 10:25:56 -05:00
|
|
|
let _ = open::that_detached(&auth_url);
|
2023-11-23 18:38:07 -05:00
|
|
|
|
|
|
|
let interval = std::time::Duration::from_secs(auth.poll_interval);
|
|
|
|
|
|
|
|
loop {
|
|
|
|
tokio::time::sleep(interval).await;
|
|
|
|
let response = client
|
2024-07-17 19:37:31 -04:00
|
|
|
.post_json(
|
|
|
|
format!("{}authorizations/exchange", registry_url).parse()?,
|
|
|
|
&serde_json::json!({
|
|
|
|
"exchangeToken": auth.exchange_token,
|
|
|
|
"verifier": verifier,
|
|
|
|
}),
|
|
|
|
)?
|
2023-11-23 18:38:07 -05:00
|
|
|
.send()
|
|
|
|
.await
|
|
|
|
.context("Failed to exchange authorization")?;
|
|
|
|
let res =
|
2023-12-14 06:05:59 -05:00
|
|
|
api::parse_response::<api::ExchangeAuthorizationResponse>(response)
|
|
|
|
.await;
|
2023-11-23 18:38:07 -05:00
|
|
|
match res {
|
|
|
|
Ok(res) => {
|
2024-05-08 22:45:06 -04:00
|
|
|
log::info!(
|
2023-11-23 18:38:07 -05:00
|
|
|
"{} {} {}",
|
|
|
|
colors::green("Authorization successful."),
|
|
|
|
colors::gray("Authenticated as"),
|
|
|
|
colors::cyan(res.user.name)
|
|
|
|
);
|
2023-11-30 13:54:54 -05:00
|
|
|
let authorization: Rc<str> = format!("Bearer {}", res.token).into();
|
2024-06-03 17:17:08 -04:00
|
|
|
for pkg in packages {
|
2023-12-14 04:55:56 -05:00
|
|
|
authorizations.insert(
|
|
|
|
(pkg.scope.clone(), pkg.package.clone(), pkg.version.clone()),
|
|
|
|
authorization.clone(),
|
|
|
|
);
|
2023-11-30 13:54:54 -05:00
|
|
|
}
|
2023-12-14 04:55:56 -05:00
|
|
|
break;
|
2023-11-23 18:38:07 -05:00
|
|
|
}
|
|
|
|
Err(err) => {
|
|
|
|
if err.code == "authorizationPending" {
|
|
|
|
continue;
|
|
|
|
} else {
|
|
|
|
return Err(err).context("Failed to exchange authorization");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-11-30 13:54:54 -05:00
|
|
|
AuthMethod::Token(token) => {
|
|
|
|
let authorization: Rc<str> = format!("Bearer {}", token).into();
|
2024-06-03 17:17:08 -04:00
|
|
|
for pkg in packages {
|
2023-12-14 04:55:56 -05:00
|
|
|
authorizations.insert(
|
|
|
|
(pkg.scope.clone(), pkg.package.clone(), pkg.version.clone()),
|
|
|
|
authorization.clone(),
|
|
|
|
);
|
2023-11-30 13:54:54 -05:00
|
|
|
}
|
|
|
|
}
|
2023-11-23 18:38:07 -05:00
|
|
|
AuthMethod::Oidc(oidc_config) => {
|
2023-12-14 04:55:56 -05:00
|
|
|
let mut chunked_packages = packages.chunks(16);
|
2023-11-30 13:54:54 -05:00
|
|
|
for permissions in permissions.chunks(16) {
|
|
|
|
let audience = json!({ "permissions": permissions }).to_string();
|
|
|
|
let url = format!(
|
|
|
|
"{}&audience={}",
|
|
|
|
oidc_config.url,
|
|
|
|
percent_encoding::percent_encode(
|
|
|
|
audience.as_bytes(),
|
|
|
|
percent_encoding::NON_ALPHANUMERIC
|
|
|
|
)
|
2023-11-23 18:38:07 -05:00
|
|
|
);
|
2023-11-30 13:54:54 -05:00
|
|
|
|
|
|
|
let response = client
|
2024-07-17 19:37:31 -04:00
|
|
|
.get(url.parse()?)?
|
|
|
|
.header(
|
|
|
|
http::header::AUTHORIZATION,
|
|
|
|
format!("Bearer {}", oidc_config.token).parse()?,
|
|
|
|
)
|
2023-11-30 13:54:54 -05:00
|
|
|
.send()
|
|
|
|
.await
|
|
|
|
.context("Failed to get OIDC token")?;
|
|
|
|
let status = response.status();
|
2024-07-17 19:37:31 -04:00
|
|
|
let text = crate::http_util::body_to_string(response)
|
|
|
|
.await
|
|
|
|
.with_context(|| {
|
|
|
|
format!("Failed to get OIDC token: status {}", status)
|
|
|
|
})?;
|
2023-11-30 13:54:54 -05:00
|
|
|
if !status.is_success() {
|
|
|
|
bail!(
|
|
|
|
"Failed to get OIDC token: status {}, response: '{}'",
|
|
|
|
status,
|
|
|
|
text
|
|
|
|
);
|
|
|
|
}
|
2023-12-14 06:05:59 -05:00
|
|
|
let api::OidcTokenResponse { value } = serde_json::from_str(&text)
|
2023-11-30 13:54:54 -05:00
|
|
|
.with_context(|| {
|
|
|
|
format!(
|
|
|
|
"Failed to parse OIDC token: '{}' (status {})",
|
|
|
|
text, status
|
|
|
|
)
|
|
|
|
})?;
|
|
|
|
|
|
|
|
let authorization: Rc<str> = format!("githuboidc {}", value).into();
|
2023-12-14 04:55:56 -05:00
|
|
|
for pkg in chunked_packages.next().unwrap() {
|
|
|
|
authorizations.insert(
|
|
|
|
(pkg.scope.clone(), pkg.package.clone(), pkg.version.clone()),
|
|
|
|
authorization.clone(),
|
|
|
|
);
|
2023-11-30 13:54:54 -05:00
|
|
|
}
|
|
|
|
}
|
2023-11-23 18:38:07 -05:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
Ok(authorizations)
|
|
|
|
}
|
|
|
|
|
2023-12-15 05:27:10 -05:00
|
|
|
/// Check if both `scope` and `package` already exist, if not return
|
|
|
|
/// a URL to the management panel to create them.
|
|
|
|
async fn check_if_scope_and_package_exist(
|
2024-06-03 17:17:08 -04:00
|
|
|
client: &HttpClient,
|
|
|
|
registry_api_url: &Url,
|
|
|
|
registry_manage_url: &Url,
|
2023-12-15 05:27:10 -05:00
|
|
|
scope: &str,
|
|
|
|
package: &str,
|
|
|
|
) -> Result<Option<String>, AnyError> {
|
|
|
|
let mut needs_scope = false;
|
|
|
|
let mut needs_package = false;
|
|
|
|
|
|
|
|
let response = api::get_scope(client, registry_api_url, scope).await?;
|
|
|
|
if response.status() == 404 {
|
|
|
|
needs_scope = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
let response =
|
|
|
|
api::get_package(client, registry_api_url, scope, package).await?;
|
|
|
|
if response.status() == 404 {
|
|
|
|
needs_package = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
if needs_scope || needs_package {
|
|
|
|
let create_url = format!(
|
|
|
|
"{}new?scope={}&package={}&from=cli",
|
|
|
|
registry_manage_url, scope, package
|
|
|
|
);
|
|
|
|
return Ok(Some(create_url));
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(None)
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn ensure_scopes_and_packages_exist(
|
2024-06-03 17:17:08 -04:00
|
|
|
client: &HttpClient,
|
|
|
|
registry_api_url: &Url,
|
|
|
|
registry_manage_url: &Url,
|
|
|
|
packages: &[Rc<PreparedPublishPackage>],
|
2023-12-15 05:27:10 -05:00
|
|
|
) -> Result<(), AnyError> {
|
|
|
|
if !std::io::stdin().is_terminal() {
|
|
|
|
let mut missing_packages_lines = vec![];
|
|
|
|
for package in packages {
|
|
|
|
let maybe_create_package_url = check_if_scope_and_package_exist(
|
|
|
|
client,
|
2024-06-03 17:17:08 -04:00
|
|
|
registry_api_url,
|
|
|
|
registry_manage_url,
|
2023-12-15 05:27:10 -05:00
|
|
|
&package.scope,
|
|
|
|
&package.package,
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
if let Some(create_package_url) = maybe_create_package_url {
|
|
|
|
missing_packages_lines.push(format!(" - {}", create_package_url));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !missing_packages_lines.is_empty() {
|
|
|
|
bail!(
|
|
|
|
"Following packages don't exist, follow the links and create them:\n{}",
|
|
|
|
missing_packages_lines.join("\n")
|
|
|
|
);
|
|
|
|
}
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
for package in packages {
|
|
|
|
let maybe_create_package_url = check_if_scope_and_package_exist(
|
|
|
|
client,
|
2024-06-03 17:17:08 -04:00
|
|
|
registry_api_url,
|
|
|
|
registry_manage_url,
|
2023-12-15 05:27:10 -05:00
|
|
|
&package.scope,
|
|
|
|
&package.package,
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
let Some(create_package_url) = maybe_create_package_url else {
|
|
|
|
continue;
|
|
|
|
};
|
|
|
|
|
|
|
|
ring_bell();
|
2024-05-08 22:45:06 -04:00
|
|
|
log::warn!(
|
2023-12-15 05:27:10 -05:00
|
|
|
"'@{}/{}' doesn't exist yet. Visit {} to create the package",
|
|
|
|
&package.scope,
|
|
|
|
&package.package,
|
2024-01-31 16:38:57 -05:00
|
|
|
colors::cyan_with_underline(&create_package_url)
|
2023-12-15 05:27:10 -05:00
|
|
|
);
|
2024-05-08 22:45:06 -04:00
|
|
|
log::warn!("{}", colors::gray("Waiting..."));
|
2024-02-01 10:25:56 -05:00
|
|
|
let _ = open::that_detached(&create_package_url);
|
2023-12-15 05:27:10 -05:00
|
|
|
|
|
|
|
let package_api_url = api::get_package_api_url(
|
2024-06-03 17:17:08 -04:00
|
|
|
registry_api_url,
|
2023-12-15 05:27:10 -05:00
|
|
|
&package.scope,
|
|
|
|
&package.package,
|
|
|
|
);
|
|
|
|
|
|
|
|
loop {
|
|
|
|
tokio::time::sleep(std::time::Duration::from_secs(3)).await;
|
2024-07-17 19:37:31 -04:00
|
|
|
let response = client.get(package_api_url.parse()?)?.send().await?;
|
2023-12-15 05:27:10 -05:00
|
|
|
if response.status() == 200 {
|
|
|
|
let name = format!("@{}/{}", package.scope, package.package);
|
2024-05-08 22:45:06 -04:00
|
|
|
log::info!("Package {} created", colors::green(name));
|
2023-12-15 05:27:10 -05:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
async fn perform_publish(
|
2024-06-03 17:17:08 -04:00
|
|
|
http_client: &HttpClient,
|
2023-12-14 06:05:59 -05:00
|
|
|
mut publish_order_graph: PublishOrderGraph,
|
|
|
|
mut prepared_package_by_name: HashMap<String, Rc<PreparedPublishPackage>>,
|
|
|
|
auth_method: AuthMethod,
|
2024-02-29 16:19:30 -05:00
|
|
|
provenance: bool,
|
2023-12-14 06:05:59 -05:00
|
|
|
) -> Result<(), AnyError> {
|
2024-06-03 17:17:08 -04:00
|
|
|
let registry_api_url = jsr_api_url();
|
|
|
|
let registry_url = jsr_url();
|
2023-12-14 06:05:59 -05:00
|
|
|
|
|
|
|
let packages = prepared_package_by_name
|
|
|
|
.values()
|
|
|
|
.cloned()
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
2023-12-15 05:27:10 -05:00
|
|
|
ensure_scopes_and_packages_exist(
|
2024-06-03 17:17:08 -04:00
|
|
|
http_client,
|
|
|
|
registry_api_url,
|
|
|
|
registry_url,
|
|
|
|
&packages,
|
2023-12-15 05:27:10 -05:00
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
let mut authorizations =
|
2024-06-03 17:17:08 -04:00
|
|
|
get_auth_headers(http_client, registry_api_url, &packages, auth_method)
|
2023-12-14 06:05:59 -05:00
|
|
|
.await?;
|
|
|
|
|
2023-12-14 04:55:56 -05:00
|
|
|
assert_eq!(prepared_package_by_name.len(), authorizations.len());
|
2024-06-03 17:17:08 -04:00
|
|
|
let mut futures: FuturesUnordered<LocalBoxFuture<Result<String, AnyError>>> =
|
|
|
|
Default::default();
|
2023-12-14 04:55:56 -05:00
|
|
|
loop {
|
|
|
|
let next_batch = publish_order_graph.next();
|
|
|
|
|
|
|
|
for package_name in next_batch {
|
|
|
|
let package = prepared_package_by_name.remove(&package_name).unwrap();
|
2024-01-08 18:51:49 -05:00
|
|
|
|
|
|
|
// todo(dsherret): output something that looks better than this even not in debug
|
|
|
|
if log::log_enabled!(log::Level::Debug) {
|
|
|
|
log::debug!("Publishing {}", package.display_name());
|
|
|
|
for file in &package.tarball.files {
|
|
|
|
log::debug!(
|
|
|
|
" Tarball file {} {}",
|
|
|
|
human_size(file.size as f64),
|
2024-01-24 16:24:52 -05:00
|
|
|
file.specifier
|
2024-01-08 18:51:49 -05:00
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-12-14 04:55:56 -05:00
|
|
|
let authorization = authorizations
|
|
|
|
.remove(&(
|
|
|
|
package.scope.clone(),
|
|
|
|
package.package.clone(),
|
|
|
|
package.version.clone(),
|
|
|
|
))
|
|
|
|
.unwrap();
|
2024-06-03 17:17:08 -04:00
|
|
|
futures.push(
|
|
|
|
async move {
|
|
|
|
let display_name = package.display_name();
|
|
|
|
publish_package(
|
|
|
|
http_client,
|
|
|
|
package,
|
|
|
|
registry_api_url,
|
|
|
|
registry_url,
|
|
|
|
&authorization,
|
|
|
|
provenance,
|
|
|
|
)
|
|
|
|
.await
|
|
|
|
.with_context(|| format!("Failed to publish {}", display_name))?;
|
|
|
|
Ok(package_name)
|
|
|
|
}
|
|
|
|
.boxed_local(),
|
|
|
|
);
|
2023-12-14 04:55:56 -05:00
|
|
|
}
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2024-06-03 17:17:08 -04:00
|
|
|
let Some(result) = futures.next().await else {
|
2023-12-14 04:55:56 -05:00
|
|
|
// done, ensure no circular dependency
|
|
|
|
publish_order_graph.ensure_no_pending()?;
|
|
|
|
break;
|
2023-12-04 06:40:58 -05:00
|
|
|
};
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2024-06-03 17:17:08 -04:00
|
|
|
let package_name = result?;
|
2023-12-14 04:55:56 -05:00
|
|
|
publish_order_graph.finish_package(&package_name);
|
|
|
|
}
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2023-12-14 04:55:56 -05:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn publish_package(
|
|
|
|
http_client: &HttpClient,
|
2023-12-14 06:05:59 -05:00
|
|
|
package: Rc<PreparedPublishPackage>,
|
2024-06-03 17:17:08 -04:00
|
|
|
registry_api_url: &Url,
|
|
|
|
registry_url: &Url,
|
2023-12-14 04:55:56 -05:00
|
|
|
authorization: &str,
|
2024-02-29 16:19:30 -05:00
|
|
|
provenance: bool,
|
2023-12-14 04:55:56 -05:00
|
|
|
) -> Result<(), AnyError> {
|
2024-05-08 22:45:06 -04:00
|
|
|
log::info!(
|
2023-12-14 04:55:56 -05:00
|
|
|
"{} @{}/{}@{} ...",
|
|
|
|
colors::intense_blue("Publishing"),
|
|
|
|
package.scope,
|
|
|
|
package.package,
|
|
|
|
package.version
|
|
|
|
);
|
|
|
|
|
|
|
|
let url = format!(
|
2024-02-14 06:53:15 -05:00
|
|
|
"{}scopes/{}/packages/{}/versions/{}?config=/{}",
|
|
|
|
registry_api_url,
|
|
|
|
package.scope,
|
|
|
|
package.package,
|
|
|
|
package.version,
|
|
|
|
package.config
|
2023-12-14 04:55:56 -05:00
|
|
|
);
|
|
|
|
|
2024-07-17 19:37:31 -04:00
|
|
|
let body = http_body_util::Full::new(package.tarball.bytes.clone())
|
|
|
|
.map_err(|never| match never {})
|
|
|
|
.boxed();
|
2024-06-03 17:17:08 -04:00
|
|
|
let response = http_client
|
2024-07-17 19:37:31 -04:00
|
|
|
.post(url.parse()?, body)?
|
|
|
|
.header(
|
|
|
|
http::header::AUTHORIZATION,
|
|
|
|
authorization.parse().map_err(http::Error::from)?,
|
|
|
|
)
|
|
|
|
.header(
|
|
|
|
http::header::CONTENT_ENCODING,
|
|
|
|
"gzip".parse().map_err(http::Error::from)?,
|
|
|
|
)
|
2023-12-14 04:55:56 -05:00
|
|
|
.send()
|
|
|
|
.await?;
|
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
let res = api::parse_response::<api::PublishingTask>(response).await;
|
2023-12-14 04:55:56 -05:00
|
|
|
let mut task = match res {
|
|
|
|
Ok(task) => task,
|
2024-01-08 10:25:18 -05:00
|
|
|
Err(mut err) if err.code == "duplicateVersionPublish" => {
|
|
|
|
let task = serde_json::from_value::<api::PublishingTask>(
|
|
|
|
err.data.get_mut("task").unwrap().take(),
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
if task.status == "success" {
|
2024-05-08 22:45:06 -04:00
|
|
|
log::info!(
|
2024-01-08 10:25:18 -05:00
|
|
|
"{} @{}/{}@{}",
|
2024-02-28 15:30:20 -05:00
|
|
|
colors::yellow("Warning: Skipping, already published"),
|
2024-01-08 10:25:18 -05:00
|
|
|
package.scope,
|
|
|
|
package.package,
|
|
|
|
package.version
|
|
|
|
);
|
|
|
|
return Ok(());
|
|
|
|
}
|
2024-05-08 22:45:06 -04:00
|
|
|
log::info!(
|
2023-12-14 04:55:56 -05:00
|
|
|
"{} @{}/{}@{}",
|
2024-01-08 10:25:18 -05:00
|
|
|
colors::yellow("Already uploaded, waiting for publishing"),
|
2023-11-23 18:38:07 -05:00
|
|
|
package.scope,
|
|
|
|
package.package,
|
2023-12-14 04:55:56 -05:00
|
|
|
package.version
|
2023-11-23 18:38:07 -05:00
|
|
|
);
|
2024-01-08 10:25:18 -05:00
|
|
|
task
|
2023-11-23 18:38:07 -05:00
|
|
|
}
|
2023-12-14 04:55:56 -05:00
|
|
|
Err(err) => {
|
|
|
|
return Err(err).with_context(|| {
|
|
|
|
format!(
|
|
|
|
"Failed to publish @{}/{} at {}",
|
|
|
|
package.scope, package.package, package.version
|
|
|
|
)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
};
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2023-12-14 04:55:56 -05:00
|
|
|
let interval = std::time::Duration::from_secs(2);
|
|
|
|
while task.status != "success" && task.status != "failure" {
|
|
|
|
tokio::time::sleep(interval).await;
|
2024-06-03 17:17:08 -04:00
|
|
|
let resp = http_client
|
2024-07-17 19:37:31 -04:00
|
|
|
.get(format!("{}publish_status/{}", registry_api_url, task.id).parse()?)?
|
2023-12-14 04:55:56 -05:00
|
|
|
.send()
|
|
|
|
.await
|
|
|
|
.with_context(|| {
|
|
|
|
format!(
|
|
|
|
"Failed to get publishing status for @{}/{} at {}",
|
|
|
|
package.scope, package.package, package.version
|
|
|
|
)
|
|
|
|
})?;
|
2023-12-14 06:05:59 -05:00
|
|
|
task = api::parse_response::<api::PublishingTask>(resp)
|
2023-12-14 04:55:56 -05:00
|
|
|
.await
|
|
|
|
.with_context(|| {
|
|
|
|
format!(
|
|
|
|
"Failed to get publishing status for @{}/{} at {}",
|
|
|
|
package.scope, package.package, package.version
|
|
|
|
)
|
|
|
|
})?;
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(error) = task.error {
|
|
|
|
bail!(
|
|
|
|
"{} @{}/{} at {}: {}",
|
|
|
|
colors::red("Failed to publish"),
|
2023-11-23 18:38:07 -05:00
|
|
|
package.scope,
|
|
|
|
package.package,
|
2023-12-14 04:55:56 -05:00
|
|
|
package.version,
|
|
|
|
error.message
|
2023-11-23 18:38:07 -05:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2024-05-08 22:45:06 -04:00
|
|
|
log::info!(
|
2023-12-14 04:55:56 -05:00
|
|
|
"{} @{}/{}@{}",
|
|
|
|
colors::green("Successfully published"),
|
|
|
|
package.scope,
|
|
|
|
package.package,
|
|
|
|
package.version
|
|
|
|
);
|
2024-02-27 21:28:02 -05:00
|
|
|
|
2024-02-29 11:18:47 -05:00
|
|
|
let enable_provenance = std::env::var("DISABLE_JSR_PROVENANCE").is_err()
|
2024-02-29 16:19:30 -05:00
|
|
|
&& (auth::is_gha() && auth::gha_oidc_token().is_some() && provenance);
|
2024-02-29 11:18:47 -05:00
|
|
|
|
|
|
|
// Enable provenance by default on Github actions with OIDC token
|
|
|
|
if enable_provenance {
|
|
|
|
// Get the version manifest from the registry
|
2024-02-27 21:28:02 -05:00
|
|
|
let meta_url = jsr_url().join(&format!(
|
|
|
|
"@{}/{}/{}_meta.json",
|
|
|
|
package.scope, package.package, package.version
|
|
|
|
))?;
|
|
|
|
|
2024-07-17 19:37:31 -04:00
|
|
|
let resp = http_client.get(meta_url)?.send().await?;
|
|
|
|
let meta_bytes = resp.collect().await?.to_bytes();
|
2024-02-27 21:28:02 -05:00
|
|
|
|
|
|
|
if std::env::var("DISABLE_JSR_MANIFEST_VERIFICATION_FOR_TESTING").is_err() {
|
|
|
|
verify_version_manifest(&meta_bytes, &package)?;
|
|
|
|
}
|
|
|
|
|
|
|
|
let subject = provenance::Subject {
|
|
|
|
name: format!(
|
|
|
|
"pkg:jsr/@{}/{}@{}",
|
|
|
|
package.scope, package.package, package.version
|
|
|
|
),
|
|
|
|
digest: provenance::SubjectDigest {
|
2024-03-07 12:00:43 -05:00
|
|
|
sha256: faster_hex::hex_string(&sha2::Sha256::digest(&meta_bytes)),
|
2024-02-27 21:28:02 -05:00
|
|
|
},
|
|
|
|
};
|
2024-06-03 17:17:08 -04:00
|
|
|
let bundle = provenance::generate_provenance(http_client, subject).await?;
|
2024-02-27 21:28:02 -05:00
|
|
|
|
|
|
|
let tlog_entry = &bundle.verification_material.tlog_entries[0];
|
2024-05-08 22:45:06 -04:00
|
|
|
log::info!("{}",
|
2024-02-27 21:28:02 -05:00
|
|
|
colors::green(format!(
|
|
|
|
"Provenance transparency log available at https://search.sigstore.dev/?logIndex={}",
|
|
|
|
tlog_entry.log_index
|
|
|
|
))
|
|
|
|
);
|
|
|
|
|
|
|
|
// Submit bundle to JSR
|
|
|
|
let provenance_url = format!(
|
|
|
|
"{}scopes/{}/packages/{}/versions/{}/provenance",
|
|
|
|
registry_api_url, package.scope, package.package, package.version
|
|
|
|
);
|
2024-06-03 17:17:08 -04:00
|
|
|
http_client
|
2024-07-17 19:37:31 -04:00
|
|
|
.post_json(provenance_url.parse()?, &json!({ "bundle": bundle }))?
|
|
|
|
.header(http::header::AUTHORIZATION, authorization.parse()?)
|
2024-02-27 21:28:02 -05:00
|
|
|
.send()
|
|
|
|
.await?;
|
|
|
|
}
|
|
|
|
|
2024-05-08 22:45:06 -04:00
|
|
|
log::info!(
|
2023-12-15 05:27:10 -05:00
|
|
|
"{}",
|
|
|
|
colors::gray(format!(
|
|
|
|
"Visit {}@{}/{}@{} for details",
|
|
|
|
registry_url, package.scope, package.package, package.version
|
|
|
|
))
|
2023-12-14 04:55:56 -05:00
|
|
|
);
|
2023-11-23 18:38:07 -05:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2024-05-27 21:35:08 -04:00
|
|
|
fn collect_excluded_module_diagnostics(
|
|
|
|
root: &ModuleSpecifier,
|
|
|
|
graph: &deno_graph::ModuleGraph,
|
|
|
|
publish_paths: &[CollectedPublishPath],
|
2024-01-23 10:37:43 -05:00
|
|
|
diagnostics_collector: &PublishDiagnosticsCollector,
|
2024-05-27 21:35:08 -04:00
|
|
|
) {
|
|
|
|
let publish_specifiers = publish_paths
|
|
|
|
.iter()
|
|
|
|
.map(|path| &path.specifier)
|
|
|
|
.collect::<HashSet<_>>();
|
|
|
|
let graph_specifiers = graph
|
|
|
|
.modules()
|
|
|
|
.filter_map(|m| match m {
|
|
|
|
deno_graph::Module::Js(_) | deno_graph::Module::Json(_) => {
|
|
|
|
Some(m.specifier())
|
2024-01-24 08:49:33 -05:00
|
|
|
}
|
2024-05-27 21:35:08 -04:00
|
|
|
deno_graph::Module::Npm(_)
|
|
|
|
| deno_graph::Module::Node(_)
|
|
|
|
| deno_graph::Module::External(_) => None,
|
2024-01-24 08:49:33 -05:00
|
|
|
})
|
2024-05-27 21:35:08 -04:00
|
|
|
.filter(|s| s.as_str().starts_with(root.as_str()));
|
|
|
|
for specifier in graph_specifiers {
|
|
|
|
if !publish_specifiers.contains(specifier) {
|
|
|
|
diagnostics_collector.push(PublishDiagnostic::ExcludedModule {
|
|
|
|
specifier: specifier.clone(),
|
|
|
|
});
|
2024-02-20 07:30:34 -05:00
|
|
|
}
|
2024-01-11 16:17:03 -05:00
|
|
|
}
|
2024-02-27 21:28:02 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Deserialize)]
|
|
|
|
struct ManifestEntry {
|
|
|
|
checksum: String,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Deserialize)]
|
|
|
|
struct VersionManifest {
|
|
|
|
manifest: HashMap<String, ManifestEntry>,
|
|
|
|
exports: HashMap<String, String>,
|
|
|
|
}
|
|
|
|
|
|
|
|
fn verify_version_manifest(
|
|
|
|
meta_bytes: &[u8],
|
|
|
|
package: &PreparedPublishPackage,
|
|
|
|
) -> Result<(), AnyError> {
|
|
|
|
let manifest = serde_json::from_slice::<VersionManifest>(meta_bytes)?;
|
|
|
|
// Check that nothing was removed from the manifest.
|
|
|
|
if manifest.manifest.len() != package.tarball.files.len() {
|
|
|
|
bail!(
|
|
|
|
"Mismatch in the number of files in the manifest: expected {}, got {}",
|
|
|
|
package.tarball.files.len(),
|
|
|
|
manifest.manifest.len()
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
for (path, entry) in manifest.manifest {
|
|
|
|
// Verify each path with the files in the tarball.
|
|
|
|
let file = package
|
|
|
|
.tarball
|
|
|
|
.files
|
|
|
|
.iter()
|
|
|
|
.find(|f| f.path_str == path.as_str());
|
|
|
|
|
|
|
|
if let Some(file) = file {
|
|
|
|
if file.hash != entry.checksum {
|
|
|
|
bail!(
|
|
|
|
"Checksum mismatch for {}: expected {}, got {}",
|
|
|
|
path,
|
|
|
|
entry.checksum,
|
|
|
|
file.hash
|
|
|
|
);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
bail!("File {} not found in the tarball", path);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (specifier, expected) in &manifest.exports {
|
|
|
|
let actual = package.exports.get(specifier).ok_or_else(|| {
|
|
|
|
deno_core::anyhow::anyhow!(
|
|
|
|
"Export {} not found in the package",
|
|
|
|
specifier
|
|
|
|
)
|
|
|
|
})?;
|
|
|
|
if actual != expected {
|
|
|
|
bail!(
|
|
|
|
"Export {} mismatch: expected {}, got {}",
|
|
|
|
specifier,
|
|
|
|
expected,
|
|
|
|
actual
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2024-07-12 15:35:57 -04:00
|
|
|
async fn check_if_git_repo_dirty(cwd: &Path) -> Option<String> {
|
2024-03-07 16:13:36 -05:00
|
|
|
let bin_name = if cfg!(windows) { "git.exe" } else { "git" };
|
|
|
|
|
2024-07-12 15:35:57 -04:00
|
|
|
// Check if git exists
|
2024-03-07 16:13:36 -05:00
|
|
|
let git_exists = Command::new(bin_name)
|
|
|
|
.arg("--version")
|
|
|
|
.stderr(Stdio::null())
|
|
|
|
.stdout(Stdio::null())
|
|
|
|
.status()
|
|
|
|
.await
|
|
|
|
.map_or(false, |status| status.success());
|
|
|
|
|
|
|
|
if !git_exists {
|
2024-07-12 15:35:57 -04:00
|
|
|
return None; // Git is not installed
|
2024-03-07 16:13:36 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// Check if there are uncommitted changes
|
|
|
|
let output = Command::new(bin_name)
|
|
|
|
.current_dir(cwd)
|
|
|
|
.args(["status", "--porcelain"])
|
|
|
|
.output()
|
|
|
|
.await
|
|
|
|
.expect("Failed to execute command");
|
|
|
|
|
|
|
|
let output_str = String::from_utf8_lossy(&output.stdout);
|
2024-07-12 15:35:57 -04:00
|
|
|
let text = output_str.trim();
|
|
|
|
if text.is_empty() {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
Some(text.to_string())
|
|
|
|
}
|
2024-03-07 16:13:36 -05:00
|
|
|
}
|
|
|
|
|
2024-07-24 21:43:30 -04:00
|
|
|
static SUPPORTED_LICENSE_FILE_NAMES: [&str; 6] = [
|
|
|
|
"LICENSE",
|
|
|
|
"LICENSE.md",
|
|
|
|
"LICENSE.txt",
|
|
|
|
"LICENCE",
|
|
|
|
"LICENCE.md",
|
|
|
|
"LICENCE.txt",
|
|
|
|
];
|
|
|
|
|
|
|
|
fn resolve_license_file(
|
|
|
|
pkg_root_dir: &Path,
|
|
|
|
workspace: &Workspace,
|
|
|
|
) -> Option<PathBuf> {
|
|
|
|
let workspace_root_dir = workspace.root_dir_path();
|
|
|
|
let mut dirs = Vec::with_capacity(2);
|
|
|
|
dirs.push(pkg_root_dir);
|
|
|
|
if workspace_root_dir != pkg_root_dir {
|
|
|
|
dirs.push(&workspace_root_dir);
|
|
|
|
}
|
|
|
|
for dir in dirs {
|
|
|
|
for file_name in &SUPPORTED_LICENSE_FILE_NAMES {
|
|
|
|
let file_path = dir.join(file_name);
|
|
|
|
if file_path.exists() {
|
|
|
|
return Some(file_path);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
}
|
|
|
|
|
2024-07-22 15:46:37 -04:00
|
|
|
fn has_license_file<'a>(
|
|
|
|
mut specifiers: impl Iterator<Item = &'a ModuleSpecifier>,
|
|
|
|
) -> bool {
|
2024-07-24 21:43:30 -04:00
|
|
|
let supported_license_files = SUPPORTED_LICENSE_FILE_NAMES
|
|
|
|
.iter()
|
|
|
|
.map(|s| s.to_lowercase())
|
|
|
|
.collect::<HashSet<_>>();
|
2024-07-22 15:46:37 -04:00
|
|
|
specifiers.any(|specifier| {
|
|
|
|
specifier
|
|
|
|
.path()
|
|
|
|
.rsplit_once('/')
|
|
|
|
.map(|(_, file)| {
|
2024-07-24 21:43:30 -04:00
|
|
|
supported_license_files.contains(file.to_lowercase().as_str())
|
2024-07-22 15:46:37 -04:00
|
|
|
})
|
|
|
|
.unwrap_or(false)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2024-05-27 21:35:08 -04:00
|
|
|
#[allow(clippy::print_stderr)]
|
|
|
|
fn ring_bell() {
|
|
|
|
// ASCII code for the bell character.
|
|
|
|
eprint!("\x07");
|
|
|
|
}
|
|
|
|
|
2024-02-27 21:28:02 -05:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2024-07-22 15:46:37 -04:00
|
|
|
use deno_ast::ModuleSpecifier;
|
|
|
|
|
|
|
|
use crate::tools::registry::has_license_file;
|
|
|
|
|
2024-02-27 21:28:02 -05:00
|
|
|
use super::tar::PublishableTarball;
|
|
|
|
use super::tar::PublishableTarballFile;
|
|
|
|
use super::verify_version_manifest;
|
|
|
|
use std::collections::HashMap;
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_verify_version_manifest() {
|
|
|
|
let meta = r#"{
|
|
|
|
"manifest": {
|
|
|
|
"mod.ts": {
|
|
|
|
"checksum": "abc123"
|
|
|
|
}
|
|
|
|
},
|
|
|
|
"exports": {}
|
|
|
|
}"#;
|
|
|
|
|
|
|
|
let meta_bytes = meta.as_bytes();
|
|
|
|
let package = super::PreparedPublishPackage {
|
|
|
|
scope: "test".to_string(),
|
|
|
|
package: "test".to_string(),
|
|
|
|
version: "1.0.0".to_string(),
|
|
|
|
tarball: PublishableTarball {
|
|
|
|
bytes: vec![].into(),
|
|
|
|
hash: "abc123".to_string(),
|
|
|
|
files: vec![PublishableTarballFile {
|
|
|
|
specifier: "file://mod.ts".try_into().unwrap(),
|
|
|
|
path_str: "mod.ts".to_string(),
|
|
|
|
hash: "abc123".to_string(),
|
|
|
|
size: 0,
|
|
|
|
}],
|
|
|
|
},
|
|
|
|
config: "deno.json".to_string(),
|
|
|
|
exports: HashMap::new(),
|
|
|
|
};
|
|
|
|
|
|
|
|
assert!(verify_version_manifest(meta_bytes, &package).is_ok());
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_verify_version_manifest_missing() {
|
|
|
|
let meta = r#"{
|
|
|
|
"manifest": {
|
|
|
|
"mod.ts": {},
|
|
|
|
},
|
|
|
|
"exports": {}
|
|
|
|
}"#;
|
|
|
|
|
|
|
|
let meta_bytes = meta.as_bytes();
|
|
|
|
let package = super::PreparedPublishPackage {
|
|
|
|
scope: "test".to_string(),
|
|
|
|
package: "test".to_string(),
|
|
|
|
version: "1.0.0".to_string(),
|
|
|
|
tarball: PublishableTarball {
|
|
|
|
bytes: vec![].into(),
|
|
|
|
hash: "abc123".to_string(),
|
|
|
|
files: vec![PublishableTarballFile {
|
|
|
|
specifier: "file://mod.ts".try_into().unwrap(),
|
|
|
|
path_str: "mod.ts".to_string(),
|
|
|
|
hash: "abc123".to_string(),
|
|
|
|
size: 0,
|
|
|
|
}],
|
|
|
|
},
|
|
|
|
config: "deno.json".to_string(),
|
|
|
|
exports: HashMap::new(),
|
|
|
|
};
|
|
|
|
|
|
|
|
assert!(verify_version_manifest(meta_bytes, &package).is_err());
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_verify_version_manifest_invalid_hash() {
|
|
|
|
let meta = r#"{
|
|
|
|
"manifest": {
|
|
|
|
"mod.ts": {
|
|
|
|
"checksum": "lol123"
|
|
|
|
},
|
|
|
|
"exports": {}
|
|
|
|
}
|
|
|
|
}"#;
|
|
|
|
|
|
|
|
let meta_bytes = meta.as_bytes();
|
|
|
|
let package = super::PreparedPublishPackage {
|
|
|
|
scope: "test".to_string(),
|
|
|
|
package: "test".to_string(),
|
|
|
|
version: "1.0.0".to_string(),
|
|
|
|
tarball: PublishableTarball {
|
|
|
|
bytes: vec![].into(),
|
|
|
|
hash: "abc123".to_string(),
|
|
|
|
files: vec![PublishableTarballFile {
|
|
|
|
specifier: "file://mod.ts".try_into().unwrap(),
|
|
|
|
path_str: "mod.ts".to_string(),
|
|
|
|
hash: "abc123".to_string(),
|
|
|
|
size: 0,
|
|
|
|
}],
|
|
|
|
},
|
|
|
|
config: "deno.json".to_string(),
|
|
|
|
exports: HashMap::new(),
|
|
|
|
};
|
|
|
|
|
|
|
|
assert!(verify_version_manifest(meta_bytes, &package).is_err());
|
|
|
|
}
|
2024-07-22 15:46:37 -04:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_has_license_files() {
|
|
|
|
fn has_license_file_str(expected: &[&str]) -> bool {
|
|
|
|
let specifiers = expected
|
|
|
|
.iter()
|
|
|
|
.map(|s| ModuleSpecifier::parse(s).unwrap())
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
has_license_file(specifiers.iter())
|
|
|
|
}
|
|
|
|
|
|
|
|
assert!(has_license_file_str(&["file:///LICENSE"]));
|
|
|
|
assert!(has_license_file_str(&["file:///license"]));
|
|
|
|
assert!(has_license_file_str(&["file:///LICENSE.txt"]));
|
|
|
|
assert!(has_license_file_str(&["file:///LICENSE.md"]));
|
|
|
|
assert!(has_license_file_str(&["file:///LICENCE"]));
|
|
|
|
assert!(has_license_file_str(&["file:///LICENCE.txt"]));
|
|
|
|
assert!(has_license_file_str(&["file:///LICENCE.md"]));
|
|
|
|
assert!(has_license_file_str(&[
|
|
|
|
"file:///other",
|
|
|
|
"file:///test/LICENCE.md"
|
|
|
|
]),);
|
|
|
|
assert!(!has_license_file_str(&[
|
|
|
|
"file:///other",
|
|
|
|
"file:///test/tLICENSE"
|
|
|
|
]),);
|
|
|
|
}
|
2023-11-23 18:38:07 -05:00
|
|
|
}
|