2024-01-01 14:58:21 -05:00
|
|
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2023-12-14 04:55:56 -05:00
|
|
|
use std::collections::HashMap;
|
2023-12-15 05:27:10 -05:00
|
|
|
use std::io::IsTerminal;
|
2023-11-30 13:54:54 -05:00
|
|
|
use std::rc::Rc;
|
2023-11-23 18:38:07 -05:00
|
|
|
use std::sync::Arc;
|
|
|
|
|
|
|
|
use base64::prelude::BASE64_STANDARD;
|
|
|
|
use base64::Engine;
|
|
|
|
use deno_config::ConfigFile;
|
|
|
|
use deno_core::anyhow::bail;
|
|
|
|
use deno_core::anyhow::Context;
|
|
|
|
use deno_core::error::AnyError;
|
|
|
|
use deno_core::serde_json;
|
|
|
|
use deno_core::serde_json::json;
|
2023-12-14 04:55:56 -05:00
|
|
|
use deno_core::unsync::JoinHandle;
|
|
|
|
use deno_core::unsync::JoinSet;
|
2023-11-23 18:38:07 -05:00
|
|
|
use deno_runtime::colors;
|
|
|
|
use deno_runtime::deno_fetch::reqwest;
|
2023-11-29 08:59:30 -05:00
|
|
|
use import_map::ImportMap;
|
|
|
|
use lsp_types::Url;
|
2023-11-23 18:38:07 -05:00
|
|
|
use serde::Serialize;
|
|
|
|
use sha2::Digest;
|
|
|
|
|
2023-12-12 09:45:20 -05:00
|
|
|
use crate::args::deno_registry_api_url;
|
2023-12-15 05:27:10 -05:00
|
|
|
use crate::args::deno_registry_url;
|
2024-01-10 17:40:30 -05:00
|
|
|
use crate::args::CliOptions;
|
2023-11-23 18:38:07 -05:00
|
|
|
use crate::args::Flags;
|
|
|
|
use crate::args::PublishFlags;
|
|
|
|
use crate::factory::CliFactory;
|
2024-01-10 17:40:30 -05:00
|
|
|
use crate::graph_util::ModuleGraphBuilder;
|
2023-11-23 18:38:07 -05:00
|
|
|
use crate::http_util::HttpClient;
|
2024-01-10 17:40:30 -05:00
|
|
|
use crate::tools::check::CheckOptions;
|
|
|
|
use crate::tools::registry::graph::get_workspace_member_roots;
|
|
|
|
use crate::tools::registry::graph::resolve_config_file_roots_from_exports;
|
|
|
|
use crate::tools::registry::graph::surface_fast_check_type_graph_errors;
|
|
|
|
use crate::tools::registry::graph::MemberRoots;
|
2024-01-08 18:51:49 -05:00
|
|
|
use crate::util::display::human_size;
|
|
|
|
use crate::util::glob::PathOrPatternSet;
|
2023-11-23 18:38:07 -05:00
|
|
|
use crate::util::import_map::ImportMapUnfurler;
|
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
mod api;
|
|
|
|
mod auth;
|
2024-01-10 17:40:30 -05:00
|
|
|
mod graph;
|
2023-12-14 04:55:56 -05:00
|
|
|
mod publish_order;
|
2023-11-23 18:38:07 -05:00
|
|
|
mod tar;
|
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
use auth::get_auth_method;
|
|
|
|
use auth::AuthMethod;
|
|
|
|
use publish_order::PublishOrderGraph;
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2024-01-10 17:40:30 -05:00
|
|
|
use super::check::TypeChecker;
|
|
|
|
|
2024-01-08 18:51:49 -05:00
|
|
|
use self::tar::PublishableTarball;
|
|
|
|
|
2023-12-15 05:27:10 -05:00
|
|
|
fn ring_bell() {
|
|
|
|
// ASCII code for the bell character.
|
|
|
|
print!("\x07");
|
|
|
|
}
|
|
|
|
|
2023-11-23 18:38:07 -05:00
|
|
|
struct PreparedPublishPackage {
|
|
|
|
scope: String,
|
|
|
|
package: String,
|
|
|
|
version: String,
|
2024-01-08 18:51:49 -05:00
|
|
|
tarball: PublishableTarball,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl PreparedPublishPackage {
|
|
|
|
pub fn display_name(&self) -> String {
|
|
|
|
format!("@{}/{}@{}", self.scope, self.package, self.version)
|
|
|
|
}
|
2023-11-23 18:38:07 -05:00
|
|
|
}
|
|
|
|
|
2023-12-12 13:18:02 -05:00
|
|
|
static SUGGESTED_ENTRYPOINTS: [&str; 4] =
|
|
|
|
["mod.ts", "mod.js", "index.ts", "index.js"];
|
|
|
|
|
2024-01-10 17:40:30 -05:00
|
|
|
fn get_deno_json_package_name(
|
|
|
|
deno_json: &ConfigFile,
|
|
|
|
) -> Result<String, AnyError> {
|
|
|
|
match deno_json.json.name.clone() {
|
|
|
|
Some(name) => Ok(name),
|
|
|
|
None => bail!("{} is missing 'name' field", deno_json.specifier),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-11-23 18:38:07 -05:00
|
|
|
async fn prepare_publish(
|
2023-12-14 04:55:56 -05:00
|
|
|
deno_json: &ConfigFile,
|
|
|
|
import_map: Arc<ImportMap>,
|
2023-12-14 06:05:59 -05:00
|
|
|
) -> Result<Rc<PreparedPublishPackage>, AnyError> {
|
2023-12-14 04:55:56 -05:00
|
|
|
let config_path = deno_json.specifier.to_file_path().unwrap();
|
|
|
|
let dir_path = config_path.parent().unwrap().to_path_buf();
|
2023-11-23 18:38:07 -05:00
|
|
|
let Some(version) = deno_json.json.version.clone() else {
|
2023-12-14 04:55:56 -05:00
|
|
|
bail!("{} is missing 'version' field", deno_json.specifier);
|
2023-11-23 18:38:07 -05:00
|
|
|
};
|
2024-01-10 17:40:30 -05:00
|
|
|
let name = get_deno_json_package_name(deno_json)?;
|
2023-12-12 13:18:02 -05:00
|
|
|
if deno_json.json.exports.is_none() {
|
|
|
|
let mut suggested_entrypoint = None;
|
|
|
|
|
|
|
|
for entrypoint in SUGGESTED_ENTRYPOINTS {
|
2023-12-14 04:55:56 -05:00
|
|
|
if dir_path.join(entrypoint).exists() {
|
2023-12-12 13:18:02 -05:00
|
|
|
suggested_entrypoint = Some(entrypoint);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let exports_content = format!(
|
|
|
|
r#"{{
|
|
|
|
"name": "{}",
|
|
|
|
"version": "{}",
|
|
|
|
"exports": "{}"
|
|
|
|
}}"#,
|
|
|
|
name,
|
|
|
|
version,
|
|
|
|
suggested_entrypoint.unwrap_or("<path_to_entrypoint>")
|
|
|
|
);
|
|
|
|
|
|
|
|
bail!(
|
|
|
|
"You did not specify an entrypoint to \"{}\" package in {}. Add `exports` mapping in the configuration file, eg:\n{}",
|
|
|
|
name,
|
2023-12-14 04:55:56 -05:00
|
|
|
deno_json.specifier,
|
2023-12-12 13:18:02 -05:00
|
|
|
exports_content
|
|
|
|
);
|
|
|
|
}
|
2023-11-23 18:38:07 -05:00
|
|
|
let Some(name) = name.strip_prefix('@') else {
|
|
|
|
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
|
|
|
|
};
|
|
|
|
let Some((scope, package_name)) = name.split_once('/') else {
|
|
|
|
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
|
|
|
|
};
|
2024-01-08 18:51:49 -05:00
|
|
|
let exclude_patterns = deno_json.to_files_config().and_then(|files| {
|
|
|
|
PathOrPatternSet::from_absolute_paths(files.unwrap_or_default().exclude)
|
|
|
|
.context("Invalid config file exclude pattern.")
|
|
|
|
})?;
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2024-01-08 18:51:49 -05:00
|
|
|
let tarball = deno_core::unsync::spawn_blocking(move || {
|
2023-12-14 04:55:56 -05:00
|
|
|
let unfurler = ImportMapUnfurler::new(&import_map);
|
2024-01-08 18:51:49 -05:00
|
|
|
tar::create_gzipped_tarball(&dir_path, &unfurler, &exclude_patterns)
|
2023-12-14 04:55:56 -05:00
|
|
|
.context("Failed to create a tarball")
|
|
|
|
})
|
|
|
|
.await??;
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2024-01-10 17:40:30 -05:00
|
|
|
log::debug!("Tarball size ({}): {}", name, tarball.bytes.len());
|
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
Ok(Rc::new(PreparedPublishPackage {
|
2023-11-23 18:38:07 -05:00
|
|
|
scope: scope.to_string(),
|
|
|
|
package: package_name.to_string(),
|
|
|
|
version: version.to_string(),
|
|
|
|
tarball,
|
2023-12-14 06:05:59 -05:00
|
|
|
}))
|
2023-11-23 18:38:07 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Serialize)]
|
|
|
|
#[serde(tag = "permission")]
|
|
|
|
pub enum Permission<'s> {
|
|
|
|
#[serde(rename = "package/publish", rename_all = "camelCase")]
|
|
|
|
VersionPublish {
|
|
|
|
scope: &'s str,
|
|
|
|
package: &'s str,
|
|
|
|
version: &'s str,
|
|
|
|
tarball_hash: &'s str,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2023-12-12 09:45:45 -05:00
|
|
|
/// Prints diagnostics like so:
|
|
|
|
/// ```
|
|
|
|
///
|
|
|
|
/// Warning
|
|
|
|
/// ├╌ Dynamic import was not analyzable...
|
|
|
|
/// ├╌╌ at file:///dev/foo/bar/foo.ts:4:5
|
|
|
|
/// |
|
|
|
|
/// ├╌ Dynamic import was not analyzable...
|
|
|
|
/// ├╌╌ at file:///dev/foo/bar/foo.ts:4:5
|
|
|
|
/// |
|
|
|
|
/// ├╌ Dynamic import was not analyzable...
|
|
|
|
/// └╌╌ at file:///dev/foo/bar/foo.ts:4:5
|
|
|
|
///
|
|
|
|
/// ```
|
2024-01-08 18:51:49 -05:00
|
|
|
fn print_diagnostics(diagnostics: &[String]) {
|
2023-12-12 09:45:45 -05:00
|
|
|
if !diagnostics.is_empty() {
|
|
|
|
let len = diagnostics.len();
|
|
|
|
log::warn!("");
|
|
|
|
log::warn!("{}", crate::colors::yellow("Warning"));
|
|
|
|
for (i, diagnostic) in diagnostics.iter().enumerate() {
|
|
|
|
let last_diagnostic = i == len - 1;
|
|
|
|
let lines = diagnostic.split('\n').collect::<Vec<_>>();
|
|
|
|
let lines_len = lines.len();
|
|
|
|
if i != 0 {
|
|
|
|
log::warn!("|");
|
|
|
|
}
|
|
|
|
for (j, line) in lines.iter().enumerate() {
|
|
|
|
let last_line = j == lines_len - 1;
|
|
|
|
if j == 0 {
|
|
|
|
log::warn!("├╌ {}", line);
|
|
|
|
} else if last_line && last_diagnostic {
|
|
|
|
log::warn!("└╌╌ {}", line);
|
|
|
|
} else {
|
|
|
|
log::warn!("├╌╌ {}", line);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
log::warn!("");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
async fn get_auth_headers(
|
|
|
|
client: &reqwest::Client,
|
|
|
|
registry_url: String,
|
|
|
|
packages: Vec<Rc<PreparedPublishPackage>>,
|
2023-11-23 18:38:07 -05:00
|
|
|
auth_method: AuthMethod,
|
2023-12-14 06:05:59 -05:00
|
|
|
) -> Result<HashMap<(String, String, String), Rc<str>>, AnyError> {
|
2023-11-30 13:54:54 -05:00
|
|
|
let permissions = packages
|
|
|
|
.iter()
|
|
|
|
.map(|package| Permission::VersionPublish {
|
|
|
|
scope: &package.scope,
|
|
|
|
package: &package.package,
|
|
|
|
version: &package.version,
|
2024-01-08 18:51:49 -05:00
|
|
|
tarball_hash: &package.tarball.hash,
|
2023-11-30 13:54:54 -05:00
|
|
|
})
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
2023-12-14 04:55:56 -05:00
|
|
|
let mut authorizations = HashMap::with_capacity(packages.len());
|
|
|
|
|
|
|
|
match auth_method {
|
2023-11-23 18:38:07 -05:00
|
|
|
AuthMethod::Interactive => {
|
|
|
|
let verifier = uuid::Uuid::new_v4().to_string();
|
|
|
|
let challenge = BASE64_STANDARD.encode(sha2::Sha256::digest(&verifier));
|
|
|
|
|
|
|
|
let response = client
|
2023-11-27 12:09:33 -05:00
|
|
|
.post(format!("{}authorizations", registry_url))
|
2023-11-23 18:38:07 -05:00
|
|
|
.json(&serde_json::json!({
|
|
|
|
"challenge": challenge,
|
|
|
|
"permissions": permissions,
|
|
|
|
}))
|
|
|
|
.send()
|
|
|
|
.await
|
|
|
|
.context("Failed to create interactive authorization")?;
|
2023-12-14 06:05:59 -05:00
|
|
|
let auth =
|
|
|
|
api::parse_response::<api::CreateAuthorizationResponse>(response)
|
|
|
|
.await
|
|
|
|
.context("Failed to create interactive authorization")?;
|
2023-11-23 18:38:07 -05:00
|
|
|
|
|
|
|
print!(
|
|
|
|
"Visit {} to authorize publishing of",
|
|
|
|
colors::cyan(format!("{}?code={}", auth.verification_url, auth.code))
|
|
|
|
);
|
|
|
|
if packages.len() > 1 {
|
|
|
|
println!(" {} packages", packages.len());
|
|
|
|
} else {
|
|
|
|
println!(" @{}/{}", packages[0].scope, packages[0].package);
|
|
|
|
}
|
|
|
|
|
2023-12-15 05:27:10 -05:00
|
|
|
ring_bell();
|
2023-11-23 18:38:07 -05:00
|
|
|
println!("{}", colors::gray("Waiting..."));
|
|
|
|
|
|
|
|
let interval = std::time::Duration::from_secs(auth.poll_interval);
|
|
|
|
|
|
|
|
loop {
|
|
|
|
tokio::time::sleep(interval).await;
|
|
|
|
let response = client
|
2023-11-27 12:09:33 -05:00
|
|
|
.post(format!("{}authorizations/exchange", registry_url))
|
2023-11-23 18:38:07 -05:00
|
|
|
.json(&serde_json::json!({
|
|
|
|
"exchangeToken": auth.exchange_token,
|
|
|
|
"verifier": verifier,
|
|
|
|
}))
|
|
|
|
.send()
|
|
|
|
.await
|
|
|
|
.context("Failed to exchange authorization")?;
|
|
|
|
let res =
|
2023-12-14 06:05:59 -05:00
|
|
|
api::parse_response::<api::ExchangeAuthorizationResponse>(response)
|
|
|
|
.await;
|
2023-11-23 18:38:07 -05:00
|
|
|
match res {
|
|
|
|
Ok(res) => {
|
|
|
|
println!(
|
|
|
|
"{} {} {}",
|
|
|
|
colors::green("Authorization successful."),
|
|
|
|
colors::gray("Authenticated as"),
|
|
|
|
colors::cyan(res.user.name)
|
|
|
|
);
|
2023-11-30 13:54:54 -05:00
|
|
|
let authorization: Rc<str> = format!("Bearer {}", res.token).into();
|
2023-12-14 04:55:56 -05:00
|
|
|
for pkg in &packages {
|
|
|
|
authorizations.insert(
|
|
|
|
(pkg.scope.clone(), pkg.package.clone(), pkg.version.clone()),
|
|
|
|
authorization.clone(),
|
|
|
|
);
|
2023-11-30 13:54:54 -05:00
|
|
|
}
|
2023-12-14 04:55:56 -05:00
|
|
|
break;
|
2023-11-23 18:38:07 -05:00
|
|
|
}
|
|
|
|
Err(err) => {
|
|
|
|
if err.code == "authorizationPending" {
|
|
|
|
continue;
|
|
|
|
} else {
|
|
|
|
return Err(err).context("Failed to exchange authorization");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-11-30 13:54:54 -05:00
|
|
|
AuthMethod::Token(token) => {
|
|
|
|
let authorization: Rc<str> = format!("Bearer {}", token).into();
|
2023-12-14 04:55:56 -05:00
|
|
|
for pkg in &packages {
|
|
|
|
authorizations.insert(
|
|
|
|
(pkg.scope.clone(), pkg.package.clone(), pkg.version.clone()),
|
|
|
|
authorization.clone(),
|
|
|
|
);
|
2023-11-30 13:54:54 -05:00
|
|
|
}
|
|
|
|
}
|
2023-11-23 18:38:07 -05:00
|
|
|
AuthMethod::Oidc(oidc_config) => {
|
2023-12-14 04:55:56 -05:00
|
|
|
let mut chunked_packages = packages.chunks(16);
|
2023-11-30 13:54:54 -05:00
|
|
|
for permissions in permissions.chunks(16) {
|
|
|
|
let audience = json!({ "permissions": permissions }).to_string();
|
|
|
|
let url = format!(
|
|
|
|
"{}&audience={}",
|
|
|
|
oidc_config.url,
|
|
|
|
percent_encoding::percent_encode(
|
|
|
|
audience.as_bytes(),
|
|
|
|
percent_encoding::NON_ALPHANUMERIC
|
|
|
|
)
|
2023-11-23 18:38:07 -05:00
|
|
|
);
|
2023-11-30 13:54:54 -05:00
|
|
|
|
|
|
|
let response = client
|
|
|
|
.get(url)
|
|
|
|
.bearer_auth(&oidc_config.token)
|
|
|
|
.send()
|
|
|
|
.await
|
|
|
|
.context("Failed to get OIDC token")?;
|
|
|
|
let status = response.status();
|
|
|
|
let text = response.text().await.with_context(|| {
|
|
|
|
format!("Failed to get OIDC token: status {}", status)
|
2023-11-23 18:38:07 -05:00
|
|
|
})?;
|
2023-11-30 13:54:54 -05:00
|
|
|
if !status.is_success() {
|
|
|
|
bail!(
|
|
|
|
"Failed to get OIDC token: status {}, response: '{}'",
|
|
|
|
status,
|
|
|
|
text
|
|
|
|
);
|
|
|
|
}
|
2023-12-14 06:05:59 -05:00
|
|
|
let api::OidcTokenResponse { value } = serde_json::from_str(&text)
|
2023-11-30 13:54:54 -05:00
|
|
|
.with_context(|| {
|
|
|
|
format!(
|
|
|
|
"Failed to parse OIDC token: '{}' (status {})",
|
|
|
|
text, status
|
|
|
|
)
|
|
|
|
})?;
|
|
|
|
|
|
|
|
let authorization: Rc<str> = format!("githuboidc {}", value).into();
|
2023-12-14 04:55:56 -05:00
|
|
|
for pkg in chunked_packages.next().unwrap() {
|
|
|
|
authorizations.insert(
|
|
|
|
(pkg.scope.clone(), pkg.package.clone(), pkg.version.clone()),
|
|
|
|
authorization.clone(),
|
|
|
|
);
|
2023-11-30 13:54:54 -05:00
|
|
|
}
|
|
|
|
}
|
2023-11-23 18:38:07 -05:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
Ok(authorizations)
|
|
|
|
}
|
|
|
|
|
2023-12-15 05:27:10 -05:00
|
|
|
/// Check if both `scope` and `package` already exist, if not return
|
|
|
|
/// a URL to the management panel to create them.
|
|
|
|
async fn check_if_scope_and_package_exist(
|
|
|
|
client: &reqwest::Client,
|
|
|
|
registry_api_url: &str,
|
|
|
|
registry_manage_url: &str,
|
|
|
|
scope: &str,
|
|
|
|
package: &str,
|
|
|
|
) -> Result<Option<String>, AnyError> {
|
|
|
|
let mut needs_scope = false;
|
|
|
|
let mut needs_package = false;
|
|
|
|
|
|
|
|
let response = api::get_scope(client, registry_api_url, scope).await?;
|
|
|
|
if response.status() == 404 {
|
|
|
|
needs_scope = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
let response =
|
|
|
|
api::get_package(client, registry_api_url, scope, package).await?;
|
|
|
|
if response.status() == 404 {
|
|
|
|
needs_package = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
if needs_scope || needs_package {
|
|
|
|
let create_url = format!(
|
|
|
|
"{}new?scope={}&package={}&from=cli",
|
|
|
|
registry_manage_url, scope, package
|
|
|
|
);
|
|
|
|
return Ok(Some(create_url));
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(None)
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn ensure_scopes_and_packages_exist(
|
|
|
|
client: &reqwest::Client,
|
|
|
|
registry_api_url: String,
|
|
|
|
registry_manage_url: String,
|
|
|
|
packages: Vec<Rc<PreparedPublishPackage>>,
|
|
|
|
) -> Result<(), AnyError> {
|
|
|
|
if !std::io::stdin().is_terminal() {
|
|
|
|
let mut missing_packages_lines = vec![];
|
|
|
|
for package in packages {
|
|
|
|
let maybe_create_package_url = check_if_scope_and_package_exist(
|
|
|
|
client,
|
|
|
|
®istry_api_url,
|
|
|
|
®istry_manage_url,
|
|
|
|
&package.scope,
|
|
|
|
&package.package,
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
if let Some(create_package_url) = maybe_create_package_url {
|
|
|
|
missing_packages_lines.push(format!(" - {}", create_package_url));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !missing_packages_lines.is_empty() {
|
|
|
|
bail!(
|
|
|
|
"Following packages don't exist, follow the links and create them:\n{}",
|
|
|
|
missing_packages_lines.join("\n")
|
|
|
|
);
|
|
|
|
}
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
for package in packages {
|
|
|
|
let maybe_create_package_url = check_if_scope_and_package_exist(
|
|
|
|
client,
|
|
|
|
®istry_api_url,
|
|
|
|
®istry_manage_url,
|
|
|
|
&package.scope,
|
|
|
|
&package.package,
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
let Some(create_package_url) = maybe_create_package_url else {
|
|
|
|
continue;
|
|
|
|
};
|
|
|
|
|
|
|
|
ring_bell();
|
|
|
|
println!(
|
|
|
|
"'@{}/{}' doesn't exist yet. Visit {} to create the package",
|
|
|
|
&package.scope,
|
|
|
|
&package.package,
|
|
|
|
colors::cyan_with_underline(create_package_url)
|
|
|
|
);
|
|
|
|
println!("{}", colors::gray("Waiting..."));
|
|
|
|
|
|
|
|
let package_api_url = api::get_package_api_url(
|
|
|
|
®istry_api_url,
|
|
|
|
&package.scope,
|
|
|
|
&package.package,
|
|
|
|
);
|
|
|
|
|
|
|
|
loop {
|
|
|
|
tokio::time::sleep(std::time::Duration::from_secs(3)).await;
|
|
|
|
let response = client.get(&package_api_url).send().await?;
|
|
|
|
if response.status() == 200 {
|
|
|
|
let name = format!("@{}/{}", package.scope, package.package);
|
|
|
|
println!("Package {} created", colors::green(name));
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
async fn perform_publish(
|
|
|
|
http_client: &Arc<HttpClient>,
|
|
|
|
mut publish_order_graph: PublishOrderGraph,
|
|
|
|
mut prepared_package_by_name: HashMap<String, Rc<PreparedPublishPackage>>,
|
|
|
|
auth_method: AuthMethod,
|
|
|
|
) -> Result<(), AnyError> {
|
|
|
|
let client = http_client.client()?;
|
2023-12-15 05:27:10 -05:00
|
|
|
let registry_api_url = deno_registry_api_url().to_string();
|
|
|
|
let registry_url = deno_registry_url().to_string();
|
2023-12-14 06:05:59 -05:00
|
|
|
|
|
|
|
let packages = prepared_package_by_name
|
|
|
|
.values()
|
|
|
|
.cloned()
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
let diagnostics = packages
|
|
|
|
.iter()
|
2024-01-08 18:51:49 -05:00
|
|
|
.flat_map(|p| p.tarball.diagnostics.iter().cloned())
|
2023-12-14 06:05:59 -05:00
|
|
|
.collect::<Vec<_>>();
|
2024-01-08 18:51:49 -05:00
|
|
|
print_diagnostics(&diagnostics);
|
2023-12-14 06:05:59 -05:00
|
|
|
|
2023-12-15 05:27:10 -05:00
|
|
|
ensure_scopes_and_packages_exist(
|
|
|
|
client,
|
|
|
|
registry_api_url.clone(),
|
|
|
|
registry_url.clone(),
|
|
|
|
packages.clone(),
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
let mut authorizations =
|
2023-12-15 05:27:10 -05:00
|
|
|
get_auth_headers(client, registry_api_url.clone(), packages, auth_method)
|
2023-12-14 06:05:59 -05:00
|
|
|
.await?;
|
|
|
|
|
2023-12-14 04:55:56 -05:00
|
|
|
assert_eq!(prepared_package_by_name.len(), authorizations.len());
|
|
|
|
let mut futures: JoinSet<Result<String, AnyError>> = JoinSet::default();
|
|
|
|
loop {
|
|
|
|
let next_batch = publish_order_graph.next();
|
|
|
|
|
|
|
|
for package_name in next_batch {
|
|
|
|
let package = prepared_package_by_name.remove(&package_name).unwrap();
|
2024-01-08 18:51:49 -05:00
|
|
|
|
|
|
|
// todo(dsherret): output something that looks better than this even not in debug
|
|
|
|
if log::log_enabled!(log::Level::Debug) {
|
|
|
|
log::debug!("Publishing {}", package.display_name());
|
|
|
|
for file in &package.tarball.files {
|
|
|
|
log::debug!(
|
|
|
|
" Tarball file {} {}",
|
|
|
|
human_size(file.size as f64),
|
|
|
|
file.path.display()
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-12-14 04:55:56 -05:00
|
|
|
let authorization = authorizations
|
|
|
|
.remove(&(
|
|
|
|
package.scope.clone(),
|
|
|
|
package.package.clone(),
|
|
|
|
package.version.clone(),
|
|
|
|
))
|
|
|
|
.unwrap();
|
2023-12-15 05:27:10 -05:00
|
|
|
let registry_api_url = registry_api_url.clone();
|
2023-12-14 04:55:56 -05:00
|
|
|
let registry_url = registry_url.clone();
|
|
|
|
let http_client = http_client.clone();
|
|
|
|
futures.spawn(async move {
|
2024-01-08 18:51:49 -05:00
|
|
|
let display_name = package.display_name();
|
2023-12-15 05:27:10 -05:00
|
|
|
publish_package(
|
|
|
|
&http_client,
|
|
|
|
package,
|
|
|
|
®istry_api_url,
|
|
|
|
®istry_url,
|
|
|
|
&authorization,
|
|
|
|
)
|
|
|
|
.await
|
|
|
|
.with_context(|| format!("Failed to publish {}", display_name))?;
|
2023-12-14 04:55:56 -05:00
|
|
|
Ok(package_name)
|
|
|
|
});
|
|
|
|
}
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2023-12-14 04:55:56 -05:00
|
|
|
let Some(result) = futures.join_next().await else {
|
|
|
|
// done, ensure no circular dependency
|
|
|
|
publish_order_graph.ensure_no_pending()?;
|
|
|
|
break;
|
2023-12-04 06:40:58 -05:00
|
|
|
};
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2023-12-14 04:55:56 -05:00
|
|
|
let package_name = result??;
|
|
|
|
publish_order_graph.finish_package(&package_name);
|
|
|
|
}
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2023-12-14 04:55:56 -05:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn publish_package(
|
|
|
|
http_client: &HttpClient,
|
2023-12-14 06:05:59 -05:00
|
|
|
package: Rc<PreparedPublishPackage>,
|
2023-12-15 05:27:10 -05:00
|
|
|
registry_api_url: &str,
|
2023-12-14 04:55:56 -05:00
|
|
|
registry_url: &str,
|
|
|
|
authorization: &str,
|
|
|
|
) -> Result<(), AnyError> {
|
|
|
|
let client = http_client.client()?;
|
|
|
|
println!(
|
|
|
|
"{} @{}/{}@{} ...",
|
|
|
|
colors::intense_blue("Publishing"),
|
|
|
|
package.scope,
|
|
|
|
package.package,
|
|
|
|
package.version
|
|
|
|
);
|
|
|
|
|
|
|
|
let url = format!(
|
|
|
|
"{}scopes/{}/packages/{}/versions/{}",
|
2023-12-15 05:27:10 -05:00
|
|
|
registry_api_url, package.scope, package.package, package.version
|
2023-12-14 04:55:56 -05:00
|
|
|
);
|
|
|
|
|
|
|
|
let response = client
|
|
|
|
.post(url)
|
2023-12-27 11:59:57 -05:00
|
|
|
.header(reqwest::header::AUTHORIZATION, authorization)
|
|
|
|
.header(reqwest::header::CONTENT_ENCODING, "gzip")
|
2024-01-08 18:51:49 -05:00
|
|
|
.body(package.tarball.bytes.clone())
|
2023-12-14 04:55:56 -05:00
|
|
|
.send()
|
|
|
|
.await?;
|
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
let res = api::parse_response::<api::PublishingTask>(response).await;
|
2023-12-14 04:55:56 -05:00
|
|
|
let mut task = match res {
|
|
|
|
Ok(task) => task,
|
2024-01-08 10:25:18 -05:00
|
|
|
Err(mut err) if err.code == "duplicateVersionPublish" => {
|
|
|
|
let task = serde_json::from_value::<api::PublishingTask>(
|
|
|
|
err.data.get_mut("task").unwrap().take(),
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
if task.status == "success" {
|
|
|
|
println!(
|
|
|
|
"{} @{}/{}@{}",
|
|
|
|
colors::green("Skipping, already published"),
|
|
|
|
package.scope,
|
|
|
|
package.package,
|
|
|
|
package.version
|
|
|
|
);
|
|
|
|
return Ok(());
|
|
|
|
}
|
2023-12-14 04:55:56 -05:00
|
|
|
println!(
|
|
|
|
"{} @{}/{}@{}",
|
2024-01-08 10:25:18 -05:00
|
|
|
colors::yellow("Already uploaded, waiting for publishing"),
|
2023-11-23 18:38:07 -05:00
|
|
|
package.scope,
|
|
|
|
package.package,
|
2023-12-14 04:55:56 -05:00
|
|
|
package.version
|
2023-11-23 18:38:07 -05:00
|
|
|
);
|
2024-01-08 10:25:18 -05:00
|
|
|
task
|
2023-11-23 18:38:07 -05:00
|
|
|
}
|
2023-12-14 04:55:56 -05:00
|
|
|
Err(err) => {
|
|
|
|
return Err(err).with_context(|| {
|
|
|
|
format!(
|
|
|
|
"Failed to publish @{}/{} at {}",
|
|
|
|
package.scope, package.package, package.version
|
|
|
|
)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
};
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2023-12-14 04:55:56 -05:00
|
|
|
let interval = std::time::Duration::from_secs(2);
|
|
|
|
while task.status != "success" && task.status != "failure" {
|
|
|
|
tokio::time::sleep(interval).await;
|
|
|
|
let resp = client
|
2023-12-15 05:27:10 -05:00
|
|
|
.get(format!("{}publish_status/{}", registry_api_url, task.id))
|
2023-12-14 04:55:56 -05:00
|
|
|
.send()
|
|
|
|
.await
|
|
|
|
.with_context(|| {
|
|
|
|
format!(
|
|
|
|
"Failed to get publishing status for @{}/{} at {}",
|
|
|
|
package.scope, package.package, package.version
|
|
|
|
)
|
|
|
|
})?;
|
2023-12-14 06:05:59 -05:00
|
|
|
task = api::parse_response::<api::PublishingTask>(resp)
|
2023-12-14 04:55:56 -05:00
|
|
|
.await
|
|
|
|
.with_context(|| {
|
|
|
|
format!(
|
|
|
|
"Failed to get publishing status for @{}/{} at {}",
|
|
|
|
package.scope, package.package, package.version
|
|
|
|
)
|
|
|
|
})?;
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(error) = task.error {
|
|
|
|
bail!(
|
|
|
|
"{} @{}/{} at {}: {}",
|
|
|
|
colors::red("Failed to publish"),
|
2023-11-23 18:38:07 -05:00
|
|
|
package.scope,
|
|
|
|
package.package,
|
2023-12-14 04:55:56 -05:00
|
|
|
package.version,
|
|
|
|
error.message
|
2023-11-23 18:38:07 -05:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2023-12-14 04:55:56 -05:00
|
|
|
println!(
|
|
|
|
"{} @{}/{}@{}",
|
|
|
|
colors::green("Successfully published"),
|
|
|
|
package.scope,
|
|
|
|
package.package,
|
|
|
|
package.version
|
|
|
|
);
|
|
|
|
println!(
|
2023-12-15 05:27:10 -05:00
|
|
|
"{}",
|
|
|
|
colors::gray(format!(
|
|
|
|
"Visit {}@{}/{}@{} for details",
|
|
|
|
registry_url, package.scope, package.package, package.version
|
|
|
|
))
|
2023-12-14 04:55:56 -05:00
|
|
|
);
|
2023-11-23 18:38:07 -05:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
async fn prepare_packages_for_publishing(
|
|
|
|
cli_factory: &CliFactory,
|
|
|
|
deno_json: ConfigFile,
|
|
|
|
import_map: Arc<ImportMap>,
|
|
|
|
) -> Result<
|
|
|
|
(
|
|
|
|
PublishOrderGraph,
|
|
|
|
HashMap<String, Rc<PreparedPublishPackage>>,
|
|
|
|
),
|
|
|
|
AnyError,
|
|
|
|
> {
|
|
|
|
let maybe_workspace_config = deno_json.to_workspace_config()?;
|
2024-01-10 17:40:30 -05:00
|
|
|
let module_graph_builder = cli_factory.module_graph_builder().await?.as_ref();
|
|
|
|
let type_checker = cli_factory.type_checker().await?;
|
|
|
|
let cli_options = cli_factory.cli_options();
|
2023-12-14 06:05:59 -05:00
|
|
|
|
|
|
|
let Some(workspace_config) = maybe_workspace_config else {
|
2024-01-10 17:40:30 -05:00
|
|
|
let roots = resolve_config_file_roots_from_exports(&deno_json)?;
|
|
|
|
build_and_check_graph_for_publish(
|
|
|
|
module_graph_builder,
|
|
|
|
type_checker,
|
|
|
|
cli_options,
|
|
|
|
&[MemberRoots {
|
|
|
|
name: get_deno_json_package_name(&deno_json)?,
|
|
|
|
dir_url: deno_json.specifier.join("./").unwrap().clone(),
|
|
|
|
exports: roots,
|
|
|
|
}],
|
|
|
|
)
|
|
|
|
.await?;
|
2023-12-14 06:05:59 -05:00
|
|
|
let mut prepared_package_by_name = HashMap::with_capacity(1);
|
|
|
|
let package = prepare_publish(&deno_json, import_map).await?;
|
2024-01-10 17:40:30 -05:00
|
|
|
let package_name = format!("@{}/{}", package.scope, package.package);
|
2023-12-14 06:05:59 -05:00
|
|
|
let publish_order_graph =
|
|
|
|
PublishOrderGraph::new_single(package_name.clone());
|
|
|
|
prepared_package_by_name.insert(package_name, package);
|
|
|
|
return Ok((publish_order_graph, prepared_package_by_name));
|
|
|
|
};
|
|
|
|
|
|
|
|
println!("Publishing a workspace...");
|
2024-01-10 17:40:30 -05:00
|
|
|
// create the module graph
|
|
|
|
let roots = get_workspace_member_roots(&workspace_config)?;
|
|
|
|
let graph = build_and_check_graph_for_publish(
|
|
|
|
module_graph_builder,
|
|
|
|
type_checker,
|
|
|
|
cli_options,
|
|
|
|
&roots,
|
2023-12-14 06:05:59 -05:00
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
|
2024-01-10 17:40:30 -05:00
|
|
|
let mut prepared_package_by_name =
|
|
|
|
HashMap::with_capacity(workspace_config.members.len());
|
|
|
|
let publish_order_graph =
|
|
|
|
publish_order::build_publish_order_graph(&graph, &roots)?;
|
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
let results =
|
|
|
|
workspace_config
|
|
|
|
.members
|
|
|
|
.iter()
|
|
|
|
.cloned()
|
|
|
|
.map(|member| {
|
|
|
|
let import_map = import_map.clone();
|
|
|
|
deno_core::unsync::spawn(async move {
|
|
|
|
let package = prepare_publish(&member.config_file, import_map)
|
|
|
|
.await
|
|
|
|
.with_context(|| {
|
|
|
|
format!("Failed preparing '{}'.", member.package_name)
|
|
|
|
})?;
|
|
|
|
Ok((member.package_name, package))
|
|
|
|
})
|
|
|
|
})
|
|
|
|
.collect::<Vec<
|
|
|
|
JoinHandle<Result<(String, Rc<PreparedPublishPackage>), AnyError>>,
|
|
|
|
>>();
|
|
|
|
let results = deno_core::futures::future::join_all(results).await;
|
|
|
|
for result in results {
|
|
|
|
let (package_name, package) = result??;
|
|
|
|
prepared_package_by_name.insert(package_name, package);
|
2023-11-23 18:38:07 -05:00
|
|
|
}
|
2023-12-14 06:05:59 -05:00
|
|
|
Ok((publish_order_graph, prepared_package_by_name))
|
2023-11-23 18:38:07 -05:00
|
|
|
}
|
|
|
|
|
2024-01-10 17:40:30 -05:00
|
|
|
async fn build_and_check_graph_for_publish(
|
|
|
|
module_graph_builder: &ModuleGraphBuilder,
|
|
|
|
type_checker: &TypeChecker,
|
|
|
|
cli_options: &CliOptions,
|
|
|
|
packages: &[MemberRoots],
|
|
|
|
) -> Result<Arc<deno_graph::ModuleGraph>, deno_core::anyhow::Error> {
|
|
|
|
let graph = Arc::new(
|
|
|
|
module_graph_builder
|
|
|
|
.create_graph_with_options(crate::graph_util::CreateGraphOptions {
|
|
|
|
// All because we're going to use this same graph to determine the publish order later
|
|
|
|
graph_kind: deno_graph::GraphKind::All,
|
|
|
|
roots: packages
|
|
|
|
.iter()
|
|
|
|
.flat_map(|r| r.exports.iter())
|
|
|
|
.cloned()
|
|
|
|
.collect(),
|
|
|
|
workspace_fast_check: true,
|
|
|
|
loader: None,
|
|
|
|
})
|
|
|
|
.await?,
|
|
|
|
);
|
|
|
|
graph.valid()?;
|
|
|
|
log::info!("Checking fast check type graph for errors...");
|
|
|
|
surface_fast_check_type_graph_errors(&graph, packages)?;
|
|
|
|
log::info!("Ensuring type checks...");
|
|
|
|
let diagnostics = type_checker
|
|
|
|
.check_diagnostics(
|
|
|
|
graph.clone(),
|
|
|
|
CheckOptions {
|
|
|
|
lib: cli_options.ts_type_lib_window(),
|
|
|
|
log_ignored_options: false,
|
|
|
|
reload: cli_options.reload_flag(),
|
|
|
|
},
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
if !diagnostics.is_empty() {
|
|
|
|
bail!(
|
|
|
|
concat!(
|
|
|
|
"{:#}\n\n",
|
|
|
|
"You may have discovered a bug in Deno's fast check implementation. ",
|
|
|
|
"Fast check is still early days and we would appreciate if you log a ",
|
|
|
|
"bug if you believe this is one: https://github.com/denoland/deno/issues/"
|
|
|
|
),
|
|
|
|
diagnostics
|
|
|
|
);
|
|
|
|
}
|
|
|
|
Ok(graph)
|
|
|
|
}
|
|
|
|
|
2023-11-23 18:38:07 -05:00
|
|
|
pub async fn publish(
|
|
|
|
flags: Flags,
|
|
|
|
publish_flags: PublishFlags,
|
|
|
|
) -> Result<(), AnyError> {
|
|
|
|
let cli_factory = CliFactory::from_flags(flags).await?;
|
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
let auth_method = get_auth_method(publish_flags.token)?;
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2023-11-29 08:59:30 -05:00
|
|
|
let import_map = cli_factory
|
|
|
|
.maybe_import_map()
|
|
|
|
.await?
|
|
|
|
.clone()
|
|
|
|
.unwrap_or_else(|| {
|
|
|
|
Arc::new(ImportMap::new(Url::parse("file:///dev/null").unwrap()))
|
|
|
|
});
|
|
|
|
|
2024-01-10 17:40:30 -05:00
|
|
|
let directory_path = cli_factory.cli_options().initial_cwd();
|
2023-11-23 18:38:07 -05:00
|
|
|
// TODO: doesn't handle jsonc
|
|
|
|
let deno_json_path = directory_path.join("deno.json");
|
|
|
|
let deno_json = ConfigFile::read(&deno_json_path).with_context(|| {
|
|
|
|
format!(
|
|
|
|
"Failed to read deno.json file at {}",
|
|
|
|
deno_json_path.display()
|
|
|
|
)
|
|
|
|
})?;
|
|
|
|
|
2023-12-14 06:05:59 -05:00
|
|
|
let (publish_order_graph, prepared_package_by_name) =
|
|
|
|
prepare_packages_for_publishing(&cli_factory, deno_json, import_map)
|
2023-12-14 04:55:56 -05:00
|
|
|
.await?;
|
2023-11-23 18:38:07 -05:00
|
|
|
|
2023-12-14 04:55:56 -05:00
|
|
|
if prepared_package_by_name.is_empty() {
|
2023-11-23 18:38:07 -05:00
|
|
|
bail!("No packages to publish");
|
|
|
|
}
|
|
|
|
|
2024-01-11 16:17:03 -05:00
|
|
|
if publish_flags.dry_run {
|
|
|
|
log::warn!(
|
|
|
|
"{} Aborting due to --dry-run",
|
|
|
|
crate::colors::yellow("Warning")
|
|
|
|
);
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
2023-12-14 04:55:56 -05:00
|
|
|
perform_publish(
|
|
|
|
cli_factory.http_client(),
|
|
|
|
publish_order_graph,
|
|
|
|
prepared_package_by_name,
|
|
|
|
auth_method,
|
|
|
|
)
|
|
|
|
.await
|
2023-11-23 18:38:07 -05:00
|
|
|
}
|