mirror of
https://github.com/denoland/deno.git
synced 2024-11-25 15:29:32 -05:00
feat(unstable): tar up directory with deno.json (#21228)
Co-authored-by: David Sherret <dsherret@gmail.com> Co-authored-by: Luca Casonato <lucacasonato@yahoo.com> Co-authored-by: Luca Casonato <hello@lcas.dev>
This commit is contained in:
parent
778e4c9710
commit
585cf2de89
11 changed files with 844 additions and 3 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -927,6 +927,7 @@ dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_repr",
|
"serde_repr",
|
||||||
|
"sha2",
|
||||||
"shell-escape",
|
"shell-escape",
|
||||||
"tar",
|
"tar",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
|
|
|
@ -117,6 +117,7 @@ rustyline = { version = "=10.0.0", default-features = false, features = ["custom
|
||||||
rustyline-derive = "=0.7.0"
|
rustyline-derive = "=0.7.0"
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
serde_repr.workspace = true
|
serde_repr.workspace = true
|
||||||
|
sha2.workspace = true
|
||||||
shell-escape = "=0.1.5"
|
shell-escape = "=0.1.5"
|
||||||
tar.workspace = true
|
tar.workspace = true
|
||||||
tempfile.workspace = true
|
tempfile.workspace = true
|
||||||
|
|
|
@ -279,6 +279,12 @@ pub struct VendorFlags {
|
||||||
pub force: bool,
|
pub force: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
|
pub struct PublishFlags {
|
||||||
|
pub directory: String,
|
||||||
|
pub token: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
pub enum DenoSubcommand {
|
pub enum DenoSubcommand {
|
||||||
Bench(BenchFlags),
|
Bench(BenchFlags),
|
||||||
|
@ -305,6 +311,8 @@ pub enum DenoSubcommand {
|
||||||
Types,
|
Types,
|
||||||
Upgrade(UpgradeFlags),
|
Upgrade(UpgradeFlags),
|
||||||
Vendor(VendorFlags),
|
Vendor(VendorFlags),
|
||||||
|
// TODO:
|
||||||
|
Publish(PublishFlags),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DenoSubcommand {
|
impl DenoSubcommand {
|
||||||
|
@ -715,7 +723,7 @@ impl Flags {
|
||||||
}
|
}
|
||||||
Bundle(_) | Completions(_) | Doc(_) | Fmt(_) | Init(_) | Install(_)
|
Bundle(_) | Completions(_) | Doc(_) | Fmt(_) | Init(_) | Install(_)
|
||||||
| Uninstall(_) | Jupyter(_) | Lsp | Lint(_) | Types | Upgrade(_)
|
| Uninstall(_) | Jupyter(_) | Lsp | Lint(_) | Types | Upgrade(_)
|
||||||
| Vendor(_) => None,
|
| Vendor(_) | Publish(_) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -911,6 +919,8 @@ pub fn flags_from_vec(args: Vec<String>) -> clap::error::Result<Flags> {
|
||||||
"uninstall" => uninstall_parse(&mut flags, &mut m),
|
"uninstall" => uninstall_parse(&mut flags, &mut m),
|
||||||
"upgrade" => upgrade_parse(&mut flags, &mut m),
|
"upgrade" => upgrade_parse(&mut flags, &mut m),
|
||||||
"vendor" => vendor_parse(&mut flags, &mut m),
|
"vendor" => vendor_parse(&mut flags, &mut m),
|
||||||
|
// TODO:
|
||||||
|
"do-not-use-publish" => publish_parse(&mut flags, &mut m),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -1045,6 +1055,7 @@ fn clap_root() -> Command {
|
||||||
.subcommand(uninstall_subcommand())
|
.subcommand(uninstall_subcommand())
|
||||||
.subcommand(lsp_subcommand())
|
.subcommand(lsp_subcommand())
|
||||||
.subcommand(lint_subcommand())
|
.subcommand(lint_subcommand())
|
||||||
|
.subcommand(publish_subcommand())
|
||||||
.subcommand(repl_subcommand())
|
.subcommand(repl_subcommand())
|
||||||
.subcommand(task_subcommand())
|
.subcommand(task_subcommand())
|
||||||
.subcommand(test_subcommand())
|
.subcommand(test_subcommand())
|
||||||
|
@ -2302,6 +2313,27 @@ Remote modules and multiple modules may also be specified:
|
||||||
.arg(ca_file_arg()))
|
.arg(ca_file_arg()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn publish_subcommand() -> Command {
|
||||||
|
Command::new("do-not-use-publish")
|
||||||
|
.hide(true)
|
||||||
|
.about("Publish a package to the Deno registry")
|
||||||
|
// TODO: .long_about()
|
||||||
|
.defer(|cmd| {
|
||||||
|
cmd.arg(
|
||||||
|
Arg::new("directory")
|
||||||
|
.help(
|
||||||
|
"The directory to the package, or workspace of packages to publish",
|
||||||
|
)
|
||||||
|
.value_hint(ValueHint::DirPath)
|
||||||
|
.required(true),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("token")
|
||||||
|
.help("The API token to use when publishing. If unset, interactive authentication will be used.")
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
fn compile_args(app: Command) -> Command {
|
fn compile_args(app: Command) -> Command {
|
||||||
compile_args_without_check_args(app.arg(no_check_arg()))
|
compile_args_without_check_args(app.arg(no_check_arg()))
|
||||||
}
|
}
|
||||||
|
@ -3722,6 +3754,13 @@ fn vendor_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn publish_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
||||||
|
flags.subcommand = DenoSubcommand::Publish(PublishFlags {
|
||||||
|
directory: matches.remove_one::<String>("directory").unwrap(),
|
||||||
|
token: matches.remove_one("token"),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
fn compile_args_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
fn compile_args_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
||||||
compile_args_without_check_parse(flags, matches);
|
compile_args_without_check_parse(flags, matches);
|
||||||
no_check_arg_parse(flags, matches);
|
no_check_arg_parse(flags, matches);
|
||||||
|
|
2
cli/cache/mod.rs
vendored
2
cli/cache/mod.rs
vendored
|
@ -165,7 +165,7 @@ impl FetchCacher {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static DENO_REGISTRY_URL: Lazy<Url> = Lazy::new(|| {
|
pub(crate) static DENO_REGISTRY_URL: Lazy<Url> = Lazy::new(|| {
|
||||||
let env_var_name = "DENO_REGISTRY_URL";
|
let env_var_name = "DENO_REGISTRY_URL";
|
||||||
if let Ok(registry_url) = std::env::var(env_var_name) {
|
if let Ok(registry_url) = std::env::var(env_var_name) {
|
||||||
// ensure there is a trailing slash for the directory
|
// ensure there is a trailing slash for the directory
|
||||||
|
|
|
@ -259,7 +259,7 @@ impl HttpClient {
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
fn client(&self) -> Result<&reqwest::Client, AnyError> {
|
pub(crate) fn client(&self) -> Result<&reqwest::Client, AnyError> {
|
||||||
self.cell.get_or_try_init(|| {
|
self.cell.get_or_try_init(|| {
|
||||||
create_http_client(
|
create_http_client(
|
||||||
get_user_agent(),
|
get_user_agent(),
|
||||||
|
|
|
@ -204,6 +204,10 @@ async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
|
||||||
DenoSubcommand::Vendor(vendor_flags) => spawn_subcommand(async {
|
DenoSubcommand::Vendor(vendor_flags) => spawn_subcommand(async {
|
||||||
tools::vendor::vendor(flags, vendor_flags).await
|
tools::vendor::vendor(flags, vendor_flags).await
|
||||||
}),
|
}),
|
||||||
|
// TODO:
|
||||||
|
DenoSubcommand::Publish(publish_flags) => spawn_subcommand(async {
|
||||||
|
tools::registry::publish(flags, publish_flags).await
|
||||||
|
}),
|
||||||
};
|
};
|
||||||
|
|
||||||
handle.await?
|
handle.await?
|
||||||
|
|
|
@ -12,6 +12,7 @@ pub mod init;
|
||||||
pub mod installer;
|
pub mod installer;
|
||||||
pub mod jupyter;
|
pub mod jupyter;
|
||||||
pub mod lint;
|
pub mod lint;
|
||||||
|
pub mod registry;
|
||||||
pub mod repl;
|
pub mod repl;
|
||||||
pub mod run;
|
pub mod run;
|
||||||
pub mod task;
|
pub mod task;
|
||||||
|
|
513
cli/tools/registry/mod.rs
Normal file
513
cli/tools/registry/mod.rs
Normal file
|
@ -0,0 +1,513 @@
|
||||||
|
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use std::fmt::Write;
|
||||||
|
use std::io::IsTerminal;
|
||||||
|
use std::path::Path;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use base64::prelude::BASE64_STANDARD;
|
||||||
|
use base64::Engine;
|
||||||
|
use deno_config::ConfigFile;
|
||||||
|
use deno_core::anyhow;
|
||||||
|
use deno_core::anyhow::anyhow;
|
||||||
|
use deno_core::anyhow::bail;
|
||||||
|
use deno_core::anyhow::Context;
|
||||||
|
use deno_core::error::AnyError;
|
||||||
|
use deno_core::serde_json;
|
||||||
|
use deno_core::serde_json::json;
|
||||||
|
use deno_core::url::Url;
|
||||||
|
use deno_runtime::colors;
|
||||||
|
use deno_runtime::deno_fetch::reqwest;
|
||||||
|
use http::header::AUTHORIZATION;
|
||||||
|
use http::header::CONTENT_ENCODING;
|
||||||
|
use hyper::body::Bytes;
|
||||||
|
use import_map::ImportMapWithDiagnostics;
|
||||||
|
use serde::de::DeserializeOwned;
|
||||||
|
use serde::Serialize;
|
||||||
|
use sha2::Digest;
|
||||||
|
|
||||||
|
use crate::args::Flags;
|
||||||
|
use crate::args::PublishFlags;
|
||||||
|
use crate::factory::CliFactory;
|
||||||
|
use crate::http_util::HttpClient;
|
||||||
|
use crate::util::import_map::ImportMapUnfurler;
|
||||||
|
|
||||||
|
mod tar;
|
||||||
|
|
||||||
|
enum AuthMethod {
|
||||||
|
Interactive,
|
||||||
|
Token(String),
|
||||||
|
Oidc(OidcConfig),
|
||||||
|
}
|
||||||
|
|
||||||
|
struct OidcConfig {
|
||||||
|
url: String,
|
||||||
|
token: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct PreparedPublishPackage {
|
||||||
|
scope: String,
|
||||||
|
package: String,
|
||||||
|
version: String,
|
||||||
|
tarball_hash: String,
|
||||||
|
tarball: Bytes,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct PublishingTaskError {
|
||||||
|
pub code: String,
|
||||||
|
pub message: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct PublishingTask {
|
||||||
|
pub id: String,
|
||||||
|
pub status: String,
|
||||||
|
pub error: Option<PublishingTaskError>,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn prepare_publish(
|
||||||
|
initial_cwd: &Path,
|
||||||
|
directory: PathBuf,
|
||||||
|
) -> Result<PreparedPublishPackage, AnyError> {
|
||||||
|
// TODO: handle publishing without deno.json
|
||||||
|
|
||||||
|
let directory_path = initial_cwd.join(directory);
|
||||||
|
// TODO: doesn't handle jsonc
|
||||||
|
let deno_json_path = directory_path.join("deno.json");
|
||||||
|
let deno_json = ConfigFile::read(&deno_json_path).with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Failed to read deno configuration file at {}",
|
||||||
|
deno_json_path.display()
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let Some(version) = deno_json.json.version.clone() else {
|
||||||
|
bail!("{} is missing 'version' field", deno_json_path.display());
|
||||||
|
};
|
||||||
|
let Some(name) = deno_json.json.name.clone() else {
|
||||||
|
bail!("{} is missing 'name' field", deno_json_path.display());
|
||||||
|
};
|
||||||
|
let Some(name) = name.strip_prefix('@') else {
|
||||||
|
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
|
||||||
|
};
|
||||||
|
let Some((scope, package_name)) = name.split_once('/') else {
|
||||||
|
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO: support `importMap` field in deno.json
|
||||||
|
assert!(deno_json.to_import_map_path().is_none());
|
||||||
|
|
||||||
|
let deno_json_url = Url::from_file_path(&deno_json_path)
|
||||||
|
.map_err(|_| anyhow!("deno.json path is not a valid file URL"))?;
|
||||||
|
let ImportMapWithDiagnostics { import_map, .. } =
|
||||||
|
import_map::parse_from_value(
|
||||||
|
&deno_json_url,
|
||||||
|
deno_json.to_import_map_value(),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let unfurler = ImportMapUnfurler::new(import_map);
|
||||||
|
|
||||||
|
let tarball = tar::create_gzipped_tarball(directory_path, unfurler)
|
||||||
|
.context("Failed to create a tarball")?;
|
||||||
|
|
||||||
|
let tarball_hash_bytes: Vec<u8> =
|
||||||
|
sha2::Sha256::digest(&tarball).iter().cloned().collect();
|
||||||
|
let mut tarball_hash = "sha256-".to_string();
|
||||||
|
for byte in tarball_hash_bytes {
|
||||||
|
write!(&mut tarball_hash, "{:02x}", byte).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(PreparedPublishPackage {
|
||||||
|
scope: scope.to_string(),
|
||||||
|
package: package_name.to_string(),
|
||||||
|
version: version.to_string(),
|
||||||
|
tarball_hash,
|
||||||
|
tarball,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(tag = "permission")]
|
||||||
|
pub enum Permission<'s> {
|
||||||
|
#[serde(rename = "package/publish", rename_all = "camelCase")]
|
||||||
|
VersionPublish {
|
||||||
|
scope: &'s str,
|
||||||
|
package: &'s str,
|
||||||
|
version: &'s str,
|
||||||
|
tarball_hash: &'s str,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct CreateAuthorizationResponse {
|
||||||
|
verification_url: String,
|
||||||
|
code: String,
|
||||||
|
exchange_token: String,
|
||||||
|
poll_interval: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct ExchangeAuthorizationResponse {
|
||||||
|
token: String,
|
||||||
|
user: User,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct User {
|
||||||
|
name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct ApiError {
|
||||||
|
pub code: String,
|
||||||
|
pub message: String,
|
||||||
|
#[serde(skip)]
|
||||||
|
pub x_deno_ray: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for ApiError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{} ({})", self.message, self.code)?;
|
||||||
|
if let Some(x_deno_ray) = &self.x_deno_ray {
|
||||||
|
write!(f, "[x-deno-ray: {}]", x_deno_ray)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Debug for ApiError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
std::fmt::Display::fmt(self, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for ApiError {}
|
||||||
|
|
||||||
|
async fn parse_response<T: DeserializeOwned>(
|
||||||
|
response: reqwest::Response,
|
||||||
|
) -> Result<T, ApiError> {
|
||||||
|
let status = response.status();
|
||||||
|
let x_deno_ray = response
|
||||||
|
.headers()
|
||||||
|
.get("x-deno-ray")
|
||||||
|
.and_then(|value| value.to_str().ok())
|
||||||
|
.map(|s| s.to_string());
|
||||||
|
let text = response.text().await.unwrap();
|
||||||
|
|
||||||
|
if !status.is_success() {
|
||||||
|
match serde_json::from_str::<ApiError>(&text) {
|
||||||
|
Ok(mut err) => {
|
||||||
|
err.x_deno_ray = x_deno_ray;
|
||||||
|
return Err(err);
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
let err = ApiError {
|
||||||
|
code: "unknown".to_string(),
|
||||||
|
message: format!("{}: {}", status, text),
|
||||||
|
x_deno_ray,
|
||||||
|
};
|
||||||
|
return Err(err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
serde_json::from_str(&text).map_err(|err| ApiError {
|
||||||
|
code: "unknown".to_string(),
|
||||||
|
message: format!("Failed to parse response: {}, response: '{}'", err, text),
|
||||||
|
x_deno_ray,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(serde::Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct OidcTokenResponse {
|
||||||
|
value: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn perform_publish(
|
||||||
|
http_client: &Arc<HttpClient>,
|
||||||
|
packages: Vec<PreparedPublishPackage>,
|
||||||
|
auth_method: AuthMethod,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
let client = http_client.client()?;
|
||||||
|
let registry_url = crate::cache::DENO_REGISTRY_URL.to_string();
|
||||||
|
|
||||||
|
let authorization = match auth_method {
|
||||||
|
AuthMethod::Interactive => {
|
||||||
|
let verifier = uuid::Uuid::new_v4().to_string();
|
||||||
|
let challenge = BASE64_STANDARD.encode(sha2::Sha256::digest(&verifier));
|
||||||
|
|
||||||
|
let permissions = packages
|
||||||
|
.iter()
|
||||||
|
.map(|package| Permission::VersionPublish {
|
||||||
|
scope: &package.scope,
|
||||||
|
package: &package.package,
|
||||||
|
version: &package.version,
|
||||||
|
tarball_hash: &package.tarball_hash,
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let response = client
|
||||||
|
.post(format!("{}/authorizations", registry_url))
|
||||||
|
.json(&serde_json::json!({
|
||||||
|
"challenge": challenge,
|
||||||
|
"permissions": permissions,
|
||||||
|
}))
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("Failed to create interactive authorization")?;
|
||||||
|
let auth = parse_response::<CreateAuthorizationResponse>(response)
|
||||||
|
.await
|
||||||
|
.context("Failed to create interactive authorization")?;
|
||||||
|
|
||||||
|
print!(
|
||||||
|
"Visit {} to authorize publishing of",
|
||||||
|
colors::cyan(format!("{}?code={}", auth.verification_url, auth.code))
|
||||||
|
);
|
||||||
|
if packages.len() > 1 {
|
||||||
|
println!(" {} packages", packages.len());
|
||||||
|
} else {
|
||||||
|
println!(" @{}/{}", packages[0].scope, packages[0].package);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("{}", colors::gray("Waiting..."));
|
||||||
|
|
||||||
|
let interval = std::time::Duration::from_secs(auth.poll_interval);
|
||||||
|
|
||||||
|
loop {
|
||||||
|
tokio::time::sleep(interval).await;
|
||||||
|
let response = client
|
||||||
|
.post(format!("{}/authorizations/exchange", registry_url))
|
||||||
|
.json(&serde_json::json!({
|
||||||
|
"exchangeToken": auth.exchange_token,
|
||||||
|
"verifier": verifier,
|
||||||
|
}))
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("Failed to exchange authorization")?;
|
||||||
|
let res =
|
||||||
|
parse_response::<ExchangeAuthorizationResponse>(response).await;
|
||||||
|
match res {
|
||||||
|
Ok(res) => {
|
||||||
|
println!(
|
||||||
|
"{} {} {}",
|
||||||
|
colors::green("Authorization successful."),
|
||||||
|
colors::gray("Authenticated as"),
|
||||||
|
colors::cyan(res.user.name)
|
||||||
|
);
|
||||||
|
break format!("Bearer {}", res.token);
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
if err.code == "authorizationPending" {
|
||||||
|
continue;
|
||||||
|
} else {
|
||||||
|
return Err(err).context("Failed to exchange authorization");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
AuthMethod::Token(token) => format!("Bearer {}", token),
|
||||||
|
AuthMethod::Oidc(oidc_config) => {
|
||||||
|
let permissions = packages
|
||||||
|
.iter()
|
||||||
|
.map(|package| Permission::VersionPublish {
|
||||||
|
scope: &package.scope,
|
||||||
|
package: &package.package,
|
||||||
|
version: &package.version,
|
||||||
|
tarball_hash: &package.tarball_hash,
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
let audience = json!({ "permissions": permissions }).to_string();
|
||||||
|
|
||||||
|
let url = format!(
|
||||||
|
"{}&audience={}",
|
||||||
|
oidc_config.url,
|
||||||
|
percent_encoding::percent_encode(
|
||||||
|
audience.as_bytes(),
|
||||||
|
percent_encoding::NON_ALPHANUMERIC
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
let response = client
|
||||||
|
.get(url)
|
||||||
|
.bearer_auth(oidc_config.token)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context("Failed to get OIDC token")?;
|
||||||
|
let status = response.status();
|
||||||
|
let text = response.text().await.with_context(|| {
|
||||||
|
format!("Failed to get OIDC token: status {}", status)
|
||||||
|
})?;
|
||||||
|
if !status.is_success() {
|
||||||
|
bail!(
|
||||||
|
"Failed to get OIDC token: status {}, response: '{}'",
|
||||||
|
status,
|
||||||
|
text
|
||||||
|
);
|
||||||
|
}
|
||||||
|
let OidcTokenResponse { value } = serde_json::from_str(&text)
|
||||||
|
.with_context(|| {
|
||||||
|
format!("Failed to parse OIDC token: '{}' (status {})", text, status)
|
||||||
|
})?;
|
||||||
|
format!("githuboidc {}", value)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
for package in packages {
|
||||||
|
println!(
|
||||||
|
"{} @{}/{}@{} ...",
|
||||||
|
colors::intense_blue("Publishing"),
|
||||||
|
package.scope,
|
||||||
|
package.package,
|
||||||
|
package.version
|
||||||
|
);
|
||||||
|
|
||||||
|
let url = format!(
|
||||||
|
"{}/scopes/{}/packages/{}/versions/{}",
|
||||||
|
registry_url, package.scope, package.package, package.version
|
||||||
|
);
|
||||||
|
|
||||||
|
let response = client
|
||||||
|
.post(url)
|
||||||
|
.header(AUTHORIZATION, &authorization)
|
||||||
|
.header(CONTENT_ENCODING, "gzip")
|
||||||
|
.body(package.tarball)
|
||||||
|
.send()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut task = parse_response::<PublishingTask>(response)
|
||||||
|
.await
|
||||||
|
.with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Failed to publish @{}/{} at {}",
|
||||||
|
package.scope, package.package, package.version
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let interval = std::time::Duration::from_secs(2);
|
||||||
|
while task.status != "success" && task.status != "failure" {
|
||||||
|
tokio::time::sleep(interval).await;
|
||||||
|
let resp = client
|
||||||
|
.get(format!("{}/publish_status/{}", registry_url, task.id))
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Failed to get publishing status for @{}/{} at {}",
|
||||||
|
package.scope, package.package, package.version
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
task =
|
||||||
|
parse_response::<PublishingTask>(resp)
|
||||||
|
.await
|
||||||
|
.with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Failed to get publishing status for @{}/{} at {}",
|
||||||
|
package.scope, package.package, package.version
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(error) = task.error {
|
||||||
|
bail!(
|
||||||
|
"{} @{}/{} at {}: {}",
|
||||||
|
colors::red("Failed to publish"),
|
||||||
|
package.scope,
|
||||||
|
package.package,
|
||||||
|
package.version,
|
||||||
|
error.message
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"{} @{}/{}@{}",
|
||||||
|
colors::green("Successfully published"),
|
||||||
|
package.scope,
|
||||||
|
package.package,
|
||||||
|
package.version
|
||||||
|
);
|
||||||
|
println!(
|
||||||
|
"{}/@{}/{}/{}_meta.json",
|
||||||
|
registry_url, package.scope, package.package, package.version
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_gh_oidc_env_vars() -> Option<Result<(String, String), AnyError>> {
|
||||||
|
if std::env::var("GITHUB_ACTIONS").unwrap_or_default() == "true" {
|
||||||
|
let url = std::env::var("ACTIONS_ID_TOKEN_REQUEST_URL");
|
||||||
|
let token = std::env::var("ACTIONS_ID_TOKEN_REQUEST_TOKEN");
|
||||||
|
match (url, token) {
|
||||||
|
(Ok(url), Ok(token)) => Some(Ok((url, token))),
|
||||||
|
(Err(_), Err(_)) => Some(Err(anyhow::anyhow!(
|
||||||
|
"No means to authenticate. Pass a token to `--token`, or enable tokenless publishing from GitHub Actions using OIDC. Learn more at https://deno.co/ghoidc"
|
||||||
|
))),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn publish(
|
||||||
|
flags: Flags,
|
||||||
|
publish_flags: PublishFlags,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
let cli_factory = CliFactory::from_flags(flags).await?;
|
||||||
|
|
||||||
|
let auth_method = match publish_flags.token {
|
||||||
|
Some(token) => AuthMethod::Token(token),
|
||||||
|
None => match get_gh_oidc_env_vars() {
|
||||||
|
Some(Ok((url, token))) => AuthMethod::Oidc(OidcConfig { url, token }),
|
||||||
|
Some(Err(err)) => return Err(err),
|
||||||
|
None if std::io::stdin().is_terminal() => AuthMethod::Interactive,
|
||||||
|
None => {
|
||||||
|
bail!("No means to authenticate. Pass a token to `--token`.")
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let initial_cwd =
|
||||||
|
std::env::current_dir().with_context(|| "Failed getting cwd.")?;
|
||||||
|
|
||||||
|
let directory_path = initial_cwd.join(publish_flags.directory);
|
||||||
|
// TODO: doesn't handle jsonc
|
||||||
|
let deno_json_path = directory_path.join("deno.json");
|
||||||
|
let deno_json = ConfigFile::read(&deno_json_path).with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Failed to read deno.json file at {}",
|
||||||
|
deno_json_path.display()
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut packages =
|
||||||
|
Vec::with_capacity(std::cmp::max(1, deno_json.json.workspaces.len()));
|
||||||
|
|
||||||
|
let members = &deno_json.json.workspaces;
|
||||||
|
if members.is_empty() {
|
||||||
|
packages.push(prepare_publish(&initial_cwd, directory_path).await?);
|
||||||
|
} else {
|
||||||
|
println!("Publishing a workspace...");
|
||||||
|
for member in members {
|
||||||
|
let member_dir = directory_path.join(member);
|
||||||
|
packages.push(prepare_publish(&initial_cwd, member_dir).await?);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if packages.is_empty() {
|
||||||
|
bail!("No packages to publish");
|
||||||
|
}
|
||||||
|
|
||||||
|
perform_publish(cli_factory.http_client(), packages, auth_method).await
|
||||||
|
}
|
85
cli/tools/registry/tar.rs
Normal file
85
cli/tools/registry/tar.rs
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use deno_core::anyhow;
|
||||||
|
use deno_core::anyhow::Context;
|
||||||
|
use deno_core::error::AnyError;
|
||||||
|
use deno_core::url::Url;
|
||||||
|
use hyper::body::Bytes;
|
||||||
|
use std::io::Write;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use tar::Header;
|
||||||
|
|
||||||
|
use crate::util::import_map::ImportMapUnfurler;
|
||||||
|
|
||||||
|
pub fn create_gzipped_tarball(
|
||||||
|
dir: PathBuf,
|
||||||
|
// TODO(bartlomieju): this is too specific, factor it out into a callback that
|
||||||
|
// returns data
|
||||||
|
unfurler: ImportMapUnfurler,
|
||||||
|
) -> Result<Bytes, AnyError> {
|
||||||
|
let mut tar = TarGzArchive::new();
|
||||||
|
let dir_url = Url::from_directory_path(&dir).unwrap();
|
||||||
|
|
||||||
|
for entry in walkdir::WalkDir::new(dir).follow_links(false) {
|
||||||
|
let entry = entry?;
|
||||||
|
|
||||||
|
if entry.file_type().is_file() {
|
||||||
|
let url = Url::from_file_path(entry.path())
|
||||||
|
.map_err(|_| anyhow::anyhow!("Invalid file path {:?}", entry.path()))?;
|
||||||
|
let relative_path = dir_url
|
||||||
|
.make_relative(&url)
|
||||||
|
.expect("children can be relative to parent");
|
||||||
|
let data = std::fs::read(entry.path())
|
||||||
|
.with_context(|| format!("Unable to read file {:?}", entry.path()))?;
|
||||||
|
let content = unfurler
|
||||||
|
.unfurl(&url, data)
|
||||||
|
.with_context(|| format!("Unable to unfurl file {:?}", entry.path()))?;
|
||||||
|
tar.add_file(relative_path, &content).with_context(|| {
|
||||||
|
format!("Unable to add file to tarball {:?}", entry.path())
|
||||||
|
})?;
|
||||||
|
} else if entry.file_type().is_dir() {
|
||||||
|
// skip
|
||||||
|
} else {
|
||||||
|
log::warn!("Unsupported file type at path {:?}", entry.path());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let v = tar.finish().context("Unable to finish tarball")?;
|
||||||
|
Ok(Bytes::from(v))
|
||||||
|
}
|
||||||
|
|
||||||
|
struct TarGzArchive {
|
||||||
|
builder: tar::Builder<Vec<u8>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TarGzArchive {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
builder: tar::Builder::new(Vec::new()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_file(
|
||||||
|
&mut self,
|
||||||
|
path: String,
|
||||||
|
data: &[u8],
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
let mut header = Header::new_gnu();
|
||||||
|
header.set_size(data.len() as u64);
|
||||||
|
self.builder.append_data(&mut header, &path, data)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn finish(mut self) -> Result<Vec<u8>, AnyError> {
|
||||||
|
self.builder.finish()?;
|
||||||
|
let bytes = self.builder.into_inner()?;
|
||||||
|
let mut gz_bytes = Vec::new();
|
||||||
|
let mut encoder = flate2::write::GzEncoder::new(
|
||||||
|
&mut gz_bytes,
|
||||||
|
flate2::Compression::default(),
|
||||||
|
);
|
||||||
|
encoder.write_all(&bytes)?;
|
||||||
|
encoder.finish()?;
|
||||||
|
Ok(gz_bytes)
|
||||||
|
}
|
||||||
|
}
|
196
cli/util/import_map.rs
Normal file
196
cli/util/import_map.rs
Normal file
|
@ -0,0 +1,196 @@
|
||||||
|
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use deno_ast::ParsedSource;
|
||||||
|
use deno_core::error::AnyError;
|
||||||
|
use deno_core::ModuleSpecifier;
|
||||||
|
use deno_graph::DefaultModuleAnalyzer;
|
||||||
|
use deno_graph::MediaType;
|
||||||
|
use deno_graph::TypeScriptReference;
|
||||||
|
use import_map::ImportMap;
|
||||||
|
|
||||||
|
pub struct ImportMapUnfurler {
|
||||||
|
import_map: ImportMap,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ImportMapUnfurler {
|
||||||
|
pub fn new(import_map: ImportMap) -> Self {
|
||||||
|
Self { import_map }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn unfurl(
|
||||||
|
&self,
|
||||||
|
url: &ModuleSpecifier,
|
||||||
|
data: Vec<u8>,
|
||||||
|
) -> Result<Vec<u8>, AnyError> {
|
||||||
|
let media_type = MediaType::from_specifier(url);
|
||||||
|
|
||||||
|
match media_type {
|
||||||
|
MediaType::JavaScript
|
||||||
|
| MediaType::Jsx
|
||||||
|
| MediaType::Mjs
|
||||||
|
| MediaType::Cjs
|
||||||
|
| MediaType::TypeScript
|
||||||
|
| MediaType::Mts
|
||||||
|
| MediaType::Cts
|
||||||
|
| MediaType::Dts
|
||||||
|
| MediaType::Dmts
|
||||||
|
| MediaType::Dcts
|
||||||
|
| MediaType::Tsx => {
|
||||||
|
// continue
|
||||||
|
}
|
||||||
|
MediaType::SourceMap
|
||||||
|
| MediaType::Unknown
|
||||||
|
| MediaType::Json
|
||||||
|
| MediaType::Wasm
|
||||||
|
| MediaType::TsBuildInfo => {
|
||||||
|
// not unfurlable data
|
||||||
|
return Ok(data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let text = String::from_utf8(data)?;
|
||||||
|
let parsed_source = deno_ast::parse_module(deno_ast::ParseParams {
|
||||||
|
specifier: url.to_string(),
|
||||||
|
text_info: deno_ast::SourceTextInfo::from_string(text),
|
||||||
|
media_type,
|
||||||
|
capture_tokens: false,
|
||||||
|
maybe_syntax: None,
|
||||||
|
scope_analysis: false,
|
||||||
|
})?;
|
||||||
|
let mut text_changes = Vec::new();
|
||||||
|
let module_info = DefaultModuleAnalyzer::module_info(&parsed_source);
|
||||||
|
let mut analyze_specifier =
|
||||||
|
|specifier: &str, range: &deno_graph::PositionRange| {
|
||||||
|
let resolved = self.import_map.resolve(specifier, url);
|
||||||
|
if let Ok(resolved) = resolved {
|
||||||
|
let new_text = if resolved.scheme() == "file" {
|
||||||
|
format!("./{}", url.make_relative(&resolved).unwrap())
|
||||||
|
} else {
|
||||||
|
resolved.to_string()
|
||||||
|
};
|
||||||
|
text_changes.push(deno_ast::TextChange {
|
||||||
|
range: to_range(&parsed_source, range),
|
||||||
|
new_text,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
for dep in &module_info.dependencies {
|
||||||
|
analyze_specifier(&dep.specifier, &dep.specifier_range);
|
||||||
|
}
|
||||||
|
for ts_ref in &module_info.ts_references {
|
||||||
|
let specifier_with_range = match ts_ref {
|
||||||
|
TypeScriptReference::Path(range) => range,
|
||||||
|
TypeScriptReference::Types(range) => range,
|
||||||
|
};
|
||||||
|
analyze_specifier(
|
||||||
|
&specifier_with_range.text,
|
||||||
|
&specifier_with_range.range,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
for specifier_with_range in &module_info.jsdoc_imports {
|
||||||
|
analyze_specifier(
|
||||||
|
&specifier_with_range.text,
|
||||||
|
&specifier_with_range.range,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if let Some(specifier_with_range) = &module_info.jsx_import_source {
|
||||||
|
analyze_specifier(
|
||||||
|
&specifier_with_range.text,
|
||||||
|
&specifier_with_range.range,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Ok(
|
||||||
|
deno_ast::apply_text_changes(
|
||||||
|
parsed_source.text_info().text_str(),
|
||||||
|
text_changes,
|
||||||
|
)
|
||||||
|
.into_bytes(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
fn unfurl_to_string(
|
||||||
|
&self,
|
||||||
|
url: &ModuleSpecifier,
|
||||||
|
data: Vec<u8>,
|
||||||
|
) -> Result<String, AnyError> {
|
||||||
|
let data = self.unfurl(url, data)?;
|
||||||
|
let content = String::from_utf8(data)?;
|
||||||
|
Ok(content)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_range(
|
||||||
|
parsed_source: &ParsedSource,
|
||||||
|
range: &deno_graph::PositionRange,
|
||||||
|
) -> std::ops::Range<usize> {
|
||||||
|
let mut range = range
|
||||||
|
.as_source_range(parsed_source.text_info())
|
||||||
|
.as_byte_range(parsed_source.text_info().range().start);
|
||||||
|
let text = &parsed_source.text_info().text_str()[range.clone()];
|
||||||
|
if text.starts_with('"') || text.starts_with('\'') {
|
||||||
|
range.start += 1;
|
||||||
|
}
|
||||||
|
if text.ends_with('"') || text.ends_with('\'') {
|
||||||
|
range.end -= 1;
|
||||||
|
}
|
||||||
|
range
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use deno_ast::ModuleSpecifier;
|
||||||
|
use deno_core::serde_json::json;
|
||||||
|
use import_map::ImportMapWithDiagnostics;
|
||||||
|
use pretty_assertions::assert_eq;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_unfurling() {
|
||||||
|
let deno_json_url =
|
||||||
|
ModuleSpecifier::parse("file:///dev/deno.json").unwrap();
|
||||||
|
let value = json!({
|
||||||
|
"imports": {
|
||||||
|
"express": "npm:express@5",
|
||||||
|
"lib/": "./lib/",
|
||||||
|
"fizz": "./fizz/mod.ts"
|
||||||
|
}
|
||||||
|
});
|
||||||
|
let ImportMapWithDiagnostics { import_map, .. } =
|
||||||
|
import_map::parse_from_value(&deno_json_url, value).unwrap();
|
||||||
|
let unfurler = ImportMapUnfurler::new(import_map);
|
||||||
|
|
||||||
|
// Unfurling TS file should apply changes.
|
||||||
|
{
|
||||||
|
let source_code = r#"import express from "express";"
|
||||||
|
import foo from "lib/foo.ts";
|
||||||
|
import bar from "lib/bar.ts";
|
||||||
|
import fizz from "fizz";
|
||||||
|
"#;
|
||||||
|
let specifier = ModuleSpecifier::parse("file:///dev/mod.ts").unwrap();
|
||||||
|
let unfurled_source = unfurler
|
||||||
|
.unfurl_to_string(&specifier, source_code.as_bytes().to_vec())
|
||||||
|
.unwrap();
|
||||||
|
let expected_source = r#"import express from "npm:express@5";"
|
||||||
|
import foo from "./lib/foo.ts";
|
||||||
|
import bar from "./lib/bar.ts";
|
||||||
|
import fizz from "./fizz/mod.ts";
|
||||||
|
"#;
|
||||||
|
assert_eq!(unfurled_source, expected_source);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unfurling file with "unknown" media type should leave it as is
|
||||||
|
{
|
||||||
|
let source_code = r#"import express from "express";"
|
||||||
|
import foo from "lib/foo.ts";
|
||||||
|
import bar from "lib/bar.ts";
|
||||||
|
import fizz from "fizz";
|
||||||
|
"#;
|
||||||
|
let specifier = ModuleSpecifier::parse("file:///dev/mod").unwrap();
|
||||||
|
let unfurled_source = unfurler
|
||||||
|
.unfurl_to_string(&specifier, source_code.as_bytes().to_vec())
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(unfurled_source, source_code);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -9,6 +9,7 @@ pub mod draw_thread;
|
||||||
pub mod file_watcher;
|
pub mod file_watcher;
|
||||||
pub mod fs;
|
pub mod fs;
|
||||||
pub mod glob;
|
pub mod glob;
|
||||||
|
pub mod import_map;
|
||||||
pub mod logger;
|
pub mod logger;
|
||||||
pub mod path;
|
pub mod path;
|
||||||
pub mod progress_bar;
|
pub mod progress_bar;
|
||||||
|
|
Loading…
Reference in a new issue