From 585cf2de89f1d253bc14e0835790445d14e324f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Fri, 24 Nov 2023 00:38:07 +0100 Subject: [PATCH] feat(unstable): tar up directory with deno.json (#21228) Co-authored-by: David Sherret Co-authored-by: Luca Casonato Co-authored-by: Luca Casonato --- Cargo.lock | 1 + cli/Cargo.toml | 1 + cli/args/flags.rs | 41 ++- cli/cache/mod.rs | 2 +- cli/http_util.rs | 2 +- cli/main.rs | 4 + cli/tools/mod.rs | 1 + cli/tools/registry/mod.rs | 513 ++++++++++++++++++++++++++++++++++++++ cli/tools/registry/tar.rs | 85 +++++++ cli/util/import_map.rs | 196 +++++++++++++++ cli/util/mod.rs | 1 + 11 files changed, 844 insertions(+), 3 deletions(-) create mode 100644 cli/tools/registry/mod.rs create mode 100644 cli/tools/registry/tar.rs create mode 100644 cli/util/import_map.rs diff --git a/Cargo.lock b/Cargo.lock index d976e18ed0..670e19007d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -927,6 +927,7 @@ dependencies = [ "serde", "serde_json", "serde_repr", + "sha2", "shell-escape", "tar", "tempfile", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 3581e703f2..b4849e6125 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -117,6 +117,7 @@ rustyline = { version = "=10.0.0", default-features = false, features = ["custom rustyline-derive = "=0.7.0" serde.workspace = true serde_repr.workspace = true +sha2.workspace = true shell-escape = "=0.1.5" tar.workspace = true tempfile.workspace = true diff --git a/cli/args/flags.rs b/cli/args/flags.rs index d8d761da60..4f5d82be84 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -279,6 +279,12 @@ pub struct VendorFlags { pub force: bool, } +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct PublishFlags { + pub directory: String, + pub token: Option, +} + #[derive(Clone, Debug, Eq, PartialEq)] pub enum DenoSubcommand { Bench(BenchFlags), @@ -305,6 +311,8 @@ pub enum DenoSubcommand { Types, Upgrade(UpgradeFlags), Vendor(VendorFlags), + // TODO: + Publish(PublishFlags), } impl DenoSubcommand { @@ -715,7 +723,7 @@ impl Flags { } Bundle(_) | Completions(_) | Doc(_) | Fmt(_) | Init(_) | Install(_) | Uninstall(_) | Jupyter(_) | Lsp | Lint(_) | Types | Upgrade(_) - | Vendor(_) => None, + | Vendor(_) | Publish(_) => None, } } @@ -911,6 +919,8 @@ pub fn flags_from_vec(args: Vec) -> clap::error::Result { "uninstall" => uninstall_parse(&mut flags, &mut m), "upgrade" => upgrade_parse(&mut flags, &mut m), "vendor" => vendor_parse(&mut flags, &mut m), + // TODO: + "do-not-use-publish" => publish_parse(&mut flags, &mut m), _ => unreachable!(), } } else { @@ -1045,6 +1055,7 @@ fn clap_root() -> Command { .subcommand(uninstall_subcommand()) .subcommand(lsp_subcommand()) .subcommand(lint_subcommand()) + .subcommand(publish_subcommand()) .subcommand(repl_subcommand()) .subcommand(task_subcommand()) .subcommand(test_subcommand()) @@ -2302,6 +2313,27 @@ Remote modules and multiple modules may also be specified: .arg(ca_file_arg())) } +fn publish_subcommand() -> Command { + Command::new("do-not-use-publish") + .hide(true) + .about("Publish a package to the Deno registry") + // TODO: .long_about() + .defer(|cmd| { + cmd.arg( + Arg::new("directory") + .help( + "The directory to the package, or workspace of packages to publish", + ) + .value_hint(ValueHint::DirPath) + .required(true), + ) + .arg( + Arg::new("token") + .help("The API token to use when publishing. If unset, interactive authentication will be used.") + ) + }) +} + fn compile_args(app: Command) -> Command { compile_args_without_check_args(app.arg(no_check_arg())) } @@ -3722,6 +3754,13 @@ fn vendor_parse(flags: &mut Flags, matches: &mut ArgMatches) { }); } +fn publish_parse(flags: &mut Flags, matches: &mut ArgMatches) { + flags.subcommand = DenoSubcommand::Publish(PublishFlags { + directory: matches.remove_one::("directory").unwrap(), + token: matches.remove_one("token"), + }); +} + fn compile_args_parse(flags: &mut Flags, matches: &mut ArgMatches) { compile_args_without_check_parse(flags, matches); no_check_arg_parse(flags, matches); diff --git a/cli/cache/mod.rs b/cli/cache/mod.rs index 526236ace6..81c1260987 100644 --- a/cli/cache/mod.rs +++ b/cli/cache/mod.rs @@ -165,7 +165,7 @@ impl FetchCacher { } } -static DENO_REGISTRY_URL: Lazy = Lazy::new(|| { +pub(crate) static DENO_REGISTRY_URL: Lazy = Lazy::new(|| { let env_var_name = "DENO_REGISTRY_URL"; if let Ok(registry_url) = std::env::var(env_var_name) { // ensure there is a trailing slash for the directory diff --git a/cli/http_util.rs b/cli/http_util.rs index e90e0ee96d..e121f2a50f 100644 --- a/cli/http_util.rs +++ b/cli/http_util.rs @@ -259,7 +259,7 @@ impl HttpClient { result } - fn client(&self) -> Result<&reqwest::Client, AnyError> { + pub(crate) fn client(&self) -> Result<&reqwest::Client, AnyError> { self.cell.get_or_try_init(|| { create_http_client( get_user_agent(), diff --git a/cli/main.rs b/cli/main.rs index 6c5a78e617..1fa4ee8512 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -204,6 +204,10 @@ async fn run_subcommand(flags: Flags) -> Result { DenoSubcommand::Vendor(vendor_flags) => spawn_subcommand(async { tools::vendor::vendor(flags, vendor_flags).await }), + // TODO: + DenoSubcommand::Publish(publish_flags) => spawn_subcommand(async { + tools::registry::publish(flags, publish_flags).await + }), }; handle.await? diff --git a/cli/tools/mod.rs b/cli/tools/mod.rs index 13a37adddb..83d934742d 100644 --- a/cli/tools/mod.rs +++ b/cli/tools/mod.rs @@ -12,6 +12,7 @@ pub mod init; pub mod installer; pub mod jupyter; pub mod lint; +pub mod registry; pub mod repl; pub mod run; pub mod task; diff --git a/cli/tools/registry/mod.rs b/cli/tools/registry/mod.rs new file mode 100644 index 0000000000..aff3b5e10a --- /dev/null +++ b/cli/tools/registry/mod.rs @@ -0,0 +1,513 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use std::fmt::Write; +use std::io::IsTerminal; +use std::path::Path; +use std::path::PathBuf; +use std::sync::Arc; + +use base64::prelude::BASE64_STANDARD; +use base64::Engine; +use deno_config::ConfigFile; +use deno_core::anyhow; +use deno_core::anyhow::anyhow; +use deno_core::anyhow::bail; +use deno_core::anyhow::Context; +use deno_core::error::AnyError; +use deno_core::serde_json; +use deno_core::serde_json::json; +use deno_core::url::Url; +use deno_runtime::colors; +use deno_runtime::deno_fetch::reqwest; +use http::header::AUTHORIZATION; +use http::header::CONTENT_ENCODING; +use hyper::body::Bytes; +use import_map::ImportMapWithDiagnostics; +use serde::de::DeserializeOwned; +use serde::Serialize; +use sha2::Digest; + +use crate::args::Flags; +use crate::args::PublishFlags; +use crate::factory::CliFactory; +use crate::http_util::HttpClient; +use crate::util::import_map::ImportMapUnfurler; + +mod tar; + +enum AuthMethod { + Interactive, + Token(String), + Oidc(OidcConfig), +} + +struct OidcConfig { + url: String, + token: String, +} + +struct PreparedPublishPackage { + scope: String, + package: String, + version: String, + tarball_hash: String, + tarball: Bytes, +} + +#[derive(serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PublishingTaskError { + pub code: String, + pub message: String, +} + +#[derive(serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PublishingTask { + pub id: String, + pub status: String, + pub error: Option, +} + +async fn prepare_publish( + initial_cwd: &Path, + directory: PathBuf, +) -> Result { + // TODO: handle publishing without deno.json + + let directory_path = initial_cwd.join(directory); + // TODO: doesn't handle jsonc + let deno_json_path = directory_path.join("deno.json"); + let deno_json = ConfigFile::read(&deno_json_path).with_context(|| { + format!( + "Failed to read deno configuration file at {}", + deno_json_path.display() + ) + })?; + + let Some(version) = deno_json.json.version.clone() else { + bail!("{} is missing 'version' field", deno_json_path.display()); + }; + let Some(name) = deno_json.json.name.clone() else { + bail!("{} is missing 'name' field", deno_json_path.display()); + }; + let Some(name) = name.strip_prefix('@') else { + bail!("Invalid package name, use '@/ format"); + }; + let Some((scope, package_name)) = name.split_once('/') else { + bail!("Invalid package name, use '@/ format"); + }; + + // TODO: support `importMap` field in deno.json + assert!(deno_json.to_import_map_path().is_none()); + + let deno_json_url = Url::from_file_path(&deno_json_path) + .map_err(|_| anyhow!("deno.json path is not a valid file URL"))?; + let ImportMapWithDiagnostics { import_map, .. } = + import_map::parse_from_value( + &deno_json_url, + deno_json.to_import_map_value(), + )?; + + let unfurler = ImportMapUnfurler::new(import_map); + + let tarball = tar::create_gzipped_tarball(directory_path, unfurler) + .context("Failed to create a tarball")?; + + let tarball_hash_bytes: Vec = + sha2::Sha256::digest(&tarball).iter().cloned().collect(); + let mut tarball_hash = "sha256-".to_string(); + for byte in tarball_hash_bytes { + write!(&mut tarball_hash, "{:02x}", byte).unwrap(); + } + + Ok(PreparedPublishPackage { + scope: scope.to_string(), + package: package_name.to_string(), + version: version.to_string(), + tarball_hash, + tarball, + }) +} + +#[derive(Serialize)] +#[serde(tag = "permission")] +pub enum Permission<'s> { + #[serde(rename = "package/publish", rename_all = "camelCase")] + VersionPublish { + scope: &'s str, + package: &'s str, + version: &'s str, + tarball_hash: &'s str, + }, +} + +#[derive(serde::Deserialize)] +#[serde(rename_all = "camelCase")] +struct CreateAuthorizationResponse { + verification_url: String, + code: String, + exchange_token: String, + poll_interval: u64, +} + +#[derive(serde::Deserialize)] +#[serde(rename_all = "camelCase")] +struct ExchangeAuthorizationResponse { + token: String, + user: User, +} + +#[derive(serde::Deserialize)] +#[serde(rename_all = "camelCase")] +struct User { + name: String, +} + +#[derive(serde::Deserialize)] +#[serde(rename_all = "camelCase")] +struct ApiError { + pub code: String, + pub message: String, + #[serde(skip)] + pub x_deno_ray: Option, +} + +impl std::fmt::Display for ApiError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{} ({})", self.message, self.code)?; + if let Some(x_deno_ray) = &self.x_deno_ray { + write!(f, "[x-deno-ray: {}]", x_deno_ray)?; + } + Ok(()) + } +} + +impl std::fmt::Debug for ApiError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(self, f) + } +} + +impl std::error::Error for ApiError {} + +async fn parse_response( + response: reqwest::Response, +) -> Result { + let status = response.status(); + let x_deno_ray = response + .headers() + .get("x-deno-ray") + .and_then(|value| value.to_str().ok()) + .map(|s| s.to_string()); + let text = response.text().await.unwrap(); + + if !status.is_success() { + match serde_json::from_str::(&text) { + Ok(mut err) => { + err.x_deno_ray = x_deno_ray; + return Err(err); + } + Err(_) => { + let err = ApiError { + code: "unknown".to_string(), + message: format!("{}: {}", status, text), + x_deno_ray, + }; + return Err(err); + } + } + } + + serde_json::from_str(&text).map_err(|err| ApiError { + code: "unknown".to_string(), + message: format!("Failed to parse response: {}, response: '{}'", err, text), + x_deno_ray, + }) +} + +#[derive(serde::Deserialize)] +#[serde(rename_all = "camelCase")] +struct OidcTokenResponse { + value: String, +} + +async fn perform_publish( + http_client: &Arc, + packages: Vec, + auth_method: AuthMethod, +) -> Result<(), AnyError> { + let client = http_client.client()?; + let registry_url = crate::cache::DENO_REGISTRY_URL.to_string(); + + let authorization = match auth_method { + AuthMethod::Interactive => { + let verifier = uuid::Uuid::new_v4().to_string(); + let challenge = BASE64_STANDARD.encode(sha2::Sha256::digest(&verifier)); + + let permissions = packages + .iter() + .map(|package| Permission::VersionPublish { + scope: &package.scope, + package: &package.package, + version: &package.version, + tarball_hash: &package.tarball_hash, + }) + .collect::>(); + + let response = client + .post(format!("{}/authorizations", registry_url)) + .json(&serde_json::json!({ + "challenge": challenge, + "permissions": permissions, + })) + .send() + .await + .context("Failed to create interactive authorization")?; + let auth = parse_response::(response) + .await + .context("Failed to create interactive authorization")?; + + print!( + "Visit {} to authorize publishing of", + colors::cyan(format!("{}?code={}", auth.verification_url, auth.code)) + ); + if packages.len() > 1 { + println!(" {} packages", packages.len()); + } else { + println!(" @{}/{}", packages[0].scope, packages[0].package); + } + + println!("{}", colors::gray("Waiting...")); + + let interval = std::time::Duration::from_secs(auth.poll_interval); + + loop { + tokio::time::sleep(interval).await; + let response = client + .post(format!("{}/authorizations/exchange", registry_url)) + .json(&serde_json::json!({ + "exchangeToken": auth.exchange_token, + "verifier": verifier, + })) + .send() + .await + .context("Failed to exchange authorization")?; + let res = + parse_response::(response).await; + match res { + Ok(res) => { + println!( + "{} {} {}", + colors::green("Authorization successful."), + colors::gray("Authenticated as"), + colors::cyan(res.user.name) + ); + break format!("Bearer {}", res.token); + } + Err(err) => { + if err.code == "authorizationPending" { + continue; + } else { + return Err(err).context("Failed to exchange authorization"); + } + } + } + } + } + AuthMethod::Token(token) => format!("Bearer {}", token), + AuthMethod::Oidc(oidc_config) => { + let permissions = packages + .iter() + .map(|package| Permission::VersionPublish { + scope: &package.scope, + package: &package.package, + version: &package.version, + tarball_hash: &package.tarball_hash, + }) + .collect::>(); + let audience = json!({ "permissions": permissions }).to_string(); + + let url = format!( + "{}&audience={}", + oidc_config.url, + percent_encoding::percent_encode( + audience.as_bytes(), + percent_encoding::NON_ALPHANUMERIC + ) + ); + + let response = client + .get(url) + .bearer_auth(oidc_config.token) + .send() + .await + .context("Failed to get OIDC token")?; + let status = response.status(); + let text = response.text().await.with_context(|| { + format!("Failed to get OIDC token: status {}", status) + })?; + if !status.is_success() { + bail!( + "Failed to get OIDC token: status {}, response: '{}'", + status, + text + ); + } + let OidcTokenResponse { value } = serde_json::from_str(&text) + .with_context(|| { + format!("Failed to parse OIDC token: '{}' (status {})", text, status) + })?; + format!("githuboidc {}", value) + } + }; + + for package in packages { + println!( + "{} @{}/{}@{} ...", + colors::intense_blue("Publishing"), + package.scope, + package.package, + package.version + ); + + let url = format!( + "{}/scopes/{}/packages/{}/versions/{}", + registry_url, package.scope, package.package, package.version + ); + + let response = client + .post(url) + .header(AUTHORIZATION, &authorization) + .header(CONTENT_ENCODING, "gzip") + .body(package.tarball) + .send() + .await?; + + let mut task = parse_response::(response) + .await + .with_context(|| { + format!( + "Failed to publish @{}/{} at {}", + package.scope, package.package, package.version + ) + })?; + + let interval = std::time::Duration::from_secs(2); + while task.status != "success" && task.status != "failure" { + tokio::time::sleep(interval).await; + let resp = client + .get(format!("{}/publish_status/{}", registry_url, task.id)) + .send() + .await + .with_context(|| { + format!( + "Failed to get publishing status for @{}/{} at {}", + package.scope, package.package, package.version + ) + })?; + task = + parse_response::(resp) + .await + .with_context(|| { + format!( + "Failed to get publishing status for @{}/{} at {}", + package.scope, package.package, package.version + ) + })?; + } + + if let Some(error) = task.error { + bail!( + "{} @{}/{} at {}: {}", + colors::red("Failed to publish"), + package.scope, + package.package, + package.version, + error.message + ); + } + + println!( + "{} @{}/{}@{}", + colors::green("Successfully published"), + package.scope, + package.package, + package.version + ); + println!( + "{}/@{}/{}/{}_meta.json", + registry_url, package.scope, package.package, package.version + ); + } + + Ok(()) +} + +fn get_gh_oidc_env_vars() -> Option> { + if std::env::var("GITHUB_ACTIONS").unwrap_or_default() == "true" { + let url = std::env::var("ACTIONS_ID_TOKEN_REQUEST_URL"); + let token = std::env::var("ACTIONS_ID_TOKEN_REQUEST_TOKEN"); + match (url, token) { + (Ok(url), Ok(token)) => Some(Ok((url, token))), + (Err(_), Err(_)) => Some(Err(anyhow::anyhow!( + "No means to authenticate. Pass a token to `--token`, or enable tokenless publishing from GitHub Actions using OIDC. Learn more at https://deno.co/ghoidc" + ))), + _ => None, + } + } else { + None + } +} + +pub async fn publish( + flags: Flags, + publish_flags: PublishFlags, +) -> Result<(), AnyError> { + let cli_factory = CliFactory::from_flags(flags).await?; + + let auth_method = match publish_flags.token { + Some(token) => AuthMethod::Token(token), + None => match get_gh_oidc_env_vars() { + Some(Ok((url, token))) => AuthMethod::Oidc(OidcConfig { url, token }), + Some(Err(err)) => return Err(err), + None if std::io::stdin().is_terminal() => AuthMethod::Interactive, + None => { + bail!("No means to authenticate. Pass a token to `--token`.") + } + }, + }; + + let initial_cwd = + std::env::current_dir().with_context(|| "Failed getting cwd.")?; + + let directory_path = initial_cwd.join(publish_flags.directory); + // TODO: doesn't handle jsonc + let deno_json_path = directory_path.join("deno.json"); + let deno_json = ConfigFile::read(&deno_json_path).with_context(|| { + format!( + "Failed to read deno.json file at {}", + deno_json_path.display() + ) + })?; + + let mut packages = + Vec::with_capacity(std::cmp::max(1, deno_json.json.workspaces.len())); + + let members = &deno_json.json.workspaces; + if members.is_empty() { + packages.push(prepare_publish(&initial_cwd, directory_path).await?); + } else { + println!("Publishing a workspace..."); + for member in members { + let member_dir = directory_path.join(member); + packages.push(prepare_publish(&initial_cwd, member_dir).await?); + } + } + + if packages.is_empty() { + bail!("No packages to publish"); + } + + perform_publish(cli_factory.http_client(), packages, auth_method).await +} diff --git a/cli/tools/registry/tar.rs b/cli/tools/registry/tar.rs new file mode 100644 index 0000000000..e8097357d0 --- /dev/null +++ b/cli/tools/registry/tar.rs @@ -0,0 +1,85 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use deno_core::anyhow; +use deno_core::anyhow::Context; +use deno_core::error::AnyError; +use deno_core::url::Url; +use hyper::body::Bytes; +use std::io::Write; +use std::path::PathBuf; +use tar::Header; + +use crate::util::import_map::ImportMapUnfurler; + +pub fn create_gzipped_tarball( + dir: PathBuf, + // TODO(bartlomieju): this is too specific, factor it out into a callback that + // returns data + unfurler: ImportMapUnfurler, +) -> Result { + let mut tar = TarGzArchive::new(); + let dir_url = Url::from_directory_path(&dir).unwrap(); + + for entry in walkdir::WalkDir::new(dir).follow_links(false) { + let entry = entry?; + + if entry.file_type().is_file() { + let url = Url::from_file_path(entry.path()) + .map_err(|_| anyhow::anyhow!("Invalid file path {:?}", entry.path()))?; + let relative_path = dir_url + .make_relative(&url) + .expect("children can be relative to parent"); + let data = std::fs::read(entry.path()) + .with_context(|| format!("Unable to read file {:?}", entry.path()))?; + let content = unfurler + .unfurl(&url, data) + .with_context(|| format!("Unable to unfurl file {:?}", entry.path()))?; + tar.add_file(relative_path, &content).with_context(|| { + format!("Unable to add file to tarball {:?}", entry.path()) + })?; + } else if entry.file_type().is_dir() { + // skip + } else { + log::warn!("Unsupported file type at path {:?}", entry.path()); + } + } + + let v = tar.finish().context("Unable to finish tarball")?; + Ok(Bytes::from(v)) +} + +struct TarGzArchive { + builder: tar::Builder>, +} + +impl TarGzArchive { + pub fn new() -> Self { + Self { + builder: tar::Builder::new(Vec::new()), + } + } + + pub fn add_file( + &mut self, + path: String, + data: &[u8], + ) -> Result<(), AnyError> { + let mut header = Header::new_gnu(); + header.set_size(data.len() as u64); + self.builder.append_data(&mut header, &path, data)?; + Ok(()) + } + + fn finish(mut self) -> Result, AnyError> { + self.builder.finish()?; + let bytes = self.builder.into_inner()?; + let mut gz_bytes = Vec::new(); + let mut encoder = flate2::write::GzEncoder::new( + &mut gz_bytes, + flate2::Compression::default(), + ); + encoder.write_all(&bytes)?; + encoder.finish()?; + Ok(gz_bytes) + } +} diff --git a/cli/util/import_map.rs b/cli/util/import_map.rs new file mode 100644 index 0000000000..e8fea1e033 --- /dev/null +++ b/cli/util/import_map.rs @@ -0,0 +1,196 @@ +// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. + +use deno_ast::ParsedSource; +use deno_core::error::AnyError; +use deno_core::ModuleSpecifier; +use deno_graph::DefaultModuleAnalyzer; +use deno_graph::MediaType; +use deno_graph::TypeScriptReference; +use import_map::ImportMap; + +pub struct ImportMapUnfurler { + import_map: ImportMap, +} + +impl ImportMapUnfurler { + pub fn new(import_map: ImportMap) -> Self { + Self { import_map } + } + + pub fn unfurl( + &self, + url: &ModuleSpecifier, + data: Vec, + ) -> Result, AnyError> { + let media_type = MediaType::from_specifier(url); + + match media_type { + MediaType::JavaScript + | MediaType::Jsx + | MediaType::Mjs + | MediaType::Cjs + | MediaType::TypeScript + | MediaType::Mts + | MediaType::Cts + | MediaType::Dts + | MediaType::Dmts + | MediaType::Dcts + | MediaType::Tsx => { + // continue + } + MediaType::SourceMap + | MediaType::Unknown + | MediaType::Json + | MediaType::Wasm + | MediaType::TsBuildInfo => { + // not unfurlable data + return Ok(data); + } + } + + let text = String::from_utf8(data)?; + let parsed_source = deno_ast::parse_module(deno_ast::ParseParams { + specifier: url.to_string(), + text_info: deno_ast::SourceTextInfo::from_string(text), + media_type, + capture_tokens: false, + maybe_syntax: None, + scope_analysis: false, + })?; + let mut text_changes = Vec::new(); + let module_info = DefaultModuleAnalyzer::module_info(&parsed_source); + let mut analyze_specifier = + |specifier: &str, range: &deno_graph::PositionRange| { + let resolved = self.import_map.resolve(specifier, url); + if let Ok(resolved) = resolved { + let new_text = if resolved.scheme() == "file" { + format!("./{}", url.make_relative(&resolved).unwrap()) + } else { + resolved.to_string() + }; + text_changes.push(deno_ast::TextChange { + range: to_range(&parsed_source, range), + new_text, + }); + } + }; + for dep in &module_info.dependencies { + analyze_specifier(&dep.specifier, &dep.specifier_range); + } + for ts_ref in &module_info.ts_references { + let specifier_with_range = match ts_ref { + TypeScriptReference::Path(range) => range, + TypeScriptReference::Types(range) => range, + }; + analyze_specifier( + &specifier_with_range.text, + &specifier_with_range.range, + ); + } + for specifier_with_range in &module_info.jsdoc_imports { + analyze_specifier( + &specifier_with_range.text, + &specifier_with_range.range, + ); + } + if let Some(specifier_with_range) = &module_info.jsx_import_source { + analyze_specifier( + &specifier_with_range.text, + &specifier_with_range.range, + ); + } + Ok( + deno_ast::apply_text_changes( + parsed_source.text_info().text_str(), + text_changes, + ) + .into_bytes(), + ) + } + + #[cfg(test)] + fn unfurl_to_string( + &self, + url: &ModuleSpecifier, + data: Vec, + ) -> Result { + let data = self.unfurl(url, data)?; + let content = String::from_utf8(data)?; + Ok(content) + } +} + +fn to_range( + parsed_source: &ParsedSource, + range: &deno_graph::PositionRange, +) -> std::ops::Range { + let mut range = range + .as_source_range(parsed_source.text_info()) + .as_byte_range(parsed_source.text_info().range().start); + let text = &parsed_source.text_info().text_str()[range.clone()]; + if text.starts_with('"') || text.starts_with('\'') { + range.start += 1; + } + if text.ends_with('"') || text.ends_with('\'') { + range.end -= 1; + } + range +} + +#[cfg(test)] +mod tests { + use super::*; + use deno_ast::ModuleSpecifier; + use deno_core::serde_json::json; + use import_map::ImportMapWithDiagnostics; + use pretty_assertions::assert_eq; + + #[test] + fn test_unfurling() { + let deno_json_url = + ModuleSpecifier::parse("file:///dev/deno.json").unwrap(); + let value = json!({ + "imports": { + "express": "npm:express@5", + "lib/": "./lib/", + "fizz": "./fizz/mod.ts" + } + }); + let ImportMapWithDiagnostics { import_map, .. } = + import_map::parse_from_value(&deno_json_url, value).unwrap(); + let unfurler = ImportMapUnfurler::new(import_map); + + // Unfurling TS file should apply changes. + { + let source_code = r#"import express from "express";" +import foo from "lib/foo.ts"; +import bar from "lib/bar.ts"; +import fizz from "fizz"; +"#; + let specifier = ModuleSpecifier::parse("file:///dev/mod.ts").unwrap(); + let unfurled_source = unfurler + .unfurl_to_string(&specifier, source_code.as_bytes().to_vec()) + .unwrap(); + let expected_source = r#"import express from "npm:express@5";" +import foo from "./lib/foo.ts"; +import bar from "./lib/bar.ts"; +import fizz from "./fizz/mod.ts"; +"#; + assert_eq!(unfurled_source, expected_source); + } + + // Unfurling file with "unknown" media type should leave it as is + { + let source_code = r#"import express from "express";" +import foo from "lib/foo.ts"; +import bar from "lib/bar.ts"; +import fizz from "fizz"; +"#; + let specifier = ModuleSpecifier::parse("file:///dev/mod").unwrap(); + let unfurled_source = unfurler + .unfurl_to_string(&specifier, source_code.as_bytes().to_vec()) + .unwrap(); + assert_eq!(unfurled_source, source_code); + } + } +} diff --git a/cli/util/mod.rs b/cli/util/mod.rs index 0c160dbc81..f7de7583bd 100644 --- a/cli/util/mod.rs +++ b/cli/util/mod.rs @@ -9,6 +9,7 @@ pub mod draw_thread; pub mod file_watcher; pub mod fs; pub mod glob; +pub mod import_map; pub mod logger; pub mod path; pub mod progress_bar;