1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-24 15:19:26 -05:00

feat: add initial internal npm client and dependency resolver (#15446)

This commit is contained in:
David Sherret 2022-08-10 15:23:58 -04:00 committed by GitHub
parent d0ffa0beb5
commit d9fae38d1e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 1536 additions and 82 deletions

56
Cargo.lock generated
View file

@ -819,6 +819,7 @@ dependencies = [
"eszip",
"fancy-regex",
"flaky_test",
"flate2",
"fwdansi",
"google-storage1",
"http",
@ -841,10 +842,11 @@ dependencies = [
"ring",
"rustyline",
"rustyline-derive",
"semver-parser 0.10.2",
"semver 1.0.13",
"serde",
"serde_repr",
"shell-escape",
"tar",
"tempfile",
"test_util",
"text-size",
@ -3029,15 +3031,6 @@ version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
[[package]]
name = "pest"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53"
dependencies = [
"ucd-trie",
]
[[package]]
name = "petgraph"
version = "0.6.2"
@ -3572,7 +3565,7 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
dependencies = [
"semver 1.0.10",
"semver 1.0.13",
]
[[package]]
@ -3760,14 +3753,14 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
dependencies = [
"semver-parser 0.7.0",
"semver-parser",
]
[[package]]
name = "semver"
version = "1.0.10"
version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a41d061efea015927ac527063765e73601444cdc344ba855bc7bd44578b25e1c"
checksum = "93f6841e709003d68bb2deee8c343572bf446003ec20a583e76f7b15cebf3711"
[[package]]
name = "semver-parser"
@ -3775,15 +3768,6 @@ version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
[[package]]
name = "semver-parser"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7"
dependencies = [
"pest",
]
[[package]]
name = "serde"
version = "1.0.141"
@ -4531,6 +4515,17 @@ dependencies = [
"libc",
]
[[package]]
name = "tar"
version = "0.4.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b55807c0344e1e6c04d7c965f5289c39a8d94ae23ed5c0b57aabac549f871c6"
dependencies = [
"filetime",
"libc",
"xattr",
]
[[package]]
name = "tempfile"
version = "3.3.0"
@ -5007,12 +5002,6 @@ version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
[[package]]
name = "ucd-trie"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c"
[[package]]
name = "unic-char-property"
version = "0.9.0"
@ -5606,6 +5595,15 @@ dependencies = [
"toml",
]
[[package]]
name = "xattr"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d1526bbe5aaeb5eb06885f4d987bcdfa5e23187055de9b83fe00156a821fabc"
dependencies = [
"libc",
]
[[package]]
name = "yup-oauth2"
version = "6.7.0"

View file

@ -71,6 +71,7 @@ encoding_rs = "=0.8.31"
env_logger = "=0.9.0"
eszip = "=0.23.0"
fancy-regex = "=0.10.0"
flate2 = "=1.0.24"
http = "=0.2.6"
import_map = "=0.12.1"
indexmap = "1.8.1"
@ -90,10 +91,11 @@ ring = "=0.16.20"
rustyline = { version = "=10.0.0", default-features = false, features = ["custom-bindings"] }
rustyline-derive = "=0.7.0"
secure_tempfile = { version = "=3.3.0", package = "tempfile" } # different name to discourage use in tests
semver-parser = "=0.10.2"
semver = "=1.0.13"
serde = { version = "=1.0.141", features = ["derive"] }
serde_repr = "=0.1.8"
shell-escape = "=0.1.5"
tar = "=0.4.38"
text-size = "=1.1.0"
text_lines = "=0.6.0"
tokio = { version = "=1.19", features = ["full"] }

View file

@ -447,6 +447,48 @@ pub fn path_with_stem_suffix(path: &Path, suffix: &str) -> PathBuf {
}
}
/// Gets if the provided character is not supported on all
/// kinds of file systems.
pub fn is_banned_path_char(c: char) -> bool {
matches!(c, '<' | '>' | ':' | '"' | '|' | '?' | '*')
}
/// Gets a safe local directory name for the provided url.
///
/// For example:
/// https://deno.land:8080/path -> deno.land_8080/path
pub fn root_url_to_safe_local_dirname(root: &ModuleSpecifier) -> PathBuf {
fn sanitize_segment(text: &str) -> String {
text
.chars()
.map(|c| if is_banned_segment_char(c) { '_' } else { c })
.collect()
}
fn is_banned_segment_char(c: char) -> bool {
matches!(c, '/' | '\\') || is_banned_path_char(c)
}
let mut result = String::new();
if let Some(domain) = root.domain() {
result.push_str(&sanitize_segment(domain));
}
if let Some(port) = root.port() {
if !result.is_empty() {
result.push('_');
}
result.push_str(&port.to_string());
}
let mut result = PathBuf::from(result);
if let Some(segments) = root.path_segments() {
for segment in segments.filter(|s| !s.is_empty()) {
result = result.join(sanitize_segment(segment));
}
}
result
}
#[cfg(test)]
mod tests {
use super::*;

View file

@ -23,6 +23,8 @@ mod lockfile;
mod logger;
mod lsp;
mod module_loader;
#[allow(unused)]
mod npm;
mod ops;
mod proc_state;
mod resolver;

224
cli/npm/cache.rs Normal file
View file

@ -0,0 +1,224 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
use std::fs;
use std::path::PathBuf;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_runtime::colors;
use deno_runtime::deno_fetch::reqwest;
use crate::deno_dir::DenoDir;
use crate::fs_util;
use super::tarball::verify_and_extract_tarball;
use super::NpmPackageId;
use super::NpmPackageVersionDistInfo;
pub const NPM_PACKAGE_SYNC_LOCK_FILENAME: &str = ".deno_sync_lock";
#[derive(Clone, Debug)]
pub struct ReadonlyNpmCache {
root_dir: PathBuf,
// cached url representation of the root directory
root_dir_url: Url,
}
// todo(dsherret): implementing Default for this is error prone because someone
// might accidentally use the default implementation instead of getting the
// correct location of the deno dir, which might be provided via a CLI argument.
// That said, the rest of the LSP code does this at the moment and so this code
// copies that.
impl Default for ReadonlyNpmCache {
fn default() -> Self {
// This only gets used when creating the tsc runtime and for testing, and so
// it shouldn't ever actually access the DenoDir, so it doesn't support a
// custom root.
Self::from_deno_dir(&crate::deno_dir::DenoDir::new(None).unwrap())
}
}
impl ReadonlyNpmCache {
pub fn new(root_dir: PathBuf) -> Self {
let root_dir_url = Url::from_directory_path(&root_dir).unwrap();
Self {
root_dir,
root_dir_url,
}
}
pub fn from_deno_dir(dir: &DenoDir) -> Self {
Self::new(dir.root.join("npm"))
}
pub fn package_folder(
&self,
id: &NpmPackageId,
registry_url: &Url,
) -> PathBuf {
self
.package_name_folder(&id.name, registry_url)
.join(id.version.to_string())
}
pub fn package_name_folder(&self, name: &str, registry_url: &Url) -> PathBuf {
let mut dir = self
.root_dir
.join(fs_util::root_url_to_safe_local_dirname(registry_url));
// ensure backslashes are used on windows
for part in name.split('/') {
dir = dir.join(part);
}
dir
}
pub fn resolve_package_id_from_specifier(
&self,
specifier: &ModuleSpecifier,
registry_url: &Url,
) -> Result<NpmPackageId, AnyError> {
match self.maybe_resolve_package_id_from_specifier(specifier, registry_url)
{
Some(id) => Ok(id),
None => bail!("could not find npm package for '{}'", specifier),
}
}
fn maybe_resolve_package_id_from_specifier(
&self,
specifier: &ModuleSpecifier,
registry_url: &Url,
) -> Option<NpmPackageId> {
let registry_root_dir = self
.root_dir_url
.join(&format!(
"{}/",
fs_util::root_url_to_safe_local_dirname(registry_url)
.to_string_lossy()
.replace('\\', "/")
))
// this not succeeding indicates a fatal issue, so unwrap
.unwrap();
let relative_url = registry_root_dir.make_relative(specifier)?;
if relative_url.starts_with("../") {
return None;
}
// examples:
// * chalk/5.0.1/
// * @types/chalk/5.0.1/
let is_scoped_package = relative_url.starts_with('@');
let mut parts = relative_url
.split('/')
.enumerate()
.take(if is_scoped_package { 3 } else { 2 })
.map(|(_, part)| part)
.collect::<Vec<_>>();
let version = parts.pop().unwrap();
let name = parts.join("/");
Some(NpmPackageId {
name,
version: semver::Version::parse(version).unwrap(),
})
}
}
/// Stores a single copy of npm packages in a cache.
#[derive(Clone, Debug)]
pub struct NpmCache(ReadonlyNpmCache);
impl NpmCache {
pub fn new(root_dir: PathBuf) -> Self {
Self(ReadonlyNpmCache::new(root_dir))
}
pub fn from_deno_dir(dir: &DenoDir) -> Self {
Self(ReadonlyNpmCache::from_deno_dir(dir))
}
pub fn as_readonly(&self) -> ReadonlyNpmCache {
self.0.clone()
}
pub async fn ensure_package(
&self,
id: &NpmPackageId,
dist: &NpmPackageVersionDistInfo,
registry_url: &Url,
) -> Result<(), AnyError> {
let package_folder = self.0.package_folder(id, registry_url);
if package_folder.exists()
// if this file exists, then the package didn't successfully extract
// the first time, or another process is currently extracting the zip file
&& !package_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME).exists()
{
return Ok(());
}
log::log!(
log::Level::Info,
"{} {}",
colors::green("Download"),
dist.tarball,
);
let response = reqwest::get(&dist.tarball).await?;
if response.status() == 404 {
bail!("Could not find npm package tarball at: {}", dist.tarball);
} else if !response.status().is_success() {
bail!("Bad response: {:?}", response.status());
} else {
let bytes = response.bytes().await?;
match verify_and_extract_tarball(id, &bytes, dist, &package_folder) {
Ok(()) => Ok(()),
Err(err) => {
if let Err(remove_err) = fs::remove_dir_all(&package_folder) {
if remove_err.kind() != std::io::ErrorKind::NotFound {
bail!(
concat!(
"Failed verifying and extracting npm tarball for {}, then ",
"failed cleaning up package cache folder.\n\nOriginal ",
"error:\n\n{}\n\nRemove error:\n\n{}\n\nPlease manually ",
"delete this folder or you will run into issues using this ",
"package in the future:\n\n{}"
),
id,
err,
remove_err,
package_folder.display(),
);
}
}
Err(err)
}
}
}
}
pub fn package_folder(
&self,
id: &NpmPackageId,
registry_url: &Url,
) -> PathBuf {
self.0.package_folder(id, registry_url)
}
pub fn package_name_folder(&self, name: &str, registry_url: &Url) -> PathBuf {
self.0.package_name_folder(name, registry_url)
}
pub fn resolve_package_id_from_specifier(
&self,
specifier: &ModuleSpecifier,
registry_url: &Url,
) -> Result<NpmPackageId, AnyError> {
self
.0
.resolve_package_id_from_specifier(specifier, registry_url)
}
}

248
cli/npm/mod.rs Normal file
View file

@ -0,0 +1,248 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
mod cache;
mod registry;
mod resolution;
mod tarball;
use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::url::Url;
pub use resolution::NpmPackageId;
pub use resolution::NpmPackageReference;
pub use resolution::NpmPackageReq;
pub use resolution::NpmResolutionPackage;
use cache::NpmCache;
use registry::NpmPackageVersionDistInfo;
use registry::NpmRegistryApi;
use resolution::NpmResolution;
use crate::deno_dir::DenoDir;
use self::cache::ReadonlyNpmCache;
use self::resolution::NpmResolutionSnapshot;
/// Information about the local npm package.
pub struct LocalNpmPackageInfo {
/// Unique identifier.
pub id: NpmPackageId,
/// Local folder path of the npm package.
pub folder_path: PathBuf,
}
pub trait NpmPackageResolver {
/// Resolves an npm package from a Deno module.
fn resolve_package_from_deno_module(
&self,
pkg_req: &NpmPackageReq,
) -> Result<LocalNpmPackageInfo, AnyError>;
/// Resolves an npm package from an npm package referrer.
fn resolve_package_from_package(
&self,
name: &str,
referrer: &ModuleSpecifier,
) -> Result<LocalNpmPackageInfo, AnyError>;
/// Resolve the root folder of the package the provided specifier is in.
///
/// This will erorr when the provided specifier is not in an npm package.
fn resolve_package_from_specifier(
&self,
specifier: &ModuleSpecifier,
) -> Result<LocalNpmPackageInfo, AnyError>;
/// Gets if the provided specifier is in an npm package.
fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
self.resolve_package_from_specifier(specifier).is_ok()
}
}
#[derive(Clone, Debug)]
pub struct GlobalNpmPackageResolver {
cache: NpmCache,
resolution: Arc<NpmResolution>,
registry_url: Url,
}
impl GlobalNpmPackageResolver {
pub fn new(root_cache_dir: PathBuf, reload: bool) -> Self {
Self::from_cache(NpmCache::new(root_cache_dir), reload)
}
pub fn from_deno_dir(dir: &DenoDir, reload: bool) -> Self {
Self::from_cache(NpmCache::from_deno_dir(dir), reload)
}
fn from_cache(cache: NpmCache, reload: bool) -> Self {
let api = NpmRegistryApi::new(cache.clone(), reload);
let registry_url = api.base_url().to_owned();
let resolution = Arc::new(NpmResolution::new(api));
Self {
cache,
resolution,
registry_url,
}
}
/// If the resolver has resolved any npm packages.
pub fn has_packages(&self) -> bool {
self.resolution.has_packages()
}
/// Gets all the packages.
pub fn all_packages(&self) -> Vec<NpmResolutionPackage> {
self.resolution.all_packages()
}
/// Adds a package requirement to the resolver.
pub async fn add_package_reqs(
&self,
packages: Vec<NpmPackageReq>,
) -> Result<(), AnyError> {
self.resolution.add_package_reqs(packages).await
}
/// Caches all the packages in parallel.
pub async fn cache_packages(&self) -> Result<(), AnyError> {
let handles = self.resolution.all_packages().into_iter().map(|package| {
let cache = self.cache.clone();
let registry_url = self.registry_url.clone();
tokio::task::spawn(async move {
cache
.ensure_package(&package.id, &package.dist, &registry_url)
.await
.with_context(|| {
format!("Failed caching npm package '{}'.", package.id)
})
})
});
let results = futures::future::join_all(handles).await;
for result in results {
// surface the first error
result??;
}
Ok(())
}
fn local_package_info(&self, id: &NpmPackageId) -> LocalNpmPackageInfo {
LocalNpmPackageInfo {
folder_path: self.cache.package_folder(id, &self.registry_url),
id: id.clone(),
}
}
/// Creates an inner clone.
pub fn snapshot(&self) -> NpmPackageResolverSnapshot {
NpmPackageResolverSnapshot {
cache: self.cache.as_readonly(),
snapshot: self.resolution.snapshot(),
registry_url: self.registry_url.clone(),
}
}
}
impl NpmPackageResolver for GlobalNpmPackageResolver {
fn resolve_package_from_deno_module(
&self,
pkg_req: &NpmPackageReq,
) -> Result<LocalNpmPackageInfo, AnyError> {
let pkg = self.resolution.resolve_package_from_deno_module(pkg_req)?;
Ok(self.local_package_info(&pkg.id))
}
fn resolve_package_from_package(
&self,
name: &str,
referrer: &ModuleSpecifier,
) -> Result<LocalNpmPackageInfo, AnyError> {
let referrer_pkg_id = self
.cache
.resolve_package_id_from_specifier(referrer, &self.registry_url)?;
let pkg = self
.resolution
.resolve_package_from_package(name, &referrer_pkg_id)?;
Ok(self.local_package_info(&pkg.id))
}
fn resolve_package_from_specifier(
&self,
specifier: &ModuleSpecifier,
) -> Result<LocalNpmPackageInfo, AnyError> {
let pkg_id = self
.cache
.resolve_package_id_from_specifier(specifier, &self.registry_url)?;
Ok(self.local_package_info(&pkg_id))
}
}
#[derive(Clone, Debug)]
pub struct NpmPackageResolverSnapshot {
cache: ReadonlyNpmCache,
snapshot: NpmResolutionSnapshot,
registry_url: Url,
}
// todo(dsherret): implementing Default for this is error prone, but
// necessary for the LSP. We should remove this Default implementation.
// See comment on `ReadonlyNpmCache` for more details.
impl Default for NpmPackageResolverSnapshot {
fn default() -> Self {
Self {
cache: Default::default(),
snapshot: Default::default(),
registry_url: NpmRegistryApi::default_url(),
}
}
}
impl NpmPackageResolverSnapshot {
fn local_package_info(&self, id: &NpmPackageId) -> LocalNpmPackageInfo {
LocalNpmPackageInfo {
folder_path: self.cache.package_folder(id, &self.registry_url),
id: id.clone(),
}
}
}
impl NpmPackageResolver for NpmPackageResolverSnapshot {
fn resolve_package_from_deno_module(
&self,
pkg_req: &NpmPackageReq,
) -> Result<LocalNpmPackageInfo, AnyError> {
let pkg = self.snapshot.resolve_package_from_deno_module(pkg_req)?;
Ok(self.local_package_info(&pkg.id))
}
fn resolve_package_from_package(
&self,
name: &str,
referrer: &ModuleSpecifier,
) -> Result<LocalNpmPackageInfo, AnyError> {
let referrer_pkg_id = self
.cache
.resolve_package_id_from_specifier(referrer, &self.registry_url)?;
let pkg = self
.snapshot
.resolve_package_from_package(name, &referrer_pkg_id)?;
Ok(self.local_package_info(&pkg.id))
}
fn resolve_package_from_specifier(
&self,
specifier: &ModuleSpecifier,
) -> Result<LocalNpmPackageInfo, AnyError> {
let pkg_id = self
.cache
.resolve_package_id_from_specifier(specifier, &self.registry_url)?;
Ok(self.local_package_info(&pkg_id))
}
}

323
cli/npm/registry.rs Normal file
View file

@ -0,0 +1,323 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
use std::collections::HashMap;
use std::fs;
use std::path::PathBuf;
use std::sync::Arc;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::serde::Deserialize;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_runtime::deno_fetch::reqwest;
use serde::Serialize;
use crate::fs_util;
use crate::http_cache::CACHE_PERM;
use super::cache::NpmCache;
use super::resolution::NpmVersionMatcher;
// npm registry docs: https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md
#[derive(Deserialize, Serialize, Clone)]
pub struct NpmPackageInfo {
pub name: String,
pub versions: HashMap<String, NpmPackageVersionInfo>,
}
pub struct NpmDependencyEntry {
pub bare_specifier: String,
pub name: String,
pub version_req: NpmVersionReq,
}
#[derive(Deserialize, Serialize, Clone)]
pub struct NpmPackageVersionInfo {
pub version: String,
pub dist: NpmPackageVersionDistInfo,
// Bare specifier to version (ex. `"typescript": "^3.0.1") or possibly
// package and version (ex. `"typescript-3.0.1": "npm:typescript@3.0.1"`).
#[serde(default)]
pub dependencies: HashMap<String, String>,
}
impl NpmPackageVersionInfo {
pub fn dependencies_as_entries(
&self,
) -> Result<Vec<NpmDependencyEntry>, AnyError> {
fn entry_as_bare_specifier_and_reference(
entry: (&String, &String),
) -> Result<NpmDependencyEntry, AnyError> {
let bare_specifier = entry.0.clone();
let (name, version_req) =
if let Some(package_and_version) = entry.1.strip_prefix("npm:") {
if let Some((name, version)) = package_and_version.rsplit_once('@') {
(name.to_string(), version.to_string())
} else {
bail!("could not find @ symbol in npm url '{}'", entry.1);
}
} else {
(entry.0.clone(), entry.1.clone())
};
let version_req = NpmVersionReq::parse(&version_req)
.with_context(|| format!("Dependency: {}", bare_specifier))?;
Ok(NpmDependencyEntry {
bare_specifier,
name,
version_req,
})
}
self
.dependencies
.iter()
.map(entry_as_bare_specifier_and_reference)
.collect::<Result<Vec<_>, AnyError>>()
}
}
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct NpmPackageVersionDistInfo {
/// URL to the tarball.
pub tarball: String,
pub shasum: String,
pub integrity: Option<String>,
}
#[derive(Clone)]
pub struct NpmRegistryApi {
base_url: Url,
cache: NpmCache,
mem_cache: Arc<Mutex<HashMap<String, Option<NpmPackageInfo>>>>,
reload: bool,
}
impl NpmRegistryApi {
pub fn default_url() -> Url {
Url::parse("https://registry.npmjs.org").unwrap()
}
pub fn new(cache: NpmCache, reload: bool) -> Self {
Self::from_base(Self::default_url(), cache, reload)
}
pub fn from_base(base_url: Url, cache: NpmCache, reload: bool) -> Self {
Self {
base_url,
cache,
mem_cache: Default::default(),
reload,
}
}
pub fn base_url(&self) -> &Url {
&self.base_url
}
pub async fn package_info(
&self,
name: &str,
) -> Result<NpmPackageInfo, AnyError> {
let maybe_package_info = self.maybe_package_info(name).await?;
match maybe_package_info {
Some(package_info) => Ok(package_info),
None => bail!("package '{}' does not exist", name),
}
}
pub async fn maybe_package_info(
&self,
name: &str,
) -> Result<Option<NpmPackageInfo>, AnyError> {
let maybe_info = self.mem_cache.lock().get(name).cloned();
if let Some(info) = maybe_info {
Ok(info)
} else {
let mut maybe_package_info = None;
if !self.reload {
// attempt to load from the file cache
maybe_package_info = self.load_file_cached_package_info(name);
}
if maybe_package_info.is_none() {
maybe_package_info = self
.load_package_info_from_registry(name)
.await
.with_context(|| {
format!("Error getting response at {}", self.get_package_url(name))
})?;
}
// Not worth the complexity to ensure multiple in-flight requests
// for the same package only request once because with how this is
// used that should never happen.
let mut mem_cache = self.mem_cache.lock();
Ok(match mem_cache.get(name) {
// another thread raced here, so use its result instead
Some(info) => info.clone(),
None => {
mem_cache.insert(name.to_string(), maybe_package_info.clone());
maybe_package_info
}
})
}
}
fn load_file_cached_package_info(
&self,
name: &str,
) -> Option<NpmPackageInfo> {
let file_cache_path = self.get_package_file_cache_path(name);
let file_text = fs::read_to_string(file_cache_path).ok()?;
match serde_json::from_str(&file_text) {
Ok(result) => Some(result),
Err(err) => {
if cfg!(debug_assertions) {
panic!("could not deserialize: {:#}", err);
} else {
None
}
}
}
}
fn save_package_info_to_file_cache(
&self,
name: &str,
package_info: &NpmPackageInfo,
) {
let file_cache_path = self.get_package_file_cache_path(name);
let file_text = serde_json::to_string_pretty(&package_info).unwrap();
let _ignore =
fs_util::atomic_write_file(&file_cache_path, file_text, CACHE_PERM);
}
async fn load_package_info_from_registry(
&self,
name: &str,
) -> Result<Option<NpmPackageInfo>, AnyError> {
let response = match reqwest::get(self.get_package_url(name)).await {
Ok(response) => response,
Err(err) => {
// attempt to use the local cache
if let Some(info) = self.load_file_cached_package_info(name) {
return Ok(Some(info));
} else {
return Err(err.into());
}
}
};
if response.status() == 404 {
Ok(None)
} else if !response.status().is_success() {
bail!("Bad response: {:?}", response.status());
} else {
let bytes = response.bytes().await?;
let package_info = serde_json::from_slice(&bytes)?;
self.save_package_info_to_file_cache(name, &package_info);
Ok(Some(package_info))
}
}
fn get_package_url(&self, name: &str) -> Url {
self.base_url.join(name).unwrap()
}
fn get_package_file_cache_path(&self, name: &str) -> PathBuf {
let name_folder_path = self.cache.package_name_folder(name, &self.base_url);
name_folder_path.join("registry.json")
}
}
/// A version requirement found in an npm package's dependencies.
pub struct NpmVersionReq {
raw_text: String,
comparators: Vec<semver::VersionReq>,
}
impl NpmVersionReq {
pub fn parse(text: &str) -> Result<NpmVersionReq, AnyError> {
// semver::VersionReq doesn't support spaces between comparators
// and it doesn't support using || for "OR", so we pre-process
// the version requirement in order to make this work.
let raw_text = text.to_string();
let part_texts = text.split("||").collect::<Vec<_>>();
let mut comparators = Vec::with_capacity(part_texts.len());
for part in part_texts {
comparators.push(npm_version_req_parse_part(part)?);
}
Ok(NpmVersionReq {
raw_text,
comparators,
})
}
}
impl NpmVersionMatcher for NpmVersionReq {
fn matches(&self, version: &semver::Version) -> bool {
self.comparators.iter().any(|c| c.matches(version))
}
fn version_text(&self) -> String {
self.raw_text.to_string()
}
}
fn npm_version_req_parse_part(
text: &str,
) -> Result<semver::VersionReq, AnyError> {
let text = text.trim();
let mut chars = text.chars().enumerate().peekable();
let mut final_text = String::new();
while chars.peek().is_some() {
let (i, c) = chars.next().unwrap();
let is_greater_or_less_than = c == '<' || c == '>';
if is_greater_or_less_than || c == '=' {
if i > 0 {
final_text = final_text.trim().to_string();
// add a comma to make semver::VersionReq parse this
final_text.push(',');
}
final_text.push(c);
let next_char = chars.peek().map(|(_, c)| c);
if is_greater_or_less_than && matches!(next_char, Some('=')) {
let c = chars.next().unwrap().1; // skip
final_text.push(c);
}
} else {
final_text.push(c);
}
}
Ok(semver::VersionReq::parse(&final_text)?)
}
#[cfg(test)]
mod test {
use super::*;
struct NpmVersionReqTester(NpmVersionReq);
impl NpmVersionReqTester {
fn matches(&self, version: &str) -> bool {
self.0.matches(&semver::Version::parse(version).unwrap())
}
}
#[test]
pub fn npm_version_req_ranges() {
let tester = NpmVersionReqTester(
NpmVersionReq::parse(">= 2.1.2 < 3.0.0 || 5.x").unwrap(),
);
assert!(!tester.matches("2.1.1"));
assert!(tester.matches("2.1.2"));
assert!(tester.matches("2.9.9"));
assert!(!tester.matches("3.0.0"));
assert!(tester.matches("5.0.0"));
assert!(tester.matches("5.1.0"));
assert!(!tester.matches("6.1.0"));
}
}

466
cli/npm/resolution.rs Normal file
View file

@ -0,0 +1,466 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
use std::cmp::Ordering;
use std::collections::HashMap;
use std::collections::VecDeque;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::parking_lot::RwLock;
use super::registry::NpmPackageInfo;
use super::registry::NpmPackageVersionDistInfo;
use super::registry::NpmPackageVersionInfo;
use super::registry::NpmRegistryApi;
/// The version matcher used for npm schemed urls is more strict than
/// the one used by npm packages.
pub trait NpmVersionMatcher {
fn matches(&self, version: &semver::Version) -> bool;
fn version_text(&self) -> String;
}
#[derive(Clone, Debug, Default)]
pub struct NpmPackageReference {
pub req: NpmPackageReq,
pub sub_path: Option<String>,
}
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash)]
pub struct NpmPackageReq {
pub name: String,
pub version_req: Option<semver::VersionReq>,
}
impl std::fmt::Display for NpmPackageReq {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.version_req {
Some(req) => write!(f, "{}@{}", self.name, req),
None => write!(f, "{}", self.name),
}
}
}
impl NpmVersionMatcher for NpmPackageReq {
fn matches(&self, version: &semver::Version) -> bool {
match &self.version_req {
Some(req) => req.matches(version),
None => version.pre.is_empty(),
}
}
fn version_text(&self) -> String {
self
.version_req
.as_ref()
.map(|v| format!("{}", v))
.unwrap_or_else(|| "non-prerelease".to_string())
}
}
impl NpmPackageReference {
pub fn from_specifier(
specifier: &ModuleSpecifier,
) -> Result<NpmPackageReference, AnyError> {
Self::from_str(specifier.as_str())
}
pub fn from_str(specifier: &str) -> Result<NpmPackageReference, AnyError> {
let specifier = match specifier.strip_prefix("npm:") {
Some(s) => s,
None => {
bail!("Not an npm specifier: '{}'", specifier);
}
};
let (name, version_req) = match specifier.rsplit_once('@') {
Some((name, version_req)) => (
name,
match semver::VersionReq::parse(version_req) {
Ok(v) => Some(v),
Err(_) => None, // not a version requirement
},
),
None => (specifier, None),
};
Ok(NpmPackageReference {
req: NpmPackageReq {
name: name.to_string(),
version_req,
},
// todo: implement and support this
sub_path: None,
})
}
}
impl std::fmt::Display for NpmPackageReference {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if let Some(sub_path) = &self.sub_path {
write!(f, "{}/{}", self.req, sub_path)
} else {
write!(f, "{}", self.req)
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct NpmPackageId {
pub name: String,
pub version: semver::Version,
}
impl NpmPackageId {
pub fn scope(&self) -> Option<&str> {
if self.name.starts_with('@') && self.name.contains('/') {
self.name.split('/').next()
} else {
None
}
}
}
impl std::fmt::Display for NpmPackageId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}@{}", self.name, self.version)
}
}
#[derive(Debug, Clone)]
pub struct NpmResolutionPackage {
pub id: NpmPackageId,
pub dist: NpmPackageVersionDistInfo,
/// Key is what the package refers to the other package as,
/// which could be different from the package name.
pub dependencies: HashMap<String, NpmPackageId>,
}
#[derive(Debug, Clone, Default)]
pub struct NpmResolutionSnapshot {
package_reqs: HashMap<NpmPackageReq, semver::Version>,
packages_by_name: HashMap<String, Vec<semver::Version>>,
packages: HashMap<NpmPackageId, NpmResolutionPackage>,
}
impl NpmResolutionSnapshot {
/// Resolve a node package from a deno module.
pub fn resolve_package_from_deno_module(
&self,
req: &NpmPackageReq,
) -> Result<&NpmResolutionPackage, AnyError> {
match self.package_reqs.get(req) {
Some(version) => Ok(
self
.packages
.get(&NpmPackageId {
name: req.name.clone(),
version: version.clone(),
})
.unwrap(),
),
None => bail!("could not find npm package directory for '{}'", req),
}
}
pub fn resolve_package_from_package(
&self,
name: &str,
referrer: &NpmPackageId,
) -> Result<&NpmResolutionPackage, AnyError> {
match self.packages.get(referrer) {
Some(referrer_package) => match referrer_package.dependencies.get(name) {
Some(id) => Ok(self.packages.get(id).unwrap()),
None => {
bail!(
"could not find package '{}' referenced by '{}'",
name,
referrer
)
}
},
None => bail!("could not find referrer package '{}'", referrer),
}
}
pub fn all_packages(&self) -> Vec<NpmResolutionPackage> {
self.packages.values().cloned().collect()
}
pub fn resolve_best_package_version(
&self,
name: &str,
version_matcher: &impl NpmVersionMatcher,
) -> Option<semver::Version> {
let mut maybe_best_version: Option<&semver::Version> = None;
if let Some(versions) = self.packages_by_name.get(name) {
for version in versions {
if version_matcher.matches(version) {
let is_best_version = maybe_best_version
.as_ref()
.map(|best_version| (*best_version).cmp(version).is_lt())
.unwrap_or(true);
if is_best_version {
maybe_best_version = Some(version);
}
}
}
}
maybe_best_version.cloned()
}
}
pub struct NpmResolution {
api: NpmRegistryApi,
snapshot: RwLock<NpmResolutionSnapshot>,
update_sempahore: tokio::sync::Semaphore,
}
impl std::fmt::Debug for NpmResolution {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let snapshot = self.snapshot.read();
f.debug_struct("NpmResolution")
.field("snapshot", &snapshot)
.finish()
}
}
impl NpmResolution {
pub fn new(api: NpmRegistryApi) -> Self {
Self {
api,
snapshot: Default::default(),
update_sempahore: tokio::sync::Semaphore::new(1),
}
}
pub async fn add_package_reqs(
&self,
mut packages: Vec<NpmPackageReq>,
) -> Result<(), AnyError> {
// multiple packages are resolved in alphabetical order
packages.sort_by(|a, b| a.name.cmp(&b.name));
// only allow one thread in here at a time
let _permit = self.update_sempahore.acquire().await.unwrap();
let mut snapshot = self.snapshot.read().clone();
let mut pending_dependencies = VecDeque::new();
// go over the top level packages first, then down the
// tree one level at a time through all the branches
for package_ref in packages {
if snapshot.package_reqs.contains_key(&package_ref) {
// skip analyzing this package, as there's already a matching top level package
continue;
}
// inspect the list of current packages
if let Some(version) =
snapshot.resolve_best_package_version(&package_ref.name, &package_ref)
{
snapshot.package_reqs.insert(package_ref, version);
continue; // done, no need to continue
}
// no existing best version, so resolve the current packages
let info = self.api.package_info(&package_ref.name).await?;
let version_and_info = get_resolved_package_version_and_info(
&package_ref.name,
&package_ref,
info,
None,
)?;
let id = NpmPackageId {
name: package_ref.name.clone(),
version: version_and_info.version.clone(),
};
let dependencies = version_and_info
.info
.dependencies_as_entries()
.with_context(|| format!("Package: {}", id))?;
pending_dependencies.push_back((id.clone(), dependencies));
snapshot.packages.insert(
id.clone(),
NpmResolutionPackage {
id,
dist: version_and_info.info.dist,
dependencies: Default::default(),
},
);
snapshot
.packages_by_name
.entry(package_ref.name.clone())
.or_default()
.push(version_and_info.version.clone());
snapshot
.package_reqs
.insert(package_ref, version_and_info.version);
}
// now go down through the dependencies by tree depth
while let Some((parent_package_id, mut deps)) =
pending_dependencies.pop_front()
{
// sort the dependencies alphabetically by name then by version descending
deps.sort_by(|a, b| match a.name.cmp(&b.name) {
// sort by newest to oldest
Ordering::Equal => b
.version_req
.version_text()
.cmp(&a.version_req.version_text()),
ordering => ordering,
});
// now resolve them
for dep in deps {
// check if an existing dependency matches this
let id = if let Some(version) =
snapshot.resolve_best_package_version(&dep.name, &dep.version_req)
{
NpmPackageId {
name: dep.name.clone(),
version,
}
} else {
// get the information
let info = self.api.package_info(&dep.name).await?;
let version_and_info = get_resolved_package_version_and_info(
&dep.name,
&dep.version_req,
info,
None,
)?;
let dependencies = version_and_info
.info
.dependencies_as_entries()
.with_context(|| {
format!("Package: {}@{}", dep.name, version_and_info.version)
})?;
let id = NpmPackageId {
name: dep.name.clone(),
version: version_and_info.version.clone(),
};
pending_dependencies.push_back((id.clone(), dependencies));
snapshot.packages.insert(
id.clone(),
NpmResolutionPackage {
id: id.clone(),
dist: version_and_info.info.dist,
dependencies: Default::default(),
},
);
snapshot
.packages_by_name
.entry(dep.name.clone())
.or_default()
.push(id.version.clone());
id
};
// add this version as a dependency of the package
snapshot
.packages
.get_mut(&parent_package_id)
.unwrap()
.dependencies
.insert(dep.bare_specifier.clone(), id);
}
}
*self.snapshot.write() = snapshot;
Ok(())
}
pub fn resolve_package_from_package(
&self,
name: &str,
referrer: &NpmPackageId,
) -> Result<NpmResolutionPackage, AnyError> {
self
.snapshot
.read()
.resolve_package_from_package(name, referrer)
.cloned()
}
/// Resolve a node package from a deno module.
pub fn resolve_package_from_deno_module(
&self,
package: &NpmPackageReq,
) -> Result<NpmResolutionPackage, AnyError> {
self
.snapshot
.read()
.resolve_package_from_deno_module(package)
.cloned()
}
pub fn all_packages(&self) -> Vec<NpmResolutionPackage> {
self.snapshot.read().all_packages()
}
pub fn has_packages(&self) -> bool {
!self.snapshot.read().packages.is_empty()
}
pub fn snapshot(&self) -> NpmResolutionSnapshot {
self.snapshot.read().clone()
}
}
#[derive(Clone)]
struct VersionAndInfo {
version: semver::Version,
info: NpmPackageVersionInfo,
}
fn get_resolved_package_version_and_info(
pkg_name: &str,
version_matcher: &impl NpmVersionMatcher,
info: NpmPackageInfo,
parent: Option<&NpmPackageId>,
) -> Result<VersionAndInfo, AnyError> {
let mut maybe_best_version: Option<VersionAndInfo> = None;
for (_, version_info) in info.versions.into_iter() {
let version = semver::Version::parse(&version_info.version)?;
if version_matcher.matches(&version) {
let is_best_version = maybe_best_version
.as_ref()
.map(|best_version| best_version.version.cmp(&version).is_lt())
.unwrap_or(true);
if is_best_version {
maybe_best_version = Some(VersionAndInfo {
version,
info: version_info,
});
}
}
}
match maybe_best_version {
Some(v) => Ok(v),
// If the package isn't found, it likely means that the user needs to use
// `--reload` to get the latest npm package information. Although it seems
// like we could make this smart by fetching the latest information for
// this package here, we really need a full restart. There could be very
// interesting bugs that occur if this package's version was resolved by
// something previous using the old information, then now being smart here
// causes a new fetch of the package information, meaning this time the
// previous resolution of this package's version resolved to an older
// version, but next time to a different version because it has new information.
None => bail!(
concat!(
"Could not find package '{}' matching {}{}. ",
"Try retreiving the latest npm package information by running with --reload",
),
pkg_name,
version_matcher.version_text(),
match parent {
Some(id) => format!(" as specified in {}", id),
None => String::new(),
}
),
}
}

188
cli/npm/tarball.rs Normal file
View file

@ -0,0 +1,188 @@
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
use std::collections::HashSet;
use std::fs;
use std::path::Path;
use std::path::PathBuf;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use flate2::read::GzDecoder;
use tar::Archive;
use tar::EntryType;
use super::cache::NPM_PACKAGE_SYNC_LOCK_FILENAME;
use super::registry::NpmPackageVersionDistInfo;
use super::NpmPackageId;
pub fn verify_and_extract_tarball(
package: &NpmPackageId,
data: &[u8],
dist_info: &NpmPackageVersionDistInfo,
output_folder: &Path,
) -> Result<(), AnyError> {
if let Some(integrity) = &dist_info.integrity {
verify_tarball_integrity(package, data, integrity)?;
} else {
// todo(dsherret): check shasum here
bail!(
"Errored on '{}': npm packages with no integrity are not implemented.",
package
);
}
fs::create_dir_all(output_folder).with_context(|| {
format!("Error creating '{}'.", output_folder.display())
})?;
// This sync lock file is a way to ensure that partially created
// npm package directories aren't considered valid. This could maybe
// be a bit smarter in the future to not bother extracting here
// if another process has taken the lock in the past X seconds and
// wait for the other process to finish (it could try to create the
// file with `create_new(true)` then if it exists, check the metadata
// then wait until the other process finishes with a timeout), but
// for now this is good enough.
let sync_lock_path = output_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME);
match fs::OpenOptions::new()
.write(true)
.create(true)
.open(&sync_lock_path)
{
Ok(_) => {
extract_tarball(data, output_folder)?;
// extraction succeeded, so only now delete this file
let _ignore = std::fs::remove_file(&sync_lock_path);
Ok(())
}
Err(err) => {
bail!(
concat!(
"Error creating package sync lock file at '{}'. ",
"Maybe try manually deleting this folder.\n\n{:#}",
),
output_folder.display(),
err
);
}
}
}
fn verify_tarball_integrity(
package: &NpmPackageId,
data: &[u8],
npm_integrity: &str,
) -> Result<(), AnyError> {
use ring::digest::Context;
use ring::digest::SHA512;
let (algo, expected_checksum) = match npm_integrity.split_once('-') {
Some((hash_kind, checksum)) => {
let algo = match hash_kind {
"sha512" => &SHA512,
hash_kind => bail!(
"Not implemented hash function for {}: {}",
package,
hash_kind
),
};
(algo, checksum.to_lowercase())
}
None => bail!(
"Not implemented integrity kind for {}: {}",
package,
npm_integrity
),
};
let mut hash_ctx = Context::new(algo);
hash_ctx.update(data);
let digest = hash_ctx.finish();
let tarball_checksum = base64::encode(digest.as_ref()).to_lowercase();
if tarball_checksum != expected_checksum {
bail!(
"Tarball checksum did not match what was provided by npm registry for {}.\n\nExpected: {}\nActual: {}",
package,
expected_checksum,
tarball_checksum,
)
}
Ok(())
}
fn extract_tarball(data: &[u8], output_folder: &Path) -> Result<(), AnyError> {
fs::create_dir_all(output_folder)?;
let output_folder = fs::canonicalize(output_folder)?;
let tar = GzDecoder::new(data);
let mut archive = Archive::new(tar);
archive.set_overwrite(true);
archive.set_preserve_permissions(true);
let mut created_dirs = HashSet::new();
for entry in archive.entries()? {
let mut entry = entry?;
let path = entry.path()?;
let entry_type = entry.header().entry_type();
// skip the first component which will be either "package" or the name of the package
let relative_path = path.components().skip(1).collect::<PathBuf>();
let absolute_path = output_folder.join(relative_path);
let dir_path = if entry_type == EntryType::Directory {
absolute_path.as_path()
} else {
absolute_path.parent().unwrap()
};
if created_dirs.insert(dir_path.to_path_buf()) {
fs::create_dir_all(&dir_path)?;
let canonicalized_dir = fs::canonicalize(&dir_path)?;
if !canonicalized_dir.starts_with(&output_folder) {
bail!(
"Extracted directory '{}' of npm tarball was not in output directory.",
canonicalized_dir.display()
)
}
}
if entry.header().entry_type() == EntryType::Regular {
entry.unpack(&absolute_path)?;
}
}
Ok(())
}
#[cfg(test)]
mod test {
use super::*;
#[test]
pub fn test_verify_tarball() {
let package_id = NpmPackageId {
name: "package".to_string(),
version: semver::Version::parse("1.0.0").unwrap(),
};
let actual_checksum =
"z4phnx7vul3xvchq1m2ab9yg5aulvxxcg/spidns6c5h0ne8xyxysp+dgnkhfuwvy7kxvudbeoglodj6+sfapg==";
assert_eq!(
verify_tarball_integrity(&package_id, &Vec::new(), "test")
.unwrap_err()
.to_string(),
"Not implemented integrity kind for package@1.0.0: test",
);
assert_eq!(
verify_tarball_integrity(&package_id, &Vec::new(), "sha1-test")
.unwrap_err()
.to_string(),
"Not implemented hash function for package@1.0.0: sha1",
);
assert_eq!(
verify_tarball_integrity(&package_id, &Vec::new(), "sha512-test")
.unwrap_err()
.to_string(),
format!("Tarball checksum did not match what was provided by npm registry for package@1.0.0.\n\nExpected: test\nActual: {}", actual_checksum),
);
assert!(verify_tarball_integrity(
&package_id,
&Vec::new(),
&format!("sha512-{}", actual_checksum)
)
.is_ok());
}
}

View file

@ -9,7 +9,6 @@ use deno_core::futures::StreamExt;
use deno_runtime::deno_fetch::reqwest;
use deno_runtime::deno_fetch::reqwest::Client;
use once_cell::sync::Lazy;
use semver_parser::version::parse as semver_parse;
use std::env;
use std::fs;
use std::io::Write;
@ -48,7 +47,8 @@ pub async fn upgrade(upgrade_flags: UpgradeFlags) -> Result<(), AnyError> {
&& !regex::Regex::new("^[0-9a-f]{40}$")?.is_match(&passed_version)
{
bail!("Invalid commit hash passed");
} else if !upgrade_flags.canary && semver_parse(&passed_version).is_err()
} else if !upgrade_flags.canary
&& semver::Version::parse(&passed_version).is_err()
{
bail!("Invalid semver passed");
}
@ -83,8 +83,8 @@ pub async fn upgrade(upgrade_flags: UpgradeFlags) -> Result<(), AnyError> {
latest_hash.truncate(7);
crate::version::GIT_COMMIT_HASH == latest_hash
} else if !crate::version::is_canary() {
let current = semver_parse(&crate::version::deno()).unwrap();
let latest = semver_parse(&latest_version).unwrap();
let current = semver::Version::parse(&crate::version::deno()).unwrap();
let latest = semver::Version::parse(&latest_version).unwrap();
current >= latest
} else {
false

View file

@ -8,6 +8,7 @@ use deno_ast::ModuleSpecifier;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use crate::fs_util;
use crate::fs_util::path_with_stem_suffix;
/// Partitions the provided specifiers by the non-path and non-query parts of a specifier.
@ -29,24 +30,7 @@ pub fn partition_by_root_specifiers<'a>(
/// Gets the directory name to use for the provided root.
pub fn dir_name_for_root(root: &ModuleSpecifier) -> PathBuf {
let mut result = String::new();
if let Some(domain) = root.domain() {
result.push_str(&sanitize_segment(domain));
}
if let Some(port) = root.port() {
if !result.is_empty() {
result.push('_');
}
result.push_str(&port.to_string());
}
let mut result = PathBuf::from(result);
if let Some(segments) = root.path_segments() {
for segment in segments.filter(|s| !s.is_empty()) {
result = result.join(sanitize_segment(segment));
}
}
result
fs_util::root_url_to_safe_local_dirname(root)
}
/// Gets a unique file path given the provided file path
@ -90,25 +74,16 @@ pub fn is_remote_specifier_text(text: &str) -> bool {
pub fn sanitize_filepath(text: &str) -> String {
text
.chars()
.map(|c| if is_banned_path_char(c) { '_' } else { c })
.map(|c| {
if fs_util::is_banned_path_char(c) {
'_'
} else {
c
}
})
.collect()
}
fn is_banned_path_char(c: char) -> bool {
matches!(c, '<' | '>' | ':' | '"' | '|' | '?' | '*')
}
fn sanitize_segment(text: &str) -> String {
text
.chars()
.map(|c| if is_banned_segment_char(c) { '_' } else { c })
.collect()
}
fn is_banned_segment_char(c: char) -> bool {
matches!(c, '/' | '\\') || is_banned_path_char(c)
}
#[cfg(test)]
mod test {
use super::*;
@ -202,20 +177,6 @@ mod test {
assert_eq!(output, expected);
}
#[test]
fn should_get_dir_name_root() {
run_test("http://deno.land/x/test", "deno.land/x/test");
run_test("http://localhost", "localhost");
run_test("http://localhost/test%20:test", "localhost/test%20_test");
fn run_test(specifier: &str, expected: &str) {
assert_eq!(
dir_name_for_root(&ModuleSpecifier::parse(specifier).unwrap()),
PathBuf::from(expected)
);
}
}
#[test]
fn test_unique_path() {
let mut paths = HashSet::new();