mirror of
https://github.com/denoland/deno.git
synced 2024-12-22 07:14:47 -05:00
feat(unstable/npm): support peer dependencies (#16561)
This adds support for peer dependencies in npm packages. 1. If not found higher in the tree (ancestor and ancestor siblings), peer dependencies are resolved like a dependency similar to npm 7. 2. Optional peer dependencies are only resolved if found higher in the tree. 3. This creates "copy packages" or duplicates of a package when a package has different resolution due to peer dependency resolution—see https://pnpm.io/how-peers-are-resolved. Unlike pnpm though, duplicates of packages will have `_1`, `_2`, etc. added to the end of the package version in the directory in order to minimize the chance of hitting the max file path limit on Windows. This is done for both the local "node_modules" directory and also the global npm cache. The files are hard linked in this case to reduce hard drive space. This is a first pass and the code is definitely more inefficient than it could be. Closes #15823
This commit is contained in:
parent
2c72e8d5f4
commit
cbb3f85433
38 changed files with 4331 additions and 1380 deletions
8
Cargo.lock
generated
8
Cargo.lock
generated
|
@ -1227,9 +1227,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "deno_task_shell"
|
name = "deno_task_shell"
|
||||||
version = "0.7.0"
|
version = "0.7.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a275d3f78e828b4adddf20a472d9ac1927ac311aac48dca869bb8653d5a4a0b9"
|
checksum = "e8ad1e1002ecf8bafcb9b968bf19856ba4fe0e6c0c73b3404565bb29b15aae2c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"futures",
|
"futures",
|
||||||
|
@ -2803,9 +2803,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "monch"
|
name = "monch"
|
||||||
version = "0.2.1"
|
version = "0.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c5e2e282addadb529bb31700f7d184797382fa2eb18384986aad78d117eaf0c4"
|
checksum = "f13de1c3edc9a5b9dc3a1029f56e9ab3eba34640010aff4fc01044c42ef67afa"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "naga"
|
name = "naga"
|
||||||
|
|
|
@ -57,7 +57,7 @@ deno_emit = "0.10.0"
|
||||||
deno_graph = "0.37.1"
|
deno_graph = "0.37.1"
|
||||||
deno_lint = { version = "0.34.0", features = ["docs"] }
|
deno_lint = { version = "0.34.0", features = ["docs"] }
|
||||||
deno_runtime = { version = "0.83.0", path = "../runtime" }
|
deno_runtime = { version = "0.83.0", path = "../runtime" }
|
||||||
deno_task_shell = "0.7.0"
|
deno_task_shell = "0.7.2"
|
||||||
napi_sym = { path = "./napi_sym", version = "0.5.0" }
|
napi_sym = { path = "./napi_sym", version = "0.5.0" }
|
||||||
|
|
||||||
atty = "=0.2.14"
|
atty = "=0.2.14"
|
||||||
|
@ -86,7 +86,7 @@ libc = "=0.2.126"
|
||||||
log = { version = "=0.4.17", features = ["serde"] }
|
log = { version = "=0.4.17", features = ["serde"] }
|
||||||
lsp-types = "=0.93.2" # used by tower-lsp and "proposed" feature is unstable in patch releases
|
lsp-types = "=0.93.2" # used by tower-lsp and "proposed" feature is unstable in patch releases
|
||||||
mitata = "=0.0.7"
|
mitata = "=0.0.7"
|
||||||
monch = "=0.2.1"
|
monch = "=0.4.0"
|
||||||
notify = "=5.0.0"
|
notify = "=5.0.0"
|
||||||
once_cell = "=1.14.0"
|
once_cell = "=1.14.0"
|
||||||
os_pipe = "=1.0.1"
|
os_pipe = "=1.0.1"
|
||||||
|
|
|
@ -15,6 +15,7 @@ use std::io::ErrorKind;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
use std::time::Duration;
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
pub fn atomic_write_file<T: AsRef<[u8]>>(
|
pub fn atomic_write_file<T: AsRef<[u8]>>(
|
||||||
|
@ -357,6 +358,84 @@ pub fn copy_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Hardlinks the files in one directory to another directory.
|
||||||
|
///
|
||||||
|
/// Note: Does not handle symlinks.
|
||||||
|
pub fn hard_link_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> {
|
||||||
|
std::fs::create_dir_all(&to)
|
||||||
|
.with_context(|| format!("Creating {}", to.display()))?;
|
||||||
|
let read_dir = std::fs::read_dir(&from)
|
||||||
|
.with_context(|| format!("Reading {}", from.display()))?;
|
||||||
|
|
||||||
|
for entry in read_dir {
|
||||||
|
let entry = entry?;
|
||||||
|
let file_type = entry.file_type()?;
|
||||||
|
let new_from = from.join(entry.file_name());
|
||||||
|
let new_to = to.join(entry.file_name());
|
||||||
|
|
||||||
|
if file_type.is_dir() {
|
||||||
|
hard_link_dir_recursive(&new_from, &new_to).with_context(|| {
|
||||||
|
format!("Dir {} to {}", new_from.display(), new_to.display())
|
||||||
|
})?;
|
||||||
|
} else if file_type.is_file() {
|
||||||
|
// note: chance for race conditions here between attempting to create,
|
||||||
|
// then removing, then attempting to create. There doesn't seem to be
|
||||||
|
// a way to hard link with overwriting in Rust, but maybe there is some
|
||||||
|
// way with platform specific code. The workaround here is to handle
|
||||||
|
// scenarios where something else might create or remove files.
|
||||||
|
if let Err(err) = std::fs::hard_link(&new_from, &new_to) {
|
||||||
|
if err.kind() == ErrorKind::AlreadyExists {
|
||||||
|
if let Err(err) = std::fs::remove_file(&new_to) {
|
||||||
|
if err.kind() == ErrorKind::NotFound {
|
||||||
|
// Assume another process/thread created this hard link to the file we are wanting
|
||||||
|
// to remove then sleep a little bit to let the other process/thread move ahead
|
||||||
|
// faster to reduce contention.
|
||||||
|
std::thread::sleep(Duration::from_millis(10));
|
||||||
|
} else {
|
||||||
|
return Err(err).with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Removing file to hard link {} to {}",
|
||||||
|
new_from.display(),
|
||||||
|
new_to.display()
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always attempt to recreate the hardlink. In contention scenarios, the other process
|
||||||
|
// might have been killed or exited after removing the file, but before creating the hardlink
|
||||||
|
if let Err(err) = std::fs::hard_link(&new_from, &new_to) {
|
||||||
|
// Assume another process/thread created this hard link to the file we are wanting
|
||||||
|
// to now create then sleep a little bit to let the other process/thread move ahead
|
||||||
|
// faster to reduce contention.
|
||||||
|
if err.kind() == ErrorKind::AlreadyExists {
|
||||||
|
std::thread::sleep(Duration::from_millis(10));
|
||||||
|
} else {
|
||||||
|
return Err(err).with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Hard linking {} to {}",
|
||||||
|
new_from.display(),
|
||||||
|
new_to.display()
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Err(err).with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Hard linking {} to {}",
|
||||||
|
new_from.display(),
|
||||||
|
new_to.display()
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn symlink_dir(oldpath: &Path, newpath: &Path) -> Result<(), AnyError> {
|
pub fn symlink_dir(oldpath: &Path, newpath: &Path) -> Result<(), AnyError> {
|
||||||
let err_mapper = |err: Error| {
|
let err_mapper = |err: Error| {
|
||||||
Error::new(
|
Error::new(
|
||||||
|
|
|
@ -16,6 +16,7 @@ use std::rc::Rc;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::args::ConfigFile;
|
use crate::args::ConfigFile;
|
||||||
|
use crate::npm::NpmPackageId;
|
||||||
use crate::npm::NpmPackageReq;
|
use crate::npm::NpmPackageReq;
|
||||||
use crate::npm::NpmResolutionPackage;
|
use crate::npm::NpmResolutionPackage;
|
||||||
use crate::tools::fmt::format_json;
|
use crate::tools::fmt::format_json;
|
||||||
|
@ -40,7 +41,7 @@ pub struct NpmPackageInfo {
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
|
||||||
pub struct NpmContent {
|
pub struct NpmContent {
|
||||||
/// Mapping between requests for npm packages and resolved specifiers, eg.
|
/// Mapping between requests for npm packages and resolved packages, eg.
|
||||||
/// {
|
/// {
|
||||||
/// "chalk": "chalk@5.0.0"
|
/// "chalk": "chalk@5.0.0"
|
||||||
/// "react@17": "react@17.0.1"
|
/// "react@17": "react@17.0.1"
|
||||||
|
@ -269,7 +270,7 @@ impl Lockfile {
|
||||||
&mut self,
|
&mut self,
|
||||||
package: &NpmResolutionPackage,
|
package: &NpmResolutionPackage,
|
||||||
) -> Result<(), LockfileError> {
|
) -> Result<(), LockfileError> {
|
||||||
let specifier = package.id.serialize_for_lock_file();
|
let specifier = package.id.as_serialized();
|
||||||
if let Some(package_info) = self.content.npm.packages.get(&specifier) {
|
if let Some(package_info) = self.content.npm.packages.get(&specifier) {
|
||||||
let integrity = package
|
let integrity = package
|
||||||
.dist
|
.dist
|
||||||
|
@ -286,7 +287,7 @@ This could be caused by:
|
||||||
* the source itself may be corrupt
|
* the source itself may be corrupt
|
||||||
|
|
||||||
Use \"--lock-write\" flag to regenerate the lockfile at \"{}\".",
|
Use \"--lock-write\" flag to regenerate the lockfile at \"{}\".",
|
||||||
package.id, self.filename.display()
|
package.id.display(), self.filename.display()
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -300,7 +301,7 @@ Use \"--lock-write\" flag to regenerate the lockfile at \"{}\".",
|
||||||
let dependencies = package
|
let dependencies = package
|
||||||
.dependencies
|
.dependencies
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(name, id)| (name.to_string(), id.serialize_for_lock_file()))
|
.map(|(name, id)| (name.to_string(), id.as_serialized()))
|
||||||
.collect::<BTreeMap<String, String>>();
|
.collect::<BTreeMap<String, String>>();
|
||||||
|
|
||||||
let integrity = package
|
let integrity = package
|
||||||
|
@ -309,7 +310,7 @@ Use \"--lock-write\" flag to regenerate the lockfile at \"{}\".",
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.unwrap_or(&package.dist.shasum);
|
.unwrap_or(&package.dist.shasum);
|
||||||
self.content.npm.packages.insert(
|
self.content.npm.packages.insert(
|
||||||
package.id.serialize_for_lock_file(),
|
package.id.as_serialized(),
|
||||||
NpmPackageInfo {
|
NpmPackageInfo {
|
||||||
integrity: integrity.to_string(),
|
integrity: integrity.to_string(),
|
||||||
dependencies,
|
dependencies,
|
||||||
|
@ -321,12 +322,13 @@ Use \"--lock-write\" flag to regenerate the lockfile at \"{}\".",
|
||||||
pub fn insert_npm_specifier(
|
pub fn insert_npm_specifier(
|
||||||
&mut self,
|
&mut self,
|
||||||
package_req: &NpmPackageReq,
|
package_req: &NpmPackageReq,
|
||||||
version: String,
|
package_id: &NpmPackageId,
|
||||||
) {
|
) {
|
||||||
self.content.npm.specifiers.insert(
|
self
|
||||||
package_req.to_string(),
|
.content
|
||||||
format!("{}@{}", package_req.name, version),
|
.npm
|
||||||
);
|
.specifiers
|
||||||
|
.insert(package_req.to_string(), package_id.as_serialized());
|
||||||
self.has_content_changed = true;
|
self.has_content_changed = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -559,10 +561,12 @@ mod tests {
|
||||||
id: NpmPackageId {
|
id: NpmPackageId {
|
||||||
name: "nanoid".to_string(),
|
name: "nanoid".to_string(),
|
||||||
version: NpmVersion::parse("3.3.4").unwrap(),
|
version: NpmVersion::parse("3.3.4").unwrap(),
|
||||||
|
peer_dependencies: Vec::new(),
|
||||||
},
|
},
|
||||||
|
copy_index: 0,
|
||||||
dist: NpmPackageVersionDistInfo {
|
dist: NpmPackageVersionDistInfo {
|
||||||
tarball: "foo".to_string(),
|
tarball: "foo".to_string(),
|
||||||
shasum: "foo".to_string(),
|
shasum: "foo".to_string(),
|
||||||
integrity: Some("sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==".to_string())
|
integrity: Some("sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==".to_string())
|
||||||
},
|
},
|
||||||
dependencies: HashMap::new(),
|
dependencies: HashMap::new(),
|
||||||
|
@ -574,10 +578,12 @@ mod tests {
|
||||||
id: NpmPackageId {
|
id: NpmPackageId {
|
||||||
name: "picocolors".to_string(),
|
name: "picocolors".to_string(),
|
||||||
version: NpmVersion::parse("1.0.0").unwrap(),
|
version: NpmVersion::parse("1.0.0").unwrap(),
|
||||||
|
peer_dependencies: Vec::new(),
|
||||||
},
|
},
|
||||||
|
copy_index: 0,
|
||||||
dist: NpmPackageVersionDistInfo {
|
dist: NpmPackageVersionDistInfo {
|
||||||
tarball: "foo".to_string(),
|
tarball: "foo".to_string(),
|
||||||
shasum: "foo".to_string(),
|
shasum: "foo".to_string(),
|
||||||
integrity: Some("sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==".to_string())
|
integrity: Some("sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==".to_string())
|
||||||
},
|
},
|
||||||
dependencies: HashMap::new(),
|
dependencies: HashMap::new(),
|
||||||
|
@ -590,10 +596,12 @@ mod tests {
|
||||||
id: NpmPackageId {
|
id: NpmPackageId {
|
||||||
name: "source-map-js".to_string(),
|
name: "source-map-js".to_string(),
|
||||||
version: NpmVersion::parse("1.0.2").unwrap(),
|
version: NpmVersion::parse("1.0.2").unwrap(),
|
||||||
|
peer_dependencies: Vec::new(),
|
||||||
},
|
},
|
||||||
|
copy_index: 0,
|
||||||
dist: NpmPackageVersionDistInfo {
|
dist: NpmPackageVersionDistInfo {
|
||||||
tarball: "foo".to_string(),
|
tarball: "foo".to_string(),
|
||||||
shasum: "foo".to_string(),
|
shasum: "foo".to_string(),
|
||||||
integrity: Some("sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==".to_string())
|
integrity: Some("sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==".to_string())
|
||||||
},
|
},
|
||||||
dependencies: HashMap::new(),
|
dependencies: HashMap::new(),
|
||||||
|
@ -606,7 +614,9 @@ mod tests {
|
||||||
id: NpmPackageId {
|
id: NpmPackageId {
|
||||||
name: "source-map-js".to_string(),
|
name: "source-map-js".to_string(),
|
||||||
version: NpmVersion::parse("1.0.2").unwrap(),
|
version: NpmVersion::parse("1.0.2").unwrap(),
|
||||||
|
peer_dependencies: Vec::new(),
|
||||||
},
|
},
|
||||||
|
copy_index: 0,
|
||||||
dist: NpmPackageVersionDistInfo {
|
dist: NpmPackageVersionDistInfo {
|
||||||
tarball: "foo".to_string(),
|
tarball: "foo".to_string(),
|
||||||
shasum: "foo".to_string(),
|
shasum: "foo".to_string(),
|
||||||
|
|
|
@ -71,7 +71,7 @@ use crate::fs_util;
|
||||||
use crate::graph_util::graph_valid;
|
use crate::graph_util::graph_valid;
|
||||||
use crate::npm::NpmCache;
|
use crate::npm::NpmCache;
|
||||||
use crate::npm::NpmPackageResolver;
|
use crate::npm::NpmPackageResolver;
|
||||||
use crate::npm::NpmRegistryApi;
|
use crate::npm::RealNpmRegistryApi;
|
||||||
use crate::proc_state::import_map_from_text;
|
use crate::proc_state::import_map_from_text;
|
||||||
use crate::proc_state::ProcState;
|
use crate::proc_state::ProcState;
|
||||||
use crate::progress_bar::ProgressBar;
|
use crate::progress_bar::ProgressBar;
|
||||||
|
@ -258,7 +258,7 @@ impl Inner {
|
||||||
ts_server.clone(),
|
ts_server.clone(),
|
||||||
);
|
);
|
||||||
let assets = Assets::new(ts_server.clone());
|
let assets = Assets::new(ts_server.clone());
|
||||||
let registry_url = NpmRegistryApi::default_url();
|
let registry_url = RealNpmRegistryApi::default_url();
|
||||||
// Use an "only" cache setting in order to make the
|
// Use an "only" cache setting in order to make the
|
||||||
// user do an explicit "cache" command and prevent
|
// user do an explicit "cache" command and prevent
|
||||||
// the cache from being filled with lots of packages while
|
// the cache from being filled with lots of packages while
|
||||||
|
@ -270,7 +270,7 @@ impl Inner {
|
||||||
cache_setting.clone(),
|
cache_setting.clone(),
|
||||||
progress_bar.clone(),
|
progress_bar.clone(),
|
||||||
);
|
);
|
||||||
let api = NpmRegistryApi::new(
|
let api = RealNpmRegistryApi::new(
|
||||||
registry_url,
|
registry_url,
|
||||||
npm_cache.clone(),
|
npm_cache.clone(),
|
||||||
cache_setting,
|
cache_setting,
|
||||||
|
|
336
cli/npm/cache.rs
336
cli/npm/cache.rs
|
@ -21,7 +21,6 @@ use crate::progress_bar::ProgressBar;
|
||||||
use super::registry::NpmPackageVersionDistInfo;
|
use super::registry::NpmPackageVersionDistInfo;
|
||||||
use super::semver::NpmVersion;
|
use super::semver::NpmVersion;
|
||||||
use super::tarball::verify_and_extract_tarball;
|
use super::tarball::verify_and_extract_tarball;
|
||||||
use super::NpmPackageId;
|
|
||||||
|
|
||||||
/// For some of the tests, we want downloading of packages
|
/// For some of the tests, we want downloading of packages
|
||||||
/// to be deterministic so that the output is always the same
|
/// to be deterministic so that the output is always the same
|
||||||
|
@ -29,7 +28,107 @@ pub fn should_sync_download() -> bool {
|
||||||
std::env::var("DENO_UNSTABLE_NPM_SYNC_DOWNLOAD") == Ok("1".to_string())
|
std::env::var("DENO_UNSTABLE_NPM_SYNC_DOWNLOAD") == Ok("1".to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const NPM_PACKAGE_SYNC_LOCK_FILENAME: &str = ".deno_sync_lock";
|
const NPM_PACKAGE_SYNC_LOCK_FILENAME: &str = ".deno_sync_lock";
|
||||||
|
|
||||||
|
pub fn with_folder_sync_lock(
|
||||||
|
package: (&str, &NpmVersion),
|
||||||
|
output_folder: &Path,
|
||||||
|
action: impl FnOnce() -> Result<(), AnyError>,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
fn inner(
|
||||||
|
output_folder: &Path,
|
||||||
|
action: impl FnOnce() -> Result<(), AnyError>,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
fs::create_dir_all(output_folder).with_context(|| {
|
||||||
|
format!("Error creating '{}'.", output_folder.display())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// This sync lock file is a way to ensure that partially created
|
||||||
|
// npm package directories aren't considered valid. This could maybe
|
||||||
|
// be a bit smarter in the future to not bother extracting here
|
||||||
|
// if another process has taken the lock in the past X seconds and
|
||||||
|
// wait for the other process to finish (it could try to create the
|
||||||
|
// file with `create_new(true)` then if it exists, check the metadata
|
||||||
|
// then wait until the other process finishes with a timeout), but
|
||||||
|
// for now this is good enough.
|
||||||
|
let sync_lock_path = output_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME);
|
||||||
|
match fs::OpenOptions::new()
|
||||||
|
.write(true)
|
||||||
|
.create(true)
|
||||||
|
.open(&sync_lock_path)
|
||||||
|
{
|
||||||
|
Ok(_) => {
|
||||||
|
action()?;
|
||||||
|
// extraction succeeded, so only now delete this file
|
||||||
|
let _ignore = std::fs::remove_file(&sync_lock_path);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
bail!(
|
||||||
|
concat!(
|
||||||
|
"Error creating package sync lock file at '{}'. ",
|
||||||
|
"Maybe try manually deleting this folder.\n\n{:#}",
|
||||||
|
),
|
||||||
|
output_folder.display(),
|
||||||
|
err
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match inner(output_folder, action) {
|
||||||
|
Ok(()) => Ok(()),
|
||||||
|
Err(err) => {
|
||||||
|
if let Err(remove_err) = fs::remove_dir_all(&output_folder) {
|
||||||
|
if remove_err.kind() != std::io::ErrorKind::NotFound {
|
||||||
|
bail!(
|
||||||
|
concat!(
|
||||||
|
"Failed setting up package cache directory for {}@{}, then ",
|
||||||
|
"failed cleaning it up.\n\nOriginal error:\n\n{}\n\n",
|
||||||
|
"Remove error:\n\n{}\n\nPlease manually ",
|
||||||
|
"delete this folder or you will run into issues using this ",
|
||||||
|
"package in the future:\n\n{}"
|
||||||
|
),
|
||||||
|
package.0,
|
||||||
|
package.1,
|
||||||
|
err,
|
||||||
|
remove_err,
|
||||||
|
output_folder.display(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct NpmPackageCacheFolderId {
|
||||||
|
pub name: String,
|
||||||
|
pub version: NpmVersion,
|
||||||
|
/// Peer dependency resolution may require us to have duplicate copies
|
||||||
|
/// of the same package.
|
||||||
|
pub copy_index: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NpmPackageCacheFolderId {
|
||||||
|
pub fn with_no_count(&self) -> Self {
|
||||||
|
Self {
|
||||||
|
name: self.name.clone(),
|
||||||
|
version: self.version.clone(),
|
||||||
|
copy_index: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for NpmPackageCacheFolderId {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}@{}", self.name, self.version)?;
|
||||||
|
if self.copy_index > 0 {
|
||||||
|
write!(f, "_{}", self.copy_index)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct ReadonlyNpmCache {
|
pub struct ReadonlyNpmCache {
|
||||||
|
@ -78,32 +177,49 @@ impl ReadonlyNpmCache {
|
||||||
Self::new(dir.root.join("npm"))
|
Self::new(dir.root.join("npm"))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn package_folder(
|
pub fn package_folder_for_id(
|
||||||
&self,
|
&self,
|
||||||
id: &NpmPackageId,
|
id: &NpmPackageCacheFolderId,
|
||||||
|
registry_url: &Url,
|
||||||
|
) -> PathBuf {
|
||||||
|
if id.copy_index == 0 {
|
||||||
|
self.package_folder_for_name_and_version(
|
||||||
|
&id.name,
|
||||||
|
&id.version,
|
||||||
|
registry_url,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
self
|
||||||
|
.package_name_folder(&id.name, registry_url)
|
||||||
|
.join(format!("{}_{}", id.version, id.copy_index))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn package_folder_for_name_and_version(
|
||||||
|
&self,
|
||||||
|
name: &str,
|
||||||
|
version: &NpmVersion,
|
||||||
registry_url: &Url,
|
registry_url: &Url,
|
||||||
) -> PathBuf {
|
) -> PathBuf {
|
||||||
self
|
self
|
||||||
.package_name_folder(&id.name, registry_url)
|
.package_name_folder(name, registry_url)
|
||||||
.join(id.version.to_string())
|
.join(version.to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn package_name_folder(&self, name: &str, registry_url: &Url) -> PathBuf {
|
pub fn package_name_folder(&self, name: &str, registry_url: &Url) -> PathBuf {
|
||||||
let mut dir = self.registry_folder(registry_url);
|
let mut dir = self.registry_folder(registry_url);
|
||||||
let mut parts = name.split('/').map(Cow::Borrowed).collect::<Vec<_>>();
|
let parts = name.split('/').map(Cow::Borrowed).collect::<Vec<_>>();
|
||||||
// package names were not always enforced to be lowercase and so we need
|
|
||||||
// to ensure package names, which are therefore case sensitive, are stored
|
|
||||||
// on a case insensitive file system to not have conflicts. We do this by
|
|
||||||
// first putting it in a "_" folder then hashing the package name.
|
|
||||||
if name.to_lowercase() != name {
|
if name.to_lowercase() != name {
|
||||||
let last_part = parts.last_mut().unwrap();
|
// Lowercase package names introduce complications.
|
||||||
*last_part = Cow::Owned(crate::checksum::gen(&[last_part.as_bytes()]));
|
// When implementing this ensure:
|
||||||
// We can't just use the hash as part of the directory because it may
|
// 1. It works on case insensitive filesystems. ex. JSON should not
|
||||||
// have a collision with an actual package name in case someone wanted
|
// conflict with json... yes you read that right, those are separate
|
||||||
// to name an actual package that. To get around this, put all these
|
// packages.
|
||||||
// in a folder called "_" since npm packages can't start with an underscore
|
// 2. We can figure out the package id from the path. This is used
|
||||||
// and there is no package currently called just "_".
|
// in resolve_package_id_from_specifier
|
||||||
dir = dir.join("_");
|
// Probably use a hash of the package name at `npm/-/<hash>` then create
|
||||||
|
// a mapping for these package names.
|
||||||
|
todo!("deno currently doesn't support npm package names that are not all lowercase");
|
||||||
}
|
}
|
||||||
// ensure backslashes are used on windows
|
// ensure backslashes are used on windows
|
||||||
for part in parts {
|
for part in parts {
|
||||||
|
@ -118,23 +234,24 @@ impl ReadonlyNpmCache {
|
||||||
.join(fs_util::root_url_to_safe_local_dirname(registry_url))
|
.join(fs_util::root_url_to_safe_local_dirname(registry_url))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve_package_id_from_specifier(
|
pub fn resolve_package_folder_id_from_specifier(
|
||||||
&self,
|
&self,
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
registry_url: &Url,
|
registry_url: &Url,
|
||||||
) -> Result<NpmPackageId, AnyError> {
|
) -> Result<NpmPackageCacheFolderId, AnyError> {
|
||||||
match self.maybe_resolve_package_id_from_specifier(specifier, registry_url)
|
match self
|
||||||
|
.maybe_resolve_package_folder_id_from_specifier(specifier, registry_url)
|
||||||
{
|
{
|
||||||
Some(id) => Ok(id),
|
Some(id) => Ok(id),
|
||||||
None => bail!("could not find npm package for '{}'", specifier),
|
None => bail!("could not find npm package for '{}'", specifier),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn maybe_resolve_package_id_from_specifier(
|
fn maybe_resolve_package_folder_id_from_specifier(
|
||||||
&self,
|
&self,
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
registry_url: &Url,
|
registry_url: &Url,
|
||||||
) -> Option<NpmPackageId> {
|
) -> Option<NpmPackageCacheFolderId> {
|
||||||
let registry_root_dir = self
|
let registry_root_dir = self
|
||||||
.root_dir_url
|
.root_dir_url
|
||||||
.join(&format!(
|
.join(&format!(
|
||||||
|
@ -153,6 +270,7 @@ impl ReadonlyNpmCache {
|
||||||
// examples:
|
// examples:
|
||||||
// * chalk/5.0.1/
|
// * chalk/5.0.1/
|
||||||
// * @types/chalk/5.0.1/
|
// * @types/chalk/5.0.1/
|
||||||
|
// * some-package/5.0.1_1/ -- where the `_1` (/_\d+/) is a copy of the folder for peer deps
|
||||||
let is_scoped_package = relative_url.starts_with('@');
|
let is_scoped_package = relative_url.starts_with('@');
|
||||||
let mut parts = relative_url
|
let mut parts = relative_url
|
||||||
.split('/')
|
.split('/')
|
||||||
|
@ -163,11 +281,19 @@ impl ReadonlyNpmCache {
|
||||||
if parts.len() < 2 {
|
if parts.len() < 2 {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let version = parts.pop().unwrap();
|
let version_part = parts.pop().unwrap();
|
||||||
let name = parts.join("/");
|
let name = parts.join("/");
|
||||||
NpmVersion::parse(version)
|
let (version, copy_index) =
|
||||||
.ok()
|
if let Some((version, copy_count)) = version_part.split_once('_') {
|
||||||
.map(|version| NpmPackageId { name, version })
|
(version, copy_count.parse::<usize>().ok()?)
|
||||||
|
} else {
|
||||||
|
(version_part, 0)
|
||||||
|
};
|
||||||
|
Some(NpmPackageCacheFolderId {
|
||||||
|
name,
|
||||||
|
version: NpmVersion::parse(version).ok()?,
|
||||||
|
copy_index,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_cache_location(&self) -> PathBuf {
|
pub fn get_cache_location(&self) -> PathBuf {
|
||||||
|
@ -202,28 +328,38 @@ impl NpmCache {
|
||||||
|
|
||||||
pub async fn ensure_package(
|
pub async fn ensure_package(
|
||||||
&self,
|
&self,
|
||||||
id: &NpmPackageId,
|
package: (&str, &NpmVersion),
|
||||||
dist: &NpmPackageVersionDistInfo,
|
dist: &NpmPackageVersionDistInfo,
|
||||||
registry_url: &Url,
|
registry_url: &Url,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
self
|
self
|
||||||
.ensure_package_inner(id, dist, registry_url)
|
.ensure_package_inner(package, dist, registry_url)
|
||||||
.await
|
.await
|
||||||
.with_context(|| format!("Failed caching npm package '{}'.", id))
|
.with_context(|| {
|
||||||
|
format!("Failed caching npm package '{}@{}'.", package.0, package.1)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn should_use_cache_for_npm_package(&self, package_name: &str) -> bool {
|
||||||
|
self.cache_setting.should_use_for_npm_package(package_name)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn ensure_package_inner(
|
async fn ensure_package_inner(
|
||||||
&self,
|
&self,
|
||||||
id: &NpmPackageId,
|
package: (&str, &NpmVersion),
|
||||||
dist: &NpmPackageVersionDistInfo,
|
dist: &NpmPackageVersionDistInfo,
|
||||||
registry_url: &Url,
|
registry_url: &Url,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
let package_folder = self.readonly.package_folder(id, registry_url);
|
let package_folder = self.readonly.package_folder_for_name_and_version(
|
||||||
|
package.0,
|
||||||
|
package.1,
|
||||||
|
registry_url,
|
||||||
|
);
|
||||||
if package_folder.exists()
|
if package_folder.exists()
|
||||||
// if this file exists, then the package didn't successfully extract
|
// if this file exists, then the package didn't successfully extract
|
||||||
// the first time, or another process is currently extracting the zip file
|
// the first time, or another process is currently extracting the zip file
|
||||||
&& !package_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME).exists()
|
&& !package_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME).exists()
|
||||||
&& self.cache_setting.should_use_for_npm_package(&id.name)
|
&& self.should_use_cache_for_npm_package(package.0)
|
||||||
{
|
{
|
||||||
return Ok(());
|
return Ok(());
|
||||||
} else if self.cache_setting == CacheSetting::Only {
|
} else if self.cache_setting == CacheSetting::Only {
|
||||||
|
@ -231,7 +367,7 @@ impl NpmCache {
|
||||||
"NotCached",
|
"NotCached",
|
||||||
format!(
|
format!(
|
||||||
"An npm specifier not found in cache: \"{}\", --cached-only is specified.",
|
"An npm specifier not found in cache: \"{}\", --cached-only is specified.",
|
||||||
id.name
|
&package.0
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@ -256,38 +392,66 @@ impl NpmCache {
|
||||||
} else {
|
} else {
|
||||||
let bytes = response.bytes().await?;
|
let bytes = response.bytes().await?;
|
||||||
|
|
||||||
match verify_and_extract_tarball(id, &bytes, dist, &package_folder) {
|
verify_and_extract_tarball(package, &bytes, dist, &package_folder)
|
||||||
Ok(()) => Ok(()),
|
|
||||||
Err(err) => {
|
|
||||||
if let Err(remove_err) = fs::remove_dir_all(&package_folder) {
|
|
||||||
if remove_err.kind() != std::io::ErrorKind::NotFound {
|
|
||||||
bail!(
|
|
||||||
concat!(
|
|
||||||
"Failed verifying and extracting npm tarball for {}, then ",
|
|
||||||
"failed cleaning up package cache folder.\n\nOriginal ",
|
|
||||||
"error:\n\n{}\n\nRemove error:\n\n{}\n\nPlease manually ",
|
|
||||||
"delete this folder or you will run into issues using this ",
|
|
||||||
"package in the future:\n\n{}"
|
|
||||||
),
|
|
||||||
id,
|
|
||||||
err,
|
|
||||||
remove_err,
|
|
||||||
package_folder.display(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn package_folder(
|
/// Ensures a copy of the package exists in the global cache.
|
||||||
|
///
|
||||||
|
/// This assumes that the original package folder being hard linked
|
||||||
|
/// from exists before this is called.
|
||||||
|
pub fn ensure_copy_package(
|
||||||
&self,
|
&self,
|
||||||
id: &NpmPackageId,
|
id: &NpmPackageCacheFolderId,
|
||||||
|
registry_url: &Url,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
assert_ne!(id.copy_index, 0);
|
||||||
|
let package_folder = self.readonly.package_folder_for_id(id, registry_url);
|
||||||
|
|
||||||
|
if package_folder.exists()
|
||||||
|
// if this file exists, then the package didn't successfully extract
|
||||||
|
// the first time, or another process is currently extracting the zip file
|
||||||
|
&& !package_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME).exists()
|
||||||
|
&& self.cache_setting.should_use_for_npm_package(&id.name)
|
||||||
|
{
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let original_package_folder = self
|
||||||
|
.readonly
|
||||||
|
.package_folder_for_name_and_version(&id.name, &id.version, registry_url);
|
||||||
|
with_folder_sync_lock(
|
||||||
|
(id.name.as_str(), &id.version),
|
||||||
|
&package_folder,
|
||||||
|
|| {
|
||||||
|
fs_util::hard_link_dir_recursive(
|
||||||
|
&original_package_folder,
|
||||||
|
&package_folder,
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn package_folder_for_id(
|
||||||
|
&self,
|
||||||
|
id: &NpmPackageCacheFolderId,
|
||||||
registry_url: &Url,
|
registry_url: &Url,
|
||||||
) -> PathBuf {
|
) -> PathBuf {
|
||||||
self.readonly.package_folder(id, registry_url)
|
self.readonly.package_folder_for_id(id, registry_url)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn package_folder_for_name_and_version(
|
||||||
|
&self,
|
||||||
|
name: &str,
|
||||||
|
version: &NpmVersion,
|
||||||
|
registry_url: &Url,
|
||||||
|
) -> PathBuf {
|
||||||
|
self.readonly.package_folder_for_name_and_version(
|
||||||
|
name,
|
||||||
|
version,
|
||||||
|
registry_url,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn package_name_folder(&self, name: &str, registry_url: &Url) -> PathBuf {
|
pub fn package_name_folder(&self, name: &str, registry_url: &Url) -> PathBuf {
|
||||||
|
@ -298,14 +462,14 @@ impl NpmCache {
|
||||||
self.readonly.registry_folder(registry_url)
|
self.readonly.registry_folder(registry_url)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve_package_id_from_specifier(
|
pub fn resolve_package_folder_id_from_specifier(
|
||||||
&self,
|
&self,
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
registry_url: &Url,
|
registry_url: &Url,
|
||||||
) -> Result<NpmPackageId, AnyError> {
|
) -> Result<NpmPackageCacheFolderId, AnyError> {
|
||||||
self
|
self
|
||||||
.readonly
|
.readonly
|
||||||
.resolve_package_id_from_specifier(specifier, registry_url)
|
.resolve_package_folder_id_from_specifier(specifier, registry_url)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -314,8 +478,8 @@ mod test {
|
||||||
use deno_core::url::Url;
|
use deno_core::url::Url;
|
||||||
|
|
||||||
use super::ReadonlyNpmCache;
|
use super::ReadonlyNpmCache;
|
||||||
|
use crate::npm::cache::NpmPackageCacheFolderId;
|
||||||
use crate::npm::semver::NpmVersion;
|
use crate::npm::semver::NpmVersion;
|
||||||
use crate::npm::NpmPackageId;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_get_lowercase_package_folder() {
|
fn should_get_lowercase_package_folder() {
|
||||||
|
@ -323,12 +487,12 @@ mod test {
|
||||||
let cache = ReadonlyNpmCache::new(root_dir.clone());
|
let cache = ReadonlyNpmCache::new(root_dir.clone());
|
||||||
let registry_url = Url::parse("https://registry.npmjs.org/").unwrap();
|
let registry_url = Url::parse("https://registry.npmjs.org/").unwrap();
|
||||||
|
|
||||||
// all lowercase should be as-is
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cache.package_folder(
|
cache.package_folder_for_id(
|
||||||
&NpmPackageId {
|
&NpmPackageCacheFolderId {
|
||||||
name: "json".to_string(),
|
name: "json".to_string(),
|
||||||
version: NpmVersion::parse("1.2.5").unwrap(),
|
version: NpmVersion::parse("1.2.5").unwrap(),
|
||||||
|
copy_index: 0,
|
||||||
},
|
},
|
||||||
®istry_url,
|
®istry_url,
|
||||||
),
|
),
|
||||||
|
@ -337,44 +501,20 @@ mod test {
|
||||||
.join("json")
|
.join("json")
|
||||||
.join("1.2.5"),
|
.join("1.2.5"),
|
||||||
);
|
);
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn should_handle_non_all_lowercase_package_names() {
|
|
||||||
// it was possible at one point for npm packages to not just be lowercase
|
|
||||||
let root_dir = crate::deno_dir::DenoDir::new(None).unwrap().root;
|
|
||||||
let cache = ReadonlyNpmCache::new(root_dir.clone());
|
|
||||||
let registry_url = Url::parse("https://registry.npmjs.org/").unwrap();
|
|
||||||
let json_uppercase_hash =
|
|
||||||
"db1a21a0bc2ef8fbe13ac4cf044e8c9116d29137d5ed8b916ab63dcb2d4290df";
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cache.package_folder(
|
cache.package_folder_for_id(
|
||||||
&NpmPackageId {
|
&NpmPackageCacheFolderId {
|
||||||
name: "JSON".to_string(),
|
name: "json".to_string(),
|
||||||
version: NpmVersion::parse("1.2.5").unwrap(),
|
version: NpmVersion::parse("1.2.5").unwrap(),
|
||||||
|
copy_index: 1,
|
||||||
},
|
},
|
||||||
®istry_url,
|
®istry_url,
|
||||||
),
|
),
|
||||||
root_dir
|
root_dir
|
||||||
.join("registry.npmjs.org")
|
.join("registry.npmjs.org")
|
||||||
.join("_")
|
.join("json")
|
||||||
.join(json_uppercase_hash)
|
.join("1.2.5_1"),
|
||||||
.join("1.2.5"),
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
cache.package_folder(
|
|
||||||
&NpmPackageId {
|
|
||||||
name: "@types/JSON".to_string(),
|
|
||||||
version: NpmVersion::parse("1.2.5").unwrap(),
|
|
||||||
},
|
|
||||||
®istry_url,
|
|
||||||
),
|
|
||||||
root_dir
|
|
||||||
.join("registry.npmjs.org")
|
|
||||||
.join("_")
|
|
||||||
.join("@types")
|
|
||||||
.join(json_uppercase_hash)
|
|
||||||
.join("1.2.5"),
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,6 +13,7 @@ pub use cache::NpmCache;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub use registry::NpmPackageVersionDistInfo;
|
pub use registry::NpmPackageVersionDistInfo;
|
||||||
pub use registry::NpmRegistryApi;
|
pub use registry::NpmRegistryApi;
|
||||||
|
pub use registry::RealNpmRegistryApi;
|
||||||
pub use resolution::NpmPackageId;
|
pub use resolution::NpmPackageId;
|
||||||
pub use resolution::NpmPackageReference;
|
pub use resolution::NpmPackageReference;
|
||||||
pub use resolution::NpmPackageReq;
|
pub use resolution::NpmPackageReq;
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use std::cmp::Ordering;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::ErrorKind;
|
use std::io::ErrorKind;
|
||||||
|
@ -10,6 +11,8 @@ use deno_core::anyhow::bail;
|
||||||
use deno_core::anyhow::Context;
|
use deno_core::anyhow::Context;
|
||||||
use deno_core::error::custom_error;
|
use deno_core::error::custom_error;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
|
use deno_core::futures::future::BoxFuture;
|
||||||
|
use deno_core::futures::FutureExt;
|
||||||
use deno_core::parking_lot::Mutex;
|
use deno_core::parking_lot::Mutex;
|
||||||
use deno_core::serde::Deserialize;
|
use deno_core::serde::Deserialize;
|
||||||
use deno_core::serde_json;
|
use deno_core::serde_json;
|
||||||
|
@ -24,11 +27,13 @@ use crate::http_cache::CACHE_PERM;
|
||||||
use crate::progress_bar::ProgressBar;
|
use crate::progress_bar::ProgressBar;
|
||||||
|
|
||||||
use super::cache::NpmCache;
|
use super::cache::NpmCache;
|
||||||
|
use super::resolution::NpmVersionMatcher;
|
||||||
|
use super::semver::NpmVersion;
|
||||||
use super::semver::NpmVersionReq;
|
use super::semver::NpmVersionReq;
|
||||||
|
|
||||||
// npm registry docs: https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md
|
// npm registry docs: https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize, Clone)]
|
#[derive(Debug, Default, Deserialize, Serialize, Clone)]
|
||||||
pub struct NpmPackageInfo {
|
pub struct NpmPackageInfo {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub versions: HashMap<String, NpmPackageVersionInfo>,
|
pub versions: HashMap<String, NpmPackageVersionInfo>,
|
||||||
|
@ -36,13 +41,59 @@ pub struct NpmPackageInfo {
|
||||||
pub dist_tags: HashMap<String, String>,
|
pub dist_tags: HashMap<String, String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Eq, PartialEq)]
|
||||||
|
pub enum NpmDependencyEntryKind {
|
||||||
|
Dep,
|
||||||
|
Peer,
|
||||||
|
OptionalPeer,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NpmDependencyEntryKind {
|
||||||
|
pub fn is_optional(&self) -> bool {
|
||||||
|
matches!(self, NpmDependencyEntryKind::OptionalPeer)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Eq, PartialEq)]
|
||||||
pub struct NpmDependencyEntry {
|
pub struct NpmDependencyEntry {
|
||||||
|
pub kind: NpmDependencyEntryKind,
|
||||||
pub bare_specifier: String,
|
pub bare_specifier: String,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub version_req: NpmVersionReq,
|
pub version_req: NpmVersionReq,
|
||||||
|
/// When the dependency is also marked as a peer dependency,
|
||||||
|
/// use this entry to resolve the dependency when it can't
|
||||||
|
/// be resolved as a peer dependency.
|
||||||
|
pub peer_dep_version_req: Option<NpmVersionReq>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialOrd for NpmDependencyEntry {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||||
|
Some(self.cmp(other))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for NpmDependencyEntry {
|
||||||
|
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||||
|
// sort the dependencies alphabetically by name then by version descending
|
||||||
|
match self.name.cmp(&other.name) {
|
||||||
|
// sort by newest to oldest
|
||||||
|
Ordering::Equal => other
|
||||||
|
.version_req
|
||||||
|
.version_text()
|
||||||
|
.cmp(&self.version_req.version_text()),
|
||||||
|
ordering => ordering,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default, Deserialize, Serialize, Clone)]
|
#[derive(Debug, Default, Deserialize, Serialize, Clone)]
|
||||||
|
pub struct NpmPeerDependencyMeta {
|
||||||
|
#[serde(default)]
|
||||||
|
optional: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, Deserialize, Serialize, Clone)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct NpmPackageVersionInfo {
|
pub struct NpmPackageVersionInfo {
|
||||||
pub version: String,
|
pub version: String,
|
||||||
pub dist: NpmPackageVersionDistInfo,
|
pub dist: NpmPackageVersionDistInfo,
|
||||||
|
@ -50,14 +101,19 @@ pub struct NpmPackageVersionInfo {
|
||||||
// package and version (ex. `"typescript-3.0.1": "npm:typescript@3.0.1"`).
|
// package and version (ex. `"typescript-3.0.1": "npm:typescript@3.0.1"`).
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub dependencies: HashMap<String, String>,
|
pub dependencies: HashMap<String, String>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub peer_dependencies: HashMap<String, String>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub peer_dependencies_meta: HashMap<String, NpmPeerDependencyMeta>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NpmPackageVersionInfo {
|
impl NpmPackageVersionInfo {
|
||||||
pub fn dependencies_as_entries(
|
pub fn dependencies_as_entries(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<Vec<NpmDependencyEntry>, AnyError> {
|
) -> Result<Vec<NpmDependencyEntry>, AnyError> {
|
||||||
fn entry_as_bare_specifier_and_reference(
|
fn parse_dep_entry(
|
||||||
entry: (&String, &String),
|
entry: (&String, &String),
|
||||||
|
kind: NpmDependencyEntryKind,
|
||||||
) -> Result<NpmDependencyEntry, AnyError> {
|
) -> Result<NpmDependencyEntry, AnyError> {
|
||||||
let bare_specifier = entry.0.clone();
|
let bare_specifier = entry.0.clone();
|
||||||
let (name, version_req) =
|
let (name, version_req) =
|
||||||
|
@ -78,21 +134,46 @@ impl NpmPackageVersionInfo {
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
Ok(NpmDependencyEntry {
|
Ok(NpmDependencyEntry {
|
||||||
|
kind,
|
||||||
bare_specifier,
|
bare_specifier,
|
||||||
name,
|
name,
|
||||||
version_req,
|
version_req,
|
||||||
|
peer_dep_version_req: None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
self
|
let mut result = HashMap::with_capacity(
|
||||||
.dependencies
|
self.dependencies.len() + self.peer_dependencies.len(),
|
||||||
.iter()
|
);
|
||||||
.map(entry_as_bare_specifier_and_reference)
|
for entry in &self.peer_dependencies {
|
||||||
.collect::<Result<Vec<_>, AnyError>>()
|
let is_optional = self
|
||||||
|
.peer_dependencies_meta
|
||||||
|
.get(entry.0)
|
||||||
|
.map(|d| d.optional)
|
||||||
|
.unwrap_or(false);
|
||||||
|
let kind = match is_optional {
|
||||||
|
true => NpmDependencyEntryKind::OptionalPeer,
|
||||||
|
false => NpmDependencyEntryKind::Peer,
|
||||||
|
};
|
||||||
|
let entry = parse_dep_entry(entry, kind)?;
|
||||||
|
result.insert(entry.bare_specifier.clone(), entry);
|
||||||
|
}
|
||||||
|
for entry in &self.dependencies {
|
||||||
|
let entry = parse_dep_entry(entry, NpmDependencyEntryKind::Dep)?;
|
||||||
|
// people may define a dependency as a peer dependency as well,
|
||||||
|
// so in those cases, attempt to resolve as a peer dependency,
|
||||||
|
// but then use this dependency version requirement otherwise
|
||||||
|
if let Some(peer_dep_entry) = result.get_mut(&entry.bare_specifier) {
|
||||||
|
peer_dep_entry.peer_dep_version_req = Some(entry.version_req);
|
||||||
|
} else {
|
||||||
|
result.insert(entry.bare_specifier.clone(), entry);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(result.into_values().collect())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
pub struct NpmPackageVersionDistInfo {
|
pub struct NpmPackageVersionDistInfo {
|
||||||
/// URL to the tarball.
|
/// URL to the tarball.
|
||||||
pub tarball: String,
|
pub tarball: String,
|
||||||
|
@ -100,16 +181,50 @@ pub struct NpmPackageVersionDistInfo {
|
||||||
pub integrity: Option<String>,
|
pub integrity: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
pub trait NpmRegistryApi: Clone + Sync + Send + 'static {
|
||||||
pub struct NpmRegistryApi {
|
fn maybe_package_info(
|
||||||
base_url: Url,
|
&self,
|
||||||
cache: NpmCache,
|
name: &str,
|
||||||
mem_cache: Arc<Mutex<HashMap<String, Option<NpmPackageInfo>>>>,
|
) -> BoxFuture<'static, Result<Option<NpmPackageInfo>, AnyError>>;
|
||||||
cache_setting: CacheSetting,
|
|
||||||
progress_bar: ProgressBar,
|
fn package_info(
|
||||||
|
&self,
|
||||||
|
name: &str,
|
||||||
|
) -> BoxFuture<'static, Result<NpmPackageInfo, AnyError>> {
|
||||||
|
let api = self.clone();
|
||||||
|
let name = name.to_string();
|
||||||
|
async move {
|
||||||
|
let maybe_package_info = api.maybe_package_info(&name).await?;
|
||||||
|
match maybe_package_info {
|
||||||
|
Some(package_info) => Ok(package_info),
|
||||||
|
None => bail!("npm package '{}' does not exist", name),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn package_version_info(
|
||||||
|
&self,
|
||||||
|
name: &str,
|
||||||
|
version: &NpmVersion,
|
||||||
|
) -> BoxFuture<'static, Result<Option<NpmPackageVersionInfo>, AnyError>> {
|
||||||
|
let api = self.clone();
|
||||||
|
let name = name.to_string();
|
||||||
|
let version = version.to_string();
|
||||||
|
async move {
|
||||||
|
// todo(dsherret): this could be optimized to not clone the
|
||||||
|
// entire package info in the case of the RealNpmRegistryApi
|
||||||
|
let mut package_info = api.package_info(&name).await?;
|
||||||
|
Ok(package_info.versions.remove(&version))
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NpmRegistryApi {
|
#[derive(Clone)]
|
||||||
|
pub struct RealNpmRegistryApi(Arc<RealNpmRegistryApiInner>);
|
||||||
|
|
||||||
|
impl RealNpmRegistryApi {
|
||||||
pub fn default_url() -> Url {
|
pub fn default_url() -> Url {
|
||||||
let env_var_name = "DENO_NPM_REGISTRY";
|
let env_var_name = "DENO_NPM_REGISTRY";
|
||||||
if let Ok(registry_url) = std::env::var(env_var_name) {
|
if let Ok(registry_url) = std::env::var(env_var_name) {
|
||||||
|
@ -135,30 +250,40 @@ impl NpmRegistryApi {
|
||||||
cache_setting: CacheSetting,
|
cache_setting: CacheSetting,
|
||||||
progress_bar: ProgressBar,
|
progress_bar: ProgressBar,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self(Arc::new(RealNpmRegistryApiInner {
|
||||||
base_url,
|
base_url,
|
||||||
cache,
|
cache,
|
||||||
mem_cache: Default::default(),
|
mem_cache: Default::default(),
|
||||||
cache_setting,
|
cache_setting,
|
||||||
progress_bar,
|
progress_bar,
|
||||||
}
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn base_url(&self) -> &Url {
|
pub fn base_url(&self) -> &Url {
|
||||||
&self.base_url
|
&self.0.base_url
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn package_info(
|
impl NpmRegistryApi for RealNpmRegistryApi {
|
||||||
|
fn maybe_package_info(
|
||||||
&self,
|
&self,
|
||||||
name: &str,
|
name: &str,
|
||||||
) -> Result<NpmPackageInfo, AnyError> {
|
) -> BoxFuture<'static, Result<Option<NpmPackageInfo>, AnyError>> {
|
||||||
let maybe_package_info = self.maybe_package_info(name).await?;
|
let api = self.clone();
|
||||||
match maybe_package_info {
|
let name = name.to_string();
|
||||||
Some(package_info) => Ok(package_info),
|
async move { api.0.maybe_package_info(&name).await }.boxed()
|
||||||
None => bail!("npm package '{}' does not exist", name),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct RealNpmRegistryApiInner {
|
||||||
|
base_url: Url,
|
||||||
|
cache: NpmCache,
|
||||||
|
mem_cache: Mutex<HashMap<String, Option<NpmPackageInfo>>>,
|
||||||
|
cache_setting: CacheSetting,
|
||||||
|
progress_bar: ProgressBar,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RealNpmRegistryApiInner {
|
||||||
pub async fn maybe_package_info(
|
pub async fn maybe_package_info(
|
||||||
&self,
|
&self,
|
||||||
name: &str,
|
name: &str,
|
||||||
|
@ -331,3 +456,100 @@ impl NpmRegistryApi {
|
||||||
name_folder_path.join("registry.json")
|
name_folder_path.join("registry.json")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Note: This test struct is not thread safe for setup
|
||||||
|
/// purposes. Construct everything on the same thread.
|
||||||
|
#[cfg(test)]
|
||||||
|
#[derive(Clone, Default)]
|
||||||
|
pub struct TestNpmRegistryApi {
|
||||||
|
package_infos: Arc<Mutex<HashMap<String, NpmPackageInfo>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
impl TestNpmRegistryApi {
|
||||||
|
pub fn add_package_info(&self, name: &str, info: NpmPackageInfo) {
|
||||||
|
let previous = self.package_infos.lock().insert(name.to_string(), info);
|
||||||
|
assert!(previous.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ensure_package(&self, name: &str) {
|
||||||
|
if !self.package_infos.lock().contains_key(name) {
|
||||||
|
self.add_package_info(
|
||||||
|
name,
|
||||||
|
NpmPackageInfo {
|
||||||
|
name: name.to_string(),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ensure_package_version(&self, name: &str, version: &str) {
|
||||||
|
self.ensure_package(name);
|
||||||
|
let mut infos = self.package_infos.lock();
|
||||||
|
let info = infos.get_mut(name).unwrap();
|
||||||
|
if !info.versions.contains_key(version) {
|
||||||
|
info.versions.insert(
|
||||||
|
version.to_string(),
|
||||||
|
NpmPackageVersionInfo {
|
||||||
|
version: version.to_string(),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_dependency(
|
||||||
|
&self,
|
||||||
|
package_from: (&str, &str),
|
||||||
|
package_to: (&str, &str),
|
||||||
|
) {
|
||||||
|
let mut infos = self.package_infos.lock();
|
||||||
|
let info = infos.get_mut(package_from.0).unwrap();
|
||||||
|
let version = info.versions.get_mut(package_from.1).unwrap();
|
||||||
|
version
|
||||||
|
.dependencies
|
||||||
|
.insert(package_to.0.to_string(), package_to.1.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_peer_dependency(
|
||||||
|
&self,
|
||||||
|
package_from: (&str, &str),
|
||||||
|
package_to: (&str, &str),
|
||||||
|
) {
|
||||||
|
let mut infos = self.package_infos.lock();
|
||||||
|
let info = infos.get_mut(package_from.0).unwrap();
|
||||||
|
let version = info.versions.get_mut(package_from.1).unwrap();
|
||||||
|
version
|
||||||
|
.peer_dependencies
|
||||||
|
.insert(package_to.0.to_string(), package_to.1.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_optional_peer_dependency(
|
||||||
|
&self,
|
||||||
|
package_from: (&str, &str),
|
||||||
|
package_to: (&str, &str),
|
||||||
|
) {
|
||||||
|
let mut infos = self.package_infos.lock();
|
||||||
|
let info = infos.get_mut(package_from.0).unwrap();
|
||||||
|
let version = info.versions.get_mut(package_from.1).unwrap();
|
||||||
|
version
|
||||||
|
.peer_dependencies
|
||||||
|
.insert(package_to.0.to_string(), package_to.1.to_string());
|
||||||
|
version.peer_dependencies_meta.insert(
|
||||||
|
package_to.0.to_string(),
|
||||||
|
NpmPeerDependencyMeta { optional: true },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
impl NpmRegistryApi for TestNpmRegistryApi {
|
||||||
|
fn maybe_package_info(
|
||||||
|
&self,
|
||||||
|
name: &str,
|
||||||
|
) -> BoxFuture<'static, Result<Option<NpmPackageInfo>, AnyError>> {
|
||||||
|
let result = self.package_infos.lock().get(name).cloned();
|
||||||
|
Box::pin(deno_core::futures::future::ready(Ok(result)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load diff
2033
cli/npm/resolution/graph.rs
Normal file
2033
cli/npm/resolution/graph.rs
Normal file
File diff suppressed because it is too large
Load diff
676
cli/npm/resolution/mod.rs
Normal file
676
cli/npm/resolution/mod.rs
Normal file
|
@ -0,0 +1,676 @@
|
||||||
|
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
use deno_ast::ModuleSpecifier;
|
||||||
|
use deno_core::anyhow::bail;
|
||||||
|
use deno_core::anyhow::Context;
|
||||||
|
use deno_core::error::generic_error;
|
||||||
|
use deno_core::error::AnyError;
|
||||||
|
use deno_core::futures;
|
||||||
|
use deno_core::parking_lot::RwLock;
|
||||||
|
use serde::Deserialize;
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
use crate::lockfile::Lockfile;
|
||||||
|
|
||||||
|
use self::graph::GraphDependencyResolver;
|
||||||
|
use self::snapshot::NpmPackagesPartitioned;
|
||||||
|
|
||||||
|
use super::cache::should_sync_download;
|
||||||
|
use super::cache::NpmPackageCacheFolderId;
|
||||||
|
use super::registry::NpmPackageVersionDistInfo;
|
||||||
|
use super::registry::RealNpmRegistryApi;
|
||||||
|
use super::semver::NpmVersion;
|
||||||
|
use super::semver::SpecifierVersionReq;
|
||||||
|
use super::NpmRegistryApi;
|
||||||
|
|
||||||
|
mod graph;
|
||||||
|
mod snapshot;
|
||||||
|
|
||||||
|
use graph::Graph;
|
||||||
|
pub use snapshot::NpmResolutionSnapshot;
|
||||||
|
|
||||||
|
/// The version matcher used for npm schemed urls is more strict than
|
||||||
|
/// the one used by npm packages and so we represent either via a trait.
|
||||||
|
pub trait NpmVersionMatcher {
|
||||||
|
fn tag(&self) -> Option<&str>;
|
||||||
|
fn matches(&self, version: &NpmVersion) -> bool;
|
||||||
|
fn version_text(&self) -> String;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||||
|
pub struct NpmPackageReference {
|
||||||
|
pub req: NpmPackageReq,
|
||||||
|
pub sub_path: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NpmPackageReference {
|
||||||
|
pub fn from_specifier(
|
||||||
|
specifier: &ModuleSpecifier,
|
||||||
|
) -> Result<NpmPackageReference, AnyError> {
|
||||||
|
Self::from_str(specifier.as_str())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_str(specifier: &str) -> Result<NpmPackageReference, AnyError> {
|
||||||
|
let specifier = match specifier.strip_prefix("npm:") {
|
||||||
|
Some(s) => s,
|
||||||
|
None => {
|
||||||
|
bail!("Not an npm specifier: {}", specifier);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let parts = specifier.split('/').collect::<Vec<_>>();
|
||||||
|
let name_part_len = if specifier.starts_with('@') { 2 } else { 1 };
|
||||||
|
if parts.len() < name_part_len {
|
||||||
|
return Err(generic_error(format!("Not a valid package: {}", specifier)));
|
||||||
|
}
|
||||||
|
let name_parts = &parts[0..name_part_len];
|
||||||
|
let last_name_part = &name_parts[name_part_len - 1];
|
||||||
|
let (name, version_req) = if let Some(at_index) = last_name_part.rfind('@')
|
||||||
|
{
|
||||||
|
let version = &last_name_part[at_index + 1..];
|
||||||
|
let last_name_part = &last_name_part[..at_index];
|
||||||
|
let version_req = SpecifierVersionReq::parse(version)
|
||||||
|
.with_context(|| "Invalid version requirement.")?;
|
||||||
|
let name = if name_part_len == 1 {
|
||||||
|
last_name_part.to_string()
|
||||||
|
} else {
|
||||||
|
format!("{}/{}", name_parts[0], last_name_part)
|
||||||
|
};
|
||||||
|
(name, Some(version_req))
|
||||||
|
} else {
|
||||||
|
(name_parts.join("/"), None)
|
||||||
|
};
|
||||||
|
let sub_path = if parts.len() == name_parts.len() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(parts[name_part_len..].join("/"))
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(sub_path) = &sub_path {
|
||||||
|
if let Some(at_index) = sub_path.rfind('@') {
|
||||||
|
let (new_sub_path, version) = sub_path.split_at(at_index);
|
||||||
|
let msg = format!(
|
||||||
|
"Invalid package specifier 'npm:{}/{}'. Did you mean to write 'npm:{}{}/{}'?",
|
||||||
|
name, sub_path, name, version, new_sub_path
|
||||||
|
);
|
||||||
|
return Err(generic_error(msg));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(NpmPackageReference {
|
||||||
|
req: NpmPackageReq { name, version_req },
|
||||||
|
sub_path,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for NpmPackageReference {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
if let Some(sub_path) = &self.sub_path {
|
||||||
|
write!(f, "npm:{}/{}", self.req, sub_path)
|
||||||
|
} else {
|
||||||
|
write!(f, "npm:{}", self.req)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(
|
||||||
|
Clone, Debug, Default, PartialEq, Eq, Hash, Serialize, Deserialize,
|
||||||
|
)]
|
||||||
|
pub struct NpmPackageReq {
|
||||||
|
pub name: String,
|
||||||
|
pub version_req: Option<SpecifierVersionReq>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for NpmPackageReq {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match &self.version_req {
|
||||||
|
Some(req) => write!(f, "{}@{}", self.name, req),
|
||||||
|
None => write!(f, "{}", self.name),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NpmPackageReq {
|
||||||
|
pub fn from_str(text: &str) -> Result<Self, AnyError> {
|
||||||
|
// probably should do something more targetted in the future
|
||||||
|
let reference = NpmPackageReference::from_str(&format!("npm:{}", text))?;
|
||||||
|
Ok(reference.req)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NpmVersionMatcher for NpmPackageReq {
|
||||||
|
fn tag(&self) -> Option<&str> {
|
||||||
|
match &self.version_req {
|
||||||
|
Some(version_req) => version_req.tag(),
|
||||||
|
None => Some("latest"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn matches(&self, version: &NpmVersion) -> bool {
|
||||||
|
match self.version_req.as_ref() {
|
||||||
|
Some(req) => {
|
||||||
|
assert_eq!(self.tag(), None);
|
||||||
|
match req.range() {
|
||||||
|
Some(range) => range.satisfies(version),
|
||||||
|
None => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => version.pre.is_empty(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn version_text(&self) -> String {
|
||||||
|
self
|
||||||
|
.version_req
|
||||||
|
.as_ref()
|
||||||
|
.map(|v| format!("{}", v))
|
||||||
|
.unwrap_or_else(|| "non-prerelease".to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(
|
||||||
|
Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Hash, Serialize, Deserialize,
|
||||||
|
)]
|
||||||
|
pub struct NpmPackageId {
|
||||||
|
pub name: String,
|
||||||
|
pub version: NpmVersion,
|
||||||
|
pub peer_dependencies: Vec<NpmPackageId>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NpmPackageId {
|
||||||
|
#[allow(unused)]
|
||||||
|
pub fn scope(&self) -> Option<&str> {
|
||||||
|
if self.name.starts_with('@') && self.name.contains('/') {
|
||||||
|
self.name.split('/').next()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_serialized(&self) -> String {
|
||||||
|
self.as_serialized_with_level(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_serialized_with_level(&self, level: usize) -> String {
|
||||||
|
// WARNING: This should not change because it's used in the lockfile
|
||||||
|
let mut result = format!(
|
||||||
|
"{}@{}",
|
||||||
|
if level == 0 {
|
||||||
|
self.name.to_string()
|
||||||
|
} else {
|
||||||
|
self.name.replace('/', "+")
|
||||||
|
},
|
||||||
|
self.version
|
||||||
|
);
|
||||||
|
for peer in &self.peer_dependencies {
|
||||||
|
// unfortunately we can't do something like `_3` when
|
||||||
|
// this gets deep because npm package names can start
|
||||||
|
// with a number
|
||||||
|
result.push_str(&"_".repeat(level + 1));
|
||||||
|
result.push_str(&peer.as_serialized_with_level(level + 1));
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_serialized(id: &str) -> Result<Self, AnyError> {
|
||||||
|
use monch::*;
|
||||||
|
|
||||||
|
fn parse_name(input: &str) -> ParseResult<&str> {
|
||||||
|
if_not_empty(substring(move |input| {
|
||||||
|
for (pos, c) in input.char_indices() {
|
||||||
|
// first character might be a scope, so skip it
|
||||||
|
if pos > 0 && c == '@' {
|
||||||
|
return Ok((&input[pos..], ()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ParseError::backtrace()
|
||||||
|
}))(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_version(input: &str) -> ParseResult<&str> {
|
||||||
|
if_not_empty(substring(skip_while(|c| c != '_')))(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_name_and_version(
|
||||||
|
input: &str,
|
||||||
|
) -> ParseResult<(String, NpmVersion)> {
|
||||||
|
let (input, name) = parse_name(input)?;
|
||||||
|
let (input, _) = ch('@')(input)?;
|
||||||
|
let at_version_input = input;
|
||||||
|
let (input, version) = parse_version(input)?;
|
||||||
|
match NpmVersion::parse(version) {
|
||||||
|
Ok(version) => Ok((input, (name.to_string(), version))),
|
||||||
|
Err(err) => ParseError::fail(at_version_input, format!("{:#}", err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_level_at_level<'a>(
|
||||||
|
level: usize,
|
||||||
|
) -> impl Fn(&'a str) -> ParseResult<'a, ()> {
|
||||||
|
fn parse_level(input: &str) -> ParseResult<usize> {
|
||||||
|
let level = input.chars().take_while(|c| *c == '_').count();
|
||||||
|
Ok((&input[level..], level))
|
||||||
|
}
|
||||||
|
|
||||||
|
move |input| {
|
||||||
|
let (input, parsed_level) = parse_level(input)?;
|
||||||
|
if parsed_level == level {
|
||||||
|
Ok((input, ()))
|
||||||
|
} else {
|
||||||
|
ParseError::backtrace()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_peers_at_level<'a>(
|
||||||
|
level: usize,
|
||||||
|
) -> impl Fn(&'a str) -> ParseResult<'a, Vec<NpmPackageId>> {
|
||||||
|
move |mut input| {
|
||||||
|
let mut peers = Vec::new();
|
||||||
|
while let Ok((level_input, _)) = parse_level_at_level(level)(input) {
|
||||||
|
input = level_input;
|
||||||
|
let peer_result = parse_id_at_level(level)(input)?;
|
||||||
|
input = peer_result.0;
|
||||||
|
peers.push(peer_result.1);
|
||||||
|
}
|
||||||
|
Ok((input, peers))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_id_at_level<'a>(
|
||||||
|
level: usize,
|
||||||
|
) -> impl Fn(&'a str) -> ParseResult<'a, NpmPackageId> {
|
||||||
|
move |input| {
|
||||||
|
let (input, (name, version)) = parse_name_and_version(input)?;
|
||||||
|
let name = if level > 0 {
|
||||||
|
name.replace('+', "/")
|
||||||
|
} else {
|
||||||
|
name
|
||||||
|
};
|
||||||
|
let (input, peer_dependencies) =
|
||||||
|
parse_peers_at_level(level + 1)(input)?;
|
||||||
|
Ok((
|
||||||
|
input,
|
||||||
|
NpmPackageId {
|
||||||
|
name,
|
||||||
|
version,
|
||||||
|
peer_dependencies,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
with_failure_handling(parse_id_at_level(0))(id)
|
||||||
|
.with_context(|| format!("Invalid npm package id '{}'.", id))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn display(&self) -> String {
|
||||||
|
// Don't implement std::fmt::Display because we don't
|
||||||
|
// want this to be used by accident in certain scenarios.
|
||||||
|
format!("{}@{}", self.name, self.version)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
|
pub struct NpmResolutionPackage {
|
||||||
|
pub id: NpmPackageId,
|
||||||
|
/// The peer dependency resolution can differ for the same
|
||||||
|
/// package (name and version) depending on where it is in
|
||||||
|
/// the resolution tree. This copy index indicates which
|
||||||
|
/// copy of the package this is.
|
||||||
|
pub copy_index: usize,
|
||||||
|
pub dist: NpmPackageVersionDistInfo,
|
||||||
|
/// Key is what the package refers to the other package as,
|
||||||
|
/// which could be different from the package name.
|
||||||
|
pub dependencies: HashMap<String, NpmPackageId>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NpmResolutionPackage {
|
||||||
|
pub fn get_package_cache_folder_id(&self) -> NpmPackageCacheFolderId {
|
||||||
|
NpmPackageCacheFolderId {
|
||||||
|
name: self.id.name.clone(),
|
||||||
|
version: self.id.version.clone(),
|
||||||
|
copy_index: self.copy_index,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct NpmResolution {
|
||||||
|
api: RealNpmRegistryApi,
|
||||||
|
snapshot: RwLock<NpmResolutionSnapshot>,
|
||||||
|
update_sempahore: tokio::sync::Semaphore,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Debug for NpmResolution {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
let snapshot = self.snapshot.read();
|
||||||
|
f.debug_struct("NpmResolution")
|
||||||
|
.field("snapshot", &snapshot)
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NpmResolution {
|
||||||
|
pub fn new(
|
||||||
|
api: RealNpmRegistryApi,
|
||||||
|
initial_snapshot: Option<NpmResolutionSnapshot>,
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
api,
|
||||||
|
snapshot: RwLock::new(initial_snapshot.unwrap_or_default()),
|
||||||
|
update_sempahore: tokio::sync::Semaphore::new(1),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn add_package_reqs(
|
||||||
|
&self,
|
||||||
|
package_reqs: Vec<NpmPackageReq>,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
// only allow one thread in here at a time
|
||||||
|
let _permit = self.update_sempahore.acquire().await.unwrap();
|
||||||
|
let snapshot = self.snapshot.read().clone();
|
||||||
|
|
||||||
|
let snapshot = self
|
||||||
|
.add_package_reqs_to_snapshot(package_reqs, snapshot)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
*self.snapshot.write() = snapshot;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn set_package_reqs(
|
||||||
|
&self,
|
||||||
|
package_reqs: HashSet<NpmPackageReq>,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
// only allow one thread in here at a time
|
||||||
|
let _permit = self.update_sempahore.acquire().await.unwrap();
|
||||||
|
let snapshot = self.snapshot.read().clone();
|
||||||
|
|
||||||
|
let has_removed_package = !snapshot
|
||||||
|
.package_reqs
|
||||||
|
.keys()
|
||||||
|
.all(|req| package_reqs.contains(req));
|
||||||
|
// if any packages were removed, we need to completely recreate the npm resolution snapshot
|
||||||
|
let snapshot = if has_removed_package {
|
||||||
|
NpmResolutionSnapshot::default()
|
||||||
|
} else {
|
||||||
|
snapshot
|
||||||
|
};
|
||||||
|
let snapshot = self
|
||||||
|
.add_package_reqs_to_snapshot(
|
||||||
|
package_reqs.into_iter().collect(),
|
||||||
|
snapshot,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
*self.snapshot.write() = snapshot;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn add_package_reqs_to_snapshot(
|
||||||
|
&self,
|
||||||
|
mut package_reqs: Vec<NpmPackageReq>,
|
||||||
|
snapshot: NpmResolutionSnapshot,
|
||||||
|
) -> Result<NpmResolutionSnapshot, AnyError> {
|
||||||
|
// convert the snapshot to a traversable graph
|
||||||
|
let mut graph = Graph::from_snapshot(snapshot);
|
||||||
|
|
||||||
|
// multiple packages are resolved in alphabetical order
|
||||||
|
package_reqs.sort_by(|a, b| a.name.cmp(&b.name));
|
||||||
|
|
||||||
|
// go over the top level packages first, then down the
|
||||||
|
// tree one level at a time through all the branches
|
||||||
|
let mut unresolved_tasks = Vec::with_capacity(package_reqs.len());
|
||||||
|
for package_req in package_reqs {
|
||||||
|
if graph.has_package_req(&package_req) {
|
||||||
|
// skip analyzing this package, as there's already a matching top level package
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// no existing best version, so resolve the current packages
|
||||||
|
let api = self.api.clone();
|
||||||
|
let maybe_info = if should_sync_download() {
|
||||||
|
// for deterministic test output
|
||||||
|
Some(api.package_info(&package_req.name).await)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
unresolved_tasks.push(tokio::task::spawn(async move {
|
||||||
|
let info = match maybe_info {
|
||||||
|
Some(info) => info?,
|
||||||
|
None => api.package_info(&package_req.name).await?,
|
||||||
|
};
|
||||||
|
Result::<_, AnyError>::Ok((package_req, info))
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut resolver = GraphDependencyResolver::new(&mut graph, &self.api);
|
||||||
|
|
||||||
|
for result in futures::future::join_all(unresolved_tasks).await {
|
||||||
|
let (package_req, info) = result??;
|
||||||
|
resolver.add_package_req(&package_req, info)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
resolver.resolve_pending().await?;
|
||||||
|
|
||||||
|
graph.into_snapshot(&self.api).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn resolve_package_from_id(
|
||||||
|
&self,
|
||||||
|
id: &NpmPackageId,
|
||||||
|
) -> Option<NpmResolutionPackage> {
|
||||||
|
self.snapshot.read().package_from_id(id).cloned()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn resolve_package_cache_folder_id_from_id(
|
||||||
|
&self,
|
||||||
|
id: &NpmPackageId,
|
||||||
|
) -> Option<NpmPackageCacheFolderId> {
|
||||||
|
self
|
||||||
|
.snapshot
|
||||||
|
.read()
|
||||||
|
.package_from_id(id)
|
||||||
|
.map(|p| p.get_package_cache_folder_id())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn resolve_package_from_package(
|
||||||
|
&self,
|
||||||
|
name: &str,
|
||||||
|
referrer: &NpmPackageCacheFolderId,
|
||||||
|
) -> Result<NpmResolutionPackage, AnyError> {
|
||||||
|
self
|
||||||
|
.snapshot
|
||||||
|
.read()
|
||||||
|
.resolve_package_from_package(name, referrer)
|
||||||
|
.cloned()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Resolve a node package from a deno module.
|
||||||
|
pub fn resolve_package_from_deno_module(
|
||||||
|
&self,
|
||||||
|
package: &NpmPackageReq,
|
||||||
|
) -> Result<NpmResolutionPackage, AnyError> {
|
||||||
|
self
|
||||||
|
.snapshot
|
||||||
|
.read()
|
||||||
|
.resolve_package_from_deno_module(package)
|
||||||
|
.cloned()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn all_packages(&self) -> Vec<NpmResolutionPackage> {
|
||||||
|
self.snapshot.read().all_packages()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn all_packages_partitioned(&self) -> NpmPackagesPartitioned {
|
||||||
|
self.snapshot.read().all_packages_partitioned()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn has_packages(&self) -> bool {
|
||||||
|
!self.snapshot.read().packages.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn snapshot(&self) -> NpmResolutionSnapshot {
|
||||||
|
self.snapshot.read().clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lock(
|
||||||
|
&self,
|
||||||
|
lockfile: &mut Lockfile,
|
||||||
|
snapshot: &NpmResolutionSnapshot,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
for (package_req, package_id) in snapshot.package_reqs.iter() {
|
||||||
|
lockfile.insert_npm_specifier(package_req, package_id);
|
||||||
|
}
|
||||||
|
for package in self.all_packages() {
|
||||||
|
lockfile.check_or_insert_npm_package(&package)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_npm_package_ref() {
|
||||||
|
assert_eq!(
|
||||||
|
NpmPackageReference::from_str("npm:@package/test").unwrap(),
|
||||||
|
NpmPackageReference {
|
||||||
|
req: NpmPackageReq {
|
||||||
|
name: "@package/test".to_string(),
|
||||||
|
version_req: None,
|
||||||
|
},
|
||||||
|
sub_path: None,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
NpmPackageReference::from_str("npm:@package/test@1").unwrap(),
|
||||||
|
NpmPackageReference {
|
||||||
|
req: NpmPackageReq {
|
||||||
|
name: "@package/test".to_string(),
|
||||||
|
version_req: Some(SpecifierVersionReq::parse("1").unwrap()),
|
||||||
|
},
|
||||||
|
sub_path: None,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
NpmPackageReference::from_str("npm:@package/test@~1.1/sub_path").unwrap(),
|
||||||
|
NpmPackageReference {
|
||||||
|
req: NpmPackageReq {
|
||||||
|
name: "@package/test".to_string(),
|
||||||
|
version_req: Some(SpecifierVersionReq::parse("~1.1").unwrap()),
|
||||||
|
},
|
||||||
|
sub_path: Some("sub_path".to_string()),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
NpmPackageReference::from_str("npm:@package/test/sub_path").unwrap(),
|
||||||
|
NpmPackageReference {
|
||||||
|
req: NpmPackageReq {
|
||||||
|
name: "@package/test".to_string(),
|
||||||
|
version_req: None,
|
||||||
|
},
|
||||||
|
sub_path: Some("sub_path".to_string()),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
NpmPackageReference::from_str("npm:test").unwrap(),
|
||||||
|
NpmPackageReference {
|
||||||
|
req: NpmPackageReq {
|
||||||
|
name: "test".to_string(),
|
||||||
|
version_req: None,
|
||||||
|
},
|
||||||
|
sub_path: None,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
NpmPackageReference::from_str("npm:test@^1.2").unwrap(),
|
||||||
|
NpmPackageReference {
|
||||||
|
req: NpmPackageReq {
|
||||||
|
name: "test".to_string(),
|
||||||
|
version_req: Some(SpecifierVersionReq::parse("^1.2").unwrap()),
|
||||||
|
},
|
||||||
|
sub_path: None,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
NpmPackageReference::from_str("npm:test@~1.1/sub_path").unwrap(),
|
||||||
|
NpmPackageReference {
|
||||||
|
req: NpmPackageReq {
|
||||||
|
name: "test".to_string(),
|
||||||
|
version_req: Some(SpecifierVersionReq::parse("~1.1").unwrap()),
|
||||||
|
},
|
||||||
|
sub_path: Some("sub_path".to_string()),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
NpmPackageReference::from_str("npm:@package/test/sub_path").unwrap(),
|
||||||
|
NpmPackageReference {
|
||||||
|
req: NpmPackageReq {
|
||||||
|
name: "@package/test".to_string(),
|
||||||
|
version_req: None,
|
||||||
|
},
|
||||||
|
sub_path: Some("sub_path".to_string()),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
NpmPackageReference::from_str("npm:@package")
|
||||||
|
.err()
|
||||||
|
.unwrap()
|
||||||
|
.to_string(),
|
||||||
|
"Not a valid package: @package"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn serialize_npm_package_id() {
|
||||||
|
let id = NpmPackageId {
|
||||||
|
name: "pkg-a".to_string(),
|
||||||
|
version: NpmVersion::parse("1.2.3").unwrap(),
|
||||||
|
peer_dependencies: vec![
|
||||||
|
NpmPackageId {
|
||||||
|
name: "pkg-b".to_string(),
|
||||||
|
version: NpmVersion::parse("3.2.1").unwrap(),
|
||||||
|
peer_dependencies: vec![
|
||||||
|
NpmPackageId {
|
||||||
|
name: "pkg-c".to_string(),
|
||||||
|
version: NpmVersion::parse("1.3.2").unwrap(),
|
||||||
|
peer_dependencies: vec![],
|
||||||
|
},
|
||||||
|
NpmPackageId {
|
||||||
|
name: "pkg-d".to_string(),
|
||||||
|
version: NpmVersion::parse("2.3.4").unwrap(),
|
||||||
|
peer_dependencies: vec![],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
NpmPackageId {
|
||||||
|
name: "pkg-e".to_string(),
|
||||||
|
version: NpmVersion::parse("2.3.1").unwrap(),
|
||||||
|
peer_dependencies: vec![NpmPackageId {
|
||||||
|
name: "pkg-f".to_string(),
|
||||||
|
version: NpmVersion::parse("2.3.1").unwrap(),
|
||||||
|
peer_dependencies: vec![],
|
||||||
|
}],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
let serialized = id.as_serialized();
|
||||||
|
assert_eq!(serialized, "pkg-a@1.2.3_pkg-b@3.2.1__pkg-c@1.3.2__pkg-d@2.3.4_pkg-e@2.3.1__pkg-f@2.3.1");
|
||||||
|
assert_eq!(NpmPackageId::from_serialized(&serialized).unwrap(), id);
|
||||||
|
}
|
||||||
|
}
|
470
cli/npm/resolution/snapshot.rs
Normal file
470
cli/npm/resolution/snapshot.rs
Normal file
|
@ -0,0 +1,470 @@
|
||||||
|
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::collections::HashSet;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use deno_core::anyhow::anyhow;
|
||||||
|
use deno_core::anyhow::bail;
|
||||||
|
use deno_core::anyhow::Context;
|
||||||
|
use deno_core::error::AnyError;
|
||||||
|
use deno_core::futures;
|
||||||
|
use deno_core::parking_lot::Mutex;
|
||||||
|
use serde::Deserialize;
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
use crate::lockfile::Lockfile;
|
||||||
|
use crate::npm::cache::should_sync_download;
|
||||||
|
use crate::npm::cache::NpmPackageCacheFolderId;
|
||||||
|
use crate::npm::registry::NpmPackageVersionDistInfo;
|
||||||
|
use crate::npm::registry::NpmRegistryApi;
|
||||||
|
use crate::npm::registry::RealNpmRegistryApi;
|
||||||
|
|
||||||
|
use super::NpmPackageId;
|
||||||
|
use super::NpmPackageReq;
|
||||||
|
use super::NpmResolutionPackage;
|
||||||
|
use super::NpmVersionMatcher;
|
||||||
|
|
||||||
|
/// Packages partitioned by if they are "copy" packages or not.
|
||||||
|
pub struct NpmPackagesPartitioned {
|
||||||
|
pub packages: Vec<NpmResolutionPackage>,
|
||||||
|
/// Since peer dependency resolution occurs based on ancestors and ancestor
|
||||||
|
/// siblings, this may sometimes cause the same package (name and version)
|
||||||
|
/// to have different dependencies based on where it appears in the tree.
|
||||||
|
/// For these packages, we create a "copy package" or duplicate of the package
|
||||||
|
/// whose dependencies are that of where in the tree they've resolved to.
|
||||||
|
pub copy_packages: Vec<NpmResolutionPackage>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NpmPackagesPartitioned {
|
||||||
|
pub fn into_all(self) -> Vec<NpmResolutionPackage> {
|
||||||
|
let mut packages = self.packages;
|
||||||
|
packages.extend(self.copy_packages);
|
||||||
|
packages
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
|
||||||
|
pub struct NpmResolutionSnapshot {
|
||||||
|
#[serde(with = "map_to_vec")]
|
||||||
|
pub(super) package_reqs: HashMap<NpmPackageReq, NpmPackageId>,
|
||||||
|
pub(super) packages_by_name: HashMap<String, Vec<NpmPackageId>>,
|
||||||
|
#[serde(with = "map_to_vec")]
|
||||||
|
pub(super) packages: HashMap<NpmPackageId, NpmResolutionPackage>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is done so the maps with non-string keys get serialized and deserialized as vectors.
|
||||||
|
// Adapted from: https://github.com/serde-rs/serde/issues/936#issuecomment-302281792
|
||||||
|
mod map_to_vec {
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use serde::de::Deserialize;
|
||||||
|
use serde::de::Deserializer;
|
||||||
|
use serde::ser::Serializer;
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
pub fn serialize<S, K: Serialize, V: Serialize>(
|
||||||
|
map: &HashMap<K, V>,
|
||||||
|
serializer: S,
|
||||||
|
) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: Serializer,
|
||||||
|
{
|
||||||
|
serializer.collect_seq(map.iter())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deserialize<
|
||||||
|
'de,
|
||||||
|
D,
|
||||||
|
K: Deserialize<'de> + Eq + std::hash::Hash,
|
||||||
|
V: Deserialize<'de>,
|
||||||
|
>(
|
||||||
|
deserializer: D,
|
||||||
|
) -> Result<HashMap<K, V>, D::Error>
|
||||||
|
where
|
||||||
|
D: Deserializer<'de>,
|
||||||
|
{
|
||||||
|
let mut map = HashMap::new();
|
||||||
|
for (key, value) in Vec::<(K, V)>::deserialize(deserializer)? {
|
||||||
|
map.insert(key, value);
|
||||||
|
}
|
||||||
|
Ok(map)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NpmResolutionSnapshot {
|
||||||
|
/// Resolve a node package from a deno module.
|
||||||
|
pub fn resolve_package_from_deno_module(
|
||||||
|
&self,
|
||||||
|
req: &NpmPackageReq,
|
||||||
|
) -> Result<&NpmResolutionPackage, AnyError> {
|
||||||
|
match self.package_reqs.get(req) {
|
||||||
|
Some(id) => Ok(self.packages.get(id).unwrap()),
|
||||||
|
None => bail!("could not find npm package directory for '{}'", req),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn top_level_packages(&self) -> Vec<NpmPackageId> {
|
||||||
|
self
|
||||||
|
.package_reqs
|
||||||
|
.values()
|
||||||
|
.cloned()
|
||||||
|
.collect::<HashSet<_>>()
|
||||||
|
.into_iter()
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn package_from_id(
|
||||||
|
&self,
|
||||||
|
id: &NpmPackageId,
|
||||||
|
) -> Option<&NpmResolutionPackage> {
|
||||||
|
self.packages.get(id)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn resolve_package_from_package(
|
||||||
|
&self,
|
||||||
|
name: &str,
|
||||||
|
referrer: &NpmPackageCacheFolderId,
|
||||||
|
) -> Result<&NpmResolutionPackage, AnyError> {
|
||||||
|
// todo(dsherret): do we need an additional hashmap to get this quickly?
|
||||||
|
let referrer_package = self
|
||||||
|
.packages_by_name
|
||||||
|
.get(&referrer.name)
|
||||||
|
.and_then(|packages| {
|
||||||
|
packages
|
||||||
|
.iter()
|
||||||
|
.filter(|p| p.version == referrer.version)
|
||||||
|
.filter_map(|id| {
|
||||||
|
let package = self.packages.get(id)?;
|
||||||
|
if package.copy_index == referrer.copy_index {
|
||||||
|
Some(package)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.next()
|
||||||
|
})
|
||||||
|
.ok_or_else(|| {
|
||||||
|
anyhow!("could not find referrer npm package '{}'", referrer)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let name = name_without_path(name);
|
||||||
|
if let Some(id) = referrer_package.dependencies.get(name) {
|
||||||
|
return Ok(self.packages.get(id).unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
if referrer_package.id.name == name {
|
||||||
|
return Ok(referrer_package);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(bartlomieju): this should use a reverse lookup table in the
|
||||||
|
// snapshot instead of resolving best version again.
|
||||||
|
let req = NpmPackageReq {
|
||||||
|
name: name.to_string(),
|
||||||
|
version_req: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(id) = self.resolve_best_package_id(name, &req) {
|
||||||
|
if let Some(pkg) = self.packages.get(&id) {
|
||||||
|
return Ok(pkg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bail!(
|
||||||
|
"could not find npm package '{}' referenced by '{}'",
|
||||||
|
name,
|
||||||
|
referrer
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn all_packages(&self) -> Vec<NpmResolutionPackage> {
|
||||||
|
self.packages.values().cloned().collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn all_packages_partitioned(&self) -> NpmPackagesPartitioned {
|
||||||
|
let mut packages = self.all_packages();
|
||||||
|
let mut copy_packages = Vec::with_capacity(packages.len() / 2); // at most 1 copy for every package
|
||||||
|
|
||||||
|
// partition out any packages that are "copy" packages
|
||||||
|
for i in (0..packages.len()).rev() {
|
||||||
|
if packages[i].copy_index > 0 {
|
||||||
|
copy_packages.push(packages.swap_remove(i));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
NpmPackagesPartitioned {
|
||||||
|
packages,
|
||||||
|
copy_packages,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn resolve_best_package_id(
|
||||||
|
&self,
|
||||||
|
name: &str,
|
||||||
|
version_matcher: &impl NpmVersionMatcher,
|
||||||
|
) -> Option<NpmPackageId> {
|
||||||
|
// todo(dsherret): this is not exactly correct because some ids
|
||||||
|
// will be better than others due to peer dependencies
|
||||||
|
let mut maybe_best_id: Option<&NpmPackageId> = None;
|
||||||
|
if let Some(ids) = self.packages_by_name.get(name) {
|
||||||
|
for id in ids {
|
||||||
|
if version_matcher.matches(&id.version) {
|
||||||
|
let is_best_version = maybe_best_id
|
||||||
|
.as_ref()
|
||||||
|
.map(|best_id| best_id.version.cmp(&id.version).is_lt())
|
||||||
|
.unwrap_or(true);
|
||||||
|
if is_best_version {
|
||||||
|
maybe_best_id = Some(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
maybe_best_id.cloned()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn from_lockfile(
|
||||||
|
lockfile: Arc<Mutex<Lockfile>>,
|
||||||
|
api: &RealNpmRegistryApi,
|
||||||
|
) -> Result<Self, AnyError> {
|
||||||
|
let mut package_reqs: HashMap<NpmPackageReq, NpmPackageId>;
|
||||||
|
let mut packages_by_name: HashMap<String, Vec<NpmPackageId>>;
|
||||||
|
let mut packages: HashMap<NpmPackageId, NpmResolutionPackage>;
|
||||||
|
let mut copy_index_resolver: SnapshotPackageCopyIndexResolver;
|
||||||
|
|
||||||
|
{
|
||||||
|
let lockfile = lockfile.lock();
|
||||||
|
|
||||||
|
// pre-allocate collections
|
||||||
|
package_reqs =
|
||||||
|
HashMap::with_capacity(lockfile.content.npm.specifiers.len());
|
||||||
|
let packages_len = lockfile.content.npm.packages.len();
|
||||||
|
packages = HashMap::with_capacity(packages_len);
|
||||||
|
packages_by_name = HashMap::with_capacity(packages_len); // close enough
|
||||||
|
copy_index_resolver =
|
||||||
|
SnapshotPackageCopyIndexResolver::with_capacity(packages_len);
|
||||||
|
let mut verify_ids = HashSet::with_capacity(packages_len);
|
||||||
|
|
||||||
|
// collect the specifiers to version mappings
|
||||||
|
for (key, value) in &lockfile.content.npm.specifiers {
|
||||||
|
let package_req = NpmPackageReq::from_str(key)
|
||||||
|
.with_context(|| format!("Unable to parse npm specifier: {}", key))?;
|
||||||
|
let package_id = NpmPackageId::from_serialized(value)?;
|
||||||
|
package_reqs.insert(package_req, package_id.clone());
|
||||||
|
verify_ids.insert(package_id.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
// then the packages
|
||||||
|
for (key, value) in &lockfile.content.npm.packages {
|
||||||
|
let package_id = NpmPackageId::from_serialized(key)?;
|
||||||
|
|
||||||
|
// collect the dependencies
|
||||||
|
let mut dependencies = HashMap::default();
|
||||||
|
|
||||||
|
packages_by_name
|
||||||
|
.entry(package_id.name.to_string())
|
||||||
|
.or_default()
|
||||||
|
.push(package_id.clone());
|
||||||
|
|
||||||
|
for (name, specifier) in &value.dependencies {
|
||||||
|
let dep_id = NpmPackageId::from_serialized(specifier)?;
|
||||||
|
dependencies.insert(name.to_string(), dep_id.clone());
|
||||||
|
verify_ids.insert(dep_id);
|
||||||
|
}
|
||||||
|
|
||||||
|
let package = NpmResolutionPackage {
|
||||||
|
id: package_id.clone(),
|
||||||
|
copy_index: copy_index_resolver.resolve(&package_id),
|
||||||
|
// temporary dummy value
|
||||||
|
dist: NpmPackageVersionDistInfo {
|
||||||
|
tarball: "foobar".to_string(),
|
||||||
|
shasum: "foobar".to_string(),
|
||||||
|
integrity: Some("foobar".to_string()),
|
||||||
|
},
|
||||||
|
dependencies,
|
||||||
|
};
|
||||||
|
|
||||||
|
packages.insert(package_id, package);
|
||||||
|
}
|
||||||
|
|
||||||
|
// verify that all these ids exist in packages
|
||||||
|
for id in &verify_ids {
|
||||||
|
if !packages.contains_key(id) {
|
||||||
|
bail!(
|
||||||
|
"the lockfile is corrupt. You can recreate it with --lock-write"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut unresolved_tasks = Vec::with_capacity(packages_by_name.len());
|
||||||
|
|
||||||
|
// cache the package names in parallel in the registry api
|
||||||
|
// unless synchronous download should occur
|
||||||
|
if should_sync_download() {
|
||||||
|
let mut package_names = packages_by_name.keys().collect::<Vec<_>>();
|
||||||
|
package_names.sort();
|
||||||
|
for package_name in package_names {
|
||||||
|
api.package_info(package_name).await?;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for package_name in packages_by_name.keys() {
|
||||||
|
let package_name = package_name.clone();
|
||||||
|
let api = api.clone();
|
||||||
|
unresolved_tasks.push(tokio::task::spawn(async move {
|
||||||
|
api.package_info(&package_name).await?;
|
||||||
|
Result::<_, AnyError>::Ok(())
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for result in futures::future::join_all(unresolved_tasks).await {
|
||||||
|
result??;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ensure the dist is set for each package
|
||||||
|
for package in packages.values_mut() {
|
||||||
|
// this will read from the memory cache now
|
||||||
|
let version_info = match api
|
||||||
|
.package_version_info(&package.id.name, &package.id.version)
|
||||||
|
.await?
|
||||||
|
{
|
||||||
|
Some(version_info) => version_info,
|
||||||
|
None => {
|
||||||
|
bail!("could not find '{}' specified in the lockfile. Maybe try again with --reload", package.id.display());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
package.dist = version_info.dist;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
package_reqs,
|
||||||
|
packages_by_name,
|
||||||
|
packages,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct SnapshotPackageCopyIndexResolver {
|
||||||
|
packages_to_copy_index: HashMap<NpmPackageId, usize>,
|
||||||
|
package_name_version_to_copy_count: HashMap<(String, String), usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SnapshotPackageCopyIndexResolver {
|
||||||
|
pub fn with_capacity(capacity: usize) -> Self {
|
||||||
|
Self {
|
||||||
|
packages_to_copy_index: HashMap::with_capacity(capacity),
|
||||||
|
package_name_version_to_copy_count: HashMap::with_capacity(capacity), // close enough
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_map_with_capacity(
|
||||||
|
mut packages_to_copy_index: HashMap<NpmPackageId, usize>,
|
||||||
|
capacity: usize,
|
||||||
|
) -> Self {
|
||||||
|
let mut package_name_version_to_copy_count =
|
||||||
|
HashMap::with_capacity(capacity); // close enough
|
||||||
|
if capacity > packages_to_copy_index.len() {
|
||||||
|
packages_to_copy_index.reserve(capacity - packages_to_copy_index.len());
|
||||||
|
}
|
||||||
|
|
||||||
|
for (id, index) in &packages_to_copy_index {
|
||||||
|
let entry = package_name_version_to_copy_count
|
||||||
|
.entry((id.name.to_string(), id.version.to_string()))
|
||||||
|
.or_insert(0);
|
||||||
|
if *entry < *index {
|
||||||
|
*entry = *index;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Self {
|
||||||
|
packages_to_copy_index,
|
||||||
|
package_name_version_to_copy_count,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn resolve(&mut self, id: &NpmPackageId) -> usize {
|
||||||
|
if let Some(index) = self.packages_to_copy_index.get(id) {
|
||||||
|
*index
|
||||||
|
} else {
|
||||||
|
let index = *self
|
||||||
|
.package_name_version_to_copy_count
|
||||||
|
.entry((id.name.to_string(), id.version.to_string()))
|
||||||
|
.and_modify(|count| {
|
||||||
|
*count += 1;
|
||||||
|
})
|
||||||
|
.or_insert(0);
|
||||||
|
self.packages_to_copy_index.insert(id.clone(), index);
|
||||||
|
index
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn name_without_path(name: &str) -> &str {
|
||||||
|
let mut search_start_index = 0;
|
||||||
|
if name.starts_with('@') {
|
||||||
|
if let Some(slash_index) = name.find('/') {
|
||||||
|
search_start_index = slash_index + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(slash_index) = &name[search_start_index..].find('/') {
|
||||||
|
// get the name up until the path slash
|
||||||
|
&name[0..search_start_index + slash_index]
|
||||||
|
} else {
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_name_without_path() {
|
||||||
|
assert_eq!(name_without_path("foo"), "foo");
|
||||||
|
assert_eq!(name_without_path("@foo/bar"), "@foo/bar");
|
||||||
|
assert_eq!(name_without_path("@foo/bar/baz"), "@foo/bar");
|
||||||
|
assert_eq!(name_without_path("@hello"), "@hello");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_copy_index_resolver() {
|
||||||
|
let mut copy_index_resolver =
|
||||||
|
SnapshotPackageCopyIndexResolver::with_capacity(10);
|
||||||
|
assert_eq!(
|
||||||
|
copy_index_resolver
|
||||||
|
.resolve(&NpmPackageId::from_serialized("package@1.0.0").unwrap()),
|
||||||
|
0
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
copy_index_resolver
|
||||||
|
.resolve(&NpmPackageId::from_serialized("package@1.0.0").unwrap()),
|
||||||
|
0
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
copy_index_resolver.resolve(
|
||||||
|
&NpmPackageId::from_serialized("package@1.0.0_package-b@1.0.0")
|
||||||
|
.unwrap()
|
||||||
|
),
|
||||||
|
1
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
copy_index_resolver.resolve(
|
||||||
|
&NpmPackageId::from_serialized(
|
||||||
|
"package@1.0.0_package-b@1.0.0__package-c@2.0.0"
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
),
|
||||||
|
2
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
copy_index_resolver.resolve(
|
||||||
|
&NpmPackageId::from_serialized("package@1.0.0_package-b@1.0.0")
|
||||||
|
.unwrap()
|
||||||
|
),
|
||||||
|
1
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
copy_index_resolver
|
||||||
|
.resolve(&NpmPackageId::from_serialized("package-b@1.0.0").unwrap()),
|
||||||
|
0
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -70,13 +70,19 @@ pub async fn cache_packages(
|
||||||
// and we want the output to be deterministic
|
// and we want the output to be deterministic
|
||||||
packages.sort_by(|a, b| a.id.cmp(&b.id));
|
packages.sort_by(|a, b| a.id.cmp(&b.id));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut handles = Vec::with_capacity(packages.len());
|
let mut handles = Vec::with_capacity(packages.len());
|
||||||
for package in packages {
|
for package in packages {
|
||||||
|
assert_eq!(package.copy_index, 0); // the caller should not provide any of these
|
||||||
let cache = cache.clone();
|
let cache = cache.clone();
|
||||||
let registry_url = registry_url.clone();
|
let registry_url = registry_url.clone();
|
||||||
let handle = tokio::task::spawn(async move {
|
let handle = tokio::task::spawn(async move {
|
||||||
cache
|
cache
|
||||||
.ensure_package(&package.id, &package.dist, ®istry_url)
|
.ensure_package(
|
||||||
|
(package.id.name.as_str(), &package.id.version),
|
||||||
|
&package.dist,
|
||||||
|
®istry_url,
|
||||||
|
)
|
||||||
.await
|
.await
|
||||||
});
|
});
|
||||||
if sync_download {
|
if sync_download {
|
||||||
|
|
|
@ -23,7 +23,7 @@ use crate::npm::resolvers::common::cache_packages;
|
||||||
use crate::npm::NpmCache;
|
use crate::npm::NpmCache;
|
||||||
use crate::npm::NpmPackageId;
|
use crate::npm::NpmPackageId;
|
||||||
use crate::npm::NpmPackageReq;
|
use crate::npm::NpmPackageReq;
|
||||||
use crate::npm::NpmRegistryApi;
|
use crate::npm::RealNpmRegistryApi;
|
||||||
|
|
||||||
use super::common::ensure_registry_read_permission;
|
use super::common::ensure_registry_read_permission;
|
||||||
use super::common::InnerNpmPackageResolver;
|
use super::common::InnerNpmPackageResolver;
|
||||||
|
@ -39,7 +39,7 @@ pub struct GlobalNpmPackageResolver {
|
||||||
impl GlobalNpmPackageResolver {
|
impl GlobalNpmPackageResolver {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
cache: NpmCache,
|
cache: NpmCache,
|
||||||
api: NpmRegistryApi,
|
api: RealNpmRegistryApi,
|
||||||
initial_snapshot: Option<NpmResolutionSnapshot>,
|
initial_snapshot: Option<NpmResolutionSnapshot>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let registry_url = api.base_url().to_owned();
|
let registry_url = api.base_url().to_owned();
|
||||||
|
@ -53,7 +53,13 @@ impl GlobalNpmPackageResolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn package_folder(&self, id: &NpmPackageId) -> PathBuf {
|
fn package_folder(&self, id: &NpmPackageId) -> PathBuf {
|
||||||
self.cache.package_folder(id, &self.registry_url)
|
let folder_id = self
|
||||||
|
.resolution
|
||||||
|
.resolve_package_cache_folder_id_from_id(id)
|
||||||
|
.unwrap();
|
||||||
|
self
|
||||||
|
.cache
|
||||||
|
.package_folder_for_id(&folder_id, &self.registry_url)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -74,7 +80,7 @@ impl InnerNpmPackageResolver for GlobalNpmPackageResolver {
|
||||||
) -> Result<PathBuf, AnyError> {
|
) -> Result<PathBuf, AnyError> {
|
||||||
let referrer_pkg_id = self
|
let referrer_pkg_id = self
|
||||||
.cache
|
.cache
|
||||||
.resolve_package_id_from_specifier(referrer, &self.registry_url)?;
|
.resolve_package_folder_id_from_specifier(referrer, &self.registry_url)?;
|
||||||
let pkg_result = self
|
let pkg_result = self
|
||||||
.resolution
|
.resolution
|
||||||
.resolve_package_from_package(name, &referrer_pkg_id);
|
.resolve_package_from_package(name, &referrer_pkg_id);
|
||||||
|
@ -105,10 +111,15 @@ impl InnerNpmPackageResolver for GlobalNpmPackageResolver {
|
||||||
&self,
|
&self,
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
) -> Result<PathBuf, AnyError> {
|
) -> Result<PathBuf, AnyError> {
|
||||||
let pkg_id = self
|
let pkg_folder_id = self.cache.resolve_package_folder_id_from_specifier(
|
||||||
.cache
|
specifier,
|
||||||
.resolve_package_id_from_specifier(specifier, &self.registry_url)?;
|
&self.registry_url,
|
||||||
Ok(self.package_folder(&pkg_id))
|
)?;
|
||||||
|
Ok(
|
||||||
|
self
|
||||||
|
.cache
|
||||||
|
.package_folder_for_id(&pkg_folder_id, &self.registry_url),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn package_size(&self, package_id: &NpmPackageId) -> Result<u64, AnyError> {
|
fn package_size(&self, package_id: &NpmPackageId) -> Result<u64, AnyError> {
|
||||||
|
@ -162,10 +173,22 @@ impl InnerNpmPackageResolver for GlobalNpmPackageResolver {
|
||||||
async fn cache_packages_in_resolver(
|
async fn cache_packages_in_resolver(
|
||||||
resolver: &GlobalNpmPackageResolver,
|
resolver: &GlobalNpmPackageResolver,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
|
let package_partitions = resolver.resolution.all_packages_partitioned();
|
||||||
|
|
||||||
cache_packages(
|
cache_packages(
|
||||||
resolver.resolution.all_packages(),
|
package_partitions.packages,
|
||||||
&resolver.cache,
|
&resolver.cache,
|
||||||
&resolver.registry_url,
|
&resolver.registry_url,
|
||||||
)
|
)
|
||||||
.await
|
.await?;
|
||||||
|
|
||||||
|
// create the copy package folders
|
||||||
|
for copy in package_partitions.copy_packages {
|
||||||
|
resolver.cache.ensure_copy_package(
|
||||||
|
©.get_package_cache_folder_id(),
|
||||||
|
&resolver.registry_url,
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,12 +24,14 @@ use tokio::task::JoinHandle;
|
||||||
use crate::fs_util;
|
use crate::fs_util;
|
||||||
use crate::lockfile::Lockfile;
|
use crate::lockfile::Lockfile;
|
||||||
use crate::npm::cache::should_sync_download;
|
use crate::npm::cache::should_sync_download;
|
||||||
|
use crate::npm::cache::NpmPackageCacheFolderId;
|
||||||
use crate::npm::resolution::NpmResolution;
|
use crate::npm::resolution::NpmResolution;
|
||||||
use crate::npm::resolution::NpmResolutionSnapshot;
|
use crate::npm::resolution::NpmResolutionSnapshot;
|
||||||
use crate::npm::NpmCache;
|
use crate::npm::NpmCache;
|
||||||
use crate::npm::NpmPackageId;
|
use crate::npm::NpmPackageId;
|
||||||
use crate::npm::NpmPackageReq;
|
use crate::npm::NpmPackageReq;
|
||||||
use crate::npm::NpmRegistryApi;
|
use crate::npm::NpmResolutionPackage;
|
||||||
|
use crate::npm::RealNpmRegistryApi;
|
||||||
|
|
||||||
use super::common::ensure_registry_read_permission;
|
use super::common::ensure_registry_read_permission;
|
||||||
use super::common::InnerNpmPackageResolver;
|
use super::common::InnerNpmPackageResolver;
|
||||||
|
@ -48,7 +50,7 @@ pub struct LocalNpmPackageResolver {
|
||||||
impl LocalNpmPackageResolver {
|
impl LocalNpmPackageResolver {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
cache: NpmCache,
|
cache: NpmCache,
|
||||||
api: NpmRegistryApi,
|
api: RealNpmRegistryApi,
|
||||||
node_modules_folder: PathBuf,
|
node_modules_folder: PathBuf,
|
||||||
initial_snapshot: Option<NpmResolutionSnapshot>,
|
initial_snapshot: Option<NpmResolutionSnapshot>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
|
@ -101,6 +103,35 @@ impl LocalNpmPackageResolver {
|
||||||
// it's within the directory, so use it
|
// it's within the directory, so use it
|
||||||
specifier.to_file_path().ok()
|
specifier.to_file_path().ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_package_id_folder(
|
||||||
|
&self,
|
||||||
|
package_id: &NpmPackageId,
|
||||||
|
) -> Result<PathBuf, AnyError> {
|
||||||
|
match self.resolution.resolve_package_from_id(package_id) {
|
||||||
|
Some(package) => Ok(self.get_package_id_folder_from_package(&package)),
|
||||||
|
None => bail!(
|
||||||
|
"Could not find package information for '{}'",
|
||||||
|
package_id.as_serialized()
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_package_id_folder_from_package(
|
||||||
|
&self,
|
||||||
|
package: &NpmResolutionPackage,
|
||||||
|
) -> PathBuf {
|
||||||
|
// package is stored at:
|
||||||
|
// node_modules/.deno/<package_cache_folder_id_folder_name>/node_modules/<package_name>
|
||||||
|
self
|
||||||
|
.root_node_modules_path
|
||||||
|
.join(".deno")
|
||||||
|
.join(get_package_folder_id_folder_name(
|
||||||
|
&package.get_package_cache_folder_id(),
|
||||||
|
))
|
||||||
|
.join("node_modules")
|
||||||
|
.join(&package.id.name)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl InnerNpmPackageResolver for LocalNpmPackageResolver {
|
impl InnerNpmPackageResolver for LocalNpmPackageResolver {
|
||||||
|
@ -108,19 +139,8 @@ impl InnerNpmPackageResolver for LocalNpmPackageResolver {
|
||||||
&self,
|
&self,
|
||||||
pkg_req: &NpmPackageReq,
|
pkg_req: &NpmPackageReq,
|
||||||
) -> Result<PathBuf, AnyError> {
|
) -> Result<PathBuf, AnyError> {
|
||||||
let resolved_package =
|
let package = self.resolution.resolve_package_from_deno_module(pkg_req)?;
|
||||||
self.resolution.resolve_package_from_deno_module(pkg_req)?;
|
Ok(self.get_package_id_folder_from_package(&package))
|
||||||
|
|
||||||
// it might be at the full path if there are duplicate names
|
|
||||||
let fully_resolved_folder_path = join_package_name(
|
|
||||||
&self.root_node_modules_path,
|
|
||||||
&resolved_package.id.to_string(),
|
|
||||||
);
|
|
||||||
Ok(if fully_resolved_folder_path.exists() {
|
|
||||||
fully_resolved_folder_path
|
|
||||||
} else {
|
|
||||||
join_package_name(&self.root_node_modules_path, &resolved_package.id.name)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_package_folder_from_package(
|
fn resolve_package_folder_from_package(
|
||||||
|
@ -178,19 +198,9 @@ impl InnerNpmPackageResolver for LocalNpmPackageResolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn package_size(&self, package_id: &NpmPackageId) -> Result<u64, AnyError> {
|
fn package_size(&self, package_id: &NpmPackageId) -> Result<u64, AnyError> {
|
||||||
match self.resolution.resolve_package_from_id(package_id) {
|
let package_folder_path = self.get_package_id_folder(package_id)?;
|
||||||
Some(package) => Ok(fs_util::dir_size(
|
|
||||||
// package is stored at:
|
Ok(fs_util::dir_size(&package_folder_path)?)
|
||||||
// node_modules/.deno/<package_id>/node_modules/<package_name>
|
|
||||||
&self
|
|
||||||
.root_node_modules_path
|
|
||||||
.join(".deno")
|
|
||||||
.join(package.id.to_string())
|
|
||||||
.join("node_modules")
|
|
||||||
.join(package.id.name),
|
|
||||||
)?),
|
|
||||||
None => bail!("Could not find package folder for '{}'", package_id),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn has_packages(&self) -> bool {
|
fn has_packages(&self) -> bool {
|
||||||
|
@ -255,10 +265,6 @@ async fn sync_resolution_with_fs(
|
||||||
registry_url: &Url,
|
registry_url: &Url,
|
||||||
root_node_modules_dir_path: &Path,
|
root_node_modules_dir_path: &Path,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
fn get_package_folder_name(package_id: &NpmPackageId) -> String {
|
|
||||||
package_id.to_string().replace('/', "+")
|
|
||||||
}
|
|
||||||
|
|
||||||
let deno_local_registry_dir = root_node_modules_dir_path.join(".deno");
|
let deno_local_registry_dir = root_node_modules_dir_path.join(".deno");
|
||||||
fs::create_dir_all(&deno_local_registry_dir).with_context(|| {
|
fs::create_dir_all(&deno_local_registry_dir).with_context(|| {
|
||||||
format!("Creating '{}'", deno_local_registry_dir.display())
|
format!("Creating '{}'", deno_local_registry_dir.display())
|
||||||
|
@ -267,34 +273,45 @@ async fn sync_resolution_with_fs(
|
||||||
// 1. Write all the packages out the .deno directory.
|
// 1. Write all the packages out the .deno directory.
|
||||||
//
|
//
|
||||||
// Copy (hardlink in future) <global_registry_cache>/<package_id>/ to
|
// Copy (hardlink in future) <global_registry_cache>/<package_id>/ to
|
||||||
// node_modules/.deno/<package_id>/node_modules/<package_name>
|
// node_modules/.deno/<package_folder_id_folder_name>/node_modules/<package_name>
|
||||||
let sync_download = should_sync_download();
|
let sync_download = should_sync_download();
|
||||||
let mut all_packages = snapshot.all_packages();
|
let mut package_partitions = snapshot.all_packages_partitioned();
|
||||||
if sync_download {
|
if sync_download {
|
||||||
// we're running the tests not with --quiet
|
// we're running the tests not with --quiet
|
||||||
// and we want the output to be deterministic
|
// and we want the output to be deterministic
|
||||||
all_packages.sort_by(|a, b| a.id.cmp(&b.id));
|
package_partitions.packages.sort_by(|a, b| a.id.cmp(&b.id));
|
||||||
}
|
}
|
||||||
let mut handles: Vec<JoinHandle<Result<(), AnyError>>> =
|
let mut handles: Vec<JoinHandle<Result<(), AnyError>>> =
|
||||||
Vec::with_capacity(all_packages.len());
|
Vec::with_capacity(package_partitions.packages.len());
|
||||||
for package in &all_packages {
|
for package in &package_partitions.packages {
|
||||||
let folder_name = get_package_folder_name(&package.id);
|
let folder_name =
|
||||||
|
get_package_folder_id_folder_name(&package.get_package_cache_folder_id());
|
||||||
let folder_path = deno_local_registry_dir.join(&folder_name);
|
let folder_path = deno_local_registry_dir.join(&folder_name);
|
||||||
let initialized_file = folder_path.join("deno_initialized");
|
let initialized_file = folder_path.join(".initialized");
|
||||||
if !initialized_file.exists() {
|
if !cache.should_use_cache_for_npm_package(&package.id.name)
|
||||||
|
|| !initialized_file.exists()
|
||||||
|
{
|
||||||
let cache = cache.clone();
|
let cache = cache.clone();
|
||||||
let registry_url = registry_url.clone();
|
let registry_url = registry_url.clone();
|
||||||
let package = package.clone();
|
let package = package.clone();
|
||||||
let handle = tokio::task::spawn(async move {
|
let handle = tokio::task::spawn(async move {
|
||||||
cache
|
cache
|
||||||
.ensure_package(&package.id, &package.dist, ®istry_url)
|
.ensure_package(
|
||||||
|
(&package.id.name, &package.id.version),
|
||||||
|
&package.dist,
|
||||||
|
®istry_url,
|
||||||
|
)
|
||||||
.await?;
|
.await?;
|
||||||
let sub_node_modules = folder_path.join("node_modules");
|
let sub_node_modules = folder_path.join("node_modules");
|
||||||
let package_path =
|
let package_path =
|
||||||
join_package_name(&sub_node_modules, &package.id.name);
|
join_package_name(&sub_node_modules, &package.id.name);
|
||||||
fs::create_dir_all(&package_path)
|
fs::create_dir_all(&package_path)
|
||||||
.with_context(|| format!("Creating '{}'", folder_path.display()))?;
|
.with_context(|| format!("Creating '{}'", folder_path.display()))?;
|
||||||
let cache_folder = cache.package_folder(&package.id, ®istry_url);
|
let cache_folder = cache.package_folder_for_name_and_version(
|
||||||
|
&package.id.name,
|
||||||
|
&package.id.version,
|
||||||
|
®istry_url,
|
||||||
|
);
|
||||||
// for now copy, but in the future consider hard linking
|
// for now copy, but in the future consider hard linking
|
||||||
fs_util::copy_dir_recursive(&cache_folder, &package_path)?;
|
fs_util::copy_dir_recursive(&cache_folder, &package_path)?;
|
||||||
// write out a file that indicates this folder has been initialized
|
// write out a file that indicates this folder has been initialized
|
||||||
|
@ -314,16 +331,51 @@ async fn sync_resolution_with_fs(
|
||||||
result??; // surface the first error
|
result??; // surface the first error
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2. Symlink all the dependencies into the .deno directory.
|
// 2. Create any "copy" packages, which are used for peer dependencies
|
||||||
|
for package in &package_partitions.copy_packages {
|
||||||
|
let package_cache_folder_id = package.get_package_cache_folder_id();
|
||||||
|
let destination_path = deno_local_registry_dir
|
||||||
|
.join(&get_package_folder_id_folder_name(&package_cache_folder_id));
|
||||||
|
let initialized_file = destination_path.join(".initialized");
|
||||||
|
if !initialized_file.exists() {
|
||||||
|
let sub_node_modules = destination_path.join("node_modules");
|
||||||
|
let package_path = join_package_name(&sub_node_modules, &package.id.name);
|
||||||
|
fs::create_dir_all(&package_path).with_context(|| {
|
||||||
|
format!("Creating '{}'", destination_path.display())
|
||||||
|
})?;
|
||||||
|
let source_path = join_package_name(
|
||||||
|
&deno_local_registry_dir
|
||||||
|
.join(&get_package_folder_id_folder_name(
|
||||||
|
&package_cache_folder_id.with_no_count(),
|
||||||
|
))
|
||||||
|
.join("node_modules"),
|
||||||
|
&package.id.name,
|
||||||
|
);
|
||||||
|
fs_util::hard_link_dir_recursive(&source_path, &package_path)?;
|
||||||
|
// write out a file that indicates this folder has been initialized
|
||||||
|
fs::write(initialized_file, "")?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let all_packages = package_partitions.into_all();
|
||||||
|
|
||||||
|
// 3. Symlink all the dependencies into the .deno directory.
|
||||||
//
|
//
|
||||||
// Symlink node_modules/.deno/<package_id>/node_modules/<dep_name> to
|
// Symlink node_modules/.deno/<package_id>/node_modules/<dep_name> to
|
||||||
// node_modules/.deno/<dep_id>/node_modules/<dep_package_name>
|
// node_modules/.deno/<dep_id>/node_modules/<dep_package_name>
|
||||||
for package in &all_packages {
|
for package in &all_packages {
|
||||||
let sub_node_modules = deno_local_registry_dir
|
let sub_node_modules = deno_local_registry_dir
|
||||||
.join(&get_package_folder_name(&package.id))
|
.join(&get_package_folder_id_folder_name(
|
||||||
|
&package.get_package_cache_folder_id(),
|
||||||
|
))
|
||||||
.join("node_modules");
|
.join("node_modules");
|
||||||
for (name, dep_id) in &package.dependencies {
|
for (name, dep_id) in &package.dependencies {
|
||||||
let dep_folder_name = get_package_folder_name(dep_id);
|
let dep_cache_folder_id = snapshot
|
||||||
|
.package_from_id(dep_id)
|
||||||
|
.unwrap()
|
||||||
|
.get_package_cache_folder_id();
|
||||||
|
let dep_folder_name =
|
||||||
|
get_package_folder_id_folder_name(&dep_cache_folder_id);
|
||||||
let dep_folder_path = join_package_name(
|
let dep_folder_path = join_package_name(
|
||||||
&deno_local_registry_dir
|
&deno_local_registry_dir
|
||||||
.join(dep_folder_name)
|
.join(dep_folder_name)
|
||||||
|
@ -337,7 +389,7 @@ async fn sync_resolution_with_fs(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3. Create all the packages in the node_modules folder, which are symlinks.
|
// 4. Create all the packages in the node_modules folder, which are symlinks.
|
||||||
//
|
//
|
||||||
// Symlink node_modules/<package_name> to
|
// Symlink node_modules/<package_name> to
|
||||||
// node_modules/.deno/<package_id>/node_modules/<package_name>
|
// node_modules/.deno/<package_id>/node_modules/<package_name>
|
||||||
|
@ -353,29 +405,41 @@ async fn sync_resolution_with_fs(
|
||||||
let root_folder_name = if found_names.insert(package_id.name.clone()) {
|
let root_folder_name = if found_names.insert(package_id.name.clone()) {
|
||||||
package_id.name.clone()
|
package_id.name.clone()
|
||||||
} else if is_top_level {
|
} else if is_top_level {
|
||||||
package_id.to_string()
|
package_id.display()
|
||||||
} else {
|
} else {
|
||||||
continue; // skip, already handled
|
continue; // skip, already handled
|
||||||
};
|
};
|
||||||
let local_registry_package_path = deno_local_registry_dir
|
let package = snapshot.package_from_id(&package_id).unwrap();
|
||||||
.join(&get_package_folder_name(&package_id))
|
let local_registry_package_path = join_package_name(
|
||||||
.join("node_modules")
|
&deno_local_registry_dir
|
||||||
.join(&package_id.name);
|
.join(&get_package_folder_id_folder_name(
|
||||||
|
&package.get_package_cache_folder_id(),
|
||||||
|
))
|
||||||
|
.join("node_modules"),
|
||||||
|
&package_id.name,
|
||||||
|
);
|
||||||
|
|
||||||
symlink_package_dir(
|
symlink_package_dir(
|
||||||
&local_registry_package_path,
|
&local_registry_package_path,
|
||||||
&join_package_name(root_node_modules_dir_path, &root_folder_name),
|
&join_package_name(root_node_modules_dir_path, &root_folder_name),
|
||||||
)?;
|
)?;
|
||||||
if let Some(package) = snapshot.package_from_id(&package_id) {
|
for id in package.dependencies.values() {
|
||||||
for id in package.dependencies.values() {
|
pending_packages.push_back((id.clone(), false));
|
||||||
pending_packages.push_back((id.clone(), false));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_package_folder_id_folder_name(id: &NpmPackageCacheFolderId) -> String {
|
||||||
|
let copy_str = if id.copy_index == 0 {
|
||||||
|
"".to_string()
|
||||||
|
} else {
|
||||||
|
format!("_{}", id.copy_index)
|
||||||
|
};
|
||||||
|
format!("{}@{}{}", id.name, id.version, copy_str).replace('/', "+")
|
||||||
|
}
|
||||||
|
|
||||||
fn symlink_package_dir(
|
fn symlink_package_dir(
|
||||||
old_path: &Path,
|
old_path: &Path,
|
||||||
new_path: &Path,
|
new_path: &Path,
|
||||||
|
|
|
@ -6,6 +6,7 @@ mod local;
|
||||||
|
|
||||||
use deno_ast::ModuleSpecifier;
|
use deno_ast::ModuleSpecifier;
|
||||||
use deno_core::anyhow::bail;
|
use deno_core::anyhow::bail;
|
||||||
|
use deno_core::anyhow::Context;
|
||||||
use deno_core::error::custom_error;
|
use deno_core::error::custom_error;
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use deno_core::parking_lot::Mutex;
|
use deno_core::parking_lot::Mutex;
|
||||||
|
@ -29,8 +30,8 @@ use self::local::LocalNpmPackageResolver;
|
||||||
use super::NpmCache;
|
use super::NpmCache;
|
||||||
use super::NpmPackageId;
|
use super::NpmPackageId;
|
||||||
use super::NpmPackageReq;
|
use super::NpmPackageReq;
|
||||||
use super::NpmRegistryApi;
|
|
||||||
use super::NpmResolutionSnapshot;
|
use super::NpmResolutionSnapshot;
|
||||||
|
use super::RealNpmRegistryApi;
|
||||||
|
|
||||||
const RESOLUTION_STATE_ENV_VAR_NAME: &str =
|
const RESOLUTION_STATE_ENV_VAR_NAME: &str =
|
||||||
"DENO_DONT_USE_INTERNAL_NODE_COMPAT_STATE";
|
"DENO_DONT_USE_INTERNAL_NODE_COMPAT_STATE";
|
||||||
|
@ -71,7 +72,7 @@ pub struct NpmPackageResolver {
|
||||||
no_npm: bool,
|
no_npm: bool,
|
||||||
inner: Arc<dyn InnerNpmPackageResolver>,
|
inner: Arc<dyn InnerNpmPackageResolver>,
|
||||||
local_node_modules_path: Option<PathBuf>,
|
local_node_modules_path: Option<PathBuf>,
|
||||||
api: NpmRegistryApi,
|
api: RealNpmRegistryApi,
|
||||||
cache: NpmCache,
|
cache: NpmCache,
|
||||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||||
}
|
}
|
||||||
|
@ -90,7 +91,7 @@ impl std::fmt::Debug for NpmPackageResolver {
|
||||||
impl NpmPackageResolver {
|
impl NpmPackageResolver {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
cache: NpmCache,
|
cache: NpmCache,
|
||||||
api: NpmRegistryApi,
|
api: RealNpmRegistryApi,
|
||||||
unstable: bool,
|
unstable: bool,
|
||||||
no_npm: bool,
|
no_npm: bool,
|
||||||
local_node_modules_path: Option<PathBuf>,
|
local_node_modules_path: Option<PathBuf>,
|
||||||
|
@ -112,7 +113,14 @@ impl NpmPackageResolver {
|
||||||
lockfile: Arc<Mutex<Lockfile>>,
|
lockfile: Arc<Mutex<Lockfile>>,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
let snapshot =
|
let snapshot =
|
||||||
NpmResolutionSnapshot::from_lockfile(lockfile.clone(), &self.api).await?;
|
NpmResolutionSnapshot::from_lockfile(lockfile.clone(), &self.api)
|
||||||
|
.await
|
||||||
|
.with_context(|| {
|
||||||
|
format!(
|
||||||
|
"failed reading lockfile '{}'",
|
||||||
|
lockfile.lock().filename.display()
|
||||||
|
)
|
||||||
|
})?;
|
||||||
self.maybe_lockfile = Some(lockfile);
|
self.maybe_lockfile = Some(lockfile);
|
||||||
if let Some(node_modules_folder) = &self.local_node_modules_path {
|
if let Some(node_modules_folder) = &self.local_node_modules_path {
|
||||||
self.inner = Arc::new(LocalNpmPackageResolver::new(
|
self.inner = Arc::new(LocalNpmPackageResolver::new(
|
||||||
|
@ -133,7 +141,7 @@ impl NpmPackageResolver {
|
||||||
|
|
||||||
fn new_with_maybe_snapshot(
|
fn new_with_maybe_snapshot(
|
||||||
cache: NpmCache,
|
cache: NpmCache,
|
||||||
api: NpmRegistryApi,
|
api: RealNpmRegistryApi,
|
||||||
unstable: bool,
|
unstable: bool,
|
||||||
no_npm: bool,
|
no_npm: bool,
|
||||||
local_node_modules_path: Option<PathBuf>,
|
local_node_modules_path: Option<PathBuf>,
|
||||||
|
|
|
@ -1,38 +0,0 @@
|
||||||
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
|
|
||||||
|
|
||||||
use deno_core::anyhow::bail;
|
|
||||||
use deno_core::error::AnyError;
|
|
||||||
use monch::ParseError;
|
|
||||||
use monch::ParseErrorFailure;
|
|
||||||
use monch::ParseResult;
|
|
||||||
|
|
||||||
pub fn with_failure_handling<'a, T>(
|
|
||||||
combinator: impl Fn(&'a str) -> ParseResult<T>,
|
|
||||||
) -> impl Fn(&'a str) -> Result<T, AnyError> {
|
|
||||||
move |input| match combinator(input) {
|
|
||||||
Ok((input, result)) => {
|
|
||||||
if !input.is_empty() {
|
|
||||||
error_for_failure(fail_for_trailing_input(input))
|
|
||||||
} else {
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(ParseError::Backtrace) => {
|
|
||||||
error_for_failure(fail_for_trailing_input(input))
|
|
||||||
}
|
|
||||||
Err(ParseError::Failure(e)) => error_for_failure(e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn error_for_failure<T>(e: ParseErrorFailure) -> Result<T, AnyError> {
|
|
||||||
bail!(
|
|
||||||
"{}\n {}\n ~",
|
|
||||||
e.message,
|
|
||||||
// truncate the output to prevent wrapping in the console
|
|
||||||
e.input.chars().take(60).collect::<String>()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn fail_for_trailing_input(input: &str) -> ParseErrorFailure {
|
|
||||||
ParseErrorFailure::new(input, "Unexpected character.")
|
|
||||||
}
|
|
|
@ -11,7 +11,6 @@ use serde::Serialize;
|
||||||
|
|
||||||
use crate::npm::resolution::NpmVersionMatcher;
|
use crate::npm::resolution::NpmVersionMatcher;
|
||||||
|
|
||||||
use self::errors::with_failure_handling;
|
|
||||||
use self::range::Partial;
|
use self::range::Partial;
|
||||||
use self::range::VersionBoundKind;
|
use self::range::VersionBoundKind;
|
||||||
use self::range::VersionRange;
|
use self::range::VersionRange;
|
||||||
|
@ -20,7 +19,6 @@ use self::range::VersionRangeSet;
|
||||||
use self::range::XRange;
|
use self::range::XRange;
|
||||||
pub use self::specifier::SpecifierVersionReq;
|
pub use self::specifier::SpecifierVersionReq;
|
||||||
|
|
||||||
mod errors;
|
|
||||||
mod range;
|
mod range;
|
||||||
mod specifier;
|
mod specifier;
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,6 @@ use monch::*;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
|
||||||
use super::errors::with_failure_handling;
|
|
||||||
use super::range::Partial;
|
use super::range::Partial;
|
||||||
use super::range::VersionRange;
|
use super::range::VersionRange;
|
||||||
use super::range::XRange;
|
use super::range::XRange;
|
||||||
|
|
|
@ -6,18 +6,17 @@ use std::path::Path;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use deno_core::anyhow::bail;
|
use deno_core::anyhow::bail;
|
||||||
use deno_core::anyhow::Context;
|
|
||||||
use deno_core::error::AnyError;
|
use deno_core::error::AnyError;
|
||||||
use flate2::read::GzDecoder;
|
use flate2::read::GzDecoder;
|
||||||
use tar::Archive;
|
use tar::Archive;
|
||||||
use tar::EntryType;
|
use tar::EntryType;
|
||||||
|
|
||||||
use super::cache::NPM_PACKAGE_SYNC_LOCK_FILENAME;
|
use super::cache::with_folder_sync_lock;
|
||||||
use super::registry::NpmPackageVersionDistInfo;
|
use super::registry::NpmPackageVersionDistInfo;
|
||||||
use super::NpmPackageId;
|
use super::semver::NpmVersion;
|
||||||
|
|
||||||
pub fn verify_and_extract_tarball(
|
pub fn verify_and_extract_tarball(
|
||||||
package: &NpmPackageId,
|
package: (&str, &NpmVersion),
|
||||||
data: &[u8],
|
data: &[u8],
|
||||||
dist_info: &NpmPackageVersionDistInfo,
|
dist_info: &NpmPackageVersionDistInfo,
|
||||||
output_folder: &Path,
|
output_folder: &Path,
|
||||||
|
@ -27,50 +26,19 @@ pub fn verify_and_extract_tarball(
|
||||||
} else {
|
} else {
|
||||||
// todo(dsherret): check shasum here
|
// todo(dsherret): check shasum here
|
||||||
bail!(
|
bail!(
|
||||||
"Errored on '{}': npm packages with no integrity are not implemented.",
|
"Errored on '{}@{}': npm packages with no integrity are not implemented.",
|
||||||
package
|
package.0,
|
||||||
|
package.1,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fs::create_dir_all(output_folder).with_context(|| {
|
with_folder_sync_lock(package, output_folder, || {
|
||||||
format!("Error creating '{}'.", output_folder.display())
|
extract_tarball(data, output_folder)
|
||||||
})?;
|
})
|
||||||
|
|
||||||
// This sync lock file is a way to ensure that partially created
|
|
||||||
// npm package directories aren't considered valid. This could maybe
|
|
||||||
// be a bit smarter in the future to not bother extracting here
|
|
||||||
// if another process has taken the lock in the past X seconds and
|
|
||||||
// wait for the other process to finish (it could try to create the
|
|
||||||
// file with `create_new(true)` then if it exists, check the metadata
|
|
||||||
// then wait until the other process finishes with a timeout), but
|
|
||||||
// for now this is good enough.
|
|
||||||
let sync_lock_path = output_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME);
|
|
||||||
match fs::OpenOptions::new()
|
|
||||||
.write(true)
|
|
||||||
.create(true)
|
|
||||||
.open(&sync_lock_path)
|
|
||||||
{
|
|
||||||
Ok(_) => {
|
|
||||||
extract_tarball(data, output_folder)?;
|
|
||||||
// extraction succeeded, so only now delete this file
|
|
||||||
let _ignore = std::fs::remove_file(&sync_lock_path);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
bail!(
|
|
||||||
concat!(
|
|
||||||
"Error creating package sync lock file at '{}'. ",
|
|
||||||
"Maybe try manually deleting this folder.\n\n{:#}",
|
|
||||||
),
|
|
||||||
output_folder.display(),
|
|
||||||
err
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn verify_tarball_integrity(
|
fn verify_tarball_integrity(
|
||||||
package: &NpmPackageId,
|
package: (&str, &NpmVersion),
|
||||||
data: &[u8],
|
data: &[u8],
|
||||||
npm_integrity: &str,
|
npm_integrity: &str,
|
||||||
) -> Result<(), AnyError> {
|
) -> Result<(), AnyError> {
|
||||||
|
@ -81,16 +49,18 @@ fn verify_tarball_integrity(
|
||||||
let algo = match hash_kind {
|
let algo = match hash_kind {
|
||||||
"sha512" => &SHA512,
|
"sha512" => &SHA512,
|
||||||
hash_kind => bail!(
|
hash_kind => bail!(
|
||||||
"Not implemented hash function for {}: {}",
|
"Not implemented hash function for {}@{}: {}",
|
||||||
package,
|
package.0,
|
||||||
|
package.1,
|
||||||
hash_kind
|
hash_kind
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
(algo, checksum.to_lowercase())
|
(algo, checksum.to_lowercase())
|
||||||
}
|
}
|
||||||
None => bail!(
|
None => bail!(
|
||||||
"Not implemented integrity kind for {}: {}",
|
"Not implemented integrity kind for {}@{}: {}",
|
||||||
package,
|
package.0,
|
||||||
|
package.1,
|
||||||
npm_integrity
|
npm_integrity
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
@ -101,8 +71,9 @@ fn verify_tarball_integrity(
|
||||||
let tarball_checksum = base64::encode(digest.as_ref()).to_lowercase();
|
let tarball_checksum = base64::encode(digest.as_ref()).to_lowercase();
|
||||||
if tarball_checksum != expected_checksum {
|
if tarball_checksum != expected_checksum {
|
||||||
bail!(
|
bail!(
|
||||||
"Tarball checksum did not match what was provided by npm registry for {}.\n\nExpected: {}\nActual: {}",
|
"Tarball checksum did not match what was provided by npm registry for {}@{}.\n\nExpected: {}\nActual: {}",
|
||||||
package,
|
package.0,
|
||||||
|
package.1,
|
||||||
expected_checksum,
|
expected_checksum,
|
||||||
tarball_checksum,
|
tarball_checksum,
|
||||||
)
|
)
|
||||||
|
@ -162,32 +133,31 @@ mod test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_verify_tarball() {
|
pub fn test_verify_tarball() {
|
||||||
let package_id = NpmPackageId {
|
let package_name = "package".to_string();
|
||||||
name: "package".to_string(),
|
let package_version = NpmVersion::parse("1.0.0").unwrap();
|
||||||
version: NpmVersion::parse("1.0.0").unwrap(),
|
let package = (package_name.as_str(), &package_version);
|
||||||
};
|
|
||||||
let actual_checksum =
|
let actual_checksum =
|
||||||
"z4phnx7vul3xvchq1m2ab9yg5aulvxxcg/spidns6c5h0ne8xyxysp+dgnkhfuwvy7kxvudbeoglodj6+sfapg==";
|
"z4phnx7vul3xvchq1m2ab9yg5aulvxxcg/spidns6c5h0ne8xyxysp+dgnkhfuwvy7kxvudbeoglodj6+sfapg==";
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
verify_tarball_integrity(&package_id, &Vec::new(), "test")
|
verify_tarball_integrity(package, &Vec::new(), "test")
|
||||||
.unwrap_err()
|
.unwrap_err()
|
||||||
.to_string(),
|
.to_string(),
|
||||||
"Not implemented integrity kind for package@1.0.0: test",
|
"Not implemented integrity kind for package@1.0.0: test",
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
verify_tarball_integrity(&package_id, &Vec::new(), "sha1-test")
|
verify_tarball_integrity(package, &Vec::new(), "sha1-test")
|
||||||
.unwrap_err()
|
.unwrap_err()
|
||||||
.to_string(),
|
.to_string(),
|
||||||
"Not implemented hash function for package@1.0.0: sha1",
|
"Not implemented hash function for package@1.0.0: sha1",
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
verify_tarball_integrity(&package_id, &Vec::new(), "sha512-test")
|
verify_tarball_integrity(package, &Vec::new(), "sha512-test")
|
||||||
.unwrap_err()
|
.unwrap_err()
|
||||||
.to_string(),
|
.to_string(),
|
||||||
format!("Tarball checksum did not match what was provided by npm registry for package@1.0.0.\n\nExpected: test\nActual: {}", actual_checksum),
|
format!("Tarball checksum did not match what was provided by npm registry for package@1.0.0.\n\nExpected: test\nActual: {}", actual_checksum),
|
||||||
);
|
);
|
||||||
assert!(verify_tarball_integrity(
|
assert!(verify_tarball_integrity(
|
||||||
&package_id,
|
package,
|
||||||
&Vec::new(),
|
&Vec::new(),
|
||||||
&format!("sha512-{}", actual_checksum)
|
&format!("sha512-{}", actual_checksum)
|
||||||
)
|
)
|
||||||
|
|
|
@ -26,7 +26,7 @@ use crate::node::NodeResolution;
|
||||||
use crate::npm::NpmCache;
|
use crate::npm::NpmCache;
|
||||||
use crate::npm::NpmPackageReference;
|
use crate::npm::NpmPackageReference;
|
||||||
use crate::npm::NpmPackageResolver;
|
use crate::npm::NpmPackageResolver;
|
||||||
use crate::npm::NpmRegistryApi;
|
use crate::npm::RealNpmRegistryApi;
|
||||||
use crate::progress_bar::ProgressBar;
|
use crate::progress_bar::ProgressBar;
|
||||||
use crate::resolver::CliResolver;
|
use crate::resolver::CliResolver;
|
||||||
use crate::tools::check;
|
use crate::tools::check;
|
||||||
|
@ -211,13 +211,13 @@ impl ProcState {
|
||||||
let emit_cache = EmitCache::new(dir.gen_cache.clone());
|
let emit_cache = EmitCache::new(dir.gen_cache.clone());
|
||||||
let parsed_source_cache =
|
let parsed_source_cache =
|
||||||
ParsedSourceCache::new(Some(dir.dep_analysis_db_file_path()));
|
ParsedSourceCache::new(Some(dir.dep_analysis_db_file_path()));
|
||||||
let registry_url = NpmRegistryApi::default_url();
|
let registry_url = RealNpmRegistryApi::default_url();
|
||||||
let npm_cache = NpmCache::from_deno_dir(
|
let npm_cache = NpmCache::from_deno_dir(
|
||||||
&dir,
|
&dir,
|
||||||
cli_options.cache_setting(),
|
cli_options.cache_setting(),
|
||||||
progress_bar.clone(),
|
progress_bar.clone(),
|
||||||
);
|
);
|
||||||
let api = NpmRegistryApi::new(
|
let api = RealNpmRegistryApi::new(
|
||||||
registry_url,
|
registry_url,
|
||||||
npm_cache.clone(),
|
npm_cache.clone(),
|
||||||
cli_options.cache_setting(),
|
cli_options.cache_setting(),
|
||||||
|
|
|
@ -1002,7 +1002,7 @@ fn lock_file_missing_top_level_package() {
|
||||||
let stderr = String::from_utf8(output.stderr).unwrap();
|
let stderr = String::from_utf8(output.stderr).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
stderr,
|
stderr,
|
||||||
"error: the lockfile (deno.lock) is corrupt. You can recreate it with --lock-write\n"
|
"error: failed reading lockfile 'deno.lock'\n\nCaused by:\n the lockfile is corrupt. You can recreate it with --lock-write\n"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1054,6 +1054,182 @@ fn auto_discover_lock_file() {
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn peer_deps_with_copied_folders_and_lockfile() {
|
||||||
|
let _server = http_server();
|
||||||
|
|
||||||
|
let deno_dir = util::new_deno_dir();
|
||||||
|
let temp_dir = util::TempDir::new();
|
||||||
|
|
||||||
|
// write empty config file
|
||||||
|
temp_dir.write("deno.json", "{}");
|
||||||
|
let test_folder_path = test_util::testdata_path()
|
||||||
|
.join("npm")
|
||||||
|
.join("peer_deps_with_copied_folders");
|
||||||
|
let main_contents =
|
||||||
|
std::fs::read_to_string(test_folder_path.join("main.ts")).unwrap();
|
||||||
|
temp_dir.write("./main.ts", main_contents);
|
||||||
|
|
||||||
|
let deno = util::deno_cmd_with_deno_dir(&deno_dir)
|
||||||
|
.current_dir(temp_dir.path())
|
||||||
|
.arg("run")
|
||||||
|
.arg("--unstable")
|
||||||
|
.arg("-A")
|
||||||
|
.arg("main.ts")
|
||||||
|
.envs(env_vars())
|
||||||
|
.stdout(Stdio::piped())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.spawn()
|
||||||
|
.unwrap();
|
||||||
|
let output = deno.wait_with_output().unwrap();
|
||||||
|
assert!(output.status.success());
|
||||||
|
|
||||||
|
let expected_output =
|
||||||
|
std::fs::read_to_string(test_folder_path.join("main.out")).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(String::from_utf8(output.stderr).unwrap(), expected_output);
|
||||||
|
|
||||||
|
assert!(temp_dir.path().join("deno.lock").exists());
|
||||||
|
let grandchild_path = deno_dir
|
||||||
|
.path()
|
||||||
|
.join("npm")
|
||||||
|
.join("localhost_4545")
|
||||||
|
.join("npm")
|
||||||
|
.join("registry")
|
||||||
|
.join("@denotest")
|
||||||
|
.join("peer-dep-test-grandchild");
|
||||||
|
assert!(grandchild_path.join("1.0.0").exists());
|
||||||
|
assert!(grandchild_path.join("1.0.0_1").exists()); // copy folder, which is hardlinked
|
||||||
|
|
||||||
|
// run again
|
||||||
|
let deno = util::deno_cmd_with_deno_dir(&deno_dir)
|
||||||
|
.current_dir(temp_dir.path())
|
||||||
|
.arg("run")
|
||||||
|
.arg("--unstable")
|
||||||
|
.arg("-A")
|
||||||
|
.arg("main.ts")
|
||||||
|
.envs(env_vars())
|
||||||
|
.stdout(Stdio::piped())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.spawn()
|
||||||
|
.unwrap();
|
||||||
|
let output = deno.wait_with_output().unwrap();
|
||||||
|
assert_eq!(String::from_utf8(output.stderr).unwrap(), "1\n2\n");
|
||||||
|
assert!(output.status.success());
|
||||||
|
|
||||||
|
let deno = util::deno_cmd_with_deno_dir(&deno_dir)
|
||||||
|
.current_dir(temp_dir.path())
|
||||||
|
.arg("run")
|
||||||
|
.arg("--unstable")
|
||||||
|
.arg("--reload")
|
||||||
|
.arg("-A")
|
||||||
|
.arg("main.ts")
|
||||||
|
.envs(env_vars())
|
||||||
|
.stdout(Stdio::piped())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.spawn()
|
||||||
|
.unwrap();
|
||||||
|
let output = deno.wait_with_output().unwrap();
|
||||||
|
assert_eq!(String::from_utf8(output.stderr).unwrap(), expected_output);
|
||||||
|
assert!(output.status.success());
|
||||||
|
|
||||||
|
// now run with local node modules
|
||||||
|
let deno = util::deno_cmd_with_deno_dir(&deno_dir)
|
||||||
|
.current_dir(temp_dir.path())
|
||||||
|
.arg("run")
|
||||||
|
.arg("--unstable")
|
||||||
|
.arg("--node-modules-dir")
|
||||||
|
.arg("-A")
|
||||||
|
.arg("main.ts")
|
||||||
|
.envs(env_vars())
|
||||||
|
.stdout(Stdio::piped())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.spawn()
|
||||||
|
.unwrap();
|
||||||
|
let output = deno.wait_with_output().unwrap();
|
||||||
|
assert_eq!(String::from_utf8(output.stderr).unwrap(), "1\n2\n");
|
||||||
|
assert!(output.status.success());
|
||||||
|
|
||||||
|
let deno_folder = temp_dir.path().join("node_modules").join(".deno");
|
||||||
|
assert!(deno_folder
|
||||||
|
.join("@denotest+peer-dep-test-grandchild@1.0.0")
|
||||||
|
.exists());
|
||||||
|
assert!(deno_folder
|
||||||
|
.join("@denotest+peer-dep-test-grandchild@1.0.0_1")
|
||||||
|
.exists()); // copy folder
|
||||||
|
|
||||||
|
// now again run with local node modules
|
||||||
|
let deno = util::deno_cmd_with_deno_dir(&deno_dir)
|
||||||
|
.current_dir(temp_dir.path())
|
||||||
|
.arg("run")
|
||||||
|
.arg("--unstable")
|
||||||
|
.arg("--node-modules-dir")
|
||||||
|
.arg("-A")
|
||||||
|
.arg("main.ts")
|
||||||
|
.envs(env_vars())
|
||||||
|
.stdout(Stdio::piped())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.spawn()
|
||||||
|
.unwrap();
|
||||||
|
let output = deno.wait_with_output().unwrap();
|
||||||
|
assert!(output.status.success());
|
||||||
|
assert_eq!(String::from_utf8(output.stderr).unwrap(), "1\n2\n");
|
||||||
|
|
||||||
|
// now ensure it works with reloading
|
||||||
|
let deno = util::deno_cmd_with_deno_dir(&deno_dir)
|
||||||
|
.current_dir(temp_dir.path())
|
||||||
|
.arg("run")
|
||||||
|
.arg("--unstable")
|
||||||
|
.arg("--node-modules-dir")
|
||||||
|
.arg("--reload")
|
||||||
|
.arg("-A")
|
||||||
|
.arg("main.ts")
|
||||||
|
.envs(env_vars())
|
||||||
|
.stdout(Stdio::piped())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.spawn()
|
||||||
|
.unwrap();
|
||||||
|
let output = deno.wait_with_output().unwrap();
|
||||||
|
assert!(output.status.success());
|
||||||
|
assert_eq!(String::from_utf8(output.stderr).unwrap(), expected_output);
|
||||||
|
|
||||||
|
// now ensure it works with reloading and no lockfile
|
||||||
|
let deno = util::deno_cmd_with_deno_dir(&deno_dir)
|
||||||
|
.current_dir(temp_dir.path())
|
||||||
|
.arg("run")
|
||||||
|
.arg("--unstable")
|
||||||
|
.arg("--node-modules-dir")
|
||||||
|
.arg("--no-lock")
|
||||||
|
.arg("--reload")
|
||||||
|
.arg("-A")
|
||||||
|
.arg("main.ts")
|
||||||
|
.envs(env_vars())
|
||||||
|
.stdout(Stdio::piped())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.spawn()
|
||||||
|
.unwrap();
|
||||||
|
let output = deno.wait_with_output().unwrap();
|
||||||
|
assert_eq!(String::from_utf8(output.stderr).unwrap(), expected_output,);
|
||||||
|
assert!(output.status.success());
|
||||||
|
}
|
||||||
|
|
||||||
|
itest!(info_peer_deps {
|
||||||
|
args: "info --quiet --unstable npm/peer_deps_with_copied_folders/main.ts",
|
||||||
|
output: "npm/peer_deps_with_copied_folders/main_info.out",
|
||||||
|
exit_code: 0,
|
||||||
|
envs: env_vars(),
|
||||||
|
http_server: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
itest!(info_peer_deps_json {
|
||||||
|
args:
|
||||||
|
"info --quiet --unstable --json npm/peer_deps_with_copied_folders/main.ts",
|
||||||
|
output: "npm/peer_deps_with_copied_folders/main_info_json.out",
|
||||||
|
exit_code: 0,
|
||||||
|
envs: env_vars(),
|
||||||
|
http_server: true,
|
||||||
|
});
|
||||||
|
|
||||||
fn env_vars_no_sync_download() -> Vec<(String, String)> {
|
fn env_vars_no_sync_download() -> Vec<(String, String)> {
|
||||||
vec![
|
vec![
|
||||||
("DENO_NODE_COMPAT_URL".to_string(), util::std_file_url()),
|
("DENO_NODE_COMPAT_URL".to_string(), util::std_file_url()),
|
||||||
|
|
10
cli/tests/testdata/npm/peer_deps_with_copied_folders/main.out
vendored
Normal file
10
cli/tests/testdata/npm/peer_deps_with_copied_folders/main.out
vendored
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
Download http://localhost:4545/npm/registry/@denotest/peer-dep-test-child
|
||||||
|
Download http://localhost:4545/npm/registry/@denotest/peer-dep-test-grandchild
|
||||||
|
Download http://localhost:4545/npm/registry/@denotest/peer-dep-test-peer
|
||||||
|
Download http://localhost:4545/npm/registry/@denotest/peer-dep-test-child/1.0.0.tgz
|
||||||
|
Download http://localhost:4545/npm/registry/@denotest/peer-dep-test-child/2.0.0.tgz
|
||||||
|
Download http://localhost:4545/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0.tgz
|
||||||
|
Download http://localhost:4545/npm/registry/@denotest/peer-dep-test-peer/1.0.0.tgz
|
||||||
|
Download http://localhost:4545/npm/registry/@denotest/peer-dep-test-peer/2.0.0.tgz
|
||||||
|
1
|
||||||
|
2
|
5
cli/tests/testdata/npm/peer_deps_with_copied_folders/main.ts
vendored
Normal file
5
cli/tests/testdata/npm/peer_deps_with_copied_folders/main.ts
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
import version1 from "npm:@denotest/peer-dep-test-child@1";
|
||||||
|
import version2 from "npm:@denotest/peer-dep-test-child@2";
|
||||||
|
|
||||||
|
console.error(version1);
|
||||||
|
console.error(version2);
|
14
cli/tests/testdata/npm/peer_deps_with_copied_folders/main_info.out
vendored
Normal file
14
cli/tests/testdata/npm/peer_deps_with_copied_folders/main_info.out
vendored
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
local: [WILDCARD]main.ts
|
||||||
|
type: TypeScript
|
||||||
|
dependencies: 6 unique
|
||||||
|
size: [WILDCARD]
|
||||||
|
|
||||||
|
file:///[WILDCARD]/testdata/npm/peer_deps_with_copied_folders/main.ts (171B)
|
||||||
|
├─┬ npm:@denotest/peer-dep-test-child@1 - 1.0.0 ([WILDCARD])
|
||||||
|
│ ├─┬ npm:@denotest/peer-dep-test-grandchild@1.0.0_@denotest+peer-dep-test-peer@1.0.0 ([WILDCARD])
|
||||||
|
│ │ └── npm:@denotest/peer-dep-test-peer@1.0.0 ([WILDCARD])
|
||||||
|
│ └── npm:@denotest/peer-dep-test-peer@1.0.0 ([WILDCARD])
|
||||||
|
└─┬ npm:@denotest/peer-dep-test-child@2 - 2.0.0 ([WILDCARD])
|
||||||
|
├─┬ npm:@denotest/peer-dep-test-grandchild@1.0.0_@denotest+peer-dep-test-peer@2.0.0 ([WILDCARD])
|
||||||
|
│ └── npm:@denotest/peer-dep-test-peer@2.0.0 ([WILDCARD])
|
||||||
|
└── npm:@denotest/peer-dep-test-peer@2.0.0 ([WILDCARD])
|
95
cli/tests/testdata/npm/peer_deps_with_copied_folders/main_info_json.out
vendored
Normal file
95
cli/tests/testdata/npm/peer_deps_with_copied_folders/main_info_json.out
vendored
Normal file
|
@ -0,0 +1,95 @@
|
||||||
|
{
|
||||||
|
"roots": [
|
||||||
|
"[WILDCARD]/npm/peer_deps_with_copied_folders/main.ts"
|
||||||
|
],
|
||||||
|
"modules": [
|
||||||
|
{
|
||||||
|
"dependencies": [
|
||||||
|
{
|
||||||
|
"specifier": "npm:@denotest/peer-dep-test-child@1",
|
||||||
|
"code": {
|
||||||
|
"specifier": "npm:@denotest/peer-dep-test-child@1",
|
||||||
|
"span": {
|
||||||
|
"start": {
|
||||||
|
"line": 0,
|
||||||
|
"character": 21
|
||||||
|
},
|
||||||
|
"end": {
|
||||||
|
"line": 0,
|
||||||
|
"character": 58
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"npmPackage": "@denotest/peer-dep-test-child@1.0.0_@denotest+peer-dep-test-peer@1.0.0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"specifier": "npm:@denotest/peer-dep-test-child@2",
|
||||||
|
"code": {
|
||||||
|
"specifier": "npm:@denotest/peer-dep-test-child@2",
|
||||||
|
"span": {
|
||||||
|
"start": {
|
||||||
|
"line": 1,
|
||||||
|
"character": 21
|
||||||
|
},
|
||||||
|
"end": {
|
||||||
|
"line": 1,
|
||||||
|
"character": 58
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"npmPackage": "@denotest/peer-dep-test-child@2.0.0_@denotest+peer-dep-test-peer@2.0.0"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"kind": "esm",
|
||||||
|
"local": "[WILDCARD]main.ts",
|
||||||
|
"emit": null,
|
||||||
|
"map": null,
|
||||||
|
"size": 171,
|
||||||
|
"mediaType": "TypeScript",
|
||||||
|
"specifier": "file://[WILDCARD]/main.ts"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"redirects": {},
|
||||||
|
"npmPackages": {
|
||||||
|
"@denotest/peer-dep-test-child@1.0.0_@denotest+peer-dep-test-peer@1.0.0": {
|
||||||
|
"name": "@denotest/peer-dep-test-child",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"dependencies": [
|
||||||
|
"@denotest/peer-dep-test-grandchild@1.0.0_@denotest+peer-dep-test-peer@1.0.0",
|
||||||
|
"@denotest/peer-dep-test-peer@1.0.0"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"@denotest/peer-dep-test-child@2.0.0_@denotest+peer-dep-test-peer@2.0.0": {
|
||||||
|
"name": "@denotest/peer-dep-test-child",
|
||||||
|
"version": "2.0.0",
|
||||||
|
"dependencies": [
|
||||||
|
"@denotest/peer-dep-test-grandchild@1.0.0_@denotest+peer-dep-test-peer@2.0.0",
|
||||||
|
"@denotest/peer-dep-test-peer@2.0.0"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"@denotest/peer-dep-test-grandchild@1.0.0_@denotest+peer-dep-test-peer@1.0.0": {
|
||||||
|
"name": "@denotest/peer-dep-test-grandchild",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"dependencies": [
|
||||||
|
"@denotest/peer-dep-test-peer@1.0.0"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"@denotest/peer-dep-test-grandchild@1.0.0_@denotest+peer-dep-test-peer@2.0.0": {
|
||||||
|
"name": "@denotest/peer-dep-test-grandchild",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"dependencies": [
|
||||||
|
"@denotest/peer-dep-test-peer@2.0.0"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"@denotest/peer-dep-test-peer@1.0.0": {
|
||||||
|
"name": "@denotest/peer-dep-test-peer",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"dependencies": []
|
||||||
|
},
|
||||||
|
"@denotest/peer-dep-test-peer@2.0.0": {
|
||||||
|
"name": "@denotest/peer-dep-test-peer",
|
||||||
|
"version": "2.0.0",
|
||||||
|
"dependencies": []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
1
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/1.0.0/index.js
vendored
Normal file
1
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/1.0.0/index.js
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
module.exports = require("@denotest/peer-dep-test-grandchild");
|
8
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/1.0.0/package.json
vendored
Normal file
8
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/1.0.0/package.json
vendored
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
{
|
||||||
|
"name": "@denotest/peer-dep-test-child",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"dependencies": {
|
||||||
|
"@denotest/peer-dep-test-grandchild": "*",
|
||||||
|
"@denotest/peer-dep-test-peer": "^1"
|
||||||
|
}
|
||||||
|
}
|
1
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/2.0.0/index.js
vendored
Normal file
1
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/2.0.0/index.js
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
module.exports = require("@denotest/peer-dep-test-grandchild");
|
8
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/2.0.0/package.json
vendored
Normal file
8
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-child/2.0.0/package.json
vendored
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
{
|
||||||
|
"name": "@denotest/peer-dep-test-child",
|
||||||
|
"version": "2.0.0",
|
||||||
|
"dependencies": {
|
||||||
|
"@denotest/peer-dep-test-grandchild": "*",
|
||||||
|
"@denotest/peer-dep-test-peer": "^2"
|
||||||
|
}
|
||||||
|
}
|
1
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0/dist/index.js
vendored
Normal file
1
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0/dist/index.js
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
module.exports = require("@denotest/peer-dep-test-peer");
|
1
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0/index.js
vendored
Normal file
1
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0/index.js
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
module.exports = require("./dist/index");
|
7
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0/package.json
vendored
Normal file
7
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-grandchild/1.0.0/package.json
vendored
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
{
|
||||||
|
"name": "@denotest/peer-dep-test-child-2",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"peerDependencies": {
|
||||||
|
"@denotest/peer-dep-test-peer": "*"
|
||||||
|
}
|
||||||
|
}
|
1
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/1.0.0/index.js
vendored
Normal file
1
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/1.0.0/index.js
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
module.exports = 1;
|
4
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/1.0.0/package.json
vendored
Normal file
4
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/1.0.0/package.json
vendored
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
{
|
||||||
|
"name": "@denotest/peer-dep-test-peer",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
1
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/2.0.0/index.js
vendored
Normal file
1
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/2.0.0/index.js
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
module.exports = 2;
|
4
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/2.0.0/package.json
vendored
Normal file
4
cli/tests/testdata/npm/registry/@denotest/peer-dep-test-peer/2.0.0/package.json
vendored
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
{
|
||||||
|
"name": "@denotest/peer-dep-test-peer",
|
||||||
|
"version": "2.0.0"
|
||||||
|
}
|
|
@ -157,7 +157,8 @@ fn add_npm_packages_to_json(
|
||||||
});
|
});
|
||||||
if let Some(pkg) = maybe_package {
|
if let Some(pkg) = maybe_package {
|
||||||
if let Some(module) = module.as_object_mut() {
|
if let Some(module) = module.as_object_mut() {
|
||||||
module.insert("npmPackage".to_string(), format!("{}", pkg.id).into());
|
module
|
||||||
|
.insert("npmPackage".to_string(), pkg.id.as_serialized().into());
|
||||||
// change the "kind" to be "npm"
|
// change the "kind" to be "npm"
|
||||||
module.insert("kind".to_string(), "npm".into());
|
module.insert("kind".to_string(), "npm".into());
|
||||||
}
|
}
|
||||||
|
@ -190,7 +191,7 @@ fn add_npm_packages_to_json(
|
||||||
{
|
{
|
||||||
dep.insert(
|
dep.insert(
|
||||||
"npmPackage".to_string(),
|
"npmPackage".to_string(),
|
||||||
format!("{}", pkg.id).into(),
|
pkg.id.as_serialized().into(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -212,11 +213,11 @@ fn add_npm_packages_to_json(
|
||||||
deps.sort();
|
deps.sort();
|
||||||
let deps = deps
|
let deps = deps
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|id| serde_json::Value::String(format!("{}", id)))
|
.map(|id| serde_json::Value::String(id.as_serialized()))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
kv.insert("dependencies".to_string(), deps.into());
|
kv.insert("dependencies".to_string(), deps.into());
|
||||||
|
|
||||||
json_packages.insert(format!("{}", &pkg.id), kv.into());
|
json_packages.insert(pkg.id.as_serialized(), kv.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
json.insert("npmPackages".to_string(), json_packages.into());
|
json.insert("npmPackages".to_string(), json_packages.into());
|
||||||
|
@ -504,7 +505,7 @@ impl<'a> GraphDisplayContext<'a> {
|
||||||
None => Specifier(module.specifier.clone()),
|
None => Specifier(module.specifier.clone()),
|
||||||
};
|
};
|
||||||
let was_seen = !self.seen.insert(match &package_or_specifier {
|
let was_seen = !self.seen.insert(match &package_or_specifier {
|
||||||
Package(package) => package.id.to_string(),
|
Package(package) => package.id.as_serialized(),
|
||||||
Specifier(specifier) => specifier.to_string(),
|
Specifier(specifier) => specifier.to_string(),
|
||||||
});
|
});
|
||||||
let header_text = if was_seen {
|
let header_text = if was_seen {
|
||||||
|
@ -572,11 +573,14 @@ impl<'a> GraphDisplayContext<'a> {
|
||||||
for dep_id in deps.into_iter() {
|
for dep_id in deps.into_iter() {
|
||||||
let maybe_size = self.npm_info.package_sizes.get(dep_id).cloned();
|
let maybe_size = self.npm_info.package_sizes.get(dep_id).cloned();
|
||||||
let size_str = maybe_size_to_text(maybe_size);
|
let size_str = maybe_size_to_text(maybe_size);
|
||||||
let mut child =
|
let mut child = TreeNode::from_text(format!(
|
||||||
TreeNode::from_text(format!("npm:{} {}", dep_id, size_str));
|
"npm:{} {}",
|
||||||
|
dep_id.as_serialized(),
|
||||||
|
size_str
|
||||||
|
));
|
||||||
if let Some(package) = self.npm_info.packages.get(dep_id) {
|
if let Some(package) = self.npm_info.packages.get(dep_id) {
|
||||||
if !package.dependencies.is_empty() {
|
if !package.dependencies.is_empty() {
|
||||||
if self.seen.contains(&package.id.to_string()) {
|
if self.seen.contains(&package.id.as_serialized()) {
|
||||||
child.text = format!("{} {}", child.text, colors::gray("*"));
|
child.text = format!("{} {}", child.text, colors::gray("*"));
|
||||||
} else {
|
} else {
|
||||||
let package = package.clone();
|
let package = package.clone();
|
||||||
|
|
Loading…
Reference in a new issue