1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-11 16:42:21 -05:00

fix(npm): improve peer dependency resolution (#17835)

This PR fixes peer dependency resolution to only resolve peers based on
the current graph traversal path. Previously, it would resolve a peers
by looking at a graph node's ancestors, which is not correct because
graph nodes are shared by different resolutions.

It also stores more information about peer dependency resolution in the
lockfile.
This commit is contained in:
David Sherret 2023-02-21 12:03:48 -05:00 committed by GitHub
parent 608c855f11
commit 3479bc7661
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
27 changed files with 2743 additions and 1579 deletions

5
Cargo.lock generated
View file

@ -794,6 +794,7 @@ checksum = "8d7439c3735f405729d52c3fbbe4de140eaf938a1fe47d227c27f8254d4302a5"
name = "deno"
version = "1.30.3"
dependencies = [
"async-trait",
"atty",
"base32",
"base64 0.13.1",
@ -1092,9 +1093,9 @@ dependencies = [
[[package]]
name = "deno_graph"
version = "0.43.2"
version = "0.43.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c8c63d016d4bbc7fcd0cecfb3903b92591ace5ba61229e2c5cf61337b21d165"
checksum = "3b654f093ae79ca93715d6df840f45890dc61fc93e7f63ab0a7442c2a494ecac"
dependencies = [
"anyhow",
"data-url",

View file

@ -46,13 +46,14 @@ deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "dep_gra
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = "0.55.0"
deno_emit = "0.15.0"
deno_graph = "0.43.2"
deno_graph = "0.43.3"
deno_lint = { version = "0.38.0", features = ["docs"] }
deno_lockfile.workspace = true
deno_runtime = { workspace = true, features = ["dont_create_runtime_snapshot", "include_js_files_for_snapshotting"] }
deno_task_shell = "0.8.1"
napi_sym.workspace = true
async-trait.workspace = true
atty.workspace = true
base32 = "=0.4.0"
base64.workspace = true

View file

@ -75,8 +75,8 @@ impl Into<NpmPackageLockfileInfo> for NpmResolutionPackage {
.collect();
NpmPackageLockfileInfo {
display_id: self.id.display(),
serialized_id: self.id.as_serialized(),
display_id: self.pkg_id.nv.to_string(),
serialized_id: self.pkg_id.as_serialized(),
integrity: self.dist.integrity().to_string(),
dependencies,
}

3
cli/cache/mod.rs vendored
View file

@ -103,7 +103,8 @@ impl Loader for FetchCacher {
) -> LoadFuture {
if specifier.scheme() == "npm" {
return Box::pin(futures::future::ready(
match deno_graph::npm::NpmPackageReference::from_specifier(specifier) {
match deno_graph::npm::NpmPackageReqReference::from_specifier(specifier)
{
Ok(_) => Ok(Some(deno_graph::source::LoadResponse::External {
specifier: specifier.clone(),
})),

View file

@ -26,7 +26,7 @@ use deno_core::serde::Deserialize;
use deno_core::serde_json;
use deno_core::serde_json::json;
use deno_core::ModuleSpecifier;
use deno_graph::npm::NpmPackageReference;
use deno_graph::npm::NpmPackageReqReference;
use deno_graph::Resolution;
use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
@ -614,7 +614,7 @@ pub enum DenoDiagnostic {
/// A data module was not found in the cache.
NoCacheData(ModuleSpecifier),
/// A remote npm package reference was not found in the cache.
NoCacheNpm(NpmPackageReference, ModuleSpecifier),
NoCacheNpm(NpmPackageReqReference, ModuleSpecifier),
/// A local module was not found on the local file system.
NoLocal(ModuleSpecifier),
/// The specifier resolved to a remote specifier that was redirected to
@ -905,7 +905,8 @@ fn diagnose_resolution(
.push(DenoDiagnostic::NoAssertType.to_lsp_diagnostic(&range)),
}
}
} else if let Ok(pkg_ref) = NpmPackageReference::from_specifier(specifier)
} else if let Ok(pkg_ref) =
NpmPackageReqReference::from_specifier(specifier)
{
if let Some(npm_resolver) = &snapshot.maybe_npm_resolver {
// show diagnostics for npm package references that aren't cached
@ -929,7 +930,7 @@ fn diagnose_resolution(
} else if let Some(npm_resolver) = &snapshot.maybe_npm_resolver {
// check that a @types/node package exists in the resolver
let types_node_ref =
NpmPackageReference::from_str("npm:@types/node").unwrap();
NpmPackageReqReference::from_str("npm:@types/node").unwrap();
if npm_resolver
.resolve_package_folder_from_deno_module(&types_node_ref.req)
.is_err()

View file

@ -30,8 +30,8 @@ use deno_core::futures::future;
use deno_core::parking_lot::Mutex;
use deno_core::url;
use deno_core::ModuleSpecifier;
use deno_graph::npm::NpmPackageReference;
use deno_graph::npm::NpmPackageReq;
use deno_graph::npm::NpmPackageReqReference;
use deno_graph::GraphImport;
use deno_graph::Resolution;
use deno_runtime::deno_node::NodeResolutionMode;
@ -1103,7 +1103,7 @@ impl Documents {
.and_then(|r| r.maybe_specifier())
{
results.push(self.resolve_dependency(specifier, maybe_npm_resolver));
} else if let Ok(npm_ref) = NpmPackageReference::from_str(&specifier) {
} else if let Ok(npm_ref) = NpmPackageReqReference::from_str(&specifier) {
results.push(maybe_npm_resolver.map(|npm_resolver| {
NodeResolution::into_specifier_and_media_type(
node_resolve_npm_reference(
@ -1243,7 +1243,7 @@ impl Documents {
// perf: ensure this is not added to unless this specifier has never
// been analyzed in order to not cause an extra file system lookup
self.pending_specifiers.push_back(dep.clone());
if let Ok(reference) = NpmPackageReference::from_specifier(dep) {
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
self.npm_reqs.insert(reference.req);
}
}
@ -1321,7 +1321,7 @@ impl Documents {
specifier: &ModuleSpecifier,
maybe_npm_resolver: Option<&NpmPackageResolver>,
) -> Option<(ModuleSpecifier, MediaType)> {
if let Ok(npm_ref) = NpmPackageReference::from_specifier(specifier) {
if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(specifier) {
return maybe_npm_resolver.map(|npm_resolver| {
NodeResolution::into_specifier_and_media_type(
node_resolve_npm_reference(

View file

@ -73,7 +73,7 @@ use crate::graph_util;
use crate::http_util::HttpClient;
use crate::npm::NpmCache;
use crate::npm::NpmPackageResolver;
use crate::npm::RealNpmRegistryApi;
use crate::npm::NpmRegistryApi;
use crate::proc_state::ProcState;
use crate::tools::fmt::format_file;
use crate::tools::fmt::format_parsed_source;
@ -304,7 +304,7 @@ fn create_lsp_npm_resolver(
dir: &DenoDir,
http_client: HttpClient,
) -> NpmPackageResolver {
let registry_url = RealNpmRegistryApi::default_url();
let registry_url = NpmRegistryApi::default_url();
let progress_bar = ProgressBar::new(ProgressBarStyle::TextOnly);
let npm_cache = NpmCache::from_deno_dir(
dir,
@ -316,8 +316,8 @@ fn create_lsp_npm_resolver(
http_client.clone(),
progress_bar.clone(),
);
let api = RealNpmRegistryApi::new(
registry_url,
let api = NpmRegistryApi::new(
registry_url.clone(),
npm_cache.clone(),
http_client,
progress_bar,

View file

@ -15,8 +15,8 @@ use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::serde_json::Value;
use deno_core::url::Url;
use deno_graph::npm::NpmPackageReference;
use deno_graph::npm::NpmPackageReq;
use deno_graph::npm::NpmPackageReqReference;
use deno_runtime::deno_node;
use deno_runtime::deno_node::errors;
use deno_runtime::deno_node::find_builtin_node_module;
@ -241,7 +241,7 @@ pub fn node_resolve(
}
pub fn node_resolve_npm_reference(
reference: &NpmPackageReference,
reference: &NpmPackageReqReference,
mode: NodeResolutionMode,
npm_resolver: &NpmPackageResolver,
permissions: &mut dyn NodePermissions,

View file

@ -13,6 +13,7 @@ use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::url::Url;
use deno_graph::npm::NpmPackageNv;
use deno_graph::semver::Version;
use crate::args::CacheSetting;
@ -107,8 +108,7 @@ pub fn with_folder_sync_lock(
}
pub struct NpmPackageCacheFolderId {
pub name: String,
pub version: Version,
pub nv: NpmPackageNv,
/// Peer dependency resolution may require us to have duplicate copies
/// of the same package.
pub copy_index: usize,
@ -117,8 +117,7 @@ pub struct NpmPackageCacheFolderId {
impl NpmPackageCacheFolderId {
pub fn with_no_count(&self) -> Self {
Self {
name: self.name.clone(),
version: self.version.clone(),
nv: self.nv.clone(),
copy_index: 0,
}
}
@ -126,7 +125,7 @@ impl NpmPackageCacheFolderId {
impl std::fmt::Display for NpmPackageCacheFolderId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}@{}", self.name, self.version)?;
write!(f, "{}", self.nv)?;
if self.copy_index > 0 {
write!(f, "_{}", self.copy_index)?;
}
@ -188,14 +187,14 @@ impl ReadonlyNpmCache {
) -> PathBuf {
if id.copy_index == 0 {
self.package_folder_for_name_and_version(
&id.name,
&id.version,
&id.nv.name,
&id.nv.version,
registry_url,
)
} else {
self
.package_name_folder(&id.name, registry_url)
.join(format!("{}_{}", id.version, id.copy_index))
.package_name_folder(&id.nv.name, registry_url)
.join(format!("{}_{}", id.nv.version, id.copy_index))
}
}
@ -304,8 +303,10 @@ impl ReadonlyNpmCache {
(version_part, 0)
};
Some(NpmPackageCacheFolderId {
name,
version: Version::parse_from_npm(version).ok()?,
nv: NpmPackageNv {
name,
version: Version::parse_from_npm(version).ok()?,
},
copy_index,
})
}
@ -440,16 +441,19 @@ impl NpmCache {
// if this file exists, then the package didn't successfully extract
// the first time, or another process is currently extracting the zip file
&& !package_folder.join(NPM_PACKAGE_SYNC_LOCK_FILENAME).exists()
&& self.cache_setting.should_use_for_npm_package(&id.name)
&& self.cache_setting.should_use_for_npm_package(&id.nv.name)
{
return Ok(());
}
let original_package_folder = self
.readonly
.package_folder_for_name_and_version(&id.name, &id.version, registry_url);
let original_package_folder =
self.readonly.package_folder_for_name_and_version(
&id.nv.name,
&id.nv.version,
registry_url,
);
with_folder_sync_lock(
(id.name.as_str(), &id.version),
(id.nv.name.as_str(), &id.nv.version),
&package_folder,
|| hard_link_dir_recursive(&original_package_folder, &package_folder),
)?;
@ -514,6 +518,7 @@ pub fn mixed_case_package_name_decode(name: &str) -> Option<String> {
#[cfg(test)]
mod test {
use deno_core::url::Url;
use deno_graph::npm::NpmPackageNv;
use deno_graph::semver::Version;
use super::ReadonlyNpmCache;
@ -529,8 +534,10 @@ mod test {
assert_eq!(
cache.package_folder_for_id(
&NpmPackageCacheFolderId {
name: "json".to_string(),
version: Version::parse_from_npm("1.2.5").unwrap(),
nv: NpmPackageNv {
name: "json".to_string(),
version: Version::parse_from_npm("1.2.5").unwrap(),
},
copy_index: 0,
},
&registry_url,
@ -544,8 +551,10 @@ mod test {
assert_eq!(
cache.package_folder_for_id(
&NpmPackageCacheFolderId {
name: "json".to_string(),
version: Version::parse_from_npm("1.2.5").unwrap(),
nv: NpmPackageNv {
name: "json".to_string(),
version: Version::parse_from_npm("1.2.5").unwrap(),
},
copy_index: 1,
},
&registry_url,
@ -559,8 +568,10 @@ mod test {
assert_eq!(
cache.package_folder_for_id(
&NpmPackageCacheFolderId {
name: "JSON".to_string(),
version: Version::parse_from_npm("2.1.5").unwrap(),
nv: NpmPackageNv {
name: "JSON".to_string(),
version: Version::parse_from_npm("2.1.5").unwrap(),
},
copy_index: 0,
},
&registry_url,
@ -574,8 +585,10 @@ mod test {
assert_eq!(
cache.package_folder_for_id(
&NpmPackageCacheFolderId {
name: "@types/JSON".to_string(),
version: Version::parse_from_npm("2.1.5").unwrap(),
nv: NpmPackageNv {
name: "@types/JSON".to_string(),
version: Version::parse_from_npm("2.1.5").unwrap(),
},
copy_index: 0,
},
&registry_url,

View file

@ -10,9 +10,8 @@ pub use cache::NpmCache;
#[cfg(test)]
pub use registry::NpmPackageVersionDistInfo;
pub use registry::NpmRegistryApi;
pub use registry::RealNpmRegistryApi;
pub use resolution::resolve_graph_npm_info;
pub use resolution::NpmPackageNodeId;
pub use resolution::NpmPackageId;
pub use resolution::NpmResolutionPackage;
pub use resolution::NpmResolutionSnapshot;
pub use resolvers::NpmPackageResolver;

View file

@ -9,19 +9,19 @@ use std::io::ErrorKind;
use std::path::PathBuf;
use std::sync::Arc;
use async_trait::async_trait;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::futures::future::BoxFuture;
use deno_core::futures::FutureExt;
use deno_core::futures;
use deno_core::parking_lot::Mutex;
use deno_core::serde::Deserialize;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_graph::semver::Version;
use deno_graph::npm::NpmPackageNv;
use deno_graph::semver::VersionReq;
use deno_runtime::colors;
use once_cell::sync::Lazy;
use serde::Serialize;
use crate::args::package_json::parse_dep_entry_name_and_raw_version;
@ -31,6 +31,7 @@ use crate::http_util::HttpClient;
use crate::util::fs::atomic_write_file;
use crate::util::progress_bar::ProgressBar;
use super::cache::should_sync_download;
use super::cache::NpmCache;
// npm registry docs: https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md
@ -43,7 +44,7 @@ pub struct NpmPackageInfo {
pub dist_tags: HashMap<String, String>,
}
#[derive(Debug, Eq, PartialEq)]
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum NpmDependencyEntryKind {
Dep,
Peer,
@ -56,7 +57,7 @@ impl NpmDependencyEntryKind {
}
}
#[derive(Debug, Eq, PartialEq)]
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct NpmDependencyEntry {
pub kind: NpmDependencyEntryKind,
pub bare_specifier: String,
@ -181,83 +182,30 @@ impl NpmPackageVersionDistInfo {
}
}
pub trait NpmRegistryApi: Clone + Sync + Send + 'static {
fn maybe_package_info(
&self,
name: &str,
) -> BoxFuture<'static, Result<Option<Arc<NpmPackageInfo>>, AnyError>>;
fn package_info(
&self,
name: &str,
) -> BoxFuture<'static, Result<Arc<NpmPackageInfo>, AnyError>> {
let api = self.clone();
let name = name.to_string();
async move {
let maybe_package_info = api.maybe_package_info(&name).await?;
match maybe_package_info {
Some(package_info) => Ok(package_info),
None => bail!("npm package '{}' does not exist", name),
static NPM_REGISTRY_DEFAULT_URL: Lazy<Url> = Lazy::new(|| {
let env_var_name = "NPM_CONFIG_REGISTRY";
if let Ok(registry_url) = std::env::var(env_var_name) {
// ensure there is a trailing slash for the directory
let registry_url = format!("{}/", registry_url.trim_end_matches('/'));
match Url::parse(&registry_url) {
Ok(url) => {
return url;
}
Err(err) => {
log::debug!("Invalid {} environment variable: {:#}", env_var_name, err,);
}
}
.boxed()
}
fn package_version_info(
&self,
name: &str,
version: &Version,
) -> BoxFuture<'static, Result<Option<NpmPackageVersionInfo>, AnyError>> {
let api = self.clone();
let name = name.to_string();
let version = version.to_string();
async move {
let package_info = api.package_info(&name).await?;
Ok(package_info.versions.get(&version).cloned())
}
.boxed()
}
Url::parse("https://registry.npmjs.org").unwrap()
});
/// Clears the internal memory cache.
fn clear_memory_cache(&self);
}
#[derive(Clone, Debug)]
pub struct NpmRegistryApi(Arc<dyn NpmRegistryApiInner>);
#[derive(Clone)]
pub struct RealNpmRegistryApi(Arc<RealNpmRegistryApiInner>);
impl RealNpmRegistryApi {
pub fn default_url() -> Url {
// todo(dsherret): remove DENO_NPM_REGISTRY in the future (maybe May 2023)
let env_var_names = ["NPM_CONFIG_REGISTRY", "DENO_NPM_REGISTRY"];
for env_var_name in env_var_names {
if let Ok(registry_url) = std::env::var(env_var_name) {
// ensure there is a trailing slash for the directory
let registry_url = format!("{}/", registry_url.trim_end_matches('/'));
match Url::parse(&registry_url) {
Ok(url) => {
if env_var_name == "DENO_NPM_REGISTRY" {
log::warn!(
"{}",
colors::yellow(concat!(
"DENO_NPM_REGISTRY was intended for internal testing purposes only. ",
"Please update to NPM_CONFIG_REGISTRY instead.",
)),
);
}
return url;
}
Err(err) => {
log::debug!(
"Invalid {} environment variable: {:#}",
env_var_name,
err,
);
}
}
}
}
Url::parse("https://registry.npmjs.org").unwrap()
impl NpmRegistryApi {
pub fn default_url() -> &'static Url {
&NPM_REGISTRY_DEFAULT_URL
}
pub fn new(
@ -276,26 +224,110 @@ impl RealNpmRegistryApi {
}))
}
#[cfg(test)]
pub fn new_for_test(api: TestNpmRegistryApiInner) -> NpmRegistryApi {
Self(Arc::new(api))
}
pub async fn package_info(
&self,
name: &str,
) -> Result<Arc<NpmPackageInfo>, AnyError> {
let maybe_package_info = self.0.maybe_package_info(name).await?;
match maybe_package_info {
Some(package_info) => Ok(package_info),
None => bail!("npm package '{}' does not exist", name),
}
}
pub async fn package_version_info(
&self,
nv: &NpmPackageNv,
) -> Result<Option<NpmPackageVersionInfo>, AnyError> {
let package_info = self.package_info(&nv.name).await?;
Ok(package_info.versions.get(&nv.version.to_string()).cloned())
}
/// Caches all the package information in memory in parallel.
pub async fn cache_in_parallel(
&self,
package_names: Vec<String>,
) -> Result<(), AnyError> {
let mut unresolved_tasks = Vec::with_capacity(package_names.len());
// cache the package info up front in parallel
if should_sync_download() {
// for deterministic test output
let mut ordered_names = package_names;
ordered_names.sort();
for name in ordered_names {
self.package_info(&name).await?;
}
} else {
for name in package_names {
let api = self.clone();
unresolved_tasks.push(tokio::task::spawn(async move {
// This is ok to call because api will internally cache
// the package information in memory.
api.package_info(&name).await
}));
}
};
for result in futures::future::join_all(unresolved_tasks).await {
result??; // surface the first error
}
Ok(())
}
/// Clears the internal memory cache.
pub fn clear_memory_cache(&self) {
self.0.clear_memory_cache();
}
pub fn base_url(&self) -> &Url {
&self.0.base_url
self.0.base_url()
}
}
impl NpmRegistryApi for RealNpmRegistryApi {
fn maybe_package_info(
#[async_trait]
trait NpmRegistryApiInner: std::fmt::Debug + Sync + Send + 'static {
async fn maybe_package_info(
&self,
name: &str,
) -> BoxFuture<'static, Result<Option<Arc<NpmPackageInfo>>, AnyError>> {
let api = self.clone();
let name = name.to_string();
async move { api.0.maybe_package_info(&name).await }.boxed()
) -> Result<Option<Arc<NpmPackageInfo>>, AnyError>;
fn clear_memory_cache(&self);
fn get_cached_package_info(&self, name: &str) -> Option<Arc<NpmPackageInfo>>;
fn base_url(&self) -> &Url;
}
#[async_trait]
impl NpmRegistryApiInner for RealNpmRegistryApiInner {
fn base_url(&self) -> &Url {
&self.base_url
}
async fn maybe_package_info(
&self,
name: &str,
) -> Result<Option<Arc<NpmPackageInfo>>, AnyError> {
self.maybe_package_info(name).await
}
fn clear_memory_cache(&self) {
self.0.mem_cache.lock().clear();
self.mem_cache.lock().clear();
}
fn get_cached_package_info(&self, name: &str) -> Option<Arc<NpmPackageInfo>> {
self.mem_cache.lock().get(name).cloned().flatten()
}
}
#[derive(Debug)]
struct RealNpmRegistryApiInner {
base_url: Url,
cache: NpmCache,
@ -461,16 +493,44 @@ impl RealNpmRegistryApiInner {
}
}
#[derive(Debug)]
struct NullNpmRegistryApiInner;
#[async_trait]
impl NpmRegistryApiInner for NullNpmRegistryApiInner {
async fn maybe_package_info(
&self,
_name: &str,
) -> Result<Option<Arc<NpmPackageInfo>>, AnyError> {
Err(deno_core::anyhow::anyhow!(
"Deno bug. Please report. Registry API was not initialized."
))
}
fn clear_memory_cache(&self) {}
fn get_cached_package_info(
&self,
_name: &str,
) -> Option<Arc<NpmPackageInfo>> {
None
}
fn base_url(&self) -> &Url {
NpmRegistryApi::default_url()
}
}
/// Note: This test struct is not thread safe for setup
/// purposes. Construct everything on the same thread.
#[cfg(test)]
#[derive(Clone, Default)]
pub struct TestNpmRegistryApi {
#[derive(Clone, Default, Debug)]
pub struct TestNpmRegistryApiInner {
package_infos: Arc<Mutex<HashMap<String, NpmPackageInfo>>>,
}
#[cfg(test)]
impl TestNpmRegistryApi {
impl TestNpmRegistryApiInner {
pub fn add_package_info(&self, name: &str, info: NpmPackageInfo) {
let previous = self.package_infos.lock().insert(name.to_string(), info);
assert!(previous.is_none());
@ -554,16 +614,28 @@ impl TestNpmRegistryApi {
}
#[cfg(test)]
impl NpmRegistryApi for TestNpmRegistryApi {
fn maybe_package_info(
#[async_trait]
impl NpmRegistryApiInner for TestNpmRegistryApiInner {
async fn maybe_package_info(
&self,
name: &str,
) -> BoxFuture<'static, Result<Option<Arc<NpmPackageInfo>>, AnyError>> {
) -> Result<Option<Arc<NpmPackageInfo>>, AnyError> {
let result = self.package_infos.lock().get(name).cloned();
Box::pin(deno_core::futures::future::ready(Ok(result.map(Arc::new))))
Ok(result.map(Arc::new))
}
fn clear_memory_cache(&self) {
// do nothing for the test api
}
fn get_cached_package_info(
&self,
_name: &str,
) -> Option<Arc<NpmPackageInfo>> {
None
}
fn base_url(&self) -> &Url {
NpmRegistryApi::default_url()
}
}

View file

@ -0,0 +1,241 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_graph::semver::Version;
use deno_graph::semver::VersionReq;
use once_cell::sync::Lazy;
use super::NpmPackageId;
use crate::npm::registry::NpmPackageInfo;
use crate::npm::registry::NpmPackageVersionInfo;
pub static LATEST_VERSION_REQ: Lazy<VersionReq> =
Lazy::new(|| VersionReq::parse_from_specifier("latest").unwrap());
pub fn resolve_best_package_version_and_info<'info, 'version>(
version_req: &VersionReq,
package_info: &'info NpmPackageInfo,
existing_versions: impl Iterator<Item = &'version Version>,
) -> Result<VersionAndInfo<'info>, AnyError> {
if let Some(version) = resolve_best_from_existing_versions(
version_req,
package_info,
existing_versions,
)? {
match package_info.versions.get(&version.to_string()) {
Some(version_info) => Ok(VersionAndInfo {
version,
info: version_info,
}),
None => {
bail!(
"could not find version '{}' for '{}'",
version,
&package_info.name
)
}
}
} else {
// get the information
get_resolved_package_version_and_info(version_req, package_info, None)
}
}
#[derive(Clone)]
pub struct VersionAndInfo<'a> {
pub version: Version,
pub info: &'a NpmPackageVersionInfo,
}
fn get_resolved_package_version_and_info<'a>(
version_req: &VersionReq,
info: &'a NpmPackageInfo,
parent: Option<&NpmPackageId>,
) -> Result<VersionAndInfo<'a>, AnyError> {
if let Some(tag) = version_req.tag() {
tag_to_version_info(info, tag, parent)
} else {
let mut maybe_best_version: Option<VersionAndInfo> = None;
for version_info in info.versions.values() {
let version = Version::parse_from_npm(&version_info.version)?;
if version_req.matches(&version) {
let is_best_version = maybe_best_version
.as_ref()
.map(|best_version| best_version.version.cmp(&version).is_lt())
.unwrap_or(true);
if is_best_version {
maybe_best_version = Some(VersionAndInfo {
version,
info: version_info,
});
}
}
}
match maybe_best_version {
Some(v) => Ok(v),
// If the package isn't found, it likely means that the user needs to use
// `--reload` to get the latest npm package information. Although it seems
// like we could make this smart by fetching the latest information for
// this package here, we really need a full restart. There could be very
// interesting bugs that occur if this package's version was resolved by
// something previous using the old information, then now being smart here
// causes a new fetch of the package information, meaning this time the
// previous resolution of this package's version resolved to an older
// version, but next time to a different version because it has new information.
None => bail!(
concat!(
"Could not find npm package '{}' matching {}{}. ",
"Try retrieving the latest npm package information by running with --reload",
),
info.name,
version_req.version_text(),
match parent {
Some(resolved_id) => format!(" as specified in {}", resolved_id.nv),
None => String::new(),
}
),
}
}
}
pub fn version_req_satisfies(
version_req: &VersionReq,
version: &Version,
package_info: &NpmPackageInfo,
parent: Option<&NpmPackageId>,
) -> Result<bool, AnyError> {
match version_req.tag() {
Some(tag) => {
let tag_version = tag_to_version_info(package_info, tag, parent)?.version;
Ok(tag_version == *version)
}
None => Ok(version_req.matches(version)),
}
}
fn resolve_best_from_existing_versions<'a>(
version_req: &VersionReq,
package_info: &NpmPackageInfo,
existing_versions: impl Iterator<Item = &'a Version>,
) -> Result<Option<Version>, AnyError> {
let mut maybe_best_version: Option<&Version> = None;
for version in existing_versions {
if version_req_satisfies(version_req, version, package_info, None)? {
let is_best_version = maybe_best_version
.as_ref()
.map(|best_version| (*best_version).cmp(version).is_lt())
.unwrap_or(true);
if is_best_version {
maybe_best_version = Some(version);
}
}
}
Ok(maybe_best_version.cloned())
}
fn tag_to_version_info<'a>(
info: &'a NpmPackageInfo,
tag: &str,
parent: Option<&NpmPackageId>,
) -> Result<VersionAndInfo<'a>, AnyError> {
// For when someone just specifies @types/node, we want to pull in a
// "known good" version of @types/node that works well with Deno and
// not necessarily the latest version. For example, we might only be
// compatible with Node vX, but then Node vY is published so we wouldn't
// want to pull that in.
// Note: If the user doesn't want this behavior, then they can specify an
// explicit version.
if tag == "latest" && info.name == "@types/node" {
return get_resolved_package_version_and_info(
&VersionReq::parse_from_npm("18.0.0 - 18.11.18").unwrap(),
info,
parent,
);
}
if let Some(version) = info.dist_tags.get(tag) {
match info.versions.get(version) {
Some(info) => Ok(VersionAndInfo {
version: Version::parse_from_npm(version)?,
info,
}),
None => {
bail!(
"Could not find version '{}' referenced in dist-tag '{}'.",
version,
tag,
)
}
}
} else {
bail!("Could not find dist-tag '{}'.", tag)
}
}
#[cfg(test)]
mod test {
use std::collections::HashMap;
use deno_graph::npm::NpmPackageReqReference;
use super::*;
#[test]
fn test_get_resolved_package_version_and_info() {
// dist tag where version doesn't exist
let package_ref = NpmPackageReqReference::from_str("npm:test").unwrap();
let package_info = NpmPackageInfo {
name: "test".to_string(),
versions: HashMap::new(),
dist_tags: HashMap::from([(
"latest".to_string(),
"1.0.0-alpha".to_string(),
)]),
};
let result = get_resolved_package_version_and_info(
package_ref
.req
.version_req
.as_ref()
.unwrap_or(&*LATEST_VERSION_REQ),
&package_info,
None,
);
assert_eq!(
result.err().unwrap().to_string(),
"Could not find version '1.0.0-alpha' referenced in dist-tag 'latest'."
);
// dist tag where version is a pre-release
let package_ref = NpmPackageReqReference::from_str("npm:test").unwrap();
let package_info = NpmPackageInfo {
name: "test".to_string(),
versions: HashMap::from([
("0.1.0".to_string(), NpmPackageVersionInfo::default()),
(
"1.0.0-alpha".to_string(),
NpmPackageVersionInfo {
version: "0.1.0-alpha".to_string(),
..Default::default()
},
),
]),
dist_tags: HashMap::from([(
"latest".to_string(),
"1.0.0-alpha".to_string(),
)]),
};
let result = get_resolved_package_version_and_info(
package_ref
.req
.version_req
.as_ref()
.unwrap_or(&*LATEST_VERSION_REQ),
&package_info,
None,
);
assert_eq!(result.unwrap().version.to_string(), "1.0.0-alpha");
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,11 +1,13 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use std::cmp::Ordering;
use std::collections::HashMap;
use std::collections::HashSet;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::parking_lot::RwLock;
use deno_graph::npm::NpmPackageNv;
use deno_graph::npm::NpmPackageReq;
use deno_graph::semver::Version;
use serde::Deserialize;
@ -17,12 +19,11 @@ use crate::args::Lockfile;
use self::graph::GraphDependencyResolver;
use self::snapshot::NpmPackagesPartitioned;
use super::cache::should_sync_download;
use super::cache::NpmPackageCacheFolderId;
use super::registry::NpmPackageVersionDistInfo;
use super::registry::RealNpmRegistryApi;
use super::NpmRegistryApi;
use super::registry::NpmRegistryApi;
mod common;
mod graph;
mod snapshot;
mod specifier;
@ -40,25 +41,20 @@ pub struct NpmPackageNodeIdDeserializationError {
/// A resolved unique identifier for an npm package. This contains
/// the resolved name, version, and peer dependency resolution identifiers.
#[derive(
Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Hash, Serialize, Deserialize,
)]
pub struct NpmPackageNodeId {
pub name: String,
pub version: Version,
pub peer_dependencies: Vec<NpmPackageNodeId>,
#[derive(Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct NpmPackageId {
pub nv: NpmPackageNv,
pub peer_dependencies: Vec<NpmPackageId>,
}
impl NpmPackageNodeId {
#[allow(unused)]
pub fn scope(&self) -> Option<&str> {
if self.name.starts_with('@') && self.name.contains('/') {
self.name.split('/').next()
} else {
None
}
// Custom debug implementation for more concise test output
impl std::fmt::Debug for NpmPackageId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_serialized())
}
}
impl NpmPackageId {
pub fn as_serialized(&self) -> String {
self.as_serialized_with_level(0)
}
@ -68,11 +64,11 @@ impl NpmPackageNodeId {
let mut result = format!(
"{}@{}",
if level == 0 {
self.name.to_string()
self.nv.name.to_string()
} else {
self.name.replace('/', "+")
self.nv.name.replace('/', "+")
},
self.version
self.nv.version
);
for peer in &self.peer_dependencies {
// unfortunately we can't do something like `_3` when
@ -136,7 +132,7 @@ impl NpmPackageNodeId {
fn parse_peers_at_level<'a>(
level: usize,
) -> impl Fn(&'a str) -> ParseResult<'a, Vec<NpmPackageNodeId>> {
) -> impl Fn(&'a str) -> ParseResult<'a, Vec<NpmPackageId>> {
move |mut input| {
let mut peers = Vec::new();
while let Ok((level_input, _)) = parse_level_at_level(level)(input) {
@ -151,7 +147,7 @@ impl NpmPackageNodeId {
fn parse_id_at_level<'a>(
level: usize,
) -> impl Fn(&'a str) -> ParseResult<'a, NpmPackageNodeId> {
) -> impl Fn(&'a str) -> ParseResult<'a, NpmPackageId> {
move |input| {
let (input, (name, version)) = parse_name_and_version(input)?;
let name = if level > 0 {
@ -163,9 +159,8 @@ impl NpmPackageNodeId {
parse_peers_at_level(level + 1)(input)?;
Ok((
input,
NpmPackageNodeId {
name,
version,
NpmPackageId {
nv: NpmPackageNv { name, version },
peer_dependencies,
},
))
@ -179,17 +174,26 @@ impl NpmPackageNodeId {
}
})
}
}
pub fn display(&self) -> String {
// Don't implement std::fmt::Display because we don't
// want this to be used by accident in certain scenarios.
format!("{}@{}", self.name, self.version)
impl Ord for NpmPackageId {
fn cmp(&self, other: &Self) -> Ordering {
match self.nv.cmp(&other.nv) {
Ordering::Equal => self.peer_dependencies.cmp(&other.peer_dependencies),
ordering => ordering,
}
}
}
impl PartialOrd for NpmPackageId {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct NpmResolutionPackage {
pub id: NpmPackageNodeId,
pub pkg_id: NpmPackageId,
/// The peer dependency resolution can differ for the same
/// package (name and version) depending on where it is in
/// the resolution tree. This copy index indicates which
@ -198,21 +202,20 @@ pub struct NpmResolutionPackage {
pub dist: NpmPackageVersionDistInfo,
/// Key is what the package refers to the other package as,
/// which could be different from the package name.
pub dependencies: HashMap<String, NpmPackageNodeId>,
pub dependencies: HashMap<String, NpmPackageId>,
}
impl NpmResolutionPackage {
pub fn get_package_cache_folder_id(&self) -> NpmPackageCacheFolderId {
NpmPackageCacheFolderId {
name: self.id.name.clone(),
version: self.id.version.clone(),
nv: self.pkg_id.nv.clone(),
copy_index: self.copy_index,
}
}
}
pub struct NpmResolution {
api: RealNpmRegistryApi,
api: NpmRegistryApi,
snapshot: RwLock<NpmResolutionSnapshot>,
update_semaphore: tokio::sync::Semaphore,
}
@ -228,7 +231,7 @@ impl std::fmt::Debug for NpmResolution {
impl NpmResolution {
pub fn new(
api: RealNpmRegistryApi,
api: NpmRegistryApi,
initial_snapshot: Option<NpmResolutionSnapshot>,
) -> Self {
Self {
@ -246,9 +249,8 @@ impl NpmResolution {
let _permit = self.update_semaphore.acquire().await?;
let snapshot = self.snapshot.read().clone();
let snapshot = self
.add_package_reqs_to_snapshot(package_reqs, snapshot)
.await?;
let snapshot =
add_package_reqs_to_snapshot(&self.api, package_reqs, snapshot).await?;
*self.snapshot.write() = snapshot;
Ok(())
@ -272,88 +274,28 @@ impl NpmResolution {
} else {
snapshot
};
let snapshot = self
.add_package_reqs_to_snapshot(
package_reqs.into_iter().collect(),
snapshot,
)
.await?;
let snapshot = add_package_reqs_to_snapshot(
&self.api,
package_reqs.into_iter().collect(),
snapshot,
)
.await?;
*self.snapshot.write() = snapshot;
Ok(())
}
async fn add_package_reqs_to_snapshot(
&self,
package_reqs: Vec<NpmPackageReq>,
snapshot: NpmResolutionSnapshot,
) -> Result<NpmResolutionSnapshot, AnyError> {
// convert the snapshot to a traversable graph
let mut graph = Graph::from_snapshot(snapshot);
// go over the top level package names first, then down the
// tree one level at a time through all the branches
let mut unresolved_tasks = Vec::with_capacity(package_reqs.len());
let mut resolving_package_names =
HashSet::with_capacity(package_reqs.len());
for package_req in &package_reqs {
if graph.has_package_req(package_req) {
// skip analyzing this package, as there's already a matching top level package
continue;
}
if !resolving_package_names.insert(package_req.name.clone()) {
continue; // already resolving
}
// cache the package info up front in parallel
if should_sync_download() {
// for deterministic test output
self.api.package_info(&package_req.name).await?;
} else {
let api = self.api.clone();
let package_name = package_req.name.clone();
unresolved_tasks.push(tokio::task::spawn(async move {
// This is ok to call because api will internally cache
// the package information in memory.
api.package_info(&package_name).await
}));
};
}
for result in futures::future::join_all(unresolved_tasks).await {
result??; // surface the first error
}
let mut resolver = GraphDependencyResolver::new(&mut graph, &self.api);
// These package_reqs should already be sorted in the order they should
// be resolved in.
for package_req in package_reqs {
// avoid loading the info if this is already in the graph
if !resolver.has_package_req(&package_req) {
let info = self.api.package_info(&package_req.name).await?;
resolver.add_package_req(&package_req, &info)?;
}
}
resolver.resolve_pending().await?;
let result = graph.into_snapshot(&self.api).await;
self.api.clear_memory_cache();
result
}
pub fn resolve_package_from_id(
&self,
id: &NpmPackageNodeId,
id: &NpmPackageId,
) -> Option<NpmResolutionPackage> {
self.snapshot.read().package_from_id(id).cloned()
}
pub fn resolve_package_cache_folder_id_from_id(
&self,
id: &NpmPackageNodeId,
id: &NpmPackageId,
) -> Option<NpmPackageCacheFolderId> {
self
.snapshot
@ -400,7 +342,8 @@ impl NpmResolution {
pub fn lock(&self, lockfile: &mut Lockfile) -> Result<(), AnyError> {
let snapshot = self.snapshot.read();
for (package_req, package_id) in snapshot.package_reqs.iter() {
for (package_req, nv) in snapshot.package_reqs.iter() {
let package_id = snapshot.root_packages.get(nv).unwrap();
lockfile.insert_npm_specifier(
package_req.to_string(),
package_id.as_serialized(),
@ -413,40 +356,106 @@ impl NpmResolution {
}
}
async fn add_package_reqs_to_snapshot(
api: &NpmRegistryApi,
package_reqs: Vec<NpmPackageReq>,
snapshot: NpmResolutionSnapshot,
) -> Result<NpmResolutionSnapshot, AnyError> {
if package_reqs
.iter()
.all(|req| snapshot.package_reqs.contains_key(req))
{
return Ok(snapshot); // already up to date
}
// convert the snapshot to a traversable graph
let mut graph = Graph::from_snapshot(snapshot).with_context(|| {
deno_core::anyhow::anyhow!(
"Failed creating npm state. Try recreating your lockfile."
)
})?;
// avoid loading the info if this is already in the graph
let package_reqs = package_reqs
.into_iter()
.filter(|r| !graph.has_package_req(r))
.collect::<Vec<_>>();
// go over the top level package names first, then down the tree
// one level at a time through all the branches
api
.cache_in_parallel(
package_reqs
.iter()
.map(|r| r.name.clone())
.into_iter()
.collect::<Vec<_>>(),
)
.await?;
let mut resolver = GraphDependencyResolver::new(&mut graph, api);
// The package reqs should already be sorted
// in the order they should be resolved in.
for package_req in package_reqs {
let info = api.package_info(&package_req.name).await?;
resolver.add_package_req(&package_req, &info)?;
}
resolver.resolve_pending().await?;
let result = graph.into_snapshot(api).await;
api.clear_memory_cache();
result
}
#[cfg(test)]
mod test {
use deno_graph::npm::NpmPackageNv;
use deno_graph::semver::Version;
use super::NpmPackageNodeId;
use super::NpmPackageId;
#[test]
fn serialize_npm_package_id() {
let id = NpmPackageNodeId {
name: "pkg-a".to_string(),
version: Version::parse_from_npm("1.2.3").unwrap(),
let id = NpmPackageId {
nv: NpmPackageNv {
name: "pkg-a".to_string(),
version: Version::parse_from_npm("1.2.3").unwrap(),
},
peer_dependencies: vec![
NpmPackageNodeId {
name: "pkg-b".to_string(),
version: Version::parse_from_npm("3.2.1").unwrap(),
NpmPackageId {
nv: NpmPackageNv {
name: "pkg-b".to_string(),
version: Version::parse_from_npm("3.2.1").unwrap(),
},
peer_dependencies: vec![
NpmPackageNodeId {
name: "pkg-c".to_string(),
version: Version::parse_from_npm("1.3.2").unwrap(),
NpmPackageId {
nv: NpmPackageNv {
name: "pkg-c".to_string(),
version: Version::parse_from_npm("1.3.2").unwrap(),
},
peer_dependencies: vec![],
},
NpmPackageNodeId {
name: "pkg-d".to_string(),
version: Version::parse_from_npm("2.3.4").unwrap(),
NpmPackageId {
nv: NpmPackageNv {
name: "pkg-d".to_string(),
version: Version::parse_from_npm("2.3.4").unwrap(),
},
peer_dependencies: vec![],
},
],
},
NpmPackageNodeId {
name: "pkg-e".to_string(),
version: Version::parse_from_npm("2.3.1").unwrap(),
peer_dependencies: vec![NpmPackageNodeId {
name: "pkg-f".to_string(),
NpmPackageId {
nv: NpmPackageNv {
name: "pkg-e".to_string(),
version: Version::parse_from_npm("2.3.1").unwrap(),
},
peer_dependencies: vec![NpmPackageId {
nv: NpmPackageNv {
name: "pkg-f".to_string(),
version: Version::parse_from_npm("2.3.1").unwrap(),
},
peer_dependencies: vec![],
}],
},
@ -456,6 +465,6 @@ mod test {
// this shouldn't change because it's used in the lockfile
let serialized = id.as_serialized();
assert_eq!(serialized, "pkg-a@1.2.3_pkg-b@3.2.1__pkg-c@1.3.2__pkg-d@2.3.4_pkg-e@2.3.1__pkg-f@2.3.1");
assert_eq!(NpmPackageNodeId::from_serialized(&serialized).unwrap(), id);
assert_eq!(NpmPackageId::from_serialized(&serialized).unwrap(), id);
}
}

View file

@ -8,21 +8,19 @@ use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::parking_lot::Mutex;
use deno_graph::npm::NpmPackageNv;
use deno_graph::npm::NpmPackageReq;
use deno_graph::semver::VersionReq;
use serde::Deserialize;
use serde::Serialize;
use crate::args::Lockfile;
use crate::npm::cache::should_sync_download;
use crate::npm::cache::NpmPackageCacheFolderId;
use crate::npm::registry::NpmPackageVersionDistInfo;
use crate::npm::registry::NpmRegistryApi;
use crate::npm::registry::RealNpmRegistryApi;
use super::NpmPackageNodeId;
use super::NpmPackageId;
use super::NpmResolutionPackage;
/// Packages partitioned by if they are "copy" packages or not.
@ -44,13 +42,17 @@ impl NpmPackagesPartitioned {
}
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct NpmResolutionSnapshot {
/// The unique package requirements map to a single npm package name and version.
#[serde(with = "map_to_vec")]
pub(super) package_reqs: HashMap<NpmPackageReq, NpmPackageNodeId>,
pub(super) packages_by_name: HashMap<String, Vec<NpmPackageNodeId>>,
pub(super) package_reqs: HashMap<NpmPackageReq, NpmPackageNv>,
// Each root level npm package name and version maps to an exact npm package node id.
#[serde(with = "map_to_vec")]
pub(super) packages: HashMap<NpmPackageNodeId, NpmResolutionPackage>,
pub(super) root_packages: HashMap<NpmPackageNv, NpmPackageId>,
pub(super) packages_by_name: HashMap<String, Vec<NpmPackageId>>,
#[serde(with = "map_to_vec")]
pub(super) packages: HashMap<NpmPackageId, NpmResolutionPackage>,
}
// This is done so the maps with non-string keys get serialized and deserialized as vectors.
@ -98,25 +100,24 @@ impl NpmResolutionSnapshot {
&self,
req: &NpmPackageReq,
) -> Result<&NpmResolutionPackage, AnyError> {
match self.package_reqs.get(req) {
Some(id) => Ok(self.packages.get(id).unwrap()),
match self
.package_reqs
.get(req)
.and_then(|nv| self.root_packages.get(nv))
.and_then(|id| self.packages.get(id))
{
Some(id) => Ok(id),
None => bail!("could not find npm package directory for '{}'", req),
}
}
pub fn top_level_packages(&self) -> Vec<NpmPackageNodeId> {
self
.package_reqs
.values()
.cloned()
.collect::<HashSet<_>>()
.into_iter()
.collect::<Vec<_>>()
pub fn top_level_packages(&self) -> Vec<NpmPackageId> {
self.root_packages.values().cloned().collect::<Vec<_>>()
}
pub fn package_from_id(
&self,
id: &NpmPackageNodeId,
id: &NpmPackageId,
) -> Option<&NpmResolutionPackage> {
self.packages.get(id)
}
@ -129,13 +130,13 @@ impl NpmResolutionSnapshot {
// todo(dsherret): do we need an additional hashmap to get this quickly?
let referrer_package = self
.packages_by_name
.get(&referrer.name)
.get(&referrer.nv.name)
.and_then(|packages| {
packages
.iter()
.filter(|p| p.version == referrer.version)
.filter_map(|id| {
let package = self.packages.get(id)?;
.filter(|p| p.nv.version == referrer.nv.version)
.filter_map(|node_id| {
let package = self.packages.get(node_id)?;
if package.copy_index == referrer.copy_index {
Some(package)
} else {
@ -153,7 +154,7 @@ impl NpmResolutionSnapshot {
return Ok(self.packages.get(id).unwrap());
}
if referrer_package.id.name == name {
if referrer_package.pkg_id.nv.name == name {
return Ok(referrer_package);
}
@ -198,19 +199,19 @@ impl NpmResolutionSnapshot {
&self,
name: &str,
version_req: &VersionReq,
) -> Option<NpmPackageNodeId> {
) -> Option<NpmPackageId> {
// todo(dsherret): this is not exactly correct because some ids
// will be better than others due to peer dependencies
let mut maybe_best_id: Option<&NpmPackageNodeId> = None;
if let Some(ids) = self.packages_by_name.get(name) {
for id in ids {
if version_req.matches(&id.version) {
let mut maybe_best_id: Option<&NpmPackageId> = None;
if let Some(node_ids) = self.packages_by_name.get(name) {
for node_id in node_ids.iter() {
if version_req.matches(&node_id.nv.version) {
let is_best_version = maybe_best_id
.as_ref()
.map(|best_id| best_id.version.cmp(&id.version).is_lt())
.map(|best_id| best_id.nv.version.cmp(&node_id.nv.version).is_lt())
.unwrap_or(true);
if is_best_version {
maybe_best_id = Some(id);
maybe_best_id = Some(node_id);
}
}
}
@ -220,11 +221,12 @@ impl NpmResolutionSnapshot {
pub async fn from_lockfile(
lockfile: Arc<Mutex<Lockfile>>,
api: &RealNpmRegistryApi,
api: &NpmRegistryApi,
) -> Result<Self, AnyError> {
let mut package_reqs: HashMap<NpmPackageReq, NpmPackageNodeId>;
let mut packages_by_name: HashMap<String, Vec<NpmPackageNodeId>>;
let mut packages: HashMap<NpmPackageNodeId, NpmResolutionPackage>;
let mut package_reqs: HashMap<NpmPackageReq, NpmPackageNv>;
let mut root_packages: HashMap<NpmPackageNv, NpmPackageId>;
let mut packages_by_name: HashMap<String, Vec<NpmPackageId>>;
let mut packages: HashMap<NpmPackageId, NpmResolutionPackage>;
let mut copy_index_resolver: SnapshotPackageCopyIndexResolver;
{
@ -233,6 +235,8 @@ impl NpmResolutionSnapshot {
// pre-allocate collections
package_reqs =
HashMap::with_capacity(lockfile.content.npm.specifiers.len());
root_packages =
HashMap::with_capacity(lockfile.content.npm.specifiers.len());
let packages_len = lockfile.content.npm.packages.len();
packages = HashMap::with_capacity(packages_len);
packages_by_name = HashMap::with_capacity(packages_len); // close enough
@ -244,31 +248,32 @@ impl NpmResolutionSnapshot {
for (key, value) in &lockfile.content.npm.specifiers {
let package_req = NpmPackageReq::from_str(key)
.with_context(|| format!("Unable to parse npm specifier: {key}"))?;
let package_id = NpmPackageNodeId::from_serialized(value)?;
package_reqs.insert(package_req, package_id.clone());
let package_id = NpmPackageId::from_serialized(value)?;
package_reqs.insert(package_req, package_id.nv.clone());
root_packages.insert(package_id.nv.clone(), package_id.clone());
verify_ids.insert(package_id.clone());
}
// then the packages
for (key, value) in &lockfile.content.npm.packages {
let package_id = NpmPackageNodeId::from_serialized(key)?;
let package_id = NpmPackageId::from_serialized(key)?;
// collect the dependencies
let mut dependencies = HashMap::default();
packages_by_name
.entry(package_id.name.to_string())
.entry(package_id.nv.name.to_string())
.or_default()
.push(package_id.clone());
for (name, specifier) in &value.dependencies {
let dep_id = NpmPackageNodeId::from_serialized(specifier)?;
let dep_id = NpmPackageId::from_serialized(specifier)?;
dependencies.insert(name.to_string(), dep_id.clone());
verify_ids.insert(dep_id);
}
let package = NpmResolutionPackage {
id: package_id.clone(),
pkg_id: package_id.clone(),
copy_index: copy_index_resolver.resolve(&package_id),
// temporary dummy value
dist: NpmPackageVersionDistInfo::default(),
@ -288,40 +293,20 @@ impl NpmResolutionSnapshot {
}
}
let mut unresolved_tasks = Vec::with_capacity(packages_by_name.len());
// cache the package names in parallel in the registry api
// unless synchronous download should occur
if should_sync_download() {
let mut package_names = packages_by_name.keys().collect::<Vec<_>>();
package_names.sort();
for package_name in package_names {
api.package_info(package_name).await?;
}
} else {
for package_name in packages_by_name.keys() {
let package_name = package_name.clone();
let api = api.clone();
unresolved_tasks.push(tokio::task::spawn(async move {
api.package_info(&package_name).await?;
Result::<_, AnyError>::Ok(())
}));
}
}
for result in futures::future::join_all(unresolved_tasks).await {
result??;
}
api
.cache_in_parallel(packages_by_name.keys().cloned().collect())
.await?;
// ensure the dist is set for each package
for package in packages.values_mut() {
// this will read from the memory cache now
let version_info = match api
.package_version_info(&package.id.name, &package.id.version)
.package_version_info(&package.pkg_id.nv)
.await?
{
Some(version_info) => version_info,
None => {
bail!("could not find '{}' specified in the lockfile. Maybe try again with --reload", package.id.display());
bail!("could not find '{}' specified in the lockfile. Maybe try again with --reload", package.pkg_id.nv);
}
};
package.dist = version_info.dist;
@ -329,6 +314,7 @@ impl NpmResolutionSnapshot {
Ok(Self {
package_reqs,
root_packages,
packages_by_name,
packages,
})
@ -336,8 +322,8 @@ impl NpmResolutionSnapshot {
}
pub struct SnapshotPackageCopyIndexResolver {
packages_to_copy_index: HashMap<NpmPackageNodeId, usize>,
package_name_version_to_copy_count: HashMap<(String, String), usize>,
packages_to_copy_index: HashMap<NpmPackageId, usize>,
package_name_version_to_copy_count: HashMap<NpmPackageNv, usize>,
}
impl SnapshotPackageCopyIndexResolver {
@ -349,7 +335,7 @@ impl SnapshotPackageCopyIndexResolver {
}
pub fn from_map_with_capacity(
mut packages_to_copy_index: HashMap<NpmPackageNodeId, usize>,
mut packages_to_copy_index: HashMap<NpmPackageId, usize>,
capacity: usize,
) -> Self {
let mut package_name_version_to_copy_count =
@ -358,9 +344,9 @@ impl SnapshotPackageCopyIndexResolver {
packages_to_copy_index.reserve(capacity - packages_to_copy_index.len());
}
for (id, index) in &packages_to_copy_index {
for (node_id, index) in &packages_to_copy_index {
let entry = package_name_version_to_copy_count
.entry((id.name.to_string(), id.version.to_string()))
.entry(node_id.nv.clone())
.or_insert(0);
if *entry < *index {
*entry = *index;
@ -372,18 +358,18 @@ impl SnapshotPackageCopyIndexResolver {
}
}
pub fn resolve(&mut self, id: &NpmPackageNodeId) -> usize {
if let Some(index) = self.packages_to_copy_index.get(id) {
pub fn resolve(&mut self, node_id: &NpmPackageId) -> usize {
if let Some(index) = self.packages_to_copy_index.get(node_id) {
*index
} else {
let index = *self
.package_name_version_to_copy_count
.entry((id.name.to_string(), id.version.to_string()))
.entry(node_id.nv.clone())
.and_modify(|count| {
*count += 1;
})
.or_insert(0);
self.packages_to_copy_index.insert(id.clone(), index);
self.packages_to_copy_index.insert(node_id.clone(), index);
index
}
}
@ -422,24 +408,24 @@ mod tests {
SnapshotPackageCopyIndexResolver::with_capacity(10);
assert_eq!(
copy_index_resolver
.resolve(&NpmPackageNodeId::from_serialized("package@1.0.0").unwrap()),
.resolve(&NpmPackageId::from_serialized("package@1.0.0").unwrap()),
0
);
assert_eq!(
copy_index_resolver
.resolve(&NpmPackageNodeId::from_serialized("package@1.0.0").unwrap()),
.resolve(&NpmPackageId::from_serialized("package@1.0.0").unwrap()),
0
);
assert_eq!(
copy_index_resolver.resolve(
&NpmPackageNodeId::from_serialized("package@1.0.0_package-b@1.0.0")
&NpmPackageId::from_serialized("package@1.0.0_package-b@1.0.0")
.unwrap()
),
1
);
assert_eq!(
copy_index_resolver.resolve(
&NpmPackageNodeId::from_serialized(
&NpmPackageId::from_serialized(
"package@1.0.0_package-b@1.0.0__package-c@2.0.0"
)
.unwrap()
@ -448,15 +434,14 @@ mod tests {
);
assert_eq!(
copy_index_resolver.resolve(
&NpmPackageNodeId::from_serialized("package@1.0.0_package-b@1.0.0")
&NpmPackageId::from_serialized("package@1.0.0_package-b@1.0.0")
.unwrap()
),
1
);
assert_eq!(
copy_index_resolver.resolve(
&NpmPackageNodeId::from_serialized("package-b@1.0.0").unwrap()
),
copy_index_resolver
.resolve(&NpmPackageId::from_serialized("package-b@1.0.0").unwrap()),
0
);
}

View file

@ -6,8 +6,8 @@ use std::collections::HashSet;
use std::collections::VecDeque;
use deno_ast::ModuleSpecifier;
use deno_graph::npm::NpmPackageReference;
use deno_graph::npm::NpmPackageReq;
use deno_graph::npm::NpmPackageReqReference;
use deno_graph::ModuleGraph;
pub struct GraphNpmInfo {
@ -113,7 +113,7 @@ pub fn resolve_graph_npm_info(graph: &ModuleGraph) -> GraphNpmInfo {
// fill this leaf's information
for specifier in &specifiers {
if let Ok(npm_ref) = NpmPackageReference::from_specifier(specifier) {
if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(specifier) {
leaf.reqs.insert(npm_ref.req);
} else if !specifier.as_str().starts_with(parent_specifier.as_str()) {
leaf
@ -165,7 +165,7 @@ pub fn resolve_graph_npm_info(graph: &ModuleGraph) -> GraphNpmInfo {
let mut result = Vec::new();
for specifier in &root_specifiers {
match NpmPackageReference::from_specifier(specifier) {
match NpmPackageReqReference::from_specifier(specifier) {
Ok(npm_ref) => result.push(npm_ref.req),
Err(_) => {
pending_specifiers.push_back(get_folder_path_specifier(specifier))

View file

@ -18,7 +18,7 @@ use crate::args::Lockfile;
use crate::npm::cache::should_sync_download;
use crate::npm::resolution::NpmResolutionSnapshot;
use crate::npm::NpmCache;
use crate::npm::NpmPackageNodeId;
use crate::npm::NpmPackageId;
use crate::npm::NpmResolutionPackage;
pub trait InnerNpmPackageResolver: Send + Sync {
@ -39,10 +39,7 @@ pub trait InnerNpmPackageResolver: Send + Sync {
specifier: &ModuleSpecifier,
) -> Result<PathBuf, AnyError>;
fn package_size(
&self,
package_id: &NpmPackageNodeId,
) -> Result<u64, AnyError>;
fn package_size(&self, package_id: &NpmPackageId) -> Result<u64, AnyError>;
fn has_packages(&self) -> bool;
@ -79,7 +76,7 @@ pub async fn cache_packages(
if sync_download {
// we're running the tests not with --quiet
// and we want the output to be deterministic
packages.sort_by(|a, b| a.id.cmp(&b.id));
packages.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id));
}
let mut handles = Vec::with_capacity(packages.len());
@ -90,7 +87,7 @@ pub async fn cache_packages(
let handle = tokio::task::spawn(async move {
cache
.ensure_package(
(package.id.name.as_str(), &package.id.version),
(package.pkg_id.nv.name.as_str(), &package.pkg_id.nv.version),
&package.dist,
&registry_url,
)

View file

@ -22,9 +22,9 @@ use crate::npm::resolution::NpmResolution;
use crate::npm::resolution::NpmResolutionSnapshot;
use crate::npm::resolvers::common::cache_packages;
use crate::npm::NpmCache;
use crate::npm::NpmPackageNodeId;
use crate::npm::NpmPackageId;
use crate::npm::NpmRegistryApi;
use crate::npm::NpmResolutionPackage;
use crate::npm::RealNpmRegistryApi;
use super::common::ensure_registry_read_permission;
use super::common::types_package_name;
@ -41,7 +41,7 @@ pub struct GlobalNpmPackageResolver {
impl GlobalNpmPackageResolver {
pub fn new(
cache: NpmCache,
api: RealNpmRegistryApi,
api: NpmRegistryApi,
initial_snapshot: Option<NpmResolutionSnapshot>,
) -> Self {
let registry_url = api.base_url().to_owned();
@ -54,7 +54,7 @@ impl GlobalNpmPackageResolver {
}
}
fn package_folder(&self, id: &NpmPackageNodeId) -> PathBuf {
fn package_folder(&self, id: &NpmPackageId) -> PathBuf {
let folder_id = self
.resolution
.resolve_package_cache_folder_id_from_id(id)
@ -82,7 +82,7 @@ impl InnerNpmPackageResolver for GlobalNpmPackageResolver {
pkg_req: &NpmPackageReq,
) -> Result<PathBuf, AnyError> {
let pkg = self.resolution.resolve_package_from_deno_module(pkg_req)?;
Ok(self.package_folder(&pkg.id))
Ok(self.package_folder(&pkg.pkg_id))
}
fn resolve_package_folder_from_package(
@ -107,7 +107,7 @@ impl InnerNpmPackageResolver for GlobalNpmPackageResolver {
.resolution
.resolve_package_from_package(name, &referrer_pkg_id)?
};
Ok(self.package_folder(&pkg.id))
Ok(self.package_folder(&pkg.pkg_id))
}
fn resolve_package_folder_from_specifier(
@ -125,10 +125,7 @@ impl InnerNpmPackageResolver for GlobalNpmPackageResolver {
)
}
fn package_size(
&self,
package_id: &NpmPackageNodeId,
) -> Result<u64, AnyError> {
fn package_size(&self, package_id: &NpmPackageId) -> Result<u64, AnyError> {
let package_folder = self.package_folder(package_id);
Ok(crate::util::fs::dir_size(&package_folder)?)
}

View file

@ -32,9 +32,9 @@ use crate::npm::cache::NpmPackageCacheFolderId;
use crate::npm::resolution::NpmResolution;
use crate::npm::resolution::NpmResolutionSnapshot;
use crate::npm::NpmCache;
use crate::npm::NpmPackageNodeId;
use crate::npm::NpmPackageId;
use crate::npm::NpmRegistryApi;
use crate::npm::NpmResolutionPackage;
use crate::npm::RealNpmRegistryApi;
use crate::util::fs::copy_dir_recursive;
use crate::util::fs::hard_link_dir_recursive;
@ -56,7 +56,7 @@ pub struct LocalNpmPackageResolver {
impl LocalNpmPackageResolver {
pub fn new(
cache: NpmCache,
api: RealNpmRegistryApi,
api: NpmRegistryApi,
node_modules_folder: PathBuf,
initial_snapshot: Option<NpmResolutionSnapshot>,
) -> Self {
@ -112,7 +112,7 @@ impl LocalNpmPackageResolver {
fn get_package_id_folder(
&self,
package_id: &NpmPackageNodeId,
package_id: &NpmPackageId,
) -> Result<PathBuf, AnyError> {
match self.resolution.resolve_package_from_id(package_id) {
Some(package) => Ok(self.get_package_id_folder_from_package(&package)),
@ -136,7 +136,7 @@ impl LocalNpmPackageResolver {
&package.get_package_cache_folder_id(),
))
.join("node_modules")
.join(&package.id.name)
.join(&package.pkg_id.nv.name)
}
}
@ -203,10 +203,7 @@ impl InnerNpmPackageResolver for LocalNpmPackageResolver {
Ok(package_root_path)
}
fn package_size(
&self,
package_id: &NpmPackageNodeId,
) -> Result<u64, AnyError> {
fn package_size(&self, package_id: &NpmPackageId) -> Result<u64, AnyError> {
let package_folder_path = self.get_package_id_folder(package_id)?;
Ok(crate::util::fs::dir_size(&package_folder_path)?)
@ -303,7 +300,9 @@ async fn sync_resolution_with_fs(
if sync_download {
// we're running the tests not with --quiet
// and we want the output to be deterministic
package_partitions.packages.sort_by(|a, b| a.id.cmp(&b.id));
package_partitions
.packages
.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id));
}
let mut handles: Vec<JoinHandle<Result<(), AnyError>>> =
Vec::with_capacity(package_partitions.packages.len());
@ -314,7 +313,7 @@ async fn sync_resolution_with_fs(
let initialized_file = folder_path.join(".initialized");
if !cache
.cache_setting()
.should_use_for_npm_package(&package.id.name)
.should_use_for_npm_package(&package.pkg_id.nv.name)
|| !initialized_file.exists()
{
let cache = cache.clone();
@ -323,19 +322,19 @@ async fn sync_resolution_with_fs(
let handle = tokio::task::spawn(async move {
cache
.ensure_package(
(&package.id.name, &package.id.version),
(&package.pkg_id.nv.name, &package.pkg_id.nv.version),
&package.dist,
&registry_url,
)
.await?;
let sub_node_modules = folder_path.join("node_modules");
let package_path =
join_package_name(&sub_node_modules, &package.id.name);
join_package_name(&sub_node_modules, &package.pkg_id.nv.name);
fs::create_dir_all(&package_path)
.with_context(|| format!("Creating '{}'", folder_path.display()))?;
let cache_folder = cache.package_folder_for_name_and_version(
&package.id.name,
&package.id.version,
&package.pkg_id.nv.name,
&package.pkg_id.nv.version,
&registry_url,
);
// for now copy, but in the future consider hard linking
@ -365,7 +364,8 @@ async fn sync_resolution_with_fs(
let initialized_file = destination_path.join(".initialized");
if !initialized_file.exists() {
let sub_node_modules = destination_path.join("node_modules");
let package_path = join_package_name(&sub_node_modules, &package.id.name);
let package_path =
join_package_name(&sub_node_modules, &package.pkg_id.nv.name);
fs::create_dir_all(&package_path).with_context(|| {
format!("Creating '{}'", destination_path.display())
})?;
@ -375,7 +375,7 @@ async fn sync_resolution_with_fs(
&package_cache_folder_id.with_no_count(),
))
.join("node_modules"),
&package.id.name,
&package.pkg_id.nv.name,
);
hard_link_dir_recursive(&source_path, &package_path)?;
// write out a file that indicates this folder has been initialized
@ -406,7 +406,7 @@ async fn sync_resolution_with_fs(
&deno_local_registry_dir
.join(dep_folder_name)
.join("node_modules"),
&dep_id.name,
&dep_id.nv.name,
);
symlink_package_dir(
&dep_folder_path,
@ -428,10 +428,10 @@ async fn sync_resolution_with_fs(
.map(|id| (id, true)),
);
while let Some((package_id, is_top_level)) = pending_packages.pop_front() {
let root_folder_name = if found_names.insert(package_id.name.clone()) {
package_id.name.clone()
let root_folder_name = if found_names.insert(package_id.nv.name.clone()) {
package_id.nv.name.clone()
} else if is_top_level {
package_id.display()
format!("{}@{}", package_id.nv.name, package_id.nv.version)
} else {
continue; // skip, already handled
};
@ -442,7 +442,7 @@ async fn sync_resolution_with_fs(
&package.get_package_cache_folder_id(),
))
.join("node_modules"),
&package_id.name,
&package_id.nv.name,
);
symlink_package_dir(
@ -457,18 +457,21 @@ async fn sync_resolution_with_fs(
Ok(())
}
fn get_package_folder_id_folder_name(id: &NpmPackageCacheFolderId) -> String {
let copy_str = if id.copy_index == 0 {
fn get_package_folder_id_folder_name(
folder_id: &NpmPackageCacheFolderId,
) -> String {
let copy_str = if folder_id.copy_index == 0 {
"".to_string()
} else {
format!("_{}", id.copy_index)
format!("_{}", folder_id.copy_index)
};
let name = if id.name.to_lowercase() == id.name {
Cow::Borrowed(&id.name)
let nv = &folder_id.nv;
let name = if nv.name.to_lowercase() == nv.name {
Cow::Borrowed(&nv.name)
} else {
Cow::Owned(format!("_{}", mixed_case_package_name_encode(&id.name)))
Cow::Owned(format!("_{}", mixed_case_package_name_encode(&nv.name)))
};
format!("{}@{}{}", name, id.version, copy_str).replace('/', "+")
format!("{}@{}{}", name, nv.version, copy_str).replace('/', "+")
}
fn symlink_package_dir(

View file

@ -30,9 +30,9 @@ use crate::util::fs::canonicalize_path_maybe_not_exists;
use self::common::InnerNpmPackageResolver;
use self::local::LocalNpmPackageResolver;
use super::NpmCache;
use super::NpmPackageNodeId;
use super::NpmPackageId;
use super::NpmRegistryApi;
use super::NpmResolutionSnapshot;
use super::RealNpmRegistryApi;
/// State provided to the process via an environment variable.
#[derive(Clone, Debug, Serialize, Deserialize)]
@ -46,7 +46,7 @@ pub struct NpmPackageResolver {
no_npm: bool,
inner: Arc<dyn InnerNpmPackageResolver>,
local_node_modules_path: Option<PathBuf>,
api: RealNpmRegistryApi,
api: NpmRegistryApi,
cache: NpmCache,
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
}
@ -62,13 +62,13 @@ impl std::fmt::Debug for NpmPackageResolver {
}
impl NpmPackageResolver {
pub fn new(cache: NpmCache, api: RealNpmRegistryApi) -> Self {
pub fn new(cache: NpmCache, api: NpmRegistryApi) -> Self {
Self::new_inner(cache, api, false, None, None, None)
}
pub async fn new_with_maybe_lockfile(
cache: NpmCache,
api: RealNpmRegistryApi,
api: NpmRegistryApi,
no_npm: bool,
local_node_modules_path: Option<PathBuf>,
initial_snapshot: Option<NpmResolutionSnapshot>,
@ -105,7 +105,7 @@ impl NpmPackageResolver {
fn new_inner(
cache: NpmCache,
api: RealNpmRegistryApi,
api: NpmRegistryApi,
no_npm: bool,
local_node_modules_path: Option<PathBuf>,
maybe_snapshot: Option<NpmResolutionSnapshot>,
@ -187,7 +187,7 @@ impl NpmPackageResolver {
/// Attempts to get the package size in bytes.
pub fn package_size(
&self,
package_id: &NpmPackageNodeId,
package_id: &NpmPackageId,
) -> Result<u64, AnyError> {
self.inner.package_size(package_id)
}

View file

@ -25,7 +25,7 @@ use crate::node::NodeResolution;
use crate::npm::resolve_graph_npm_info;
use crate::npm::NpmCache;
use crate::npm::NpmPackageResolver;
use crate::npm::RealNpmRegistryApi;
use crate::npm::NpmRegistryApi;
use crate::resolver::CliGraphResolver;
use crate::tools::check;
use crate::util::progress_bar::ProgressBar;
@ -43,8 +43,8 @@ use deno_core::resolve_url_or_path;
use deno_core::CompiledWasmModuleStore;
use deno_core::ModuleSpecifier;
use deno_core::SharedArrayBufferStore;
use deno_graph::npm::NpmPackageReference;
use deno_graph::npm::NpmPackageReq;
use deno_graph::npm::NpmPackageReqReference;
use deno_graph::source::Loader;
use deno_graph::source::Resolver;
use deno_graph::ModuleGraph;
@ -244,15 +244,15 @@ impl ProcState {
let emit_cache = EmitCache::new(dir.gen_cache.clone());
let parsed_source_cache =
ParsedSourceCache::new(Some(dir.dep_analysis_db_file_path()));
let registry_url = RealNpmRegistryApi::default_url();
let registry_url = NpmRegistryApi::default_url();
let npm_cache = NpmCache::from_deno_dir(
&dir,
cli_options.cache_setting(),
http_client.clone(),
progress_bar.clone(),
);
let api = RealNpmRegistryApi::new(
registry_url,
let api = NpmRegistryApi::new(
registry_url.clone(),
npm_cache.clone(),
http_client.clone(),
progress_bar.clone(),
@ -516,7 +516,8 @@ impl ProcState {
return node::resolve_builtin_node_module(specifier.path());
}
if let Ok(reference) = NpmPackageReference::from_specifier(specifier)
if let Ok(reference) =
NpmPackageReqReference::from_specifier(specifier)
{
if !self.options.unstable()
&& matches!(found_referrer.scheme(), "http" | "https")
@ -575,7 +576,9 @@ impl ProcState {
.map(Cow::Borrowed)
.or_else(|| ModuleSpecifier::parse(specifier).ok().map(Cow::Owned));
if let Some(specifier) = specifier {
if let Ok(reference) = NpmPackageReference::from_specifier(&specifier) {
if let Ok(reference) =
NpmPackageReqReference::from_specifier(&specifier)
{
return self
.handle_node_resolve_result(node::node_resolve_npm_reference(
&reference,
@ -747,7 +750,7 @@ impl GraphData {
}
"npm" => {
if !has_npm_specifier_in_graph
&& NpmPackageReference::from_specifier(specifier).is_ok()
&& NpmPackageReqReference::from_specifier(specifier).is_ok()
{
has_npm_specifier_in_graph = true;
}

View file

@ -10,8 +10,8 @@ use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_core::serde_json;
use deno_core::serde_json::json;
use deno_graph::npm::NpmPackageReference;
use deno_graph::npm::NpmPackageReq;
use deno_graph::npm::NpmPackageReqReference;
use deno_graph::Dependency;
use deno_graph::Module;
use deno_graph::ModuleGraph;
@ -22,7 +22,7 @@ use deno_runtime::colors;
use crate::args::Flags;
use crate::args::InfoFlags;
use crate::display;
use crate::npm::NpmPackageNodeId;
use crate::npm::NpmPackageId;
use crate::npm::NpmPackageResolver;
use crate::npm::NpmResolutionPackage;
use crate::npm::NpmResolutionSnapshot;
@ -150,7 +150,7 @@ fn add_npm_packages_to_json(
let maybe_package = module
.get("specifier")
.and_then(|k| k.as_str())
.and_then(|specifier| NpmPackageReference::from_str(specifier).ok())
.and_then(|specifier| NpmPackageReqReference::from_str(specifier).ok())
.and_then(|package_ref| {
snapshot
.resolve_package_from_deno_module(&package_ref.req)
@ -158,8 +158,10 @@ fn add_npm_packages_to_json(
});
if let Some(pkg) = maybe_package {
if let Some(module) = module.as_object_mut() {
module
.insert("npmPackage".to_string(), pkg.id.as_serialized().into());
module.insert(
"npmPackage".to_string(),
pkg.pkg_id.as_serialized().into(),
);
// change the "kind" to be "npm"
module.insert("kind".to_string(), "npm".into());
}
@ -186,13 +188,13 @@ fn add_npm_packages_to_json(
if let serde_json::Value::Object(dep) = dep {
let specifier = dep.get("specifier").and_then(|s| s.as_str());
if let Some(specifier) = specifier {
if let Ok(npm_ref) = NpmPackageReference::from_str(specifier) {
if let Ok(npm_ref) = NpmPackageReqReference::from_str(specifier) {
if let Ok(pkg) =
snapshot.resolve_package_from_deno_module(&npm_ref.req)
{
dep.insert(
"npmPackage".to_string(),
pkg.id.as_serialized().into(),
pkg.pkg_id.as_serialized().into(),
);
}
}
@ -204,12 +206,15 @@ fn add_npm_packages_to_json(
}
let mut sorted_packages = snapshot.all_packages();
sorted_packages.sort_by(|a, b| a.id.cmp(&b.id));
sorted_packages.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id));
let mut json_packages = serde_json::Map::with_capacity(sorted_packages.len());
for pkg in sorted_packages {
let mut kv = serde_json::Map::new();
kv.insert("name".to_string(), pkg.id.name.to_string().into());
kv.insert("version".to_string(), pkg.id.version.to_string().into());
kv.insert("name".to_string(), pkg.pkg_id.nv.name.to_string().into());
kv.insert(
"version".to_string(),
pkg.pkg_id.nv.version.to_string().into(),
);
let mut deps = pkg.dependencies.values().collect::<Vec<_>>();
deps.sort();
let deps = deps
@ -218,7 +223,7 @@ fn add_npm_packages_to_json(
.collect::<Vec<_>>();
kv.insert("dependencies".to_string(), deps.into());
json_packages.insert(pkg.id.as_serialized(), kv.into());
json_packages.insert(pkg.pkg_id.as_serialized(), kv.into());
}
json.insert("npmPackages".to_string(), json_packages.into());
@ -297,9 +302,9 @@ fn print_tree_node<TWrite: Write>(
/// Precached information about npm packages that are used in deno info.
#[derive(Default)]
struct NpmInfo {
package_sizes: HashMap<NpmPackageNodeId, u64>,
resolved_reqs: HashMap<NpmPackageReq, NpmPackageNodeId>,
packages: HashMap<NpmPackageNodeId, NpmResolutionPackage>,
package_sizes: HashMap<NpmPackageId, u64>,
resolved_reqs: HashMap<NpmPackageReq, NpmPackageId>,
packages: HashMap<NpmPackageId, NpmResolutionPackage>,
specifiers: HashMap<ModuleSpecifier, NpmPackageReq>,
}
@ -315,15 +320,17 @@ impl NpmInfo {
}
for (specifier, _) in graph.specifiers() {
if let Ok(reference) = NpmPackageReference::from_specifier(specifier) {
if let Ok(reference) = NpmPackageReqReference::from_specifier(specifier) {
info
.specifiers
.insert(specifier.clone(), reference.req.clone());
if let Ok(package) =
npm_snapshot.resolve_package_from_deno_module(&reference.req)
{
info.resolved_reqs.insert(reference.req, package.id.clone());
if !info.packages.contains_key(&package.id) {
info
.resolved_reqs
.insert(reference.req, package.pkg_id.clone());
if !info.packages.contains_key(&package.pkg_id) {
info.fill_package_info(package, npm_resolver, npm_snapshot);
}
}
@ -339,9 +346,11 @@ impl NpmInfo {
npm_resolver: &'a NpmPackageResolver,
npm_snapshot: &'a NpmResolutionSnapshot,
) {
self.packages.insert(package.id.clone(), package.clone());
if let Ok(size) = npm_resolver.package_size(&package.id) {
self.package_sizes.insert(package.id.clone(), size);
self
.packages
.insert(package.pkg_id.clone(), package.clone());
if let Ok(size) = npm_resolver.package_size(&package.pkg_id) {
self.package_sizes.insert(package.pkg_id.clone(), size);
}
for id in package.dependencies.values() {
if !self.packages.contains_key(id) {
@ -504,7 +513,7 @@ impl<'a> GraphDisplayContext<'a> {
None => Specifier(module.specifier.clone()),
};
let was_seen = !self.seen.insert(match &package_or_specifier {
Package(package) => package.id.as_serialized(),
Package(package) => package.pkg_id.as_serialized(),
Specifier(specifier) => specifier.to_string(),
});
let header_text = if was_seen {
@ -522,13 +531,13 @@ impl<'a> GraphDisplayContext<'a> {
};
let header_text = match &package_or_specifier {
Package(package) => {
format!("{} - {}", specifier_str, package.id.version)
format!("{} - {}", specifier_str, package.pkg_id.nv.version)
}
Specifier(_) => specifier_str,
};
let maybe_size = match &package_or_specifier {
Package(package) => {
self.npm_info.package_sizes.get(&package.id).copied()
self.npm_info.package_sizes.get(&package.pkg_id).copied()
}
Specifier(_) => module
.maybe_source
@ -579,7 +588,7 @@ impl<'a> GraphDisplayContext<'a> {
));
if let Some(package) = self.npm_info.packages.get(dep_id) {
if !package.dependencies.is_empty() {
let was_seen = !self.seen.insert(package.id.as_serialized());
let was_seen = !self.seen.insert(package.pkg_id.as_serialized());
if was_seen {
child.text = format!("{} {}", child.text, colors::gray("*"));
} else {

View file

@ -15,7 +15,7 @@ use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_core::url::Url;
use deno_graph::npm::NpmPackageReference;
use deno_graph::npm::NpmPackageReqReference;
use log::Level;
use once_cell::sync::Lazy;
use regex::Regex;
@ -139,7 +139,7 @@ pub async fn infer_name_from_url(url: &Url) -> Option<String> {
}
}
if let Ok(npm_ref) = NpmPackageReference::from_specifier(&url) {
if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(&url) {
if let Some(sub_path) = npm_ref.sub_path {
if !sub_path.contains('/') {
return Some(sub_path);
@ -430,7 +430,7 @@ async fn resolve_shim_data(
executable_args.push("--no-lock".to_string());
} else if flags.lock.is_some()
// always use a lockfile for an npm entrypoint unless --no-lock
|| NpmPackageReference::from_specifier(&module_url).is_ok()
|| NpmPackageReqReference::from_specifier(&module_url).is_ok()
{
let copy_path = get_hidden_file_with_ext(&file_path, "lock.json");
executable_args.push("--lock".to_string());

View file

@ -18,7 +18,7 @@ use deno_core::futures::StreamExt;
use deno_core::serde_json;
use deno_core::serde_json::Value;
use deno_core::LocalInspectorSession;
use deno_graph::npm::NpmPackageReference;
use deno_graph::npm::NpmPackageReqReference;
use deno_graph::source::Resolver;
use deno_runtime::deno_node;
use deno_runtime::worker::MainWorker;
@ -454,7 +454,7 @@ impl ReplSession {
let npm_imports = resolved_imports
.iter()
.flat_map(|url| NpmPackageReference::from_specifier(url).ok())
.flat_map(|url| NpmPackageReqReference::from_specifier(url).ok())
.map(|r| r.req)
.collect::<Vec<_>>();
let has_node_specifier =

View file

@ -7,7 +7,7 @@ use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_graph::npm::NpmPackageReference;
use deno_graph::npm::NpmPackageReqReference;
use deno_runtime::permissions::Permissions;
use deno_runtime::permissions::PermissionsContainer;
@ -50,12 +50,12 @@ To grant permissions, set them before the script argument. For example:
ps.dir.upgrade_check_file_path(),
);
let main_module = if NpmPackageReference::from_str(&run_flags.script).is_ok()
{
ModuleSpecifier::parse(&run_flags.script)?
} else {
resolve_url_or_path(&run_flags.script)?
};
let main_module =
if NpmPackageReqReference::from_str(&run_flags.script).is_ok() {
ModuleSpecifier::parse(&run_flags.script)?
} else {
resolve_url_or_path(&run_flags.script)?
};
let permissions = PermissionsContainer::new(Permissions::from_options(
&ps.options.permissions_options(),
)?);

View file

@ -28,7 +28,7 @@ use deno_core::ModuleSpecifier;
use deno_core::OpState;
use deno_core::RuntimeOptions;
use deno_core::Snapshot;
use deno_graph::npm::NpmPackageReference;
use deno_graph::npm::NpmPackageReqReference;
use deno_graph::ModuleGraph;
use deno_graph::ModuleKind;
use deno_graph::ResolutionResolved;
@ -654,7 +654,7 @@ fn op_resolve(
if module.kind == ModuleKind::External {
// handle npm:<package> urls
if let Ok(npm_ref) =
NpmPackageReference::from_specifier(&module.specifier)
NpmPackageReqReference::from_specifier(&module.specifier)
{
if let Some(npm_resolver) = &state.maybe_npm_resolver {
Some(resolve_npm_package_reference_types(
@ -689,7 +689,8 @@ fn op_resolve(
.ok()
.flatten(),
))
} else if let Ok(npm_ref) = NpmPackageReference::from_str(&specifier)
} else if let Ok(npm_ref) =
NpmPackageReqReference::from_str(&specifier)
{
// this could occur when resolving npm:@types/node when it is
// injected and not part of the graph
@ -740,7 +741,7 @@ fn op_resolve(
}
pub fn resolve_npm_package_reference_types(
npm_ref: &NpmPackageReference,
npm_ref: &NpmPackageReqReference,
npm_resolver: &NpmPackageResolver,
) -> Result<(ModuleSpecifier, MediaType), AnyError> {
let maybe_resolution = node_resolve_npm_reference(

View file

@ -14,7 +14,7 @@ use deno_core::serde_v8;
use deno_core::v8;
use deno_core::Extension;
use deno_core::ModuleId;
use deno_graph::npm::NpmPackageReference;
use deno_graph::npm::NpmPackageReqReference;
use deno_runtime::colors;
use deno_runtime::deno_node;
use deno_runtime::fmt_errors::format_js_error;
@ -308,7 +308,7 @@ impl CliMainWorker {
)
.await?;
if let DenoSubcommand::Run(flags) = self.ps.options.sub_command() {
if let Ok(pkg_ref) = NpmPackageReference::from_str(&flags.script) {
if let Ok(pkg_ref) = NpmPackageReqReference::from_str(&flags.script) {
// if the user ran a binary command, we'll need to set process.argv[0]
// to be the name of the binary command instead of deno
let binary_name = pkg_ref
@ -443,7 +443,7 @@ async fn create_main_worker_internal(
bench_or_test: bool,
) -> Result<CliMainWorker, AnyError> {
let (main_module, is_main_cjs) = if let Ok(package_ref) =
NpmPackageReference::from_specifier(&main_module)
NpmPackageReqReference::from_specifier(&main_module)
{
ps.npm_resolver
.add_package_reqs(vec![package_ref.req.clone()])