mirror of
https://github.com/denoland/deno.git
synced 2024-12-23 15:49:44 -05:00
refactor: upgrade to deno_npm 0.6 (#19244)
This commit is contained in:
parent
114ec3c1f7
commit
91ca9904b5
9 changed files with 46 additions and 64 deletions
4
Cargo.lock
generated
4
Cargo.lock
generated
|
@ -1217,9 +1217,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_npm"
|
||||
version = "0.5.1"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e9885542e422eb2500f279c2cf9d048a2b306db60df83ec4c34cefe71ae6cc2a"
|
||||
checksum = "54607b69689ab1e778e5e00545456e6f0c2310205e1bdae01af601c2dace0121"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
|
|
|
@ -53,7 +53,7 @@ deno_bench_util = { version = "0.99.0", path = "./bench_util" }
|
|||
test_util = { path = "./test_util" }
|
||||
deno_lockfile = "0.14.1"
|
||||
deno_media_type = { version = "0.1.0", features = ["module_specifier"] }
|
||||
deno_npm = "0.5.1"
|
||||
deno_npm = "0.6.0"
|
||||
deno_semver = "0.2.1"
|
||||
|
||||
# exts
|
||||
|
|
|
@ -82,7 +82,7 @@ pub async fn snapshot_from_lockfile(
|
|||
// now fill the packages except for the dist information
|
||||
let mut packages = Vec::with_capacity(lockfile.content.npm.packages.len());
|
||||
for (key, package) in &lockfile.content.npm.packages {
|
||||
let pkg_id = NpmPackageId::from_serialized(key)?;
|
||||
let id = NpmPackageId::from_serialized(key)?;
|
||||
|
||||
// collect the dependencies
|
||||
let mut dependencies = HashMap::with_capacity(package.dependencies.len());
|
||||
|
@ -92,7 +92,7 @@ pub async fn snapshot_from_lockfile(
|
|||
}
|
||||
|
||||
packages.push(SerializedNpmResolutionSnapshotPackage {
|
||||
pkg_id,
|
||||
id,
|
||||
dependencies,
|
||||
// temporarily empty
|
||||
os: Default::default(),
|
||||
|
@ -105,10 +105,7 @@ pub async fn snapshot_from_lockfile(
|
|||
};
|
||||
|
||||
// now that the lockfile is dropped, fetch the package version information
|
||||
let pkg_nvs = packages
|
||||
.iter()
|
||||
.map(|p| p.pkg_id.nv.clone())
|
||||
.collect::<Vec<_>>();
|
||||
let pkg_nvs = packages.iter().map(|p| p.id.nv.clone()).collect::<Vec<_>>();
|
||||
let get_version_infos = || {
|
||||
FuturesOrdered::from_iter(pkg_nvs.iter().map(|nv| async move {
|
||||
let package_info = api.package_info(&nv.name).await?;
|
||||
|
|
|
@ -198,7 +198,7 @@ impl NpmResolution {
|
|||
.snapshot
|
||||
.read()
|
||||
.resolve_pkg_from_pkg_req(req)
|
||||
.map(|pkg| pkg.pkg_id.clone())
|
||||
.map(|pkg| pkg.id.clone())
|
||||
}
|
||||
|
||||
pub fn resolve_pkg_id_from_deno_module(
|
||||
|
@ -209,7 +209,7 @@ impl NpmResolution {
|
|||
.snapshot
|
||||
.read()
|
||||
.resolve_package_from_deno_module(id)
|
||||
.map(|pkg| pkg.pkg_id.clone())
|
||||
.map(|pkg| pkg.id.clone())
|
||||
}
|
||||
|
||||
/// Resolves a package requirement for deno graph. This should only be
|
||||
|
@ -326,7 +326,7 @@ fn populate_lockfile_from_snapshot(
|
|||
snapshot
|
||||
.resolve_package_from_deno_module(nv)
|
||||
.unwrap()
|
||||
.pkg_id
|
||||
.id
|
||||
.as_serialized(),
|
||||
);
|
||||
}
|
||||
|
@ -350,8 +350,8 @@ fn npm_package_to_lockfile_info(
|
|||
.collect();
|
||||
|
||||
NpmPackageLockfileInfo {
|
||||
display_id: pkg.pkg_id.nv.to_string(),
|
||||
serialized_id: pkg.pkg_id.as_serialized(),
|
||||
display_id: pkg.id.nv.to_string(),
|
||||
serialized_id: pkg.id.as_serialized(),
|
||||
integrity: pkg.dist.integrity().to_string(),
|
||||
dependencies,
|
||||
}
|
||||
|
|
|
@ -64,7 +64,7 @@ pub async fn cache_packages(
|
|||
if sync_download {
|
||||
// we're running the tests not with --quiet
|
||||
// and we want the output to be deterministic
|
||||
packages.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id));
|
||||
packages.sort_by(|a, b| a.id.cmp(&b.id));
|
||||
}
|
||||
|
||||
let mut handles = Vec::with_capacity(packages.len());
|
||||
|
@ -73,7 +73,7 @@ pub async fn cache_packages(
|
|||
let registry_url = registry_url.clone();
|
||||
let handle = spawn(async move {
|
||||
cache
|
||||
.ensure_package(&package.pkg_id.nv, &package.dist, ®istry_url)
|
||||
.ensure_package(&package.id.nv, &package.dist, ®istry_url)
|
||||
.await
|
||||
});
|
||||
if sync_download {
|
||||
|
|
|
@ -110,7 +110,7 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
|
|||
.resolution
|
||||
.resolve_package_from_package(name, &referrer_pkg_id)?
|
||||
};
|
||||
self.package_folder(&pkg.pkg_id)
|
||||
self.package_folder(&pkg.id)
|
||||
}
|
||||
|
||||
fn resolve_package_folder_from_specifier(
|
||||
|
|
|
@ -274,9 +274,7 @@ async fn sync_resolution_with_fs(
|
|||
if sync_download {
|
||||
// we're running the tests not with --quiet
|
||||
// and we want the output to be deterministic
|
||||
package_partitions
|
||||
.packages
|
||||
.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id));
|
||||
package_partitions.packages.sort_by(|a, b| a.id.cmp(&b.id));
|
||||
}
|
||||
let mut handles: Vec<JoinHandle<Result<(), AnyError>>> =
|
||||
Vec::with_capacity(package_partitions.packages.len());
|
||||
|
@ -284,13 +282,13 @@ async fn sync_resolution_with_fs(
|
|||
HashMap::with_capacity(package_partitions.packages.len());
|
||||
for package in &package_partitions.packages {
|
||||
if let Some(current_pkg) =
|
||||
newest_packages_by_name.get_mut(&package.pkg_id.nv.name)
|
||||
newest_packages_by_name.get_mut(&package.id.nv.name)
|
||||
{
|
||||
if current_pkg.pkg_id.nv.cmp(&package.pkg_id.nv) == Ordering::Less {
|
||||
if current_pkg.id.nv.cmp(&package.id.nv) == Ordering::Less {
|
||||
*current_pkg = package;
|
||||
}
|
||||
} else {
|
||||
newest_packages_by_name.insert(&package.pkg_id.nv.name, package);
|
||||
newest_packages_by_name.insert(&package.id.nv.name, package);
|
||||
};
|
||||
|
||||
let folder_name =
|
||||
|
@ -299,7 +297,7 @@ async fn sync_resolution_with_fs(
|
|||
let initialized_file = folder_path.join(".initialized");
|
||||
if !cache
|
||||
.cache_setting()
|
||||
.should_use_for_npm_package(&package.pkg_id.nv.name)
|
||||
.should_use_for_npm_package(&package.id.nv.name)
|
||||
|| !initialized_file.exists()
|
||||
{
|
||||
let pb = progress_bar.clone();
|
||||
|
@ -308,21 +306,19 @@ async fn sync_resolution_with_fs(
|
|||
let package = package.clone();
|
||||
let handle = spawn(async move {
|
||||
cache
|
||||
.ensure_package(&package.pkg_id.nv, &package.dist, ®istry_url)
|
||||
.ensure_package(&package.id.nv, &package.dist, ®istry_url)
|
||||
.await?;
|
||||
let pb_guard = pb.update_with_prompt(
|
||||
ProgressMessagePrompt::Initialize,
|
||||
&package.pkg_id.nv.to_string(),
|
||||
&package.id.nv.to_string(),
|
||||
);
|
||||
let sub_node_modules = folder_path.join("node_modules");
|
||||
let package_path =
|
||||
join_package_name(&sub_node_modules, &package.pkg_id.nv.name);
|
||||
join_package_name(&sub_node_modules, &package.id.nv.name);
|
||||
fs::create_dir_all(&package_path)
|
||||
.with_context(|| format!("Creating '{}'", folder_path.display()))?;
|
||||
let cache_folder = cache.package_folder_for_name_and_version(
|
||||
&package.pkg_id.nv,
|
||||
®istry_url,
|
||||
);
|
||||
let cache_folder = cache
|
||||
.package_folder_for_name_and_version(&package.id.nv, ®istry_url);
|
||||
// for now copy, but in the future consider hard linking
|
||||
copy_dir_recursive(&cache_folder, &package_path)?;
|
||||
// write out a file that indicates this folder has been initialized
|
||||
|
@ -353,7 +349,7 @@ async fn sync_resolution_with_fs(
|
|||
if !initialized_file.exists() {
|
||||
let sub_node_modules = destination_path.join("node_modules");
|
||||
let package_path =
|
||||
join_package_name(&sub_node_modules, &package.pkg_id.nv.name);
|
||||
join_package_name(&sub_node_modules, &package.id.nv.name);
|
||||
fs::create_dir_all(&package_path).with_context(|| {
|
||||
format!("Creating '{}'", destination_path.display())
|
||||
})?;
|
||||
|
@ -363,7 +359,7 @@ async fn sync_resolution_with_fs(
|
|||
&package_cache_folder_id.with_no_count(),
|
||||
))
|
||||
.join("node_modules"),
|
||||
&package.pkg_id.nv.name,
|
||||
&package.id.nv.name,
|
||||
);
|
||||
hard_link_dir_recursive(&source_path, &package_path)?;
|
||||
// write out a file that indicates this folder has been initialized
|
||||
|
@ -375,11 +371,7 @@ async fn sync_resolution_with_fs(
|
|||
//
|
||||
// Symlink node_modules/.deno/<package_id>/node_modules/<dep_name> to
|
||||
// node_modules/.deno/<dep_id>/node_modules/<dep_package_name>
|
||||
for package in package_partitions
|
||||
.packages
|
||||
.iter()
|
||||
.chain(package_partitions.copy_packages.iter())
|
||||
{
|
||||
for package in package_partitions.iter_all() {
|
||||
let sub_node_modules = deno_local_registry_dir
|
||||
.join(get_package_folder_id_folder_name(
|
||||
&package.get_package_cache_folder_id(),
|
||||
|
@ -435,7 +427,7 @@ async fn sync_resolution_with_fs(
|
|||
// 5. Create a node_modules/.deno/node_modules/<package-name> directory with
|
||||
// the remaining packages
|
||||
for package in newest_packages_by_name.values() {
|
||||
if !found_names.insert(&package.pkg_id.nv.name) {
|
||||
if !found_names.insert(&package.id.nv.name) {
|
||||
continue; // skip, already handled
|
||||
}
|
||||
|
||||
|
@ -445,12 +437,12 @@ async fn sync_resolution_with_fs(
|
|||
&package.get_package_cache_folder_id(),
|
||||
))
|
||||
.join("node_modules"),
|
||||
&package.pkg_id.nv.name,
|
||||
&package.id.nv.name,
|
||||
);
|
||||
|
||||
symlink_package_dir(
|
||||
&local_registry_package_path,
|
||||
&join_package_name(&deno_node_modules_dir, &package.pkg_id.nv.name),
|
||||
&join_package_name(&deno_node_modules_dir, &package.id.nv.name),
|
||||
)?;
|
||||
}
|
||||
|
||||
|
|
|
@ -551,7 +551,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
{
|
||||
let folder = self
|
||||
.npm_resolver
|
||||
.resolve_pkg_folder_from_pkg_id(&package.pkg_id)?;
|
||||
.resolve_pkg_folder_from_pkg_id(&package.id)?;
|
||||
builder.add_dir_recursive(&folder)?;
|
||||
}
|
||||
// overwrite the root directory's name to obscure the user's registry url
|
||||
|
|
|
@ -170,10 +170,8 @@ fn add_npm_packages_to_json(
|
|||
});
|
||||
if let Some(pkg) = maybe_package {
|
||||
if let Some(module) = module.as_object_mut() {
|
||||
module.insert(
|
||||
"npmPackage".to_string(),
|
||||
pkg.pkg_id.as_serialized().into(),
|
||||
);
|
||||
module
|
||||
.insert("npmPackage".to_string(), pkg.id.as_serialized().into());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -206,7 +204,7 @@ fn add_npm_packages_to_json(
|
|||
{
|
||||
dep.insert(
|
||||
"npmPackage".to_string(),
|
||||
pkg.pkg_id.as_serialized().into(),
|
||||
pkg.id.as_serialized().into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -219,15 +217,12 @@ fn add_npm_packages_to_json(
|
|||
|
||||
let mut sorted_packages =
|
||||
snapshot.all_packages_for_every_system().collect::<Vec<_>>();
|
||||
sorted_packages.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id));
|
||||
sorted_packages.sort_by(|a, b| a.id.cmp(&b.id));
|
||||
let mut json_packages = serde_json::Map::with_capacity(sorted_packages.len());
|
||||
for pkg in sorted_packages {
|
||||
let mut kv = serde_json::Map::new();
|
||||
kv.insert("name".to_string(), pkg.pkg_id.nv.name.to_string().into());
|
||||
kv.insert(
|
||||
"version".to_string(),
|
||||
pkg.pkg_id.nv.version.to_string().into(),
|
||||
);
|
||||
kv.insert("name".to_string(), pkg.id.nv.name.to_string().into());
|
||||
kv.insert("version".to_string(), pkg.id.nv.version.to_string().into());
|
||||
let mut deps = pkg.dependencies.values().collect::<Vec<_>>();
|
||||
deps.sort();
|
||||
let deps = deps
|
||||
|
@ -236,7 +231,7 @@ fn add_npm_packages_to_json(
|
|||
.collect::<Vec<_>>();
|
||||
kv.insert("dependencies".to_string(), deps.into());
|
||||
|
||||
json_packages.insert(pkg.pkg_id.as_serialized(), kv.into());
|
||||
json_packages.insert(pkg.id.as_serialized(), kv.into());
|
||||
}
|
||||
|
||||
json.insert("npmPackages".to_string(), json_packages.into());
|
||||
|
@ -335,8 +330,8 @@ impl NpmInfo {
|
|||
if let Module::Npm(module) = module {
|
||||
let nv = &module.nv_reference.nv;
|
||||
if let Ok(package) = npm_snapshot.resolve_package_from_deno_module(nv) {
|
||||
info.resolved_ids.insert(nv.clone(), package.pkg_id.clone());
|
||||
if !info.packages.contains_key(&package.pkg_id) {
|
||||
info.resolved_ids.insert(nv.clone(), package.id.clone());
|
||||
if !info.packages.contains_key(&package.id) {
|
||||
info.fill_package_info(package, npm_resolver, npm_snapshot);
|
||||
}
|
||||
}
|
||||
|
@ -352,11 +347,9 @@ impl NpmInfo {
|
|||
npm_resolver: &'a CliNpmResolver,
|
||||
npm_snapshot: &'a NpmResolutionSnapshot,
|
||||
) {
|
||||
self
|
||||
.packages
|
||||
.insert(package.pkg_id.clone(), package.clone());
|
||||
if let Ok(size) = npm_resolver.package_size(&package.pkg_id) {
|
||||
self.package_sizes.insert(package.pkg_id.clone(), size);
|
||||
self.packages.insert(package.id.clone(), package.clone());
|
||||
if let Ok(size) = npm_resolver.package_size(&package.id) {
|
||||
self.package_sizes.insert(package.id.clone(), size);
|
||||
}
|
||||
for id in package.dependencies.values() {
|
||||
if !self.packages.contains_key(id) {
|
||||
|
@ -536,7 +529,7 @@ impl<'a> GraphDisplayContext<'a> {
|
|||
None => Specifier(module.specifier().clone()),
|
||||
};
|
||||
let was_seen = !self.seen.insert(match &package_or_specifier {
|
||||
Package(package) => package.pkg_id.as_serialized(),
|
||||
Package(package) => package.id.as_serialized(),
|
||||
Specifier(specifier) => specifier.to_string(),
|
||||
});
|
||||
let header_text = if was_seen {
|
||||
|
@ -554,7 +547,7 @@ impl<'a> GraphDisplayContext<'a> {
|
|||
};
|
||||
let maybe_size = match &package_or_specifier {
|
||||
Package(package) => {
|
||||
self.npm_info.package_sizes.get(&package.pkg_id).copied()
|
||||
self.npm_info.package_sizes.get(&package.id).copied()
|
||||
}
|
||||
Specifier(_) => match module {
|
||||
Module::Esm(module) => Some(module.size() as u64),
|
||||
|
@ -608,7 +601,7 @@ impl<'a> GraphDisplayContext<'a> {
|
|||
));
|
||||
if let Some(package) = self.npm_info.packages.get(dep_id) {
|
||||
if !package.dependencies.is_empty() {
|
||||
let was_seen = !self.seen.insert(package.pkg_id.as_serialized());
|
||||
let was_seen = !self.seen.insert(package.id.as_serialized());
|
||||
if was_seen {
|
||||
child.text = format!("{} {}", child.text, colors::gray("*"));
|
||||
} else {
|
||||
|
|
Loading…
Reference in a new issue