mirror of
https://github.com/denoland/deno.git
synced 2024-12-22 07:14:47 -05:00
fix(npm): reload an npm package's dependency's information when version not found (#18622)
This reloads an npm package's dependency's information when a version/version req/tag is not found. This PR applies only to dependencies of npm packages. It does NOT yet cause npm specifiers to have their dependency information cache busted. That requires a different solution, but this should help cache bust in more scenarios. Part of #16901, but doesn't close it yet
This commit is contained in:
parent
0dca0c5196
commit
5c7f76c570
15 changed files with 257 additions and 57 deletions
4
Cargo.lock
generated
4
Cargo.lock
generated
|
@ -1139,9 +1139,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_npm"
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b498f30dd6d0149e98b210ef7a01e54b2a8ba703b08a30fe4d87b3f36b93737c"
|
||||
checksum = "dfe101b42a94cd47522f91f490a647cd88a034996eff6806a14a59e672d80bbc"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
|
|
|
@ -47,7 +47,7 @@ deno_emit = "0.18.0"
|
|||
deno_graph = "=0.46.0"
|
||||
deno_lint = { version = "0.43.0", features = ["docs"] }
|
||||
deno_lockfile.workspace = true
|
||||
deno_npm = "0.1.0"
|
||||
deno_npm = "0.2.0"
|
||||
deno_runtime = { workspace = true, features = ["dont_create_runtime_snapshot", "include_js_files_for_snapshotting"] }
|
||||
deno_semver = "0.2.0"
|
||||
deno_task_shell = "0.11.0"
|
||||
|
|
|
@ -117,10 +117,11 @@ pub async fn snapshot_from_lockfile(
|
|||
let mut version_infos =
|
||||
FuturesOrdered::from_iter(packages.iter().map(|p| p.pkg_id.nv.clone()).map(
|
||||
|nv| async move {
|
||||
match api.package_version_info(&nv).await? {
|
||||
Some(version_info) => Ok(version_info),
|
||||
None => {
|
||||
bail!("could not find '{}' specified in the lockfile. Maybe try again with --reload", nv);
|
||||
let package_info = api.package_info(&nv.name).await?;
|
||||
match package_info.version_info(&nv) {
|
||||
Ok(version_info) => Ok(version_info),
|
||||
Err(err) => {
|
||||
bail!("Could not find '{}' specified in the lockfile. Maybe try again with --reload", err.0);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -5,6 +5,8 @@ use std::collections::HashSet;
|
|||
use std::fs;
|
||||
use std::io::ErrorKind;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::sync::Arc;
|
||||
|
||||
use async_trait::async_trait;
|
||||
|
@ -21,6 +23,7 @@ use deno_core::url::Url;
|
|||
use deno_core::TaskQueue;
|
||||
use deno_npm::registry::NpmPackageInfo;
|
||||
use deno_npm::registry::NpmRegistryApi;
|
||||
use deno_npm::registry::NpmRegistryPackageInfoLoadError;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use crate::args::CacheSetting;
|
||||
|
@ -67,6 +70,7 @@ impl NpmRegistry {
|
|||
Self(Some(Arc::new(NpmRegistryApiInner {
|
||||
base_url,
|
||||
cache,
|
||||
force_reload: AtomicBool::new(false),
|
||||
mem_cache: Default::default(),
|
||||
previously_reloaded_packages: Default::default(),
|
||||
http_client,
|
||||
|
@ -96,6 +100,18 @@ impl NpmRegistry {
|
|||
&self.inner().base_url
|
||||
}
|
||||
|
||||
/// Marks that new requests for package information should retrieve it
|
||||
/// from the npm registry
|
||||
///
|
||||
/// Returns true if it was successfully set for the first time.
|
||||
pub fn mark_force_reload(&self) -> bool {
|
||||
// never force reload the registry information if reloading is disabled
|
||||
if matches!(self.inner().cache.cache_setting(), CacheSetting::Only) {
|
||||
return false;
|
||||
}
|
||||
!self.inner().force_reload.swap(true, Ordering::SeqCst)
|
||||
}
|
||||
|
||||
fn inner(&self) -> &Arc<NpmRegistryApiInner> {
|
||||
// this panicking indicates a bug in the code where this
|
||||
// wasn't initialized
|
||||
|
@ -108,17 +124,26 @@ static SYNC_DOWNLOAD_TASK_QUEUE: Lazy<TaskQueue> =
|
|||
|
||||
#[async_trait]
|
||||
impl NpmRegistryApi for NpmRegistry {
|
||||
async fn maybe_package_info(
|
||||
async fn package_info(
|
||||
&self,
|
||||
name: &str,
|
||||
) -> Result<Option<Arc<NpmPackageInfo>>, AnyError> {
|
||||
if should_sync_download() {
|
||||
) -> Result<Arc<NpmPackageInfo>, NpmRegistryPackageInfoLoadError> {
|
||||
let result = if should_sync_download() {
|
||||
let inner = self.inner().clone();
|
||||
SYNC_DOWNLOAD_TASK_QUEUE
|
||||
.queue(async move { inner.maybe_package_info(name).await })
|
||||
.await
|
||||
} else {
|
||||
self.inner().maybe_package_info(name).await
|
||||
};
|
||||
match result {
|
||||
Ok(Some(info)) => Ok(info),
|
||||
Ok(None) => Err(NpmRegistryPackageInfoLoadError::PackageNotExists {
|
||||
package_name: name.to_string(),
|
||||
}),
|
||||
Err(err) => {
|
||||
Err(NpmRegistryPackageInfoLoadError::LoadError(Arc::new(err)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -135,6 +160,7 @@ enum CacheItem {
|
|||
struct NpmRegistryApiInner {
|
||||
base_url: Url,
|
||||
cache: NpmCache,
|
||||
force_reload: AtomicBool,
|
||||
mem_cache: Mutex<HashMap<String, CacheItem>>,
|
||||
previously_reloaded_packages: Mutex<HashSet<String>>,
|
||||
http_client: HttpClient,
|
||||
|
@ -154,10 +180,10 @@ impl NpmRegistryApiInner {
|
|||
}
|
||||
Some(CacheItem::Pending(future)) => (false, future.clone()),
|
||||
None => {
|
||||
if self.cache.cache_setting().should_use_for_npm_package(name)
|
||||
// if this has been previously reloaded, then try loading from the
|
||||
// file system cache
|
||||
|| !self.previously_reloaded_packages.lock().insert(name.to_string())
|
||||
if (self.cache.cache_setting().should_use_for_npm_package(name) && !self.force_reload())
|
||||
// if this has been previously reloaded, then try loading from the
|
||||
// file system cache
|
||||
|| !self.previously_reloaded_packages.lock().insert(name.to_string())
|
||||
{
|
||||
// attempt to load from the file cache
|
||||
if let Some(info) = self.load_file_cached_package_info(name) {
|
||||
|
@ -203,6 +229,10 @@ impl NpmRegistryApiInner {
|
|||
}
|
||||
}
|
||||
|
||||
fn force_reload(&self) -> bool {
|
||||
self.force_reload.load(Ordering::SeqCst)
|
||||
}
|
||||
|
||||
fn load_file_cached_package_info(
|
||||
&self,
|
||||
name: &str,
|
||||
|
|
|
@ -10,8 +10,13 @@ use deno_core::TaskQueue;
|
|||
use deno_lockfile::NpmPackageDependencyLockfileInfo;
|
||||
use deno_lockfile::NpmPackageLockfileInfo;
|
||||
use deno_npm::registry::NpmPackageInfo;
|
||||
use deno_npm::resolution::NpmPackageVersionResolutionError;
|
||||
use deno_npm::resolution::NpmPackagesPartitioned;
|
||||
use deno_npm::resolution::NpmResolutionError;
|
||||
use deno_npm::resolution::NpmResolutionSnapshot;
|
||||
use deno_npm::resolution::PackageNotFoundFromReferrerError;
|
||||
use deno_npm::resolution::PackageNvNotFoundError;
|
||||
use deno_npm::resolution::PackageReqNotFoundError;
|
||||
use deno_npm::NpmPackageCacheFolderId;
|
||||
use deno_npm::NpmPackageId;
|
||||
use deno_npm::NpmResolutionPackage;
|
||||
|
@ -70,13 +75,11 @@ impl NpmResolution {
|
|||
|
||||
// only allow one thread in here at a time
|
||||
let _permit = inner.update_queue.acquire().await;
|
||||
let snapshot = inner.snapshot.read().clone();
|
||||
|
||||
let snapshot = add_package_reqs_to_snapshot(
|
||||
&inner.api,
|
||||
package_reqs,
|
||||
snapshot,
|
||||
self.0.maybe_lockfile.clone(),
|
||||
|| inner.snapshot.read().clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
@ -91,24 +94,25 @@ impl NpmResolution {
|
|||
let inner = &self.0;
|
||||
// only allow one thread in here at a time
|
||||
let _permit = inner.update_queue.acquire().await;
|
||||
let snapshot = inner.snapshot.read().clone();
|
||||
|
||||
let reqs_set = package_reqs.iter().collect::<HashSet<_>>();
|
||||
let has_removed_package = !snapshot
|
||||
.package_reqs()
|
||||
.keys()
|
||||
.all(|req| reqs_set.contains(req));
|
||||
// if any packages were removed, we need to completely recreate the npm resolution snapshot
|
||||
let snapshot = if has_removed_package {
|
||||
NpmResolutionSnapshot::default()
|
||||
} else {
|
||||
snapshot
|
||||
};
|
||||
let reqs_set = package_reqs.iter().cloned().collect::<HashSet<_>>();
|
||||
let snapshot = add_package_reqs_to_snapshot(
|
||||
&inner.api,
|
||||
package_reqs,
|
||||
snapshot,
|
||||
self.0.maybe_lockfile.clone(),
|
||||
|| {
|
||||
let snapshot = inner.snapshot.read().clone();
|
||||
let has_removed_package = !snapshot
|
||||
.package_reqs()
|
||||
.keys()
|
||||
.all(|req| reqs_set.contains(req));
|
||||
// if any packages were removed, we need to completely recreate the npm resolution snapshot
|
||||
if has_removed_package {
|
||||
NpmResolutionSnapshot::default()
|
||||
} else {
|
||||
snapshot
|
||||
}
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
@ -121,13 +125,12 @@ impl NpmResolution {
|
|||
let inner = &self.0;
|
||||
// only allow one thread in here at a time
|
||||
let _permit = inner.update_queue.acquire().await;
|
||||
let snapshot = inner.snapshot.read().clone();
|
||||
|
||||
let snapshot = add_package_reqs_to_snapshot(
|
||||
&inner.api,
|
||||
Vec::new(),
|
||||
snapshot,
|
||||
self.0.maybe_lockfile.clone(),
|
||||
|| inner.snapshot.read().clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
@ -139,7 +142,7 @@ impl NpmResolution {
|
|||
pub fn pkg_req_ref_to_nv_ref(
|
||||
&self,
|
||||
req_ref: NpmPackageReqReference,
|
||||
) -> Result<NpmPackageNvReference, AnyError> {
|
||||
) -> Result<NpmPackageNvReference, PackageReqNotFoundError> {
|
||||
let node_id = self.resolve_pkg_id_from_pkg_req(&req_ref.req)?;
|
||||
Ok(NpmPackageNvReference {
|
||||
nv: node_id.nv,
|
||||
|
@ -163,7 +166,7 @@ impl NpmResolution {
|
|||
&self,
|
||||
name: &str,
|
||||
referrer: &NpmPackageCacheFolderId,
|
||||
) -> Result<NpmResolutionPackage, AnyError> {
|
||||
) -> Result<NpmResolutionPackage, Box<PackageNotFoundFromReferrerError>> {
|
||||
self
|
||||
.0
|
||||
.snapshot
|
||||
|
@ -176,7 +179,7 @@ impl NpmResolution {
|
|||
pub fn resolve_pkg_id_from_pkg_req(
|
||||
&self,
|
||||
req: &NpmPackageReq,
|
||||
) -> Result<NpmPackageId, AnyError> {
|
||||
) -> Result<NpmPackageId, PackageReqNotFoundError> {
|
||||
self
|
||||
.0
|
||||
.snapshot
|
||||
|
@ -188,7 +191,7 @@ impl NpmResolution {
|
|||
pub fn resolve_pkg_id_from_deno_module(
|
||||
&self,
|
||||
id: &NpmPackageNv,
|
||||
) -> Result<NpmPackageId, AnyError> {
|
||||
) -> Result<NpmPackageId, PackageNvNotFoundError> {
|
||||
self
|
||||
.0
|
||||
.snapshot
|
||||
|
@ -203,7 +206,7 @@ impl NpmResolution {
|
|||
pub fn resolve_package_req_as_pending(
|
||||
&self,
|
||||
pkg_req: &NpmPackageReq,
|
||||
) -> Result<NpmPackageNv, AnyError> {
|
||||
) -> Result<NpmPackageNv, NpmPackageVersionResolutionError> {
|
||||
// we should always have this because it should have been cached before here
|
||||
let package_info =
|
||||
self.0.api.get_cached_package_info(&pkg_req.name).unwrap();
|
||||
|
@ -217,7 +220,7 @@ impl NpmResolution {
|
|||
&self,
|
||||
pkg_req: &NpmPackageReq,
|
||||
package_info: &NpmPackageInfo,
|
||||
) -> Result<NpmPackageNv, AnyError> {
|
||||
) -> Result<NpmPackageNv, NpmPackageVersionResolutionError> {
|
||||
debug_assert_eq!(pkg_req.name, package_info.name);
|
||||
let inner = &self.0;
|
||||
let mut snapshot = inner.snapshot.write();
|
||||
|
@ -245,10 +248,14 @@ impl NpmResolution {
|
|||
|
||||
async fn add_package_reqs_to_snapshot(
|
||||
api: &NpmRegistry,
|
||||
// todo(18079): it should be possible to pass &[NpmPackageReq] in here
|
||||
// and avoid all these clones, but the LSP complains because of its
|
||||
// `Send` requirement
|
||||
package_reqs: Vec<NpmPackageReq>,
|
||||
snapshot: NpmResolutionSnapshot,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
get_new_snapshot: impl Fn() -> NpmResolutionSnapshot,
|
||||
) -> Result<NpmResolutionSnapshot, AnyError> {
|
||||
let snapshot = get_new_snapshot();
|
||||
if !snapshot.has_pending()
|
||||
&& package_reqs
|
||||
.iter()
|
||||
|
@ -257,9 +264,23 @@ async fn add_package_reqs_to_snapshot(
|
|||
return Ok(snapshot); // already up to date
|
||||
}
|
||||
|
||||
let result = snapshot.resolve_pending(package_reqs, api).await;
|
||||
let result = snapshot.resolve_pending(package_reqs.clone(), api).await;
|
||||
api.clear_memory_cache();
|
||||
let snapshot = result?; // propagate the error after clearing the memory cache
|
||||
let snapshot = match result {
|
||||
Ok(snapshot) => snapshot,
|
||||
Err(NpmResolutionError::Resolution(err)) if api.mark_force_reload() => {
|
||||
log::debug!("{err:#}");
|
||||
log::debug!("npm resolution failed. Trying again...");
|
||||
|
||||
// try again
|
||||
let snapshot = get_new_snapshot();
|
||||
let result = snapshot.resolve_pending(package_reqs, api).await;
|
||||
api.clear_memory_cache();
|
||||
// now surface the result after clearing the cache
|
||||
result?
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
};
|
||||
|
||||
if let Some(lockfile_mutex) = maybe_lockfile {
|
||||
let mut lockfile = lockfile_mutex.lock();
|
||||
|
|
|
@ -9,6 +9,7 @@ use async_trait::async_trait;
|
|||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::url::Url;
|
||||
use deno_npm::resolution::PackageNotFoundFromReferrerError;
|
||||
use deno_npm::NpmPackageCacheFolderId;
|
||||
use deno_npm::NpmPackageId;
|
||||
use deno_npm::NpmResolutionPackage;
|
||||
|
@ -58,7 +59,7 @@ impl GlobalNpmPackageResolver {
|
|||
&self,
|
||||
package_name: &str,
|
||||
referrer_pkg_id: &NpmPackageCacheFolderId,
|
||||
) -> Result<NpmResolutionPackage, AnyError> {
|
||||
) -> Result<NpmResolutionPackage, Box<PackageNotFoundFromReferrerError>> {
|
||||
let types_name = types_package_name(package_name);
|
||||
self
|
||||
.resolution
|
||||
|
|
|
@ -15,6 +15,7 @@ use deno_core::parking_lot::Mutex;
|
|||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_npm::resolution::NpmResolutionSnapshot;
|
||||
use deno_npm::resolution::PackageReqNotFoundError;
|
||||
use deno_npm::NpmPackageId;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_runtime::deno_node::NodeResolutionMode;
|
||||
|
@ -82,14 +83,14 @@ impl NpmPackageResolver {
|
|||
pub fn resolve_pkg_id_from_pkg_req(
|
||||
&self,
|
||||
req: &NpmPackageReq,
|
||||
) -> Result<NpmPackageId, AnyError> {
|
||||
) -> Result<NpmPackageId, PackageReqNotFoundError> {
|
||||
self.resolution.resolve_pkg_id_from_pkg_req(req)
|
||||
}
|
||||
|
||||
pub fn pkg_req_ref_to_nv_ref(
|
||||
&self,
|
||||
req_ref: NpmPackageReqReference,
|
||||
) -> Result<NpmPackageNvReference, AnyError> {
|
||||
) -> Result<NpmPackageNvReference, PackageReqNotFoundError> {
|
||||
self.resolution.pkg_req_ref_to_nv_ref(req_ref)
|
||||
}
|
||||
|
||||
|
@ -225,10 +226,8 @@ impl NpmPackageResolver {
|
|||
&self,
|
||||
) -> Result<(), AnyError> {
|
||||
// add and ensure this isn't added to the lockfile
|
||||
self
|
||||
.resolution
|
||||
.add_package_reqs(vec![NpmPackageReq::from_str("@types/node").unwrap()])
|
||||
.await?;
|
||||
let package_reqs = vec![NpmPackageReq::from_str("@types/node").unwrap()];
|
||||
self.resolution.add_package_reqs(package_reqs).await?;
|
||||
self.fs_resolver.cache_packages().await?;
|
||||
|
||||
Ok(())
|
||||
|
|
|
@ -237,9 +237,15 @@ impl NpmResolver for CliGraphResolver {
|
|||
if self.no_npm {
|
||||
bail!("npm specifiers were requested; but --no-npm is specified")
|
||||
}
|
||||
self
|
||||
match self
|
||||
.npm_resolution
|
||||
.resolve_package_req_as_pending(package_req)
|
||||
{
|
||||
Ok(nv) => Ok(nv),
|
||||
Err(err) => {
|
||||
bail!("{:#} Try retrieving the latest npm package information by running with --reload", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::Value;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::process::Stdio;
|
||||
use test_util as util;
|
||||
|
@ -1546,3 +1548,124 @@ itest!(node_modules_import_check {
|
|||
copy_temp_dir: Some("npm/node_modules_import/"),
|
||||
exit_code: 1,
|
||||
});
|
||||
|
||||
itest!(non_existent_dep {
|
||||
args: "cache npm:@denotest/non-existent-dep",
|
||||
envs: env_vars_for_npm_tests(),
|
||||
http_server: true,
|
||||
exit_code: 1,
|
||||
output_str: Some(concat!(
|
||||
"Download http://localhost:4545/npm/registry/@denotest/non-existent-dep\n",
|
||||
"Download http://localhost:4545/npm/registry/@denotest/non-existent\n",
|
||||
"error: npm package '@denotest/non-existent' does not exist.\n"
|
||||
)),
|
||||
});
|
||||
|
||||
itest!(non_existent_dep_version {
|
||||
args: "cache npm:@denotest/non-existent-dep-version",
|
||||
envs: env_vars_for_npm_tests(),
|
||||
http_server: true,
|
||||
exit_code: 1,
|
||||
output_str: Some(concat!(
|
||||
"Download http://localhost:4545/npm/registry/@denotest/non-existent-dep-version\n",
|
||||
"Download http://localhost:4545/npm/registry/@denotest/esm-basic\n",
|
||||
// does two downloads because when failing once it max tries to
|
||||
// get the latest version a second time
|
||||
"Download http://localhost:4545/npm/registry/@denotest/non-existent-dep-version\n",
|
||||
"Download http://localhost:4545/npm/registry/@denotest/esm-basic\n",
|
||||
"error: Could not find npm package '@denotest/esm-basic' matching '=99.99.99'.\n"
|
||||
)),
|
||||
});
|
||||
|
||||
#[test]
|
||||
fn reload_info_not_found_cache_but_exists_remote() {
|
||||
fn remove_version(registry_json: &mut Value, version: &str) {
|
||||
registry_json
|
||||
.as_object_mut()
|
||||
.unwrap()
|
||||
.get_mut("versions")
|
||||
.unwrap()
|
||||
.as_object_mut()
|
||||
.unwrap()
|
||||
.remove(version);
|
||||
}
|
||||
|
||||
// This tests that when a local machine doesn't have a version
|
||||
// specified in a dependency that exists in the npm registry
|
||||
let test_context = TestContextBuilder::for_npm()
|
||||
.use_sync_npm_download()
|
||||
.use_temp_cwd()
|
||||
.build();
|
||||
let deno_dir = test_context.deno_dir();
|
||||
let temp_dir = test_context.temp_dir();
|
||||
temp_dir.write(
|
||||
"main.ts",
|
||||
"import 'npm:@denotest/esm-import-cjs-default@1.0.0';",
|
||||
);
|
||||
|
||||
// cache successfully to the deno_dir
|
||||
let output = test_context.new_command().args("cache main.ts").run();
|
||||
output.assert_matches_text(concat!(
|
||||
"Download http://localhost:4545/npm/registry/@denotest/esm-import-cjs-default\n",
|
||||
"Download http://localhost:4545/npm/registry/@denotest/cjs-default-export\n",
|
||||
"Download http://localhost:4545/npm/registry/@denotest/cjs-default-export/1.0.0.tgz\n",
|
||||
"Download http://localhost:4545/npm/registry/@denotest/esm-import-cjs-default/1.0.0.tgz\n",
|
||||
));
|
||||
|
||||
// modify the package information in the cache to remove the latest version
|
||||
let registry_json_path = "npm/localhost_4545/npm/registry/@denotest/cjs-default-export/registry.json";
|
||||
let mut registry_json: Value =
|
||||
serde_json::from_str(&deno_dir.read_to_string(registry_json_path)).unwrap();
|
||||
remove_version(&mut registry_json, "1.0.0");
|
||||
deno_dir.write(
|
||||
registry_json_path,
|
||||
serde_json::to_string(®istry_json).unwrap(),
|
||||
);
|
||||
|
||||
// should error when `--cache-only` is used now because the version is not in the cache
|
||||
let output = test_context
|
||||
.new_command()
|
||||
.args("run --cached-only main.ts")
|
||||
.run();
|
||||
output.assert_exit_code(1);
|
||||
output.assert_matches_text("error: Could not find npm package '@denotest/cjs-default-export' matching '^1.0.0'.\n");
|
||||
|
||||
// now try running without it, it should download the package now
|
||||
let output = test_context.new_command().args("run main.ts").run();
|
||||
output.assert_matches_text(concat!(
|
||||
"Download http://localhost:4545/npm/registry/@denotest/esm-import-cjs-default\n",
|
||||
"Download http://localhost:4545/npm/registry/@denotest/cjs-default-export\n",
|
||||
"Node esm importing node cjs\n[WILDCARD]",
|
||||
));
|
||||
output.assert_exit_code(0);
|
||||
|
||||
// now remove the information for the top level package
|
||||
let registry_json_path = "npm/localhost_4545/npm/registry/@denotest/esm-import-cjs-default/registry.json";
|
||||
let mut registry_json: Value =
|
||||
serde_json::from_str(&deno_dir.read_to_string(registry_json_path)).unwrap();
|
||||
remove_version(&mut registry_json, "1.0.0");
|
||||
deno_dir.write(
|
||||
registry_json_path,
|
||||
serde_json::to_string(®istry_json).unwrap(),
|
||||
);
|
||||
|
||||
// should error again for --cached-only
|
||||
let output = test_context
|
||||
.new_command()
|
||||
.args("run --cached-only main.ts")
|
||||
.run();
|
||||
output.assert_exit_code(1);
|
||||
output.assert_matches_text(concat!(
|
||||
"error: Could not find npm package '@denotest/esm-import-cjs-default' matching '1.0.0'. Try retrieving the latest npm package information by running with --reload\n",
|
||||
" at file:///[WILDCARD]/main.ts:1:8\n",
|
||||
));
|
||||
|
||||
// now try running without it, it currently will error, but this should work in the future
|
||||
// todo(https://github.com/denoland/deno/issues/16901): fix this
|
||||
let output = test_context.new_command().args("run main.ts").run();
|
||||
output.assert_exit_code(1);
|
||||
output.assert_matches_text(concat!(
|
||||
"error: Could not find npm package '@denotest/esm-import-cjs-default' matching '1.0.0'. Try retrieving the latest npm package information by running with --reload\n",
|
||||
" at file:///[WILDCARD]/main.ts:1:8\n",
|
||||
));
|
||||
}
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
Download http://localhost:4545/npm/registry/mkdirp
|
||||
error: Could not find npm package 'mkdirp' matching '0.5.125'. Try retrieving the latest npm package information by running with --reload
|
||||
Download http://localhost:4545/npm/registry/mkdirp
|
||||
error: Could not find npm package 'mkdirp' matching '0.5.125'.
|
||||
|
|
1
cli/tests/testdata/npm/registry/@denotest/non-existent-dep-version/1.0.0/index.js
vendored
Normal file
1
cli/tests/testdata/npm/registry/@denotest/non-existent-dep-version/1.0.0/index.js
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
module.exports = 5;
|
7
cli/tests/testdata/npm/registry/@denotest/non-existent-dep-version/1.0.0/package.json
vendored
Normal file
7
cli/tests/testdata/npm/registry/@denotest/non-existent-dep-version/1.0.0/package.json
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"name": "@denotest/non-existent-dep-version",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"@denotest/esm-basic": "=99.99.99"
|
||||
}
|
||||
}
|
1
cli/tests/testdata/npm/registry/@denotest/non-existent-dep/1.0.0/index.js
vendored
Normal file
1
cli/tests/testdata/npm/registry/@denotest/non-existent-dep/1.0.0/index.js
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
module.exports = 5;
|
7
cli/tests/testdata/npm/registry/@denotest/non-existent-dep/1.0.0/package.json
vendored
Normal file
7
cli/tests/testdata/npm/registry/@denotest/non-existent-dep/1.0.0/package.json
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"name": "@denotest/non-existent-dep",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"@denotest/non-existent": "1.0"
|
||||
}
|
||||
}
|
|
@ -455,6 +455,7 @@ pub struct TestCommandOutput {
|
|||
}
|
||||
|
||||
impl Drop for TestCommandOutput {
|
||||
// assert the output and exit code was asserted
|
||||
fn drop(&mut self) {
|
||||
fn panic_unasserted_output(text: &str) {
|
||||
println!("OUTPUT\n{text}\nOUTPUT");
|
||||
|
@ -467,13 +468,6 @@ impl Drop for TestCommandOutput {
|
|||
if std::thread::panicking() {
|
||||
return;
|
||||
}
|
||||
// force the caller to assert these
|
||||
if !*self.asserted_exit_code.borrow() && self.exit_code != Some(0) {
|
||||
panic!(
|
||||
"The non-zero exit code of the command was not asserted: {:?}",
|
||||
self.exit_code,
|
||||
)
|
||||
}
|
||||
|
||||
// either the combined output needs to be asserted or both stdout and stderr
|
||||
if let Some(combined) = &self.combined {
|
||||
|
@ -489,6 +483,14 @@ impl Drop for TestCommandOutput {
|
|||
panic_unasserted_output(stderr);
|
||||
}
|
||||
}
|
||||
|
||||
// now ensure the exit code was asserted
|
||||
if !*self.asserted_exit_code.borrow() && self.exit_code != Some(0) {
|
||||
panic!(
|
||||
"The non-zero exit code of the command was not asserted: {:?}",
|
||||
self.exit_code,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue