1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-12-24 08:09:08 -05:00

fix: lazily surface errors in package.json deps parsing (#17974)

Closes #17941
This commit is contained in:
David Sherret 2023-03-03 18:27:05 -04:00 committed by GitHub
parent 5c43e2a665
commit 84bafd11d5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 288 additions and 119 deletions

8
Cargo.lock generated
View file

@ -1117,9 +1117,9 @@ dependencies = [
[[package]]
name = "deno_graph"
version = "0.44.2"
version = "0.44.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a2cb8c414852294d5fb23d265725bbf1fab9608296e04301440dba14fef71e5"
checksum = "500e2ba1f77f6baf0722a84290ab238ff47c46e48d0f94e3f3cd5632a012ee0c"
dependencies = [
"anyhow",
"data-url",
@ -2939,9 +2939,9 @@ dependencies = [
[[package]]
name = "monch"
version = "0.4.0"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f13de1c3edc9a5b9dc3a1029f56e9ab3eba34640010aff4fc01044c42ef67afa"
checksum = "1120c1ab92ab8cdacb3b89ac9a214f512d2e78e90e3b57c00d9551ced19f646f"
[[package]]
name = "naga"

View file

@ -46,7 +46,7 @@ deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "dep_gra
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = "0.57.0"
deno_emit = "0.16.0"
deno_graph = "=0.44.2"
deno_graph = "=0.44.3"
deno_lint = { version = "0.41.0", features = ["docs"] }
deno_lockfile.workspace = true
deno_runtime = { workspace = true, features = ["dont_create_runtime_snapshot", "include_js_files_for_snapshotting"] }
@ -82,7 +82,7 @@ log = { workspace = true, features = ["serde"] }
lsp-types = "=0.93.2" # used by tower-lsp and "proposed" feature is unstable in patch releases
lzzzz = '1.0'
mitata = "=0.0.7"
monch = "=0.4.0"
monch = "=0.4.1"
notify.workspace = true
once_cell.workspace = true
os_pipe.workspace = true

View file

@ -8,6 +8,7 @@ mod lockfile;
pub mod package_json;
pub use self::import_map::resolve_import_map_from_specifier;
use self::package_json::PackageJsonDeps;
use ::import_map::ImportMap;
use indexmap::IndexMap;
@ -38,7 +39,6 @@ use deno_core::normalize_path;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_graph::npm::NpmPackageReq;
use deno_runtime::colors;
use deno_runtime::deno_node::PackageJson;
use deno_runtime::deno_tls::rustls;
@ -49,7 +49,6 @@ use deno_runtime::deno_tls::webpki_roots;
use deno_runtime::inspector_server::InspectorServer;
use deno_runtime::permissions::PermissionsOptions;
use once_cell::sync::Lazy;
use std::collections::BTreeMap;
use std::env;
use std::io::BufReader;
use std::io::Cursor;
@ -803,19 +802,18 @@ impl CliOptions {
&self.maybe_package_json
}
pub fn maybe_package_json_deps(
&self,
) -> Result<Option<BTreeMap<String, NpmPackageReq>>, AnyError> {
pub fn maybe_package_json_deps(&self) -> Option<PackageJsonDeps> {
if matches!(
self.flags.subcommand,
DenoSubcommand::Task(TaskFlags { task: None, .. })
) {
// don't have any package json dependencies for deno task with no args
Ok(None)
} else if let Some(package_json) = self.maybe_package_json() {
package_json::get_local_package_json_version_reqs(package_json).map(Some)
None
} else {
Ok(None)
self
.maybe_package_json()
.as_ref()
.map(package_json::get_local_package_json_version_reqs)
}
}

View file

@ -5,30 +5,52 @@ use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_graph::npm::NpmPackageReq;
use deno_graph::semver::NpmVersionReqSpecifierParseError;
use deno_graph::semver::VersionReq;
use deno_runtime::deno_node::PackageJson;
use thiserror::Error;
#[derive(Debug, Clone, Error, PartialEq, Eq, Hash)]
#[error("Could not find @ symbol in npm url '{value}'")]
pub struct PackageJsonDepNpmSchemeValueParseError {
pub value: String,
}
/// Gets the name and raw version constraint taking into account npm
/// package aliases.
pub fn parse_dep_entry_name_and_raw_version<'a>(
key: &'a str,
value: &'a str,
) -> Result<(&'a str, &'a str), AnyError> {
) -> Result<(&'a str, &'a str), PackageJsonDepNpmSchemeValueParseError> {
if let Some(package_and_version) = value.strip_prefix("npm:") {
if let Some((name, version)) = package_and_version.rsplit_once('@') {
Ok((name, version))
} else {
bail!("could not find @ symbol in npm url '{}'", value);
Err(PackageJsonDepNpmSchemeValueParseError {
value: value.to_string(),
})
}
} else {
Ok((key, value))
}
}
#[derive(Debug, Error, Clone, Hash)]
pub enum PackageJsonDepValueParseError {
#[error(transparent)]
SchemeValue(#[from] PackageJsonDepNpmSchemeValueParseError),
#[error(transparent)]
Specifier(#[from] NpmVersionReqSpecifierParseError),
#[error("Not implemented scheme: {scheme}")]
Unsupported { scheme: String },
}
pub type PackageJsonDeps =
BTreeMap<String, Result<NpmPackageReq, PackageJsonDepValueParseError>>;
/// Gets an application level package.json's npm package requirements.
///
/// Note that this function is not general purpose. It is specifically for
@ -37,48 +59,43 @@ pub fn parse_dep_entry_name_and_raw_version<'a>(
/// entries to npm specifiers which can then be used in the resolver.
pub fn get_local_package_json_version_reqs(
package_json: &PackageJson,
) -> Result<BTreeMap<String, NpmPackageReq>, AnyError> {
) -> PackageJsonDeps {
fn parse_entry(
key: &str,
value: &str,
) -> Result<NpmPackageReq, PackageJsonDepValueParseError> {
if value.starts_with("workspace:")
|| value.starts_with("file:")
|| value.starts_with("git:")
|| value.starts_with("http:")
|| value.starts_with("https:")
{
return Err(PackageJsonDepValueParseError::Unsupported {
scheme: key.split(':').next().unwrap().to_string(),
});
}
let (name, version_req) = parse_dep_entry_name_and_raw_version(key, value)
.map_err(PackageJsonDepValueParseError::SchemeValue)?;
let result = VersionReq::parse_from_specifier(version_req);
match result {
Ok(version_req) => Ok(NpmPackageReq {
name: name.to_string(),
version_req: Some(version_req),
}),
Err(err) => Err(PackageJsonDepValueParseError::Specifier(err)),
}
}
fn insert_deps(
deps: Option<&HashMap<String, String>>,
result: &mut BTreeMap<String, NpmPackageReq>,
) -> Result<(), AnyError> {
result: &mut PackageJsonDeps,
) {
if let Some(deps) = deps {
for (key, value) in deps {
if value.starts_with("workspace:")
|| value.starts_with("file:")
|| value.starts_with("git:")
|| value.starts_with("http:")
|| value.starts_with("https:")
{
// skip these specifiers for now
continue;
}
let (name, version_req) =
parse_dep_entry_name_and_raw_version(key, value)?;
let version_req = {
let result = VersionReq::parse_from_specifier(version_req);
match result {
Ok(version_req) => version_req,
Err(e) => {
let err = anyhow!("{:#}", e).context(concat!(
"Parsing version constraints in the application-level ",
"package.json is more strict at the moment"
));
return Err(err);
}
}
};
result.insert(
key.to_string(),
NpmPackageReq {
name: name.to_string(),
version_req: Some(version_req),
},
);
result.insert(key.to_string(), parse_entry(key, value));
}
}
Ok(())
}
let deps = package_json.dependencies.as_ref();
@ -87,10 +104,10 @@ pub fn get_local_package_json_version_reqs(
// insert the dev dependencies first so the dependencies will
// take priority and overwrite any collisions
insert_deps(dev_deps, &mut result)?;
insert_deps(deps, &mut result)?;
insert_deps(dev_deps, &mut result);
insert_deps(deps, &mut result);
Ok(result)
result
}
/// Attempts to discover the package.json file, maybe stopping when it
@ -147,7 +164,7 @@ mod test {
(
"test",
"npm:package",
Err("could not find @ symbol in npm url 'npm:package'"),
Err("Could not find @ symbol in npm url 'npm:package'"),
),
];
for (key, value, expected_result) in cases {
@ -159,6 +176,23 @@ mod test {
}
}
fn get_local_package_json_version_reqs_for_tests(
package_json: &PackageJson,
) -> BTreeMap<String, Result<NpmPackageReq, String>> {
get_local_package_json_version_reqs(package_json)
.into_iter()
.map(|(k, v)| {
(
k,
match v {
Ok(v) => Ok(v),
Err(err) => Err(err.to_string()),
},
)
})
.collect::<BTreeMap<_, _>>()
}
#[test]
fn test_get_local_package_json_version_reqs() {
let mut package_json = PackageJson::empty(PathBuf::from("/package.json"));
@ -171,21 +205,21 @@ mod test {
// should be ignored
("other".to_string(), "^3.2".to_string()),
]));
let result = get_local_package_json_version_reqs(&package_json).unwrap();
let deps = get_local_package_json_version_reqs_for_tests(&package_json);
assert_eq!(
result,
deps,
BTreeMap::from([
(
"test".to_string(),
NpmPackageReq::from_str("test@^1.2").unwrap()
Ok(NpmPackageReq::from_str("test@^1.2").unwrap())
),
(
"other".to_string(),
NpmPackageReq::from_str("package@~1.3").unwrap()
Ok(NpmPackageReq::from_str("package@~1.3").unwrap())
),
(
"package_b".to_string(),
NpmPackageReq::from_str("package_b@~2.2").unwrap()
Ok(NpmPackageReq::from_str("package_b@~2.2").unwrap())
)
])
);
@ -198,18 +232,20 @@ mod test {
"test".to_string(),
"1.x - 1.3".to_string(),
)]));
let err = get_local_package_json_version_reqs(&package_json)
.err()
.unwrap();
let map = get_local_package_json_version_reqs_for_tests(&package_json);
assert_eq!(
format!("{err:#}"),
concat!(
"Parsing version constraints in the application-level ",
"package.json is more strict at the moment: Invalid npm specifier ",
"version requirement. Unexpected character.\n",
" - 1.3\n",
" ~"
)
map,
BTreeMap::from([(
"test".to_string(),
Err(
concat!(
"Invalid npm specifier version requirement. Unexpected character.\n",
" - 1.3\n",
" ~"
)
.to_string()
)
)])
);
}
@ -224,13 +260,35 @@ mod test {
("http".to_string(), "http://something".to_string()),
("https".to_string(), "https://something".to_string()),
]));
let result = get_local_package_json_version_reqs(&package_json).unwrap();
let result = get_local_package_json_version_reqs_for_tests(&package_json);
assert_eq!(
result,
BTreeMap::from([(
"test".to_string(),
NpmPackageReq::from_str("test@1").unwrap()
)])
BTreeMap::from([
(
"file".to_string(),
Err("Not implemented scheme: file".to_string()),
),
(
"git".to_string(),
Err("Not implemented scheme: git".to_string()),
),
(
"http".to_string(),
Err("Not implemented scheme: http".to_string()),
),
(
"https".to_string(),
Err("Not implemented scheme: https".to_string()),
),
(
"test".to_string(),
Ok(NpmPackageReq::from_str("test@1").unwrap())
),
(
"work".to_string(),
Err("Not implemented scheme: work".to_string()),
)
])
);
}
}

View file

@ -6,6 +6,7 @@ use super::tsc;
use super::tsc::AssetDocument;
use crate::args::package_json;
use crate::args::package_json::PackageJsonDeps;
use crate::args::ConfigFile;
use crate::args::JsxImportSourceConfig;
use crate::cache::CachedUrlMetadata;
@ -14,7 +15,6 @@ use crate::cache::HttpCache;
use crate::file_fetcher::get_source_from_bytes;
use crate::file_fetcher::map_content_type;
use crate::file_fetcher::SUPPORTED_SCHEMES;
use crate::lsp::logging::lsp_log;
use crate::node;
use crate::node::node_resolve_npm_reference;
use crate::node::NodeResolution;
@ -44,7 +44,6 @@ use deno_runtime::deno_node::PackageJson;
use deno_runtime::permissions::PermissionsContainer;
use indexmap::IndexMap;
use once_cell::sync::Lazy;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::collections::VecDeque;
@ -1171,7 +1170,7 @@ impl Documents {
fn calculate_resolver_config_hash(
maybe_import_map: Option<&import_map::ImportMap>,
maybe_jsx_config: Option<&JsxImportSourceConfig>,
maybe_package_json_deps: Option<&BTreeMap<String, NpmPackageReq>>,
maybe_package_json_deps: Option<&PackageJsonDeps>,
) -> u64 {
let mut hasher = FastInsecureHasher::default();
if let Some(import_map) = maybe_import_map {
@ -1187,14 +1186,8 @@ impl Documents {
hasher.finish()
}
let maybe_package_json_deps = maybe_package_json.and_then(|package_json| {
match package_json::get_local_package_json_version_reqs(package_json) {
Ok(deps) => Some(deps),
Err(err) => {
lsp_log!("Error parsing package.json deps: {err:#}");
None
}
}
let maybe_package_json_deps = maybe_package_json.map(|package_json| {
package_json::get_local_package_json_version_reqs(package_json)
});
let maybe_jsx_config =
maybe_config_file.and_then(|cf| cf.to_maybe_jsx_import_source_config());
@ -1206,7 +1199,11 @@ impl Documents {
self.npm_package_json_reqs = Arc::new({
match &maybe_package_json_deps {
Some(deps) => {
let mut reqs = deps.values().cloned().collect::<Vec<_>>();
let mut reqs = deps
.values()
.filter_map(|r| r.as_ref().ok())
.cloned()
.collect::<Vec<_>>();
reqs.sort();
reqs
}

View file

@ -1,11 +1,11 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use std::collections::BTreeMap;
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use deno_core::error::AnyError;
use deno_graph::npm::NpmPackageReq;
use crate::args::package_json::PackageJsonDeps;
use super::NpmRegistryApi;
use super::NpmResolution;
@ -15,7 +15,7 @@ struct PackageJsonDepsInstallerInner {
has_installed: AtomicBool,
npm_registry_api: NpmRegistryApi,
npm_resolution: NpmResolution,
package_deps: BTreeMap<String, NpmPackageReq>,
package_deps: PackageJsonDeps,
}
/// Holds and controls installing dependencies from package.json.
@ -26,7 +26,7 @@ impl PackageJsonDepsInstaller {
pub fn new(
npm_registry_api: NpmRegistryApi,
npm_resolution: NpmResolution,
deps: Option<BTreeMap<String, NpmPackageReq>>,
deps: Option<PackageJsonDeps>,
) -> Self {
Self(deps.map(|package_deps| {
Arc::new(PackageJsonDepsInstallerInner {
@ -38,7 +38,7 @@ impl PackageJsonDepsInstaller {
}))
}
pub fn package_deps(&self) -> Option<&BTreeMap<String, NpmPackageReq>> {
pub fn package_deps(&self) -> Option<&PackageJsonDeps> {
self.0.as_ref().map(|inner| &inner.package_deps)
}
@ -47,7 +47,10 @@ impl PackageJsonDepsInstaller {
if let Some(package_deps) = self.package_deps() {
// ensure this looks at the package name and not the
// bare specifiers (do not look at the keys!)
package_deps.values().any(|v| v.name == name)
package_deps
.values()
.filter_map(|r| r.as_ref().ok())
.any(|v| v.name == name)
} else {
false
}
@ -66,8 +69,11 @@ impl PackageJsonDepsInstaller {
return Ok(()); // already installed by something else
}
let mut package_reqs =
inner.package_deps.values().cloned().collect::<Vec<_>>();
let mut package_reqs = inner
.package_deps
.values()
.filter_map(|r| r.as_ref().ok())
.collect::<Vec<_>>();
package_reqs.sort(); // deterministic resolution
inner
@ -80,7 +86,7 @@ impl PackageJsonDepsInstaller {
for package_req in package_reqs {
inner
.npm_resolution
.resolve_package_req_as_pending(&package_req)?;
.resolve_package_req_as_pending(package_req)?;
}
Ok(())

View file

@ -234,7 +234,7 @@ impl ProcState {
let package_json_deps_installer = PackageJsonDepsInstaller::new(
npm_resolver.api().clone(),
npm_resolver.resolution().clone(),
cli_options.maybe_package_json_deps()?,
cli_options.maybe_package_json_deps(),
);
let maybe_import_map = cli_options
.resolve_import_map(&file_fetcher)

View file

@ -1,5 +1,6 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::futures::future;
@ -14,9 +15,9 @@ use deno_graph::source::UnknownBuiltInNodeModuleError;
use deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE;
use deno_runtime::deno_node::is_builtin_node_module;
use import_map::ImportMap;
use std::collections::BTreeMap;
use std::sync::Arc;
use crate::args::package_json::PackageJsonDeps;
use crate::args::JsxImportSourceConfig;
use crate::npm::NpmRegistryApi;
use crate::npm::NpmResolution;
@ -144,16 +145,19 @@ impl Resolver for CliGraphResolver {
fn resolve_package_json_dep(
specifier: &str,
deps: &BTreeMap<String, NpmPackageReq>,
) -> Result<Option<ModuleSpecifier>, deno_core::url::ParseError> {
for (bare_specifier, req) in deps {
deps: &PackageJsonDeps,
) -> Result<Option<ModuleSpecifier>, AnyError> {
for (bare_specifier, req_result) in deps {
if specifier.starts_with(bare_specifier) {
if specifier.len() == bare_specifier.len() {
return ModuleSpecifier::parse(&format!("npm:{req}")).map(Some);
}
let path = &specifier[bare_specifier.len()..];
if path.starts_with('/') {
return ModuleSpecifier::parse(&format!("npm:/{req}{path}")).map(Some);
if path.is_empty() || path.starts_with('/') {
let req = req_result.as_ref().map_err(|err| {
anyhow!(
"Parsing version constraints in the application-level package.json is more strict at the moment.\n\n{:#}",
err.clone()
)
})?;
return Ok(Some(ModuleSpecifier::parse(&format!("npm:{req}{path}"))?));
}
}
}
@ -239,6 +243,8 @@ impl NpmResolver for CliGraphResolver {
#[cfg(test)]
mod test {
use std::collections::BTreeMap;
use super::*;
#[test]
@ -247,7 +253,11 @@ mod test {
specifier: &str,
deps: &BTreeMap<String, NpmPackageReq>,
) -> Result<Option<String>, String> {
resolve_package_json_dep(specifier, deps)
let deps = deps
.iter()
.map(|(key, value)| (key.to_string(), Ok(value.clone())))
.collect();
resolve_package_json_dep(specifier, &deps)
.map(|s| s.map(|s| s.to_string()))
.map_err(|err| err.to_string())
}
@ -273,7 +283,7 @@ mod test {
);
assert_eq!(
resolve("package/some_path.ts", &deps).unwrap(),
Some("npm:/package@1.0/some_path.ts".to_string()),
Some("npm:package@1.0/some_path.ts".to_string()),
);
assert_eq!(
@ -282,7 +292,7 @@ mod test {
);
assert_eq!(
resolve("@deno/test/some_path.ts", &deps).unwrap(),
Some("npm:/@deno/test@~0.2/some_path.ts".to_string()),
Some("npm:@deno/test@~0.2/some_path.ts".to_string()),
);
// matches the start, but doesn't have the same length or a path
assert_eq!(resolve("@deno/testing", &deps).unwrap(), None,);

View file

@ -13,6 +13,7 @@ use trust_dns_client::serialize::txt::Lexer;
use trust_dns_client::serialize::txt::Parser;
use util::assert_contains;
use util::env_vars_for_npm_tests_no_sync_download;
use util::TestContextBuilder;
itest!(stdout_write_all {
args: "run --quiet run/stdout_write_all.ts",
@ -2836,6 +2837,36 @@ itest!(package_json_with_deno_json {
http_server: true,
});
#[test]
fn package_json_error_dep_value_test() {
let context = TestContextBuilder::for_npm()
.use_copy_temp_dir("package_json/invalid_value")
.cwd("package_json/invalid_value")
.build();
// should run fine when not referencing a failing dep entry
context
.new_command()
.args("run ok.ts")
.run()
.assert_matches_file("package_json/invalid_value/ok.ts.out");
// should fail when referencing a failing dep entry
context
.new_command()
.args("run error.ts")
.run()
.assert_exit_code(1)
.assert_matches_file("package_json/invalid_value/error.ts.out");
// should output a warning about the failing dep entry
context
.new_command()
.args("task test")
.run()
.assert_matches_file("package_json/invalid_value/task.out");
}
itest!(wasm_streaming_panic_test {
args: "run run/wasm_streaming_panic_test.js",
output: "run/wasm_streaming_panic_test.js.out",

View file

@ -0,0 +1,4 @@
// the package.json will error when importing
import * as test from "@denotest/cjs-default-export";
console.log(test);

View file

@ -0,0 +1,6 @@
error: Parsing version constraints in the application-level package.json is more strict at the moment.
Invalid npm specifier version requirement. Unexpected character.
invalid stuff that won't parse
~
at file:///[WILDCARD]/error.ts:2:23

View file

@ -0,0 +1,4 @@
import * as test from "@denotest/esm-basic";
test.setValue(2);
console.log(test.getValue());

View file

@ -0,0 +1,3 @@
Download http://localhost:4545/npm/registry/@denotest/esm-basic
Download http://localhost:4545/npm/registry/@denotest/esm-basic/1.0.0.tgz
2

View file

@ -0,0 +1,9 @@
{
"scripts": {
"test": "echo 1"
},
"dependencies": {
"@denotest/esm-basic": "*",
"@denotest/cjs-default-export": "invalid stuff that won't parse"
}
}

View file

@ -0,0 +1,6 @@
Warning Ignoring dependency '@denotest/cjs-default-export' in package.json because its version requirement failed to parse: Invalid npm specifier version requirement. Unexpected character.
invalid stuff that won't parse
~
Warning Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release.
Task test echo 1
1

View file

@ -60,11 +60,28 @@ pub async fn execute_script(
.await;
Ok(exit_code)
} else if let Some(script) = package_json_scripts.get(task_name) {
if let Some(package_deps) = ps.package_json_deps_installer.package_deps() {
for (key, value) in package_deps {
if let Err(err) = value {
log::info!(
"{} Ignoring dependency '{}' in package.json because its version requirement failed to parse: {:#}",
colors::yellow("Warning"),
key,
err,
);
}
}
}
ps.package_json_deps_installer
.ensure_top_level_install()
.await?;
ps.npm_resolver.resolve_pending().await?;
log::info!(
"{} Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release.",
colors::yellow("Warning"),
);
let cwd = match task_flags.cwd {
Some(path) => canonicalize_path(&PathBuf::from(path))?,
None => maybe_package_json
@ -76,10 +93,6 @@ pub async fn execute_script(
.to_owned(),
};
let script = get_script_with_args(script, &ps);
log::info!(
"{} Currently only basic package.json `scripts` are supported. Programs like `rimraf` or `cross-env` will not work correctly. This will be fixed in the upcoming release.",
colors::yellow("Warning"),
);
output_task(task_name, &script);
let seq_list = deno_task_shell::parser::parse(&script)
.with_context(|| format!("Error parsing script '{task_name}'."))?;

View file

@ -16,6 +16,7 @@ use pretty_assertions::assert_eq;
use crate::copy_dir_recursive;
use crate::deno_exe_path;
use crate::env_vars_for_npm_tests_no_sync_download;
use crate::http_server;
use crate::new_deno_dir;
use crate::strip_ansi_codes;
@ -41,6 +42,12 @@ impl TestContextBuilder {
Self::default()
}
pub fn for_npm() -> Self {
let mut builder = Self::new();
builder.use_http_server().add_npm_env_vars();
builder
}
pub fn use_http_server(&mut self) -> &mut Self {
self.use_http_server = true;
self
@ -54,11 +61,12 @@ impl TestContextBuilder {
/// Copies the files at the specified directory in the "testdata" directory
/// to the temp folder and runs the test from there. This is useful when
/// the test creates files in the testdata directory (ex. a node_modules folder)
pub fn use_copy_temp_dir(&mut self, dir: impl AsRef<str>) {
pub fn use_copy_temp_dir(&mut self, dir: impl AsRef<str>) -> &mut Self {
self.copy_temp_dir = Some(dir.as_ref().to_string());
self
}
pub fn set_cwd(&mut self, cwd: impl AsRef<str>) -> &mut Self {
pub fn cwd(&mut self, cwd: impl AsRef<str>) -> &mut Self {
self.cwd = Some(cwd.as_ref().to_string());
self
}
@ -74,6 +82,22 @@ impl TestContextBuilder {
self
}
pub fn add_npm_env_vars(&mut self) -> &mut Self {
for (key, value) in env_vars_for_npm_tests_no_sync_download() {
self.env(key, value);
}
self
}
pub fn use_sync_npm_download(&mut self) -> &mut Self {
self.env(
// make downloads determinstic
"DENO_UNSTABLE_NPM_SYNC_DOWNLOAD",
"1",
);
self
}
pub fn build(&self) -> TestContext {
let deno_dir = new_deno_dir(); // keep this alive for the test
let testdata_dir = if let Some(temp_copy_dir) = &self.copy_temp_dir {