mirror of
https://github.com/denoland/deno.git
synced 2024-11-21 15:04:11 -05:00
feat(node): Support executing npm package lifecycle scripts (preinstall/install/postinstall) (#24487)
Adds support for running npm package lifecycle scripts, opted into via a new `--allow-scripts` flag. With this PR, when running `deno cache` (or `DENO_FUTURE=1 deno install`) you can specify the `--allow-scripts=pkg1,pkg2` flag to run lifecycle scripts attached to the given packages. Note at the moment this only works when `nodeModulesDir` is true (using the local resolver). When a package with un-run lifecycle scripts is encountered, we emit a warning suggesting things may not work and to try running lifecycle scripts. Additionally, if a package script implicitly requires `node-gyp` and it's not found on the system, we emit a warning. Extra things in this PR: - Extracted out bits of `task.rs` into a separate module for reuse - Added a couple fields to `process.config` in order to support `node-gyp` (it relies on a few variables being there) - Drive by fix to downloading new npm packages to test registry --- TODO: - [x] validation for allow-scripts args (make sure it looks like an npm package) - [x] make allow-scripts matching smarter - [ ] figure out what issues this closes --- Review notes: - This adds a bunch of deps to our test registry due to using `node-gyp`, so it's pretty noisy
This commit is contained in:
parent
eb46296e97
commit
ce7dc2be92
207 changed files with 1409 additions and 473 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -1196,6 +1196,7 @@ dependencies = [
|
|||
"typed-arena",
|
||||
"uuid",
|
||||
"walkdir",
|
||||
"which 4.4.2",
|
||||
"winapi",
|
||||
"winres",
|
||||
"zeromq",
|
||||
|
|
|
@ -180,6 +180,7 @@ twox-hash = "=1.6.3"
|
|||
url = { version = "< 2.5.0", features = ["serde", "expose_internals"] }
|
||||
uuid = { version = "1.3.0", features = ["v4"] }
|
||||
webpki-roots = "0.26"
|
||||
which = "4.2.5"
|
||||
zeromq = { version = "=0.3.4", default-features = false, features = ["tcp-transport", "tokio-runtime"] }
|
||||
zstd = "=0.12.4"
|
||||
|
||||
|
|
|
@ -148,6 +148,7 @@ tower-lsp.workspace = true
|
|||
twox-hash.workspace = true
|
||||
typed-arena = "=2.0.1"
|
||||
uuid = { workspace = true, features = ["serde"] }
|
||||
which.workspace = true
|
||||
zeromq.workspace = true
|
||||
zstd.workspace = true
|
||||
|
||||
|
|
|
@ -507,6 +507,30 @@ pub enum CaData {
|
|||
Bytes(Vec<u8>),
|
||||
}
|
||||
|
||||
// Info needed to run NPM lifecycle scripts
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Default)]
|
||||
pub struct LifecycleScriptsConfig {
|
||||
pub allowed: PackagesAllowedScripts,
|
||||
pub initial_cwd: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Default)]
|
||||
/// The set of npm packages that are allowed to run lifecycle scripts.
|
||||
pub enum PackagesAllowedScripts {
|
||||
All,
|
||||
Some(Vec<String>),
|
||||
#[default]
|
||||
None,
|
||||
}
|
||||
|
||||
fn parse_packages_allowed_scripts(s: &str) -> Result<String, AnyError> {
|
||||
if !s.starts_with("npm:") {
|
||||
bail!("Invalid package for --allow-scripts: '{}'. An 'npm:' specifier is required", s);
|
||||
} else {
|
||||
Ok(s.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Clone, Default, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize,
|
||||
)]
|
||||
|
@ -562,6 +586,7 @@ pub struct Flags {
|
|||
pub v8_flags: Vec<String>,
|
||||
pub code_cache_enabled: bool,
|
||||
pub permissions: PermissionFlags,
|
||||
pub allow_scripts: PackagesAllowedScripts,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Default, Serialize, Deserialize)]
|
||||
|
@ -1502,6 +1527,7 @@ Future runs of this module will trigger no downloads or compilation unless
|
|||
.value_hint(ValueHint::FilePath),
|
||||
)
|
||||
.arg(frozen_lockfile_arg())
|
||||
.arg(allow_scripts_arg())
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -2213,7 +2239,7 @@ The installation root is determined, in order of precedence:
|
|||
|
||||
These must be added to the path manually if required.")
|
||||
.defer(|cmd| {
|
||||
let cmd = runtime_args(cmd, true, true).arg(check_arg(true));
|
||||
let cmd = runtime_args(cmd, true, true).arg(check_arg(true)).arg(allow_scripts_arg());
|
||||
install_args(cmd, true)
|
||||
})
|
||||
}
|
||||
|
@ -3728,6 +3754,28 @@ fn unsafely_ignore_certificate_errors_arg() -> Arg {
|
|||
.value_parser(flags_net::validator)
|
||||
}
|
||||
|
||||
fn allow_scripts_arg() -> Arg {
|
||||
Arg::new("allow-scripts")
|
||||
.long("allow-scripts")
|
||||
.num_args(0..)
|
||||
.use_value_delimiter(true)
|
||||
.require_equals(true)
|
||||
.value_name("PACKAGE")
|
||||
.value_parser(parse_packages_allowed_scripts)
|
||||
.help("Allow running npm lifecycle scripts for the given packages. Note: Scripts will only be executed when using a node_modules directory (`--node-modules-dir`)")
|
||||
}
|
||||
|
||||
fn allow_scripts_arg_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
||||
let Some(parts) = matches.remove_many::<String>("allow-scripts") else {
|
||||
return;
|
||||
};
|
||||
if parts.len() == 0 {
|
||||
flags.allow_scripts = PackagesAllowedScripts::All;
|
||||
} else {
|
||||
flags.allow_scripts = PackagesAllowedScripts::Some(parts.collect());
|
||||
}
|
||||
}
|
||||
|
||||
fn add_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
||||
flags.subcommand = DenoSubcommand::Add(add_parse_inner(matches, None));
|
||||
}
|
||||
|
@ -3810,6 +3858,7 @@ fn bundle_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
|||
fn cache_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
||||
compile_args_parse(flags, matches);
|
||||
frozen_lockfile_arg_parse(flags, matches);
|
||||
allow_scripts_arg_parse(flags, matches);
|
||||
let files = matches.remove_many::<String>("file").unwrap().collect();
|
||||
flags.subcommand = DenoSubcommand::Cache(CacheFlags { files });
|
||||
}
|
||||
|
@ -4096,6 +4145,7 @@ fn install_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
|||
let local_flags = matches
|
||||
.remove_many("cmd")
|
||||
.map(|packages| add_parse_inner(matches, Some(packages)));
|
||||
allow_scripts_arg_parse(flags, matches);
|
||||
flags.subcommand = DenoSubcommand::Install(InstallFlags {
|
||||
global,
|
||||
kind: InstallKind::Local(local_flags),
|
||||
|
@ -9969,4 +10019,50 @@ mod tests {
|
|||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn allow_scripts() {
|
||||
let cases = [
|
||||
(Some("--allow-scripts"), Ok(PackagesAllowedScripts::All)),
|
||||
(None, Ok(PackagesAllowedScripts::None)),
|
||||
(
|
||||
Some("--allow-scripts=npm:foo"),
|
||||
Ok(PackagesAllowedScripts::Some(svec!["npm:foo"])),
|
||||
),
|
||||
(
|
||||
Some("--allow-scripts=npm:foo,npm:bar"),
|
||||
Ok(PackagesAllowedScripts::Some(svec!["npm:foo", "npm:bar"])),
|
||||
),
|
||||
(Some("--allow-scripts=foo"), Err("Invalid package")),
|
||||
];
|
||||
for (flag, value) in cases {
|
||||
let mut args = svec!["deno", "cache"];
|
||||
if let Some(flag) = flag {
|
||||
args.push(flag.into());
|
||||
}
|
||||
args.push("script.ts".into());
|
||||
let r = flags_from_vec(args);
|
||||
match value {
|
||||
Ok(value) => {
|
||||
assert_eq!(
|
||||
r.unwrap(),
|
||||
Flags {
|
||||
subcommand: DenoSubcommand::Cache(CacheFlags {
|
||||
files: svec!["script.ts"],
|
||||
}),
|
||||
allow_scripts: value,
|
||||
..Flags::default()
|
||||
}
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
let err = r.unwrap_err();
|
||||
assert!(
|
||||
err.to_string().contains(e),
|
||||
"expected to contain '{e}' got '{err}'"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1720,6 +1720,20 @@ impl CliOptions {
|
|||
}
|
||||
full_paths
|
||||
}
|
||||
|
||||
pub fn lifecycle_scripts_config(&self) -> LifecycleScriptsConfig {
|
||||
LifecycleScriptsConfig {
|
||||
allowed: self.flags.allow_scripts.clone(),
|
||||
initial_cwd: if matches!(
|
||||
self.flags.allow_scripts,
|
||||
PackagesAllowedScripts::None
|
||||
) {
|
||||
None
|
||||
} else {
|
||||
Some(self.initial_cwd.clone())
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolves the path to use for a local node_modules folder.
|
||||
|
|
|
@ -443,7 +443,8 @@ impl CliFactory {
|
|||
&self.options.workspace,
|
||||
)),
|
||||
npm_system_info: self.options.npm_system_info(),
|
||||
npmrc: self.options.npmrc().clone()
|
||||
npmrc: self.options.npmrc().clone(),
|
||||
lifecycle_scripts: self.options.lifecycle_scripts_config(),
|
||||
})
|
||||
}).await
|
||||
}.boxed_local())
|
||||
|
|
|
@ -469,6 +469,7 @@ async fn create_npm_resolver(
|
|||
.and_then(|d| d.npmrc.clone())
|
||||
.unwrap_or_else(create_default_npmrc),
|
||||
npm_system_info: NpmSystemInfo::default(),
|
||||
lifecycle_scripts: Default::default(),
|
||||
})
|
||||
};
|
||||
Some(create_cli_npm_resolver_for_lsp(options).await)
|
||||
|
|
|
@ -21,6 +21,7 @@ mod npm;
|
|||
mod ops;
|
||||
mod resolver;
|
||||
mod standalone;
|
||||
mod task_runner;
|
||||
mod tools;
|
||||
mod tsc;
|
||||
mod util;
|
||||
|
|
|
@ -18,6 +18,7 @@ mod js;
|
|||
mod node;
|
||||
mod npm;
|
||||
mod resolver;
|
||||
mod task_runner;
|
||||
mod util;
|
||||
mod version;
|
||||
mod worker;
|
||||
|
|
|
@ -29,6 +29,7 @@ use deno_semver::package::PackageReq;
|
|||
use resolution::AddPkgReqsResult;
|
||||
|
||||
use crate::args::CliLockfile;
|
||||
use crate::args::LifecycleScriptsConfig;
|
||||
use crate::args::NpmProcessState;
|
||||
use crate::args::NpmProcessStateKind;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
|
@ -70,6 +71,7 @@ pub struct CliNpmResolverManagedCreateOptions {
|
|||
pub npm_system_info: NpmSystemInfo,
|
||||
pub package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
pub npmrc: Arc<ResolvedNpmRc>,
|
||||
pub lifecycle_scripts: LifecycleScriptsConfig,
|
||||
}
|
||||
|
||||
pub async fn create_managed_npm_resolver_for_lsp(
|
||||
|
@ -98,6 +100,7 @@ pub async fn create_managed_npm_resolver_for_lsp(
|
|||
options.maybe_node_modules_path,
|
||||
options.npm_system_info,
|
||||
snapshot,
|
||||
options.lifecycle_scripts,
|
||||
)
|
||||
})
|
||||
.await
|
||||
|
@ -122,6 +125,7 @@ pub async fn create_managed_npm_resolver(
|
|||
options.maybe_node_modules_path,
|
||||
options.npm_system_info,
|
||||
snapshot,
|
||||
options.lifecycle_scripts,
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -138,6 +142,7 @@ fn create_inner(
|
|||
node_modules_dir_path: Option<PathBuf>,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
|
||||
lifecycle_scripts: LifecycleScriptsConfig,
|
||||
) -> Arc<dyn CliNpmResolver> {
|
||||
let resolution = Arc::new(NpmResolution::from_serialized(
|
||||
npm_api.clone(),
|
||||
|
@ -160,6 +165,7 @@ fn create_inner(
|
|||
tarball_cache.clone(),
|
||||
node_modules_dir_path,
|
||||
npm_system_info.clone(),
|
||||
lifecycle_scripts.clone(),
|
||||
);
|
||||
Arc::new(ManagedCliNpmResolver::new(
|
||||
fs,
|
||||
|
@ -172,6 +178,7 @@ fn create_inner(
|
|||
tarball_cache,
|
||||
text_only_progress_bar,
|
||||
npm_system_info,
|
||||
lifecycle_scripts,
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -258,6 +265,7 @@ pub struct ManagedCliNpmResolver {
|
|||
text_only_progress_bar: ProgressBar,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
top_level_install_flag: AtomicFlag,
|
||||
lifecycle_scripts: LifecycleScriptsConfig,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for ManagedCliNpmResolver {
|
||||
|
@ -281,6 +289,7 @@ impl ManagedCliNpmResolver {
|
|||
tarball_cache: Arc<TarballCache>,
|
||||
text_only_progress_bar: ProgressBar,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
lifecycle_scripts: LifecycleScriptsConfig,
|
||||
) -> Self {
|
||||
Self {
|
||||
fs,
|
||||
|
@ -294,6 +303,7 @@ impl ManagedCliNpmResolver {
|
|||
tarball_cache,
|
||||
npm_system_info,
|
||||
top_level_install_flag: Default::default(),
|
||||
lifecycle_scripts,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -578,6 +588,7 @@ impl CliNpmResolver for ManagedCliNpmResolver {
|
|||
self.tarball_cache.clone(),
|
||||
self.root_node_modules_path().map(ToOwned::to_owned),
|
||||
self.npm_system_info.clone(),
|
||||
self.lifecycle_scripts.clone(),
|
||||
),
|
||||
self.maybe_lockfile.clone(),
|
||||
self.npm_api.clone(),
|
||||
|
@ -587,6 +598,7 @@ impl CliNpmResolver for ManagedCliNpmResolver {
|
|||
self.tarball_cache.clone(),
|
||||
self.text_only_progress_bar.clone(),
|
||||
self.npm_system_info.clone(),
|
||||
self.lifecycle_scripts.clone(),
|
||||
))
|
||||
}
|
||||
|
||||
|
|
|
@ -16,8 +16,11 @@ use std::path::PathBuf;
|
|||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::args::LifecycleScriptsConfig;
|
||||
use crate::args::PackagesAllowedScripts;
|
||||
use async_trait::async_trait;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::stream::FuturesUnordered;
|
||||
|
@ -68,6 +71,7 @@ pub struct LocalNpmPackageResolver {
|
|||
root_node_modules_url: Url,
|
||||
system_info: NpmSystemInfo,
|
||||
registry_read_permission_checker: RegistryReadPermissionChecker,
|
||||
lifecycle_scripts: LifecycleScriptsConfig,
|
||||
}
|
||||
|
||||
impl LocalNpmPackageResolver {
|
||||
|
@ -81,6 +85,7 @@ impl LocalNpmPackageResolver {
|
|||
tarball_cache: Arc<TarballCache>,
|
||||
node_modules_folder: PathBuf,
|
||||
system_info: NpmSystemInfo,
|
||||
lifecycle_scripts: LifecycleScriptsConfig,
|
||||
) -> Self {
|
||||
Self {
|
||||
cache,
|
||||
|
@ -97,6 +102,7 @@ impl LocalNpmPackageResolver {
|
|||
.unwrap(),
|
||||
root_node_modules_path: node_modules_folder,
|
||||
system_info,
|
||||
lifecycle_scripts,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -245,6 +251,7 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
&self.tarball_cache,
|
||||
&self.root_node_modules_path,
|
||||
&self.system_info,
|
||||
&self.lifecycle_scripts,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
@ -260,7 +267,131 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
}
|
||||
}
|
||||
|
||||
// take in all (non copy) packages from snapshot,
|
||||
// and resolve the set of available binaries to create
|
||||
// custom commands available to the task runner
|
||||
fn resolve_baseline_custom_commands(
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
packages: &[NpmResolutionPackage],
|
||||
local_registry_dir: &Path,
|
||||
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
|
||||
let mut custom_commands = crate::task_runner::TaskCustomCommands::new();
|
||||
custom_commands
|
||||
.insert("npx".to_string(), Rc::new(crate::task_runner::NpxCommand));
|
||||
|
||||
custom_commands
|
||||
.insert("npm".to_string(), Rc::new(crate::task_runner::NpmCommand));
|
||||
|
||||
custom_commands
|
||||
.insert("node".to_string(), Rc::new(crate::task_runner::NodeCommand));
|
||||
|
||||
custom_commands.insert(
|
||||
"node-gyp".to_string(),
|
||||
Rc::new(crate::task_runner::NodeGypCommand),
|
||||
);
|
||||
|
||||
// TODO: this recreates the bin entries which could be redoing some work, but the ones
|
||||
// we compute earlier in `sync_resolution_with_fs` may not be exhaustive (because we skip
|
||||
// doing it for packages that are set up already.
|
||||
// realistically, scripts won't be run very often so it probably isn't too big of an issue.
|
||||
resolve_custom_commands_from_packages(
|
||||
custom_commands,
|
||||
snapshot,
|
||||
packages,
|
||||
local_registry_dir,
|
||||
)
|
||||
}
|
||||
|
||||
// resolves the custom commands from an iterator of packages
|
||||
// and adds them to the existing custom commands.
|
||||
// note that this will overwrite any existing custom commands
|
||||
fn resolve_custom_commands_from_packages<
|
||||
'a,
|
||||
P: IntoIterator<Item = &'a NpmResolutionPackage>,
|
||||
>(
|
||||
mut commands: crate::task_runner::TaskCustomCommands,
|
||||
snapshot: &'a NpmResolutionSnapshot,
|
||||
packages: P,
|
||||
local_registry_dir: &Path,
|
||||
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
|
||||
let mut bin_entries = bin_entries::BinEntries::new();
|
||||
for package in packages {
|
||||
let package_path =
|
||||
local_node_modules_package_path(local_registry_dir, package);
|
||||
|
||||
if package.bin.is_some() {
|
||||
bin_entries.add(package.clone(), package_path);
|
||||
}
|
||||
}
|
||||
let bins = bin_entries.into_bin_files(snapshot);
|
||||
for (bin_name, script_path) in bins {
|
||||
commands.insert(
|
||||
bin_name.clone(),
|
||||
Rc::new(crate::task_runner::NodeModulesFileRunCommand {
|
||||
command_name: bin_name,
|
||||
path: script_path,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(commands)
|
||||
}
|
||||
|
||||
fn local_node_modules_package_path(
|
||||
local_registry_dir: &Path,
|
||||
package: &NpmResolutionPackage,
|
||||
) -> PathBuf {
|
||||
local_registry_dir
|
||||
.join(get_package_folder_id_folder_name(
|
||||
&package.get_package_cache_folder_id(),
|
||||
))
|
||||
.join("node_modules")
|
||||
.join(&package.id.nv.name)
|
||||
}
|
||||
|
||||
// resolves the custom commands from the dependencies of a package
|
||||
// and adds them to the existing custom commands.
|
||||
// note that this will overwrite any existing custom commands.
|
||||
fn resolve_custom_commands_from_deps(
|
||||
baseline: crate::task_runner::TaskCustomCommands,
|
||||
package: &NpmResolutionPackage,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
local_registry_dir: &Path,
|
||||
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
|
||||
resolve_custom_commands_from_packages(
|
||||
baseline,
|
||||
snapshot,
|
||||
package
|
||||
.dependencies
|
||||
.values()
|
||||
.map(|id| snapshot.package_from_id(id).unwrap()),
|
||||
local_registry_dir,
|
||||
)
|
||||
}
|
||||
|
||||
fn can_run_scripts(
|
||||
allow_scripts: &PackagesAllowedScripts,
|
||||
package_nv: &PackageNv,
|
||||
) -> bool {
|
||||
match allow_scripts {
|
||||
PackagesAllowedScripts::All => true,
|
||||
// TODO: make this more correct
|
||||
PackagesAllowedScripts::Some(allow_list) => allow_list.iter().any(|s| {
|
||||
let s = s.strip_prefix("npm:").unwrap_or(s);
|
||||
s == package_nv.name || s == package_nv.to_string()
|
||||
}),
|
||||
PackagesAllowedScripts::None => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn has_lifecycle_scripts(package: &NpmResolutionPackage) -> bool {
|
||||
package.scripts.contains_key("preinstall")
|
||||
|| package.scripts.contains_key("install")
|
||||
|| package.scripts.contains_key("postinstall")
|
||||
}
|
||||
|
||||
/// Creates a pnpm style folder structure.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn sync_resolution_with_fs(
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
cache: &Arc<NpmCache>,
|
||||
|
@ -269,6 +400,7 @@ async fn sync_resolution_with_fs(
|
|||
tarball_cache: &Arc<TarballCache>,
|
||||
root_node_modules_dir_path: &Path,
|
||||
system_info: &NpmSystemInfo,
|
||||
lifecycle_scripts: &LifecycleScriptsConfig,
|
||||
) -> Result<(), AnyError> {
|
||||
if snapshot.is_empty() && pkg_json_deps_provider.workspace_pkgs().is_empty() {
|
||||
return Ok(()); // don't create the directory
|
||||
|
@ -307,6 +439,8 @@ async fn sync_resolution_with_fs(
|
|||
let mut newest_packages_by_name: HashMap<&String, &NpmResolutionPackage> =
|
||||
HashMap::with_capacity(package_partitions.packages.len());
|
||||
let bin_entries = Rc::new(RefCell::new(bin_entries::BinEntries::new()));
|
||||
let mut packages_with_scripts = Vec::with_capacity(2);
|
||||
let mut packages_with_scripts_not_run = Vec::new();
|
||||
for package in &package_partitions.packages {
|
||||
if let Some(current_pkg) =
|
||||
newest_packages_by_name.get_mut(&package.id.nv.name)
|
||||
|
@ -331,6 +465,7 @@ async fn sync_resolution_with_fs(
|
|||
// are forced to be recreated
|
||||
setup_cache.remove_dep(&package_folder_name);
|
||||
|
||||
let folder_path = folder_path.clone();
|
||||
let bin_entries_to_setup = bin_entries.clone();
|
||||
cache_futures.push(async move {
|
||||
tarball_cache
|
||||
|
@ -368,6 +503,24 @@ async fn sync_resolution_with_fs(
|
|||
Ok::<_, AnyError>(())
|
||||
});
|
||||
}
|
||||
|
||||
if has_lifecycle_scripts(package) {
|
||||
let scripts_run = folder_path.join(".scripts-run");
|
||||
if can_run_scripts(&lifecycle_scripts.allowed, &package.id.nv) {
|
||||
if !scripts_run.exists() {
|
||||
let sub_node_modules = folder_path.join("node_modules");
|
||||
let package_path =
|
||||
join_package_name(&sub_node_modules, &package.id.nv.name);
|
||||
packages_with_scripts.push((
|
||||
package.clone(),
|
||||
package_path,
|
||||
scripts_run,
|
||||
));
|
||||
}
|
||||
} else if !scripts_run.exists() {
|
||||
packages_with_scripts_not_run.push(package.id.nv.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
while let Some(result) = cache_futures.next().await {
|
||||
|
@ -509,6 +662,73 @@ async fn sync_resolution_with_fs(
|
|||
}
|
||||
}
|
||||
|
||||
if !packages_with_scripts.is_empty() {
|
||||
// get custom commands for each bin available in the node_modules dir (essentially
|
||||
// the scripts that are in `node_modules/.bin`)
|
||||
let base = resolve_baseline_custom_commands(
|
||||
snapshot,
|
||||
&package_partitions.packages,
|
||||
&deno_local_registry_dir,
|
||||
)?;
|
||||
let init_cwd = lifecycle_scripts.initial_cwd.as_deref().unwrap();
|
||||
|
||||
for (package, package_path, scripts_run_path) in packages_with_scripts {
|
||||
// add custom commands for binaries from the package's dependencies. this will take precedence over the
|
||||
// baseline commands, so if the package relies on a bin that conflicts with one higher in the dependency tree, the
|
||||
// correct bin will be used.
|
||||
let custom_commands = resolve_custom_commands_from_deps(
|
||||
base.clone(),
|
||||
&package,
|
||||
snapshot,
|
||||
&deno_local_registry_dir,
|
||||
)?;
|
||||
for script_name in ["preinstall", "install", "postinstall"] {
|
||||
if let Some(script) = package.scripts.get(script_name) {
|
||||
let exit_code =
|
||||
crate::task_runner::run_task(crate::task_runner::RunTaskOptions {
|
||||
task_name: script_name,
|
||||
script,
|
||||
cwd: &package_path,
|
||||
env_vars: crate::task_runner::real_env_vars(),
|
||||
custom_commands: custom_commands.clone(),
|
||||
init_cwd,
|
||||
argv: &[],
|
||||
root_node_modules_dir: Some(root_node_modules_dir_path),
|
||||
})
|
||||
.await?;
|
||||
if exit_code != 0 {
|
||||
anyhow::bail!(
|
||||
"script '{}' in '{}' failed with exit code {}",
|
||||
script_name,
|
||||
package.id.nv,
|
||||
exit_code,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
fs::write(scripts_run_path, "")?;
|
||||
}
|
||||
}
|
||||
|
||||
if !packages_with_scripts_not_run.is_empty() {
|
||||
let (maybe_install, maybe_install_example) = if *crate::args::DENO_FUTURE {
|
||||
(
|
||||
" or `deno install`",
|
||||
" or `deno install --allow-scripts=pkg1,pkg2`",
|
||||
)
|
||||
} else {
|
||||
("", "")
|
||||
};
|
||||
let packages = packages_with_scripts_not_run
|
||||
.iter()
|
||||
.map(|p| p.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
log::warn!("{}: Packages contained npm lifecycle scripts (preinstall/install/postinstall) that were not executed.
|
||||
This may cause the packages to not work correctly. To run them, use the `--allow-scripts` flag with `deno cache`{maybe_install}
|
||||
(e.g. `deno cache --allow-scripts=pkg1,pkg2 <entrypoint>`{maybe_install_example}):\n {packages}", crate::colors::yellow("warning"));
|
||||
}
|
||||
|
||||
setup_cache.save();
|
||||
drop(single_process_lock);
|
||||
drop(pb_clear_guard);
|
||||
|
|
|
@ -71,19 +71,16 @@ impl BinEntries {
|
|||
self.entries.push((package, package_path));
|
||||
}
|
||||
|
||||
/// Finish setting up the bin entries, writing the necessary files
|
||||
/// to disk.
|
||||
pub(super) fn finish(
|
||||
mut self,
|
||||
fn for_each_entry(
|
||||
&mut self,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
bin_node_modules_dir_path: &Path,
|
||||
mut f: impl FnMut(
|
||||
&NpmResolutionPackage,
|
||||
&Path,
|
||||
&str, // bin name
|
||||
&str, // bin script
|
||||
) -> Result<(), AnyError>,
|
||||
) -> Result<(), AnyError> {
|
||||
if !self.entries.is_empty() && !bin_node_modules_dir_path.exists() {
|
||||
std::fs::create_dir_all(bin_node_modules_dir_path).with_context(
|
||||
|| format!("Creating '{}'", bin_node_modules_dir_path.display()),
|
||||
)?;
|
||||
}
|
||||
|
||||
if !self.collisions.is_empty() {
|
||||
// walking the dependency tree to find out the depth of each package
|
||||
// is sort of expensive, so we only do it if there's a collision
|
||||
|
@ -101,13 +98,7 @@ impl BinEntries {
|
|||
// we already set up a bin entry with this name
|
||||
continue;
|
||||
}
|
||||
set_up_bin_entry(
|
||||
package,
|
||||
name,
|
||||
script,
|
||||
package_path,
|
||||
bin_node_modules_dir_path,
|
||||
)?;
|
||||
f(package, package_path, name, script)?;
|
||||
}
|
||||
deno_npm::registry::NpmPackageVersionBinEntry::Map(entries) => {
|
||||
for (name, script) in entries {
|
||||
|
@ -115,13 +106,7 @@ impl BinEntries {
|
|||
// we already set up a bin entry with this name
|
||||
continue;
|
||||
}
|
||||
set_up_bin_entry(
|
||||
package,
|
||||
name,
|
||||
script,
|
||||
package_path,
|
||||
bin_node_modules_dir_path,
|
||||
)?;
|
||||
f(package, package_path, name, script)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -130,6 +115,47 @@ impl BinEntries {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Collect the bin entries into a vec of (name, script path)
|
||||
pub(super) fn into_bin_files(
|
||||
mut self,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
) -> Vec<(String, PathBuf)> {
|
||||
let mut bins = Vec::new();
|
||||
self
|
||||
.for_each_entry(snapshot, |_, package_path, name, script| {
|
||||
bins.push((name.to_string(), package_path.join(script)));
|
||||
Ok(())
|
||||
})
|
||||
.unwrap();
|
||||
bins
|
||||
}
|
||||
|
||||
/// Finish setting up the bin entries, writing the necessary files
|
||||
/// to disk.
|
||||
pub(super) fn finish(
|
||||
mut self,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
bin_node_modules_dir_path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
if !self.entries.is_empty() && !bin_node_modules_dir_path.exists() {
|
||||
std::fs::create_dir_all(bin_node_modules_dir_path).with_context(
|
||||
|| format!("Creating '{}'", bin_node_modules_dir_path.display()),
|
||||
)?;
|
||||
}
|
||||
|
||||
self.for_each_entry(snapshot, |package, package_path, name, script| {
|
||||
set_up_bin_entry(
|
||||
package,
|
||||
name,
|
||||
script,
|
||||
package_path,
|
||||
bin_node_modules_dir_path,
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// walk the dependency tree to find out the depth of each package
|
||||
|
|
|
@ -10,6 +10,7 @@ use std::sync::Arc;
|
|||
use deno_npm::NpmSystemInfo;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
|
||||
use crate::args::LifecycleScriptsConfig;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
|
||||
|
@ -32,6 +33,7 @@ pub fn create_npm_fs_resolver(
|
|||
tarball_cache: Arc<TarballCache>,
|
||||
maybe_node_modules_path: Option<PathBuf>,
|
||||
system_info: NpmSystemInfo,
|
||||
lifecycle_scripts: LifecycleScriptsConfig,
|
||||
) -> Arc<dyn NpmPackageFsResolver> {
|
||||
match maybe_node_modules_path {
|
||||
Some(node_modules_folder) => Arc::new(LocalNpmPackageResolver::new(
|
||||
|
@ -43,6 +45,7 @@ pub fn create_npm_fs_resolver(
|
|||
tarball_cache,
|
||||
node_modules_folder,
|
||||
system_info,
|
||||
lifecycle_scripts,
|
||||
)),
|
||||
None => Arc::new(GlobalNpmPackageResolver::new(
|
||||
npm_cache,
|
||||
|
|
|
@ -478,6 +478,7 @@ pub async fn run(
|
|||
scopes: Default::default(),
|
||||
registry_configs: Default::default(),
|
||||
}),
|
||||
lifecycle_scripts: Default::default(),
|
||||
},
|
||||
))
|
||||
.await?;
|
||||
|
@ -522,6 +523,7 @@ pub async fn run(
|
|||
// Packages from different registries are already inlined in the ESZip,
|
||||
// so no need to create actual `.npmrc` configuration.
|
||||
npmrc: create_default_npmrc(),
|
||||
lifecycle_scripts: Default::default(),
|
||||
},
|
||||
))
|
||||
.await?;
|
||||
|
|
506
cli/task_runner.rs
Normal file
506
cli/task_runner.rs
Normal file
|
@ -0,0 +1,506 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures;
|
||||
use deno_core::futures::future::LocalBoxFuture;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_task_shell::ExecutableCommand;
|
||||
use deno_task_shell::ExecuteResult;
|
||||
use deno_task_shell::ShellCommand;
|
||||
use deno_task_shell::ShellCommandContext;
|
||||
use lazy_regex::Lazy;
|
||||
use regex::Regex;
|
||||
use tokio::task::LocalSet;
|
||||
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::npm::InnerCliNpmResolverRef;
|
||||
use crate::npm::ManagedCliNpmResolver;
|
||||
|
||||
pub fn get_script_with_args(script: &str, argv: &[String]) -> String {
|
||||
let additional_args = argv
|
||||
.iter()
|
||||
// surround all the additional arguments in double quotes
|
||||
// and sanitize any command substitution
|
||||
.map(|a| format!("\"{}\"", a.replace('"', "\\\"").replace('$', "\\$")))
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ");
|
||||
let script = format!("{script} {additional_args}");
|
||||
script.trim().to_owned()
|
||||
}
|
||||
|
||||
pub struct RunTaskOptions<'a> {
|
||||
pub task_name: &'a str,
|
||||
pub script: &'a str,
|
||||
pub cwd: &'a Path,
|
||||
pub init_cwd: &'a Path,
|
||||
pub env_vars: HashMap<String, String>,
|
||||
pub argv: &'a [String],
|
||||
pub custom_commands: HashMap<String, Rc<dyn ShellCommand>>,
|
||||
pub root_node_modules_dir: Option<&'a Path>,
|
||||
}
|
||||
|
||||
pub type TaskCustomCommands = HashMap<String, Rc<dyn ShellCommand>>;
|
||||
|
||||
pub async fn run_task(opts: RunTaskOptions<'_>) -> Result<i32, AnyError> {
|
||||
let script = get_script_with_args(opts.script, opts.argv);
|
||||
let seq_list = deno_task_shell::parser::parse(&script)
|
||||
.with_context(|| format!("Error parsing script '{}'.", opts.task_name))?;
|
||||
let env_vars =
|
||||
prepare_env_vars(opts.env_vars, opts.init_cwd, opts.root_node_modules_dir);
|
||||
let local = LocalSet::new();
|
||||
let future = deno_task_shell::execute(
|
||||
seq_list,
|
||||
env_vars,
|
||||
opts.cwd,
|
||||
opts.custom_commands,
|
||||
);
|
||||
Ok(local.run_until(future).await)
|
||||
}
|
||||
|
||||
fn prepare_env_vars(
|
||||
mut env_vars: HashMap<String, String>,
|
||||
initial_cwd: &Path,
|
||||
node_modules_dir: Option<&Path>,
|
||||
) -> HashMap<String, String> {
|
||||
const INIT_CWD_NAME: &str = "INIT_CWD";
|
||||
if !env_vars.contains_key(INIT_CWD_NAME) {
|
||||
// if not set, set an INIT_CWD env var that has the cwd
|
||||
env_vars.insert(
|
||||
INIT_CWD_NAME.to_string(),
|
||||
initial_cwd.to_string_lossy().to_string(),
|
||||
);
|
||||
}
|
||||
if let Some(node_modules_dir) = node_modules_dir {
|
||||
prepend_to_path(
|
||||
&mut env_vars,
|
||||
node_modules_dir.join(".bin").to_string_lossy().to_string(),
|
||||
);
|
||||
}
|
||||
env_vars
|
||||
}
|
||||
|
||||
fn prepend_to_path(env_vars: &mut HashMap<String, String>, value: String) {
|
||||
match env_vars.get_mut("PATH") {
|
||||
Some(path) => {
|
||||
if path.is_empty() {
|
||||
*path = value;
|
||||
} else {
|
||||
*path =
|
||||
format!("{}{}{}", value, if cfg!(windows) { ";" } else { ":" }, path);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
env_vars.insert("PATH".to_string(), value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn real_env_vars() -> HashMap<String, String> {
|
||||
std::env::vars()
|
||||
.map(|(k, v)| {
|
||||
if cfg!(windows) {
|
||||
(k.to_uppercase(), v)
|
||||
} else {
|
||||
(k, v)
|
||||
}
|
||||
})
|
||||
.collect::<HashMap<String, String>>()
|
||||
}
|
||||
|
||||
// WARNING: Do not depend on this env var in user code. It's not stable API.
|
||||
pub(crate) const USE_PKG_JSON_HIDDEN_ENV_VAR_NAME: &str =
|
||||
"DENO_INTERNAL_TASK_USE_PKG_JSON";
|
||||
|
||||
pub struct NpmCommand;
|
||||
|
||||
impl ShellCommand for NpmCommand {
|
||||
fn execute(
|
||||
&self,
|
||||
mut context: ShellCommandContext,
|
||||
) -> LocalBoxFuture<'static, ExecuteResult> {
|
||||
if context.args.first().map(|s| s.as_str()) == Some("run")
|
||||
&& context.args.len() > 2
|
||||
// for now, don't run any npm scripts that have a flag because
|
||||
// we don't handle stuff like `--workspaces` properly
|
||||
&& !context.args.iter().any(|s| s.starts_with('-'))
|
||||
{
|
||||
// run with deno task instead
|
||||
let mut args = Vec::with_capacity(context.args.len());
|
||||
args.push("task".to_string());
|
||||
args.extend(context.args.iter().skip(1).cloned());
|
||||
|
||||
let mut state = context.state;
|
||||
state.apply_env_var(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME, "1");
|
||||
return ExecutableCommand::new(
|
||||
"deno".to_string(),
|
||||
std::env::current_exe().unwrap(),
|
||||
)
|
||||
.execute(ShellCommandContext {
|
||||
args,
|
||||
state,
|
||||
..context
|
||||
});
|
||||
}
|
||||
|
||||
// fallback to running the real npm command
|
||||
let npm_path = match context.state.resolve_command_path("npm") {
|
||||
Ok(path) => path,
|
||||
Err(err) => {
|
||||
let _ = context.stderr.write_line(&format!("{}", err));
|
||||
return Box::pin(futures::future::ready(
|
||||
ExecuteResult::from_exit_code(err.exit_code()),
|
||||
));
|
||||
}
|
||||
};
|
||||
ExecutableCommand::new("npm".to_string(), npm_path).execute(context)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct NodeCommand;
|
||||
|
||||
impl ShellCommand for NodeCommand {
|
||||
fn execute(
|
||||
&self,
|
||||
context: ShellCommandContext,
|
||||
) -> LocalBoxFuture<'static, ExecuteResult> {
|
||||
// run with deno if it's a simple invocation, fall back to node
|
||||
// if there are extra flags
|
||||
let mut args = Vec::with_capacity(context.args.len());
|
||||
if context.args.len() > 1
|
||||
&& (
|
||||
context.args[0].starts_with('-') // has a flag
|
||||
|| !matches!(
|
||||
MediaType::from_str(&context.args[0]),
|
||||
MediaType::Cjs | MediaType::Mjs | MediaType::JavaScript
|
||||
)
|
||||
// not a script file
|
||||
)
|
||||
{
|
||||
return ExecutableCommand::new(
|
||||
"node".to_string(),
|
||||
"node".to_string().into(),
|
||||
)
|
||||
.execute(context);
|
||||
}
|
||||
args.extend(["run", "-A"].into_iter().map(|s| s.to_string()));
|
||||
args.extend(context.args.iter().cloned());
|
||||
|
||||
let mut state = context.state;
|
||||
state.apply_env_var(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME, "1");
|
||||
ExecutableCommand::new("deno".to_string(), std::env::current_exe().unwrap())
|
||||
.execute(ShellCommandContext {
|
||||
args,
|
||||
state,
|
||||
..context
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct NodeGypCommand;
|
||||
|
||||
impl ShellCommand for NodeGypCommand {
|
||||
fn execute(
|
||||
&self,
|
||||
context: ShellCommandContext,
|
||||
) -> LocalBoxFuture<'static, ExecuteResult> {
|
||||
// at the moment this shell command is just to give a warning if node-gyp is not found
|
||||
// in the future, we could try to run/install node-gyp for the user with deno
|
||||
if which::which("node-gyp").is_err() {
|
||||
log::warn!("{}: node-gyp was used in a script, but was not listed as a dependency. Either add it as a dependency or install it globally (e.g. `npm install -g node-gyp`)", crate::colors::yellow("warning"));
|
||||
}
|
||||
ExecutableCommand::new(
|
||||
"node-gyp".to_string(),
|
||||
"node-gyp".to_string().into(),
|
||||
)
|
||||
.execute(context)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct NpxCommand;
|
||||
|
||||
impl ShellCommand for NpxCommand {
|
||||
fn execute(
|
||||
&self,
|
||||
mut context: ShellCommandContext,
|
||||
) -> LocalBoxFuture<'static, ExecuteResult> {
|
||||
if let Some(first_arg) = context.args.first().cloned() {
|
||||
if let Some(command) = context.state.resolve_custom_command(&first_arg) {
|
||||
let context = ShellCommandContext {
|
||||
args: context.args.iter().skip(1).cloned().collect::<Vec<_>>(),
|
||||
..context
|
||||
};
|
||||
command.execute(context)
|
||||
} else {
|
||||
// can't find the command, so fallback to running the real npx command
|
||||
let npx_path = match context.state.resolve_command_path("npx") {
|
||||
Ok(npx) => npx,
|
||||
Err(err) => {
|
||||
let _ = context.stderr.write_line(&format!("{}", err));
|
||||
return Box::pin(futures::future::ready(
|
||||
ExecuteResult::from_exit_code(err.exit_code()),
|
||||
));
|
||||
}
|
||||
};
|
||||
ExecutableCommand::new("npx".to_string(), npx_path).execute(context)
|
||||
}
|
||||
} else {
|
||||
let _ = context.stderr.write_line("npx: missing command");
|
||||
Box::pin(futures::future::ready(ExecuteResult::from_exit_code(1)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct NpmPackageBinCommand {
|
||||
name: String,
|
||||
npm_package: PackageNv,
|
||||
}
|
||||
|
||||
impl ShellCommand for NpmPackageBinCommand {
|
||||
fn execute(
|
||||
&self,
|
||||
context: ShellCommandContext,
|
||||
) -> LocalBoxFuture<'static, ExecuteResult> {
|
||||
let mut args = vec![
|
||||
"run".to_string(),
|
||||
"-A".to_string(),
|
||||
if self.npm_package.name == self.name {
|
||||
format!("npm:{}", self.npm_package)
|
||||
} else {
|
||||
format!("npm:{}/{}", self.npm_package, self.name)
|
||||
},
|
||||
];
|
||||
|
||||
args.extend(context.args);
|
||||
let executable_command = deno_task_shell::ExecutableCommand::new(
|
||||
"deno".to_string(),
|
||||
std::env::current_exe().unwrap(),
|
||||
);
|
||||
executable_command.execute(ShellCommandContext { args, ..context })
|
||||
}
|
||||
}
|
||||
|
||||
/// Runs a module in the node_modules folder.
|
||||
#[derive(Clone)]
|
||||
pub struct NodeModulesFileRunCommand {
|
||||
pub command_name: String,
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
impl ShellCommand for NodeModulesFileRunCommand {
|
||||
fn execute(
|
||||
&self,
|
||||
mut context: ShellCommandContext,
|
||||
) -> LocalBoxFuture<'static, ExecuteResult> {
|
||||
let mut args = vec![
|
||||
"run".to_string(),
|
||||
"--ext=js".to_string(),
|
||||
"-A".to_string(),
|
||||
self.path.to_string_lossy().to_string(),
|
||||
];
|
||||
args.extend(context.args);
|
||||
let executable_command = deno_task_shell::ExecutableCommand::new(
|
||||
"deno".to_string(),
|
||||
std::env::current_exe().unwrap(),
|
||||
);
|
||||
// set this environment variable so that the launched process knows the npm command name
|
||||
context
|
||||
.state
|
||||
.apply_env_var("DENO_INTERNAL_NPM_CMD_NAME", &self.command_name);
|
||||
executable_command.execute(ShellCommandContext { args, ..context })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_custom_commands(
|
||||
npm_resolver: &dyn CliNpmResolver,
|
||||
node_resolver: &NodeResolver,
|
||||
) -> Result<HashMap<String, Rc<dyn ShellCommand>>, AnyError> {
|
||||
let mut commands = match npm_resolver.as_inner() {
|
||||
InnerCliNpmResolverRef::Byonm(npm_resolver) => {
|
||||
let node_modules_dir = npm_resolver.root_node_modules_path().unwrap();
|
||||
resolve_npm_commands_from_bin_dir(node_modules_dir)
|
||||
}
|
||||
InnerCliNpmResolverRef::Managed(npm_resolver) => {
|
||||
resolve_managed_npm_commands(npm_resolver, node_resolver)?
|
||||
}
|
||||
};
|
||||
commands.insert("npm".to_string(), Rc::new(NpmCommand));
|
||||
Ok(commands)
|
||||
}
|
||||
|
||||
pub fn resolve_npm_commands_from_bin_dir(
|
||||
node_modules_dir: &Path,
|
||||
) -> HashMap<String, Rc<dyn ShellCommand>> {
|
||||
let mut result = HashMap::<String, Rc<dyn ShellCommand>>::new();
|
||||
let bin_dir = node_modules_dir.join(".bin");
|
||||
log::debug!("Resolving commands in '{}'.", bin_dir.display());
|
||||
match std::fs::read_dir(&bin_dir) {
|
||||
Ok(entries) => {
|
||||
for entry in entries {
|
||||
let Ok(entry) = entry else {
|
||||
continue;
|
||||
};
|
||||
if let Some(command) = resolve_bin_dir_entry_command(entry) {
|
||||
result.insert(command.command_name.clone(), Rc::new(command));
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
log::debug!("Failed read_dir for '{}': {:#}", bin_dir.display(), err);
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn resolve_bin_dir_entry_command(
|
||||
entry: std::fs::DirEntry,
|
||||
) -> Option<NodeModulesFileRunCommand> {
|
||||
if entry.path().extension().is_some() {
|
||||
return None; // only look at files without extensions (even on Windows)
|
||||
}
|
||||
let file_type = entry.file_type().ok()?;
|
||||
let path = if file_type.is_file() {
|
||||
entry.path()
|
||||
} else if file_type.is_symlink() {
|
||||
entry.path().canonicalize().ok()?
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
let text = std::fs::read_to_string(&path).ok()?;
|
||||
let command_name = entry.file_name().to_string_lossy().to_string();
|
||||
if let Some(path) = resolve_execution_path_from_npx_shim(path, &text) {
|
||||
log::debug!(
|
||||
"Resolved npx command '{}' to '{}'.",
|
||||
command_name,
|
||||
path.display()
|
||||
);
|
||||
Some(NodeModulesFileRunCommand { command_name, path })
|
||||
} else {
|
||||
log::debug!("Failed resolving npx command '{}'.", command_name);
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// This is not ideal, but it works ok because it allows us to bypass
|
||||
/// the shebang and execute the script directly with Deno.
|
||||
fn resolve_execution_path_from_npx_shim(
|
||||
file_path: PathBuf,
|
||||
text: &str,
|
||||
) -> Option<PathBuf> {
|
||||
static SCRIPT_PATH_RE: Lazy<Regex> =
|
||||
lazy_regex::lazy_regex!(r#""\$basedir\/([^"]+)" "\$@""#);
|
||||
|
||||
if text.starts_with("#!/usr/bin/env node") {
|
||||
// launch this file itself because it's a JS file
|
||||
Some(file_path)
|
||||
} else {
|
||||
// Search for...
|
||||
// > "$basedir/../next/dist/bin/next" "$@"
|
||||
// ...which is what it will look like on Windows
|
||||
SCRIPT_PATH_RE
|
||||
.captures(text)
|
||||
.and_then(|c| c.get(1))
|
||||
.map(|relative_path| {
|
||||
file_path.parent().unwrap().join(relative_path.as_str())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_managed_npm_commands(
|
||||
npm_resolver: &ManagedCliNpmResolver,
|
||||
node_resolver: &NodeResolver,
|
||||
) -> Result<HashMap<String, Rc<dyn ShellCommand>>, AnyError> {
|
||||
let mut result = HashMap::new();
|
||||
let snapshot = npm_resolver.snapshot();
|
||||
for id in snapshot.top_level_packages() {
|
||||
let package_folder = npm_resolver.resolve_pkg_folder_from_pkg_id(id)?;
|
||||
let bin_commands =
|
||||
node_resolver.resolve_binary_commands(&package_folder)?;
|
||||
for bin_command in bin_commands {
|
||||
result.insert(
|
||||
bin_command.to_string(),
|
||||
Rc::new(NpmPackageBinCommand {
|
||||
name: bin_command,
|
||||
npm_package: id.nv.clone(),
|
||||
}) as Rc<dyn ShellCommand>,
|
||||
);
|
||||
}
|
||||
}
|
||||
if !result.contains_key("npx") {
|
||||
result.insert("npx".to_string(), Rc::new(NpxCommand));
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_prepend_to_path() {
|
||||
let mut env_vars = HashMap::new();
|
||||
|
||||
prepend_to_path(&mut env_vars, "/example".to_string());
|
||||
assert_eq!(
|
||||
env_vars,
|
||||
HashMap::from([("PATH".to_string(), "/example".to_string())])
|
||||
);
|
||||
|
||||
prepend_to_path(&mut env_vars, "/example2".to_string());
|
||||
let separator = if cfg!(windows) { ";" } else { ":" };
|
||||
assert_eq!(
|
||||
env_vars,
|
||||
HashMap::from([(
|
||||
"PATH".to_string(),
|
||||
format!("/example2{}/example", separator)
|
||||
)])
|
||||
);
|
||||
|
||||
env_vars.get_mut("PATH").unwrap().clear();
|
||||
prepend_to_path(&mut env_vars, "/example".to_string());
|
||||
assert_eq!(
|
||||
env_vars,
|
||||
HashMap::from([("PATH".to_string(), "/example".to_string())])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_execution_path_from_npx_shim() {
|
||||
// example shim on unix
|
||||
let unix_shim = r#"#!/usr/bin/env node
|
||||
"use strict";
|
||||
console.log('Hi!');
|
||||
"#;
|
||||
let path = PathBuf::from("/node_modules/.bin/example");
|
||||
assert_eq!(
|
||||
resolve_execution_path_from_npx_shim(path.clone(), unix_shim).unwrap(),
|
||||
path
|
||||
);
|
||||
// example shim on windows
|
||||
let windows_shim = r#"#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../example/bin/example" "$@"
|
||||
else
|
||||
exec node "$basedir/../example/bin/example" "$@"
|
||||
fi"#;
|
||||
assert_eq!(
|
||||
resolve_execution_path_from_npx_shim(path.clone(), windows_shim).unwrap(),
|
||||
path.parent().unwrap().join("../example/bin/example")
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,12 +1,12 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::Flags;
|
||||
use crate::args::TaskFlags;
|
||||
use crate::colors;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::npm::InnerCliNpmResolverRef;
|
||||
use crate::npm::ManagedCliNpmResolver;
|
||||
use crate::task_runner;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
use deno_config::workspace::TaskOrScript;
|
||||
use deno_config::workspace::Workspace;
|
||||
|
@ -14,17 +14,8 @@ use deno_config::workspace::WorkspaceTasksConfig;
|
|||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures;
|
||||
use deno_core::futures::future::LocalBoxFuture;
|
||||
use deno_core::normalize_path;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_task_shell::ExecutableCommand;
|
||||
use deno_task_shell::ExecuteResult;
|
||||
use deno_task_shell::ShellCommand;
|
||||
use deno_task_shell::ShellCommandContext;
|
||||
use lazy_regex::Lazy;
|
||||
use regex::Regex;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
|
@ -32,11 +23,6 @@ use std::path::Path;
|
|||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use tokio::task::LocalSet;
|
||||
|
||||
// WARNING: Do not depend on this env var in user code. It's not stable API.
|
||||
const USE_PKG_JSON_HIDDEN_ENV_VAR_NAME: &str =
|
||||
"DENO_INTERNAL_TASK_USE_PKG_JSON";
|
||||
|
||||
pub async fn execute_script(
|
||||
flags: Flags,
|
||||
|
@ -48,13 +34,16 @@ pub async fn execute_script(
|
|||
if !start_ctx.has_deno_or_pkg_json() {
|
||||
bail!("deno task couldn't find deno.json(c). See https://deno.land/manual@v{}/getting_started/configuration_file", env!("CARGO_PKG_VERSION"))
|
||||
}
|
||||
let force_use_pkg_json = std::env::var_os(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME)
|
||||
.map(|v| {
|
||||
// always remove so sub processes don't inherit this env var
|
||||
std::env::remove_var(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME);
|
||||
v == "1"
|
||||
})
|
||||
.unwrap_or(false);
|
||||
let force_use_pkg_json =
|
||||
std::env::var_os(crate::task_runner::USE_PKG_JSON_HIDDEN_ENV_VAR_NAME)
|
||||
.map(|v| {
|
||||
// always remove so sub processes don't inherit this env var
|
||||
std::env::remove_var(
|
||||
crate::task_runner::USE_PKG_JSON_HIDDEN_ENV_VAR_NAME,
|
||||
);
|
||||
v == "1"
|
||||
})
|
||||
.unwrap_or(false);
|
||||
let tasks_config = start_ctx.to_tasks_config()?;
|
||||
let tasks_config = if force_use_pkg_json {
|
||||
tasks_config.with_only_pkg_json()
|
||||
|
@ -76,7 +65,7 @@ pub async fn execute_script(
|
|||
|
||||
let npm_resolver = factory.npm_resolver().await?;
|
||||
let node_resolver = factory.node_resolver().await?;
|
||||
let env_vars = real_env_vars();
|
||||
let env_vars = task_runner::real_env_vars();
|
||||
|
||||
match tasks_config.task(task_name) {
|
||||
Some((dir_url, task_or_script)) => match task_or_script {
|
||||
|
@ -87,19 +76,18 @@ pub async fn execute_script(
|
|||
None => normalize_path(dir_url.to_file_path().unwrap()),
|
||||
};
|
||||
|
||||
let custom_commands =
|
||||
resolve_custom_commands(npm_resolver.as_ref(), node_resolver)?;
|
||||
let custom_commands = task_runner::resolve_custom_commands(
|
||||
npm_resolver.as_ref(),
|
||||
node_resolver,
|
||||
)?;
|
||||
run_task(RunTaskOptions {
|
||||
task_name,
|
||||
script,
|
||||
cwd: &cwd,
|
||||
init_cwd: cli_options.initial_cwd(),
|
||||
env_vars,
|
||||
argv: cli_options.argv(),
|
||||
custom_commands,
|
||||
root_node_modules_dir: npm_resolver
|
||||
.root_node_modules_path()
|
||||
.map(|p| p.as_path()),
|
||||
npm_resolver: npm_resolver.as_ref(),
|
||||
cli_options,
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
@ -125,21 +113,20 @@ pub async fn execute_script(
|
|||
task_name.clone(),
|
||||
format!("post{}", task_name),
|
||||
];
|
||||
let custom_commands =
|
||||
resolve_custom_commands(npm_resolver.as_ref(), node_resolver)?;
|
||||
let custom_commands = task_runner::resolve_custom_commands(
|
||||
npm_resolver.as_ref(),
|
||||
node_resolver,
|
||||
)?;
|
||||
for task_name in &task_names {
|
||||
if let Some(script) = scripts.get(task_name) {
|
||||
let exit_code = run_task(RunTaskOptions {
|
||||
task_name,
|
||||
script,
|
||||
cwd: &cwd,
|
||||
init_cwd: cli_options.initial_cwd(),
|
||||
env_vars: env_vars.clone(),
|
||||
argv: cli_options.argv(),
|
||||
custom_commands: custom_commands.clone(),
|
||||
root_node_modules_dir: npm_resolver
|
||||
.root_node_modules_path()
|
||||
.map(|p| p.as_path()),
|
||||
npm_resolver: npm_resolver.as_ref(),
|
||||
cli_options,
|
||||
})
|
||||
.await?;
|
||||
if exit_code > 0 {
|
||||
|
@ -169,40 +156,41 @@ struct RunTaskOptions<'a> {
|
|||
task_name: &'a str,
|
||||
script: &'a str,
|
||||
cwd: &'a Path,
|
||||
init_cwd: &'a Path,
|
||||
env_vars: HashMap<String, String>,
|
||||
argv: &'a [String],
|
||||
custom_commands: HashMap<String, Rc<dyn ShellCommand>>,
|
||||
root_node_modules_dir: Option<&'a Path>,
|
||||
npm_resolver: &'a dyn CliNpmResolver,
|
||||
cli_options: &'a CliOptions,
|
||||
}
|
||||
|
||||
async fn run_task(opts: RunTaskOptions<'_>) -> Result<i32, AnyError> {
|
||||
let script = get_script_with_args(opts.script, opts.argv);
|
||||
output_task(opts.task_name, &script);
|
||||
let seq_list = deno_task_shell::parser::parse(&script)
|
||||
.with_context(|| format!("Error parsing script '{}'.", opts.task_name))?;
|
||||
let env_vars =
|
||||
prepare_env_vars(opts.env_vars, opts.init_cwd, opts.root_node_modules_dir);
|
||||
let local = LocalSet::new();
|
||||
let future = deno_task_shell::execute(
|
||||
seq_list,
|
||||
let RunTaskOptions {
|
||||
task_name,
|
||||
script,
|
||||
cwd,
|
||||
env_vars,
|
||||
opts.cwd,
|
||||
opts.custom_commands,
|
||||
);
|
||||
Ok(local.run_until(future).await)
|
||||
}
|
||||
custom_commands,
|
||||
npm_resolver,
|
||||
cli_options,
|
||||
} = opts;
|
||||
|
||||
fn get_script_with_args(script: &str, argv: &[String]) -> String {
|
||||
let additional_args = argv
|
||||
.iter()
|
||||
// surround all the additional arguments in double quotes
|
||||
// and sanitize any command substitution
|
||||
.map(|a| format!("\"{}\"", a.replace('"', "\\\"").replace('$', "\\$")))
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ");
|
||||
let script = format!("{script} {additional_args}");
|
||||
script.trim().to_owned()
|
||||
output_task(
|
||||
opts.task_name,
|
||||
&task_runner::get_script_with_args(script, cli_options.argv()),
|
||||
);
|
||||
|
||||
task_runner::run_task(task_runner::RunTaskOptions {
|
||||
task_name,
|
||||
script,
|
||||
cwd,
|
||||
env_vars,
|
||||
custom_commands,
|
||||
init_cwd: opts.cli_options.initial_cwd(),
|
||||
argv: cli_options.argv(),
|
||||
root_node_modules_dir: npm_resolver
|
||||
.root_node_modules_path()
|
||||
.map(|p| p.as_path()),
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
fn output_task(task_name: &str, script: &str) {
|
||||
|
@ -214,56 +202,6 @@ fn output_task(task_name: &str, script: &str) {
|
|||
);
|
||||
}
|
||||
|
||||
fn prepare_env_vars(
|
||||
mut env_vars: HashMap<String, String>,
|
||||
initial_cwd: &Path,
|
||||
node_modules_dir: Option<&Path>,
|
||||
) -> HashMap<String, String> {
|
||||
const INIT_CWD_NAME: &str = "INIT_CWD";
|
||||
if !env_vars.contains_key(INIT_CWD_NAME) {
|
||||
// if not set, set an INIT_CWD env var that has the cwd
|
||||
env_vars.insert(
|
||||
INIT_CWD_NAME.to_string(),
|
||||
initial_cwd.to_string_lossy().to_string(),
|
||||
);
|
||||
}
|
||||
if let Some(node_modules_dir) = node_modules_dir {
|
||||
prepend_to_path(
|
||||
&mut env_vars,
|
||||
node_modules_dir.join(".bin").to_string_lossy().to_string(),
|
||||
);
|
||||
}
|
||||
env_vars
|
||||
}
|
||||
|
||||
fn prepend_to_path(env_vars: &mut HashMap<String, String>, value: String) {
|
||||
match env_vars.get_mut("PATH") {
|
||||
Some(path) => {
|
||||
if path.is_empty() {
|
||||
*path = value;
|
||||
} else {
|
||||
*path =
|
||||
format!("{}{}{}", value, if cfg!(windows) { ";" } else { ":" }, path);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
env_vars.insert("PATH".to_string(), value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn real_env_vars() -> HashMap<String, String> {
|
||||
std::env::vars()
|
||||
.map(|(k, v)| {
|
||||
if cfg!(windows) {
|
||||
(k.to_uppercase(), v)
|
||||
} else {
|
||||
(k, v)
|
||||
}
|
||||
})
|
||||
.collect::<HashMap<String, String>>()
|
||||
}
|
||||
|
||||
fn print_available_tasks(
|
||||
writer: &mut dyn std::io::Write,
|
||||
workspace: &Arc<Workspace>,
|
||||
|
@ -357,327 +295,3 @@ fn print_available_tasks(
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct NpmCommand;
|
||||
|
||||
impl ShellCommand for NpmCommand {
|
||||
fn execute(
|
||||
&self,
|
||||
mut context: ShellCommandContext,
|
||||
) -> LocalBoxFuture<'static, ExecuteResult> {
|
||||
if context.args.first().map(|s| s.as_str()) == Some("run")
|
||||
&& context.args.len() > 2
|
||||
// for now, don't run any npm scripts that have a flag because
|
||||
// we don't handle stuff like `--workspaces` properly
|
||||
&& !context.args.iter().any(|s| s.starts_with('-'))
|
||||
{
|
||||
// run with deno task instead
|
||||
let mut args = Vec::with_capacity(context.args.len());
|
||||
args.push("task".to_string());
|
||||
args.extend(context.args.iter().skip(1).cloned());
|
||||
|
||||
let mut state = context.state;
|
||||
state.apply_env_var(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME, "1");
|
||||
return ExecutableCommand::new(
|
||||
"deno".to_string(),
|
||||
std::env::current_exe().unwrap(),
|
||||
)
|
||||
.execute(ShellCommandContext {
|
||||
args,
|
||||
state,
|
||||
..context
|
||||
});
|
||||
}
|
||||
|
||||
// fallback to running the real npm command
|
||||
let npm_path = match context.state.resolve_command_path("npm") {
|
||||
Ok(path) => path,
|
||||
Err(err) => {
|
||||
let _ = context.stderr.write_line(&format!("{}", err));
|
||||
return Box::pin(futures::future::ready(
|
||||
ExecuteResult::from_exit_code(err.exit_code()),
|
||||
));
|
||||
}
|
||||
};
|
||||
ExecutableCommand::new("npm".to_string(), npm_path).execute(context)
|
||||
}
|
||||
}
|
||||
|
||||
struct NpxCommand;
|
||||
|
||||
impl ShellCommand for NpxCommand {
|
||||
fn execute(
|
||||
&self,
|
||||
mut context: ShellCommandContext,
|
||||
) -> LocalBoxFuture<'static, ExecuteResult> {
|
||||
if let Some(first_arg) = context.args.first().cloned() {
|
||||
if let Some(command) = context.state.resolve_custom_command(&first_arg) {
|
||||
let context = ShellCommandContext {
|
||||
args: context.args.iter().skip(1).cloned().collect::<Vec<_>>(),
|
||||
..context
|
||||
};
|
||||
command.execute(context)
|
||||
} else {
|
||||
// can't find the command, so fallback to running the real npx command
|
||||
let npx_path = match context.state.resolve_command_path("npx") {
|
||||
Ok(npx) => npx,
|
||||
Err(err) => {
|
||||
let _ = context.stderr.write_line(&format!("{}", err));
|
||||
return Box::pin(futures::future::ready(
|
||||
ExecuteResult::from_exit_code(err.exit_code()),
|
||||
));
|
||||
}
|
||||
};
|
||||
ExecutableCommand::new("npx".to_string(), npx_path).execute(context)
|
||||
}
|
||||
} else {
|
||||
let _ = context.stderr.write_line("npx: missing command");
|
||||
Box::pin(futures::future::ready(ExecuteResult::from_exit_code(1)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct NpmPackageBinCommand {
|
||||
name: String,
|
||||
npm_package: PackageNv,
|
||||
}
|
||||
|
||||
impl ShellCommand for NpmPackageBinCommand {
|
||||
fn execute(
|
||||
&self,
|
||||
context: ShellCommandContext,
|
||||
) -> LocalBoxFuture<'static, ExecuteResult> {
|
||||
let mut args = vec![
|
||||
"run".to_string(),
|
||||
"-A".to_string(),
|
||||
if self.npm_package.name == self.name {
|
||||
format!("npm:{}", self.npm_package)
|
||||
} else {
|
||||
format!("npm:{}/{}", self.npm_package, self.name)
|
||||
},
|
||||
];
|
||||
args.extend(context.args);
|
||||
let executable_command = deno_task_shell::ExecutableCommand::new(
|
||||
"deno".to_string(),
|
||||
std::env::current_exe().unwrap(),
|
||||
);
|
||||
executable_command.execute(ShellCommandContext { args, ..context })
|
||||
}
|
||||
}
|
||||
|
||||
/// Runs a module in the node_modules folder.
|
||||
#[derive(Clone)]
|
||||
struct NodeModulesFileRunCommand {
|
||||
command_name: String,
|
||||
path: PathBuf,
|
||||
}
|
||||
|
||||
impl ShellCommand for NodeModulesFileRunCommand {
|
||||
fn execute(
|
||||
&self,
|
||||
mut context: ShellCommandContext,
|
||||
) -> LocalBoxFuture<'static, ExecuteResult> {
|
||||
let mut args = vec![
|
||||
"run".to_string(),
|
||||
"--ext=js".to_string(),
|
||||
"-A".to_string(),
|
||||
self.path.to_string_lossy().to_string(),
|
||||
];
|
||||
args.extend(context.args);
|
||||
let executable_command = deno_task_shell::ExecutableCommand::new(
|
||||
"deno".to_string(),
|
||||
std::env::current_exe().unwrap(),
|
||||
);
|
||||
// set this environment variable so that the launched process knows the npm command name
|
||||
context
|
||||
.state
|
||||
.apply_env_var("DENO_INTERNAL_NPM_CMD_NAME", &self.command_name);
|
||||
executable_command.execute(ShellCommandContext { args, ..context })
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_custom_commands(
|
||||
npm_resolver: &dyn CliNpmResolver,
|
||||
node_resolver: &NodeResolver,
|
||||
) -> Result<HashMap<String, Rc<dyn ShellCommand>>, AnyError> {
|
||||
let mut commands = match npm_resolver.as_inner() {
|
||||
InnerCliNpmResolverRef::Byonm(npm_resolver) => {
|
||||
let node_modules_dir = npm_resolver.root_node_modules_path().unwrap();
|
||||
resolve_npm_commands_from_bin_dir(node_modules_dir)
|
||||
}
|
||||
InnerCliNpmResolverRef::Managed(npm_resolver) => {
|
||||
resolve_managed_npm_commands(npm_resolver, node_resolver)?
|
||||
}
|
||||
};
|
||||
commands.insert("npm".to_string(), Rc::new(NpmCommand));
|
||||
Ok(commands)
|
||||
}
|
||||
|
||||
fn resolve_npm_commands_from_bin_dir(
|
||||
node_modules_dir: &Path,
|
||||
) -> HashMap<String, Rc<dyn ShellCommand>> {
|
||||
let mut result = HashMap::<String, Rc<dyn ShellCommand>>::new();
|
||||
let bin_dir = node_modules_dir.join(".bin");
|
||||
log::debug!("Resolving commands in '{}'.", bin_dir.display());
|
||||
match std::fs::read_dir(&bin_dir) {
|
||||
Ok(entries) => {
|
||||
for entry in entries {
|
||||
let Ok(entry) = entry else {
|
||||
continue;
|
||||
};
|
||||
if let Some(command) = resolve_bin_dir_entry_command(entry) {
|
||||
result.insert(command.command_name.clone(), Rc::new(command));
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
log::debug!("Failed read_dir for '{}': {:#}", bin_dir.display(), err);
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn resolve_bin_dir_entry_command(
|
||||
entry: std::fs::DirEntry,
|
||||
) -> Option<NodeModulesFileRunCommand> {
|
||||
if entry.path().extension().is_some() {
|
||||
return None; // only look at files without extensions (even on Windows)
|
||||
}
|
||||
let file_type = entry.file_type().ok()?;
|
||||
let path = if file_type.is_file() {
|
||||
entry.path()
|
||||
} else if file_type.is_symlink() {
|
||||
entry.path().canonicalize().ok()?
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
let text = std::fs::read_to_string(&path).ok()?;
|
||||
let command_name = entry.file_name().to_string_lossy().to_string();
|
||||
if let Some(path) = resolve_execution_path_from_npx_shim(path, &text) {
|
||||
log::debug!(
|
||||
"Resolved npx command '{}' to '{}'.",
|
||||
command_name,
|
||||
path.display()
|
||||
);
|
||||
Some(NodeModulesFileRunCommand { command_name, path })
|
||||
} else {
|
||||
log::debug!("Failed resolving npx command '{}'.", command_name);
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// This is not ideal, but it works ok because it allows us to bypass
|
||||
/// the shebang and execute the script directly with Deno.
|
||||
fn resolve_execution_path_from_npx_shim(
|
||||
file_path: PathBuf,
|
||||
text: &str,
|
||||
) -> Option<PathBuf> {
|
||||
static SCRIPT_PATH_RE: Lazy<Regex> =
|
||||
lazy_regex::lazy_regex!(r#""\$basedir\/([^"]+)" "\$@""#);
|
||||
|
||||
if text.starts_with("#!/usr/bin/env node") {
|
||||
// launch this file itself because it's a JS file
|
||||
Some(file_path)
|
||||
} else {
|
||||
// Search for...
|
||||
// > "$basedir/../next/dist/bin/next" "$@"
|
||||
// ...which is what it will look like on Windows
|
||||
SCRIPT_PATH_RE
|
||||
.captures(text)
|
||||
.and_then(|c| c.get(1))
|
||||
.map(|relative_path| {
|
||||
file_path.parent().unwrap().join(relative_path.as_str())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_managed_npm_commands(
|
||||
npm_resolver: &ManagedCliNpmResolver,
|
||||
node_resolver: &NodeResolver,
|
||||
) -> Result<HashMap<String, Rc<dyn ShellCommand>>, AnyError> {
|
||||
let mut result = HashMap::new();
|
||||
let snapshot = npm_resolver.snapshot();
|
||||
for id in snapshot.top_level_packages() {
|
||||
let package_folder = npm_resolver.resolve_pkg_folder_from_pkg_id(id)?;
|
||||
let bin_commands =
|
||||
node_resolver.resolve_binary_commands(&package_folder)?;
|
||||
for bin_command in bin_commands {
|
||||
result.insert(
|
||||
bin_command.to_string(),
|
||||
Rc::new(NpmPackageBinCommand {
|
||||
name: bin_command,
|
||||
npm_package: id.nv.clone(),
|
||||
}) as Rc<dyn ShellCommand>,
|
||||
);
|
||||
}
|
||||
}
|
||||
if !result.contains_key("npx") {
|
||||
result.insert("npx".to_string(), Rc::new(NpxCommand));
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_prepend_to_path() {
|
||||
let mut env_vars = HashMap::new();
|
||||
|
||||
prepend_to_path(&mut env_vars, "/example".to_string());
|
||||
assert_eq!(
|
||||
env_vars,
|
||||
HashMap::from([("PATH".to_string(), "/example".to_string())])
|
||||
);
|
||||
|
||||
prepend_to_path(&mut env_vars, "/example2".to_string());
|
||||
let separator = if cfg!(windows) { ";" } else { ":" };
|
||||
assert_eq!(
|
||||
env_vars,
|
||||
HashMap::from([(
|
||||
"PATH".to_string(),
|
||||
format!("/example2{}/example", separator)
|
||||
)])
|
||||
);
|
||||
|
||||
env_vars.get_mut("PATH").unwrap().clear();
|
||||
prepend_to_path(&mut env_vars, "/example".to_string());
|
||||
assert_eq!(
|
||||
env_vars,
|
||||
HashMap::from([("PATH".to_string(), "/example".to_string())])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_execution_path_from_npx_shim() {
|
||||
// example shim on unix
|
||||
let unix_shim = r#"#!/usr/bin/env node
|
||||
"use strict";
|
||||
console.log('Hi!');
|
||||
"#;
|
||||
let path = PathBuf::from("/node_modules/.bin/example");
|
||||
assert_eq!(
|
||||
resolve_execution_path_from_npx_shim(path.clone(), unix_shim).unwrap(),
|
||||
path
|
||||
);
|
||||
// example shim on windows
|
||||
let windows_shim = r#"#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../example/bin/example" "$@"
|
||||
else
|
||||
exec node "$basedir/../example/bin/example" "$@"
|
||||
fi"#;
|
||||
assert_eq!(
|
||||
resolve_execution_path_from_npx_shim(path.clone(), windows_shim).unwrap(),
|
||||
path.parent().unwrap().join("../example/bin/example")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -408,7 +408,10 @@ Process.prototype.config = {
|
|||
target_defaults: {
|
||||
default_configuration: "Release",
|
||||
},
|
||||
variables: {},
|
||||
variables: {
|
||||
llvm_version: "0.0",
|
||||
enable_lto: "false",
|
||||
},
|
||||
};
|
||||
|
||||
/** https://nodejs.org/api/process.html#process_process_cwd */
|
||||
|
|
|
@ -1459,7 +1459,13 @@ fn resolve_bin_entry_value<'a>(
|
|||
};
|
||||
let bin_entry = match bin {
|
||||
Value::String(_) => {
|
||||
if bin_name.is_some() && bin_name != package_json.name.as_deref() {
|
||||
if bin_name.is_some()
|
||||
&& bin_name
|
||||
!= package_json
|
||||
.name
|
||||
.as_deref()
|
||||
.map(|name| name.rsplit_once('/').map_or(name, |(_, name)| name))
|
||||
{
|
||||
None
|
||||
} else {
|
||||
Some(bin)
|
||||
|
|
|
@ -118,7 +118,7 @@ tokio.workspace = true
|
|||
tokio-metrics.workspace = true
|
||||
twox-hash.workspace = true
|
||||
uuid.workspace = true
|
||||
which = "4.2.5"
|
||||
which.workspace = true
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
winapi = { workspace = true, features = ["commapi", "knownfolders", "mswsock", "objbase", "psapi", "shlobj", "tlhelp32", "winbase", "winerror", "winuser", "winsock2"] }
|
||||
|
|
|
@ -21,7 +21,7 @@ libc.workspace = true
|
|||
log.workspace = true
|
||||
once_cell.workspace = true
|
||||
serde.workspace = true
|
||||
which = "4.2.5"
|
||||
which.workspace = true
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
winapi = { workspace = true, features = ["commapi", "knownfolders", "mswsock", "objbase", "psapi", "shlobj", "tlhelp32", "winbase", "winerror", "winuser", "winsock2", "processenv", "wincon", "wincontypes"] }
|
||||
|
|
|
@ -950,9 +950,13 @@ fn ensure_registry_files_local() {
|
|||
let registry_json_path = registry_dir_path
|
||||
.join(entry.file_name())
|
||||
.join("registry.json");
|
||||
|
||||
if registry_json_path.exists() {
|
||||
let file_text = std::fs::read_to_string(®istry_json_path).unwrap();
|
||||
if file_text.contains("https://registry.npmjs.org/") {
|
||||
if file_text.contains(&format!(
|
||||
"https://registry.npmjs.org/{}/-/",
|
||||
entry.file_name().to_string_lossy()
|
||||
)) {
|
||||
panic!(
|
||||
"file {} contained a reference to the npm registry",
|
||||
registry_json_path
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
export function sayBetterHello() {
|
||||
return '@denotest/better-say-hello says hello (but better)!';
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"name": "@denotest/better-say-hello",
|
||||
"version": "1.0.0",
|
||||
"bin": {
|
||||
"say-hello": "./say-hello.js"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,2 @@
|
|||
import { sayBetterHello } from "./index.js";
|
||||
sayBetterHello();
|
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'node_addon',
|
||||
'sources': [ 'src/binding.cc' ]
|
||||
}
|
||||
]
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
module.exports.hello = require('./build/Release/node_addon').hello;
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"name": "@denotest/node-addon-implicit-node-gyp",
|
||||
"version": "1.0.0",
|
||||
"scripts": {
|
||||
"install": "node-gyp configure build"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
// hello.cc using Node-API
|
||||
#include <node_api.h>
|
||||
|
||||
namespace demo {
|
||||
|
||||
napi_value Method(napi_env env, napi_callback_info args) {
|
||||
napi_value greeting;
|
||||
napi_status status;
|
||||
|
||||
status = napi_create_string_utf8(env, "world", NAPI_AUTO_LENGTH, &greeting);
|
||||
if (status != napi_ok) return nullptr;
|
||||
return greeting;
|
||||
}
|
||||
|
||||
napi_value init(napi_env env, napi_value exports) {
|
||||
napi_status status;
|
||||
napi_value fn;
|
||||
|
||||
status = napi_create_function(env, nullptr, 0, Method, nullptr, &fn);
|
||||
if (status != napi_ok) return nullptr;
|
||||
|
||||
status = napi_set_named_property(env, exports, "hello", fn);
|
||||
if (status != napi_ok) return nullptr;
|
||||
return exports;
|
||||
}
|
||||
|
||||
NAPI_MODULE(NODE_GYP_MODULE_NAME, init)
|
||||
|
||||
} // namespace demo
|
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'node_addon',
|
||||
'sources': [ 'src/binding.cc' ]
|
||||
}
|
||||
]
|
||||
}
|
1
tests/registry/npm/@denotest/node-addon/1.0.0/index.js
Normal file
1
tests/registry/npm/@denotest/node-addon/1.0.0/index.js
Normal file
|
@ -0,0 +1 @@
|
|||
module.exports.hello = require('./build/Release/node_addon').hello;
|
10
tests/registry/npm/@denotest/node-addon/1.0.0/package.json
Normal file
10
tests/registry/npm/@denotest/node-addon/1.0.0/package.json
Normal file
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"name": "@denotest/node-addon",
|
||||
"version": "1.0.0",
|
||||
"scripts": {
|
||||
"install": "node-gyp configure build"
|
||||
},
|
||||
"dependencies": {
|
||||
"node-gyp": "10.1.0"
|
||||
}
|
||||
}
|
29
tests/registry/npm/@denotest/node-addon/1.0.0/src/binding.cc
Normal file
29
tests/registry/npm/@denotest/node-addon/1.0.0/src/binding.cc
Normal file
|
@ -0,0 +1,29 @@
|
|||
// hello.cc using Node-API
|
||||
#include <node_api.h>
|
||||
|
||||
namespace demo {
|
||||
|
||||
napi_value Method(napi_env env, napi_callback_info args) {
|
||||
napi_value greeting;
|
||||
napi_status status;
|
||||
|
||||
status = napi_create_string_utf8(env, "world", NAPI_AUTO_LENGTH, &greeting);
|
||||
if (status != napi_ok) return nullptr;
|
||||
return greeting;
|
||||
}
|
||||
|
||||
napi_value init(napi_env env, napi_value exports) {
|
||||
napi_status status;
|
||||
napi_value fn;
|
||||
|
||||
status = napi_create_function(env, nullptr, 0, Method, nullptr, &fn);
|
||||
if (status != napi_ok) return nullptr;
|
||||
|
||||
status = napi_set_named_property(env, exports, "hello", fn);
|
||||
if (status != napi_ok) return nullptr;
|
||||
return exports;
|
||||
}
|
||||
|
||||
NAPI_MODULE(NODE_GYP_MODULE_NAME, init)
|
||||
|
||||
} // namespace demo
|
|
@ -0,0 +1,5 @@
|
|||
modules.export = {
|
||||
value: 42
|
||||
};
|
||||
|
||||
console.log('index.js', modules.export.value);
|
|
@ -0,0 +1,5 @@
|
|||
module.exports = {
|
||||
sayHi: () => 'Hi from node-lifecycle-scripts!'
|
||||
};
|
||||
|
||||
console.log('install.js', module.exports.sayHi());
|
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"name": "@denotest/node-lifecycle-scripts",
|
||||
"version": "1.0.0",
|
||||
"scripts": {
|
||||
"preinstall": "echo preinstall && node preinstall.js && node --require ./helper.js preinstall.js",
|
||||
"install": "echo install && cli-esm 'hello from install script'",
|
||||
"postinstall": "echo postinstall && npx cowsay postinstall"
|
||||
},
|
||||
"dependencies": {
|
||||
"@denotest/bin": "1.0.0"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
if ("Deno" in globalThis && typeof globalThis.Deno === 'object') {
|
||||
console.log('deno preinstall.js');
|
||||
} else {
|
||||
console.log('node preinstall.js');
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
export function sayHelloOnInstall() {
|
||||
return '@denotest/say-hello-on-install';
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"name": "@denotest/say-hello-on-install",
|
||||
"version": "1.0.0",
|
||||
"scripts": {
|
||||
"install": "echo 'install script' && say-hello"
|
||||
},
|
||||
"dependencies": {
|
||||
"@denotest/say-hello": "1.0.0"
|
||||
}
|
||||
}
|
3
tests/registry/npm/@denotest/say-hello/1.0.0/index.js
Normal file
3
tests/registry/npm/@denotest/say-hello/1.0.0/index.js
Normal file
|
@ -0,0 +1,3 @@
|
|||
export function sayHello() {
|
||||
return '@denotest/say-hello says hello!';
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"name": "@denotest/say-hello",
|
||||
"version": "1.0.0",
|
||||
"bin": "./say-hello.js",
|
||||
"type": "module"
|
||||
}
|
|
@ -0,0 +1,2 @@
|
|||
import { sayHello } from "./index.js";
|
||||
console.log(sayHello());
|
BIN
tests/registry/npm/@isaacs/cliui/cliui-8.0.2.tgz
Normal file
BIN
tests/registry/npm/@isaacs/cliui/cliui-8.0.2.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/@isaacs/cliui/registry.json
Normal file
1
tests/registry/npm/@isaacs/cliui/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/@npmcli/agent/agent-2.2.2.tgz
Normal file
BIN
tests/registry/npm/@npmcli/agent/agent-2.2.2.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/@npmcli/agent/registry.json
Normal file
1
tests/registry/npm/@npmcli/agent/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/@npmcli/fs/fs-3.1.1.tgz
Normal file
BIN
tests/registry/npm/@npmcli/fs/fs-3.1.1.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/@npmcli/fs/registry.json
Normal file
1
tests/registry/npm/@npmcli/fs/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/@pkgjs/parseargs/parseargs-0.11.0.tgz
Normal file
BIN
tests/registry/npm/@pkgjs/parseargs/parseargs-0.11.0.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/@pkgjs/parseargs/registry.json
Normal file
1
tests/registry/npm/@pkgjs/parseargs/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/abbrev/abbrev-2.0.0.tgz
Normal file
BIN
tests/registry/npm/abbrev/abbrev-2.0.0.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/abbrev/registry.json
Normal file
1
tests/registry/npm/abbrev/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/agent-base/agent-base-7.1.1.tgz
Normal file
BIN
tests/registry/npm/agent-base/agent-base-7.1.1.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/agent-base/registry.json
Normal file
1
tests/registry/npm/agent-base/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/aggregate-error/aggregate-error-3.1.0.tgz
Normal file
BIN
tests/registry/npm/aggregate-error/aggregate-error-3.1.0.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/aggregate-error/registry.json
Normal file
1
tests/registry/npm/aggregate-error/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/ansi-regex/ansi-regex-6.0.1.tgz
Normal file
BIN
tests/registry/npm/ansi-regex/ansi-regex-6.0.1.tgz
Normal file
Binary file not shown.
BIN
tests/registry/npm/ansi-styles/ansi-styles-6.1.0.tgz
Normal file
BIN
tests/registry/npm/ansi-styles/ansi-styles-6.1.0.tgz
Normal file
Binary file not shown.
BIN
tests/registry/npm/balanced-match/balanced-match-1.0.2.tgz
Normal file
BIN
tests/registry/npm/balanced-match/balanced-match-1.0.2.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/balanced-match/registry.json
Normal file
1
tests/registry/npm/balanced-match/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/brace-expansion/brace-expansion-2.0.1.tgz
Normal file
BIN
tests/registry/npm/brace-expansion/brace-expansion-2.0.1.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/brace-expansion/registry.json
Normal file
1
tests/registry/npm/brace-expansion/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/cacache/cacache-18.0.3.tgz
Normal file
BIN
tests/registry/npm/cacache/cacache-18.0.3.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/cacache/registry.json
Normal file
1
tests/registry/npm/cacache/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/chownr/chownr-2.0.0.tgz
Normal file
BIN
tests/registry/npm/chownr/chownr-2.0.0.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/chownr/registry.json
Normal file
1
tests/registry/npm/chownr/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/clean-stack/clean-stack-2.2.0.tgz
Normal file
BIN
tests/registry/npm/clean-stack/clean-stack-2.2.0.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/clean-stack/registry.json
Normal file
1
tests/registry/npm/clean-stack/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/cross-spawn/cross-spawn-7.0.3.tgz
Normal file
BIN
tests/registry/npm/cross-spawn/cross-spawn-7.0.3.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/cross-spawn/registry.json
Normal file
1
tests/registry/npm/cross-spawn/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/debug/debug-4.3.5.tgz
Normal file
BIN
tests/registry/npm/debug/debug-4.3.5.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/debug/registry.json
Normal file
1
tests/registry/npm/debug/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/eastasianwidth/eastasianwidth-0.2.0.tgz
Normal file
BIN
tests/registry/npm/eastasianwidth/eastasianwidth-0.2.0.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/eastasianwidth/registry.json
Normal file
1
tests/registry/npm/eastasianwidth/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/emoji-regex/emoji-regex-9.2.2.tgz
Normal file
BIN
tests/registry/npm/emoji-regex/emoji-regex-9.2.2.tgz
Normal file
Binary file not shown.
BIN
tests/registry/npm/encoding/encoding-0.1.13.tgz
Normal file
BIN
tests/registry/npm/encoding/encoding-0.1.13.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/encoding/registry.json
Normal file
1
tests/registry/npm/encoding/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/env-paths/env-paths-2.2.1.tgz
Normal file
BIN
tests/registry/npm/env-paths/env-paths-2.2.1.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/env-paths/registry.json
Normal file
1
tests/registry/npm/env-paths/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/err-code/err-code-2.0.3.tgz
Normal file
BIN
tests/registry/npm/err-code/err-code-2.0.3.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/err-code/registry.json
Normal file
1
tests/registry/npm/err-code/registry.json
Normal file
File diff suppressed because one or more lines are too long
Binary file not shown.
1
tests/registry/npm/exponential-backoff/registry.json
Normal file
1
tests/registry/npm/exponential-backoff/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/foreground-child/foreground-child-3.2.1.tgz
Normal file
BIN
tests/registry/npm/foreground-child/foreground-child-3.2.1.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/foreground-child/registry.json
Normal file
1
tests/registry/npm/foreground-child/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/fs-minipass/fs-minipass-2.1.0.tgz
Normal file
BIN
tests/registry/npm/fs-minipass/fs-minipass-2.1.0.tgz
Normal file
Binary file not shown.
BIN
tests/registry/npm/fs-minipass/fs-minipass-3.0.3.tgz
Normal file
BIN
tests/registry/npm/fs-minipass/fs-minipass-3.0.3.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/fs-minipass/registry.json
Normal file
1
tests/registry/npm/fs-minipass/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/glob/glob-10.4.4.tgz
Normal file
BIN
tests/registry/npm/glob/glob-10.4.4.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/glob/registry.json
Normal file
1
tests/registry/npm/glob/registry.json
Normal file
File diff suppressed because one or more lines are too long
Binary file not shown.
1
tests/registry/npm/http-cache-semantics/registry.json
Normal file
1
tests/registry/npm/http-cache-semantics/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/http-proxy-agent/http-proxy-agent-7.0.2.tgz
Normal file
BIN
tests/registry/npm/http-proxy-agent/http-proxy-agent-7.0.2.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/http-proxy-agent/registry.json
Normal file
1
tests/registry/npm/http-proxy-agent/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/https-proxy-agent/https-proxy-agent-7.0.5.tgz
Normal file
BIN
tests/registry/npm/https-proxy-agent/https-proxy-agent-7.0.5.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/https-proxy-agent/registry.json
Normal file
1
tests/registry/npm/https-proxy-agent/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/iconv-lite/iconv-lite-0.6.3.tgz
Normal file
BIN
tests/registry/npm/iconv-lite/iconv-lite-0.6.3.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/iconv-lite/registry.json
Normal file
1
tests/registry/npm/iconv-lite/registry.json
Normal file
File diff suppressed because one or more lines are too long
BIN
tests/registry/npm/imurmurhash/imurmurhash-0.1.4.tgz
Normal file
BIN
tests/registry/npm/imurmurhash/imurmurhash-0.1.4.tgz
Normal file
Binary file not shown.
1
tests/registry/npm/imurmurhash/registry.json
Normal file
1
tests/registry/npm/imurmurhash/registry.json
Normal file
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue