mirror of
https://github.com/denoland/deno.git
synced 2024-11-21 15:04:11 -05:00
feat: npm workspace and better Deno workspace support (#24334)
Adds much better support for the unstable Deno workspaces as well as support for npm workspaces. npm workspaces is still lacking in that we only install packages into the root node_modules folder. We'll make it smarter over time in order for it to figure out when to add node_modules folders within packages. This includes a breaking change in config file resolution where we stop searching for config files on the first found package.json unless it's in a workspace. For the previous behaviour, the root deno.json needs to be updated to be a workspace by adding `"workspace": ["./path-to-pkg-json-folder-goes-here"]`. See details in https://github.com/denoland/deno_config/pull/66 Closes #24340 Closes #24159 Closes #24161 Closes #22020 Closes #18546 Closes #16106 Closes #24160
This commit is contained in:
parent
dd6d19e120
commit
147411e64b
235 changed files with 4446 additions and 2451 deletions
53
.dprint.json
53
.dprint.json
|
@ -18,43 +18,46 @@
|
|||
".cargo_home",
|
||||
".git",
|
||||
"cli/bench/testdata/express-router.js",
|
||||
"cli/bench/testdata/npm/",
|
||||
"cli/bench/testdata/lsp_benchdata/",
|
||||
"cli/bench/testdata/npm/",
|
||||
"cli/tsc/*typescript.js",
|
||||
"cli/tsc/dts/lib.d.ts",
|
||||
"cli/tsc/dts/lib.scripthost.d.ts",
|
||||
"cli/tsc/dts/lib.decorators*.d.ts",
|
||||
"cli/tsc/dts/lib.webworker*.d.ts",
|
||||
"cli/tsc/dts/lib.dom*.d.ts",
|
||||
"cli/tsc/dts/lib.es*.d.ts",
|
||||
"cli/tsc/dts/lib.scripthost.d.ts",
|
||||
"cli/tsc/dts/lib.webworker*.d.ts",
|
||||
"cli/tsc/dts/typescript.d.ts",
|
||||
"tests/node_compat/test",
|
||||
"tests/registry/",
|
||||
"tests/testdata/file_extensions/ts_with_js_extension.js",
|
||||
"tests/testdata/fmt/badly_formatted.json",
|
||||
"tests/testdata/fmt/badly_formatted.md",
|
||||
"tests/testdata/fmt/badly_formatted.ipynb",
|
||||
"tests/testdata/byte_order_mark.ts",
|
||||
"tests/testdata/encoding",
|
||||
"tests/testdata/fmt/",
|
||||
"tests/testdata/lint/glob/",
|
||||
"tests/testdata/test/glob/",
|
||||
"tests/testdata/import_attributes/json_with_shebang.json",
|
||||
"tests/testdata/run/error_syntax_empty_trailing_line.mjs",
|
||||
"tests/testdata/run/inline_js_source_map*",
|
||||
"tests/testdata/malformed_config/",
|
||||
"tests/testdata/test/markdown_windows.md",
|
||||
"cli/tsc/*typescript.js",
|
||||
"ext/websocket/autobahn/reports",
|
||||
"gh-pages",
|
||||
"target",
|
||||
"tests/ffi/tests/test.js",
|
||||
"tests/util/std",
|
||||
"tests/wpt/suite",
|
||||
"third_party",
|
||||
"tests/node_compat/runner/TODO.md",
|
||||
"tests/node_compat/runner/suite",
|
||||
"tests/node_compat/runner/TODO.md",
|
||||
"tests/node_compat/test",
|
||||
"tests/registry/",
|
||||
"tests/specs/fmt",
|
||||
"tests/specs/lint/bom",
|
||||
"tests/testdata/byte_order_mark.ts",
|
||||
"tests/testdata/encoding",
|
||||
"tests/testdata/file_extensions/ts_with_js_extension.js",
|
||||
"tests/testdata/fmt/",
|
||||
"tests/testdata/fmt/badly_formatted.ipynb",
|
||||
"tests/testdata/fmt/badly_formatted.json",
|
||||
"tests/testdata/fmt/badly_formatted.md",
|
||||
"tests/testdata/import_attributes/json_with_shebang.json",
|
||||
"tests/testdata/lint/glob/",
|
||||
"tests/testdata/malformed_config/",
|
||||
"tests/testdata/run/byte_order_mark.ts",
|
||||
"tests/testdata/run/error_syntax_empty_trailing_line.mjs",
|
||||
"tests/testdata/run/inline_js_source_map*",
|
||||
"tests/testdata/test/glob/",
|
||||
"tests/testdata/test/markdown_windows.md",
|
||||
"tests/util/std",
|
||||
"tests/wpt/runner/expectation.json",
|
||||
"tests/wpt/runner/manifest.json",
|
||||
"ext/websocket/autobahn/reports"
|
||||
"tests/wpt/suite",
|
||||
"third_party"
|
||||
],
|
||||
"plugins": [
|
||||
"https://plugins.dprint.dev/typescript-0.91.1.wasm",
|
||||
|
|
2
.github/workflows/ci.generate.ts
vendored
2
.github/workflows/ci.generate.ts
vendored
|
@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
|
|||
// Bump this number when you want to purge the cache.
|
||||
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
|
||||
// automatically via regex, so ensure that this line maintains this format.
|
||||
const cacheVersion = 99;
|
||||
const cacheVersion = 1;
|
||||
|
||||
const ubuntuX86Runner = "ubuntu-22.04";
|
||||
const ubuntuX86XlRunner = "ubuntu-22.04-xl";
|
||||
|
|
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
|
@ -367,8 +367,8 @@ jobs:
|
|||
path: |-
|
||||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
key: '99-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '99-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
key: '1-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '1-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
if: '!(matrix.skip)'
|
||||
- name: Restore cache build output (PR)
|
||||
uses: actions/cache/restore@v4
|
||||
|
@ -380,7 +380,7 @@ jobs:
|
|||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: never_saved
|
||||
restore-keys: '99-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
restore-keys: '1-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
- name: Apply and update mtime cache
|
||||
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
|
||||
uses: ./.github/mtime_cache
|
||||
|
@ -669,7 +669,7 @@ jobs:
|
|||
!./target/*/gn_out
|
||||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: '99-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
key: '1-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
publish-canary:
|
||||
name: publish canary
|
||||
runs-on: ubuntu-22.04
|
||||
|
|
8
Cargo.lock
generated
8
Cargo.lock
generated
|
@ -1308,9 +1308,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_config"
|
||||
version = "0.17.0"
|
||||
version = "0.19.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "01b0852c0dd8594926d51a5dae80cd1679f87f79a7c02415e60625d6ee2a99ba"
|
||||
checksum = "ddc80f97cffe52c9a430201f288111fc89d33491b1675c0e01feb3a497ce76b3"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"deno_semver",
|
||||
|
@ -1947,9 +1947,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_unsync"
|
||||
version = "0.3.4"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7557a5e9278b9a5cc8056dc37062ea4344770bda4eeb5973c7cbb7ebf636b9a4"
|
||||
checksum = "10eb3aaf83c3431d4215741140ec3a63b0c0edb972ee898c89bdf8462e9e136b"
|
||||
dependencies = [
|
||||
"tokio",
|
||||
]
|
||||
|
|
|
@ -101,7 +101,7 @@ console_static_text = "=0.8.1"
|
|||
data-encoding = "2.3.3"
|
||||
data-url = "=0.3.0"
|
||||
deno_cache_dir = "=0.10.0"
|
||||
deno_config = { version = "=0.17.0", default-features = false }
|
||||
deno_config = { version = "=0.19.1", default-features = false }
|
||||
dlopen2 = "0.6.1"
|
||||
ecb = "=0.1.2"
|
||||
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem"] }
|
||||
|
|
|
@ -65,7 +65,7 @@ winres.workspace = true
|
|||
[dependencies]
|
||||
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
|
||||
deno_cache_dir = { workspace = true }
|
||||
deno_config = { workspace = true, features = ["deno_json", "package_json"] }
|
||||
deno_config = { workspace = true, features = ["workspace"] }
|
||||
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_doc = { version = "=0.141.0", features = ["html", "syntect"] }
|
||||
deno_emit = "=0.43.0"
|
||||
|
|
|
@ -9,11 +9,13 @@ use clap::ArgMatches;
|
|||
use clap::ColorChoice;
|
||||
use clap::Command;
|
||||
use clap::ValueHint;
|
||||
use deno_config::glob::FilePatterns;
|
||||
use deno_config::glob::PathOrPatternSet;
|
||||
use deno_config::ConfigFlag;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::normalize_path;
|
||||
use deno_core::resolve_url_or_path;
|
||||
use deno_core::url::Url;
|
||||
use deno_graph::GraphKind;
|
||||
|
@ -34,6 +36,7 @@ use std::path::PathBuf;
|
|||
use std::str::FromStr;
|
||||
|
||||
use crate::args::resolve_no_prompt;
|
||||
use crate::util::collections::CheckedSet;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
|
||||
use super::flags_net;
|
||||
|
@ -45,6 +48,29 @@ pub struct FileFlags {
|
|||
pub include: Vec<String>,
|
||||
}
|
||||
|
||||
impl FileFlags {
|
||||
pub fn as_file_patterns(
|
||||
&self,
|
||||
base: &Path,
|
||||
) -> Result<FilePatterns, AnyError> {
|
||||
Ok(FilePatterns {
|
||||
include: if self.include.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(PathOrPatternSet::from_include_relative_path_or_patterns(
|
||||
base,
|
||||
&self.include,
|
||||
)?)
|
||||
},
|
||||
exclude: PathOrPatternSet::from_exclude_relative_path_or_patterns(
|
||||
base,
|
||||
&self.ignore,
|
||||
)?,
|
||||
base: base.to_path_buf(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct AddFlags {
|
||||
pub packages: Vec<String>,
|
||||
|
@ -156,7 +182,7 @@ pub struct EvalFlags {
|
|||
pub code: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
#[derive(Clone, Default, Debug, Eq, PartialEq)]
|
||||
pub struct FmtFlags {
|
||||
pub check: bool,
|
||||
pub files: FileFlags,
|
||||
|
@ -235,7 +261,7 @@ pub struct UninstallFlags {
|
|||
pub kind: UninstallKind,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct LintFlags {
|
||||
pub files: FileFlags,
|
||||
pub rules: bool,
|
||||
|
@ -323,7 +349,7 @@ pub struct TaskFlags {
|
|||
pub task: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
|
||||
pub enum TestReporterConfig {
|
||||
#[default]
|
||||
Pretty,
|
||||
|
@ -838,30 +864,54 @@ impl Flags {
|
|||
args
|
||||
}
|
||||
|
||||
/// Extract path arguments for config search paths.
|
||||
/// If it returns Some(vec), the config should be discovered
|
||||
/// from the passed `current_dir` after trying to discover from each entry in
|
||||
/// the returned vector.
|
||||
/// If it returns None, the config file shouldn't be discovered at all.
|
||||
/// Extract the directory paths the config file should be discovered from.
|
||||
///
|
||||
/// Returns `None` if the config file should not be auto-discovered.
|
||||
pub fn config_path_args(&self, current_dir: &Path) -> Option<Vec<PathBuf>> {
|
||||
use DenoSubcommand::*;
|
||||
fn resolve_multiple_files(
|
||||
files: &[String],
|
||||
current_dir: &Path,
|
||||
) -> Vec<PathBuf> {
|
||||
let mut seen = CheckedSet::with_capacity(files.len());
|
||||
let result = files
|
||||
.iter()
|
||||
.filter_map(|p| {
|
||||
let path = normalize_path(current_dir.join(p).parent()?);
|
||||
if seen.insert(&path) {
|
||||
Some(path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
if result.is_empty() {
|
||||
vec![current_dir.to_path_buf()]
|
||||
} else {
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
use DenoSubcommand::*;
|
||||
match &self.subcommand {
|
||||
Fmt(FmtFlags { files, .. }) => {
|
||||
Some(files.include.iter().map(|p| current_dir.join(p)).collect())
|
||||
Some(resolve_multiple_files(&files.include, current_dir))
|
||||
}
|
||||
Lint(LintFlags { files, .. }) => {
|
||||
Some(files.include.iter().map(|p| current_dir.join(p)).collect())
|
||||
Some(resolve_multiple_files(&files.include, current_dir))
|
||||
}
|
||||
Run(RunFlags { script, .. }) => {
|
||||
Run(RunFlags { script, .. })
|
||||
| Compile(CompileFlags {
|
||||
source_file: script,
|
||||
..
|
||||
}) => {
|
||||
if let Ok(module_specifier) = resolve_url_or_path(script, current_dir) {
|
||||
if module_specifier.scheme() == "file"
|
||||
|| module_specifier.scheme() == "npm"
|
||||
{
|
||||
if let Ok(p) = module_specifier.to_file_path() {
|
||||
Some(vec![p])
|
||||
Some(vec![p.parent().unwrap().to_path_buf()])
|
||||
} else {
|
||||
Some(vec![])
|
||||
Some(vec![current_dir.to_path_buf()])
|
||||
}
|
||||
} else {
|
||||
// When the entrypoint doesn't have file: scheme (it's the remote
|
||||
|
@ -869,7 +919,7 @@ impl Flags {
|
|||
None
|
||||
}
|
||||
} else {
|
||||
Some(vec![])
|
||||
Some(vec![current_dir.to_path_buf()])
|
||||
}
|
||||
}
|
||||
Task(TaskFlags {
|
||||
|
@ -880,57 +930,10 @@ impl Flags {
|
|||
// `--cwd` when specified
|
||||
match canonicalize_path(&PathBuf::from(path)) {
|
||||
Ok(path) => Some(vec![path]),
|
||||
Err(_) => Some(vec![]),
|
||||
}
|
||||
}
|
||||
_ => Some(vec![]),
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract path argument for `package.json` search paths.
|
||||
/// If it returns Some(path), the `package.json` should be discovered
|
||||
/// from the `path` dir.
|
||||
/// If it returns None, the `package.json` file shouldn't be discovered at
|
||||
/// all.
|
||||
pub fn package_json_search_dir(&self, current_dir: &Path) -> Option<PathBuf> {
|
||||
use DenoSubcommand::*;
|
||||
|
||||
match &self.subcommand {
|
||||
Run(RunFlags { script, .. }) | Serve(ServeFlags { script, .. }) => {
|
||||
let module_specifier = resolve_url_or_path(script, current_dir).ok()?;
|
||||
if module_specifier.scheme() == "file" {
|
||||
let p = module_specifier
|
||||
.to_file_path()
|
||||
.unwrap()
|
||||
.parent()?
|
||||
.to_owned();
|
||||
Some(p)
|
||||
} else if module_specifier.scheme() == "npm" {
|
||||
Some(current_dir.to_path_buf())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Task(TaskFlags { cwd: Some(cwd), .. }) => {
|
||||
resolve_url_or_path(cwd, current_dir)
|
||||
.ok()?
|
||||
.to_file_path()
|
||||
.ok()
|
||||
}
|
||||
Task(_) | Check(_) | Coverage(_) | Cache(_) | Info(_) | Eval(_)
|
||||
| Test(_) | Bench(_) | Repl(_) | Compile(_) | Publish(_) => {
|
||||
Some(current_dir.to_path_buf())
|
||||
}
|
||||
Add(_) | Bundle(_) | Completions(_) | Doc(_) | Fmt(_) | Init(_)
|
||||
| Uninstall(_) | Jupyter(_) | Lsp | Lint(_) | Types | Upgrade(_)
|
||||
| Vendor(_) => None,
|
||||
Install(_) => {
|
||||
if *DENO_FUTURE {
|
||||
Some(current_dir.to_path_buf())
|
||||
} else {
|
||||
None
|
||||
Err(_) => Some(vec![current_dir.to_path_buf()]),
|
||||
}
|
||||
}
|
||||
_ => Some(vec![current_dir.to_path_buf()]),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -9271,7 +9274,15 @@ mod tests {
|
|||
fn test_config_path_args() {
|
||||
let flags = flags_from_vec(svec!["deno", "run", "foo.js"]).unwrap();
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
assert_eq!(flags.config_path_args(&cwd), Some(vec![cwd.join("foo.js")]));
|
||||
|
||||
assert_eq!(flags.config_path_args(&cwd), Some(vec![cwd.clone()]));
|
||||
|
||||
let flags = flags_from_vec(svec!["deno", "run", "sub_dir/foo.js"]).unwrap();
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
assert_eq!(
|
||||
flags.config_path_args(&cwd),
|
||||
Some(vec![cwd.join("sub_dir").clone()])
|
||||
);
|
||||
|
||||
let flags =
|
||||
flags_from_vec(svec!["deno", "run", "https://example.com/foo.js"])
|
||||
|
@ -9279,20 +9290,27 @@ mod tests {
|
|||
assert_eq!(flags.config_path_args(&cwd), None);
|
||||
|
||||
let flags =
|
||||
flags_from_vec(svec!["deno", "lint", "dir/a.js", "dir/b.js"]).unwrap();
|
||||
flags_from_vec(svec!["deno", "lint", "dir/a/a.js", "dir/b/b.js"])
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
flags.config_path_args(&cwd),
|
||||
Some(vec![cwd.join("dir/a.js"), cwd.join("dir/b.js")])
|
||||
Some(vec![cwd.join("dir/a/"), cwd.join("dir/b/")])
|
||||
);
|
||||
|
||||
let flags = flags_from_vec(svec!["deno", "lint"]).unwrap();
|
||||
assert!(flags.config_path_args(&cwd).unwrap().is_empty());
|
||||
assert_eq!(flags.config_path_args(&cwd), Some(vec![cwd.clone()]));
|
||||
|
||||
let flags =
|
||||
flags_from_vec(svec!["deno", "fmt", "dir/a.js", "dir/b.js"]).unwrap();
|
||||
let flags = flags_from_vec(svec![
|
||||
"deno",
|
||||
"fmt",
|
||||
"dir/a/a.js",
|
||||
"dir/a/a2.js",
|
||||
"dir/b.js"
|
||||
])
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
flags.config_path_args(&cwd),
|
||||
Some(vec![cwd.join("dir/a.js"), cwd.join("dir/b.js")])
|
||||
Some(vec![cwd.join("dir/a/"), cwd.join("dir/")])
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,127 +1,25 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use import_map::ImportMap;
|
||||
use import_map::ImportMapDiagnostic;
|
||||
use log::warn;
|
||||
|
||||
use super::ConfigFile;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
|
||||
pub async fn resolve_import_map(
|
||||
specified_specifier: Option<&Url>,
|
||||
maybe_config_file: Option<&ConfigFile>,
|
||||
pub async fn resolve_import_map_value_from_specifier(
|
||||
specifier: &Url,
|
||||
file_fetcher: &FileFetcher,
|
||||
) -> Result<Option<ImportMap>, AnyError> {
|
||||
if let Some(specifier) = specified_specifier {
|
||||
resolve_import_map_from_specifier(specifier.clone(), file_fetcher)
|
||||
.await
|
||||
.with_context(|| format!("Unable to load '{}' import map", specifier))
|
||||
.map(Some)
|
||||
} else if let Some(config_file) = maybe_config_file {
|
||||
let maybe_url_and_value = config_file
|
||||
.to_import_map_value(|specifier| {
|
||||
let specifier = specifier.clone();
|
||||
async move {
|
||||
let file = file_fetcher
|
||||
.fetch(&specifier, &PermissionsContainer::allow_all())
|
||||
.await?
|
||||
.into_text_decoded()?;
|
||||
Ok(file.source.to_string())
|
||||
}
|
||||
})
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Unable to resolve import map in '{}'",
|
||||
config_file.specifier
|
||||
)
|
||||
})?;
|
||||
match maybe_url_and_value {
|
||||
Some((url, value)) => {
|
||||
import_map_from_value(url.into_owned(), value).map(Some)
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
async fn resolve_import_map_from_specifier(
|
||||
specifier: Url,
|
||||
file_fetcher: &FileFetcher,
|
||||
) -> Result<ImportMap, AnyError> {
|
||||
let value: serde_json::Value = if specifier.scheme() == "data" {
|
||||
) -> Result<serde_json::Value, AnyError> {
|
||||
if specifier.scheme() == "data" {
|
||||
let data_url_text =
|
||||
deno_graph::source::RawDataUrl::parse(&specifier)?.decode()?;
|
||||
serde_json::from_str(&data_url_text)?
|
||||
deno_graph::source::RawDataUrl::parse(specifier)?.decode()?;
|
||||
Ok(serde_json::from_str(&data_url_text)?)
|
||||
} else {
|
||||
let file = file_fetcher
|
||||
.fetch(&specifier, &PermissionsContainer::allow_all())
|
||||
.fetch(specifier, &PermissionsContainer::allow_all())
|
||||
.await?
|
||||
.into_text_decoded()?;
|
||||
serde_json::from_str(&file.source)?
|
||||
};
|
||||
import_map_from_value(specifier, value)
|
||||
}
|
||||
|
||||
pub fn import_map_from_value(
|
||||
specifier: Url,
|
||||
json_value: serde_json::Value,
|
||||
) -> Result<ImportMap, AnyError> {
|
||||
debug_assert!(
|
||||
!specifier.as_str().contains("../"),
|
||||
"Import map specifier incorrectly contained ../: {}",
|
||||
specifier.as_str()
|
||||
);
|
||||
let result = import_map::parse_from_value(specifier, json_value)?;
|
||||
print_import_map_diagnostics(&result.diagnostics);
|
||||
Ok(result.import_map)
|
||||
}
|
||||
|
||||
fn print_import_map_diagnostics(diagnostics: &[ImportMapDiagnostic]) {
|
||||
if !diagnostics.is_empty() {
|
||||
warn!(
|
||||
"Import map diagnostics:\n{}",
|
||||
diagnostics
|
||||
.iter()
|
||||
.map(|d| format!(" - {d}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
);
|
||||
Ok(serde_json::from_str(&file.source)?)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn enhance_import_map_value_with_workspace_members(
|
||||
mut import_map_value: serde_json::Value,
|
||||
workspace_members: &[deno_config::WorkspaceMemberConfig],
|
||||
) -> serde_json::Value {
|
||||
let mut imports =
|
||||
if let Some(imports) = import_map_value.get("imports").as_ref() {
|
||||
imports.as_object().unwrap().clone()
|
||||
} else {
|
||||
serde_json::Map::new()
|
||||
};
|
||||
|
||||
for workspace_member in workspace_members {
|
||||
let name = &workspace_member.package_name;
|
||||
let version = &workspace_member.package_version;
|
||||
// Don't override existings, explicit imports
|
||||
if imports.contains_key(name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
imports.insert(
|
||||
name.to_string(),
|
||||
serde_json::Value::String(format!("jsr:{}@^{}", name, version)),
|
||||
);
|
||||
}
|
||||
|
||||
import_map_value["imports"] = serde_json::Value::Object(imports);
|
||||
::import_map::ext::expand_import_map_value(import_map_value)
|
||||
}
|
||||
|
|
1039
cli/args/mod.rs
1039
cli/args/mod.rs
File diff suppressed because it is too large
Load diff
|
@ -1,77 +1,87 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_config::package_json::PackageJsonDeps;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_runtime::deno_fs::RealFs;
|
||||
use deno_runtime::deno_node::load_pkg_json;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_config::package_json::PackageJsonDepValue;
|
||||
use deno_config::workspace::Workspace;
|
||||
use deno_semver::package::PackageReq;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct PackageJsonDepsProvider(Option<PackageJsonDeps>);
|
||||
|
||||
impl PackageJsonDepsProvider {
|
||||
pub fn new(deps: Option<PackageJsonDeps>) -> Self {
|
||||
Self(deps)
|
||||
}
|
||||
|
||||
pub fn deps(&self) -> Option<&PackageJsonDeps> {
|
||||
self.0.as_ref()
|
||||
}
|
||||
|
||||
pub fn reqs(&self) -> Option<Vec<&PackageReq>> {
|
||||
match &self.0 {
|
||||
Some(deps) => {
|
||||
let mut package_reqs = deps
|
||||
.values()
|
||||
.filter_map(|r| r.as_ref().ok())
|
||||
.collect::<Vec<_>>();
|
||||
package_reqs.sort(); // deterministic resolution
|
||||
Some(package_reqs)
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub struct InstallNpmWorkspacePkg {
|
||||
pub alias: String,
|
||||
pub pkg_dir: PathBuf,
|
||||
}
|
||||
|
||||
/// Attempts to discover the package.json file, maybe stopping when it
|
||||
/// reaches the specified `maybe_stop_at` directory.
|
||||
pub fn discover_from(
|
||||
start: &Path,
|
||||
maybe_stop_at: Option<PathBuf>,
|
||||
) -> Result<Option<Arc<PackageJson>>, AnyError> {
|
||||
const PACKAGE_JSON_NAME: &str = "package.json";
|
||||
// todo(#24419): this is not correct, but it's good enough for now.
|
||||
// We need deno_npm to be able to understand workspace packages and
|
||||
// then have a way to properly lay them out on the file system
|
||||
#[derive(Debug, Default)]
|
||||
pub struct PackageJsonInstallDepsProvider {
|
||||
remote_pkg_reqs: Vec<PackageReq>,
|
||||
workspace_pkgs: Vec<InstallNpmWorkspacePkg>,
|
||||
}
|
||||
|
||||
// note: ancestors() includes the `start` path
|
||||
for ancestor in start.ancestors() {
|
||||
let path = ancestor.join(PACKAGE_JSON_NAME);
|
||||
impl PackageJsonInstallDepsProvider {
|
||||
pub fn empty() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
let package_json = match load_pkg_json(&RealFs, &path) {
|
||||
Ok(Some(package_json)) => package_json,
|
||||
Ok(None) => {
|
||||
if let Some(stop_at) = maybe_stop_at.as_ref() {
|
||||
if ancestor == stop_at {
|
||||
break;
|
||||
pub fn from_workspace(workspace: &Arc<Workspace>) -> Self {
|
||||
let mut workspace_pkgs = Vec::new();
|
||||
let mut remote_pkg_reqs = Vec::new();
|
||||
let workspace_npm_pkgs = workspace.npm_packages();
|
||||
for pkg_json in workspace.package_jsons() {
|
||||
let deps = pkg_json.resolve_local_package_json_deps();
|
||||
let mut pkg_reqs = Vec::with_capacity(deps.len());
|
||||
for (alias, dep) in deps {
|
||||
let Ok(dep) = dep else {
|
||||
continue;
|
||||
};
|
||||
match dep {
|
||||
PackageJsonDepValue::Req(pkg_req) => {
|
||||
if let Some(pkg) = workspace_npm_pkgs
|
||||
.iter()
|
||||
.find(|pkg| pkg.matches_req(&pkg_req))
|
||||
{
|
||||
workspace_pkgs.push(InstallNpmWorkspacePkg {
|
||||
alias,
|
||||
pkg_dir: pkg.pkg_json.dir_path().to_path_buf(),
|
||||
});
|
||||
} else {
|
||||
pkg_reqs.push(pkg_req)
|
||||
}
|
||||
}
|
||||
PackageJsonDepValue::Workspace(version_req) => {
|
||||
if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| {
|
||||
pkg.matches_name_and_version_req(&alias, &version_req)
|
||||
}) {
|
||||
workspace_pkgs.push(InstallNpmWorkspacePkg {
|
||||
alias,
|
||||
pkg_dir: pkg.pkg_json.dir_path().to_path_buf(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
Err(err) => bail!(
|
||||
"Error loading package.json at {}. {:#}",
|
||||
path.display(),
|
||||
err
|
||||
),
|
||||
};
|
||||
// sort within each package
|
||||
pkg_reqs.sort();
|
||||
|
||||
log::debug!("package.json file found at '{}'", path.display());
|
||||
return Ok(Some(package_json));
|
||||
remote_pkg_reqs.extend(pkg_reqs);
|
||||
}
|
||||
remote_pkg_reqs.shrink_to_fit();
|
||||
workspace_pkgs.shrink_to_fit();
|
||||
Self {
|
||||
remote_pkg_reqs,
|
||||
workspace_pkgs,
|
||||
}
|
||||
}
|
||||
|
||||
log::debug!("No package.json file found");
|
||||
Ok(None)
|
||||
pub fn remote_pkg_reqs(&self) -> &Vec<PackageReq> {
|
||||
&self.remote_pkg_reqs
|
||||
}
|
||||
|
||||
pub fn workspace_pkgs(&self) -> &Vec<InstallNpmWorkspacePkg> {
|
||||
&self.workspace_pkgs
|
||||
}
|
||||
}
|
||||
|
|
207
cli/factory.rs
207
cli/factory.rs
|
@ -1,11 +1,10 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::deno_json::deno_json_deps;
|
||||
use crate::args::CliLockfile;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::DenoSubcommand;
|
||||
use crate::args::Flags;
|
||||
use crate::args::PackageJsonDepsProvider;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::args::StorageKeyResolver;
|
||||
use crate::args::TsConfigType;
|
||||
use crate::cache::Caches;
|
||||
|
@ -52,8 +51,12 @@ use crate::util::progress_bar::ProgressBar;
|
|||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
use crate::worker::CliMainWorkerFactory;
|
||||
use crate::worker::CliMainWorkerOptions;
|
||||
use std::collections::BTreeSet;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_config::package_json::PackageJsonDepValue;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_config::ConfigFile;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::FeatureChecker;
|
||||
|
@ -62,10 +65,10 @@ use deno_lockfile::WorkspaceMemberConfig;
|
|||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::analyze::NodeCodeTranslator;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_runtime::deno_tls::RootCertStoreProvider;
|
||||
use deno_runtime::deno_web::BlobStore;
|
||||
use deno_runtime::inspector_server::InspectorServer;
|
||||
use import_map::ImportMap;
|
||||
use log::warn;
|
||||
use std::future::Future;
|
||||
use std::sync::Arc;
|
||||
|
@ -156,7 +159,6 @@ struct CliFactoryServices {
|
|||
fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
|
||||
main_graph_container: Deferred<Arc<MainModuleGraphContainer>>,
|
||||
lockfile: Deferred<Option<Arc<CliLockfile>>>,
|
||||
maybe_import_map: Deferred<Option<Arc<ImportMap>>>,
|
||||
maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>,
|
||||
root_cert_store_provider: Deferred<Arc<dyn RootCertStoreProvider>>,
|
||||
blob_store: Deferred<Arc<BlobStore>>,
|
||||
|
@ -170,13 +172,13 @@ struct CliFactoryServices {
|
|||
node_code_translator: Deferred<Arc<CliNodeCodeTranslator>>,
|
||||
node_resolver: Deferred<Arc<NodeResolver>>,
|
||||
npm_resolver: Deferred<Arc<dyn CliNpmResolver>>,
|
||||
package_json_deps_provider: Deferred<Arc<PackageJsonDepsProvider>>,
|
||||
text_only_progress_bar: Deferred<ProgressBar>,
|
||||
type_checker: Deferred<Arc<TypeChecker>>,
|
||||
cjs_resolutions: Deferred<Arc<CjsResolutionStore>>,
|
||||
cli_node_resolver: Deferred<Arc<CliNodeResolver>>,
|
||||
feature_checker: Deferred<Arc<FeatureChecker>>,
|
||||
code_cache: Deferred<Arc<CodeCache>>,
|
||||
workspace_resolver: Deferred<Arc<WorkspaceResolver>>,
|
||||
}
|
||||
|
||||
pub struct CliFactory {
|
||||
|
@ -304,19 +306,33 @@ impl CliFactory {
|
|||
}
|
||||
|
||||
pub fn maybe_lockfile(&self) -> &Option<Arc<CliLockfile>> {
|
||||
fn check_no_npm(lockfile: &CliLockfile, options: &CliOptions) -> bool {
|
||||
if options.no_npm() {
|
||||
return true;
|
||||
}
|
||||
// Deno doesn't yet understand npm workspaces and the package.json resolution
|
||||
// may be in a different folder than the deno.json/lockfile. So for now, ignore
|
||||
// any package.jsons that are in different folders
|
||||
options
|
||||
.maybe_package_json()
|
||||
.map(|package_json| {
|
||||
package_json.path.parent() != lockfile.filename.parent()
|
||||
fn pkg_json_deps(maybe_pkg_json: Option<&PackageJson>) -> BTreeSet<String> {
|
||||
let Some(pkg_json) = maybe_pkg_json else {
|
||||
return Default::default();
|
||||
};
|
||||
pkg_json
|
||||
.resolve_local_package_json_deps()
|
||||
.values()
|
||||
.filter_map(|dep| dep.as_ref().ok())
|
||||
.filter_map(|dep| match dep {
|
||||
PackageJsonDepValue::Req(req) => Some(req),
|
||||
PackageJsonDepValue::Workspace(_) => None,
|
||||
})
|
||||
.unwrap_or(false)
|
||||
.map(|r| format!("npm:{}", r))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn deno_json_deps(
|
||||
maybe_deno_json: Option<&ConfigFile>,
|
||||
) -> BTreeSet<String> {
|
||||
maybe_deno_json
|
||||
.map(|c| {
|
||||
crate::args::deno_json::deno_json_deps(c)
|
||||
.into_iter()
|
||||
.map(|req| req.to_string())
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
self.services.lockfile.get_or_init(|| {
|
||||
|
@ -324,67 +340,52 @@ impl CliFactory {
|
|||
|
||||
// initialize the lockfile with the workspace's configuration
|
||||
if let Some(lockfile) = &maybe_lockfile {
|
||||
let no_npm = check_no_npm(lockfile, &self.options);
|
||||
let package_json_deps = (!no_npm)
|
||||
.then(|| {
|
||||
self
|
||||
.package_json_deps_provider()
|
||||
.reqs()
|
||||
.map(|reqs| {
|
||||
reqs.into_iter().map(|s| format!("npm:{}", s)).collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let config = match self.options.maybe_workspace_config() {
|
||||
Some(workspace_config) => deno_lockfile::WorkspaceConfig {
|
||||
root: WorkspaceMemberConfig {
|
||||
package_json_deps,
|
||||
dependencies: deno_json_deps(
|
||||
self.options.maybe_config_file().as_ref().unwrap(),
|
||||
)
|
||||
.into_iter()
|
||||
.map(|req| req.to_string())
|
||||
.collect(),
|
||||
},
|
||||
members: workspace_config
|
||||
.members
|
||||
.iter()
|
||||
.map(|member| {
|
||||
(
|
||||
member.package_name.clone(),
|
||||
WorkspaceMemberConfig {
|
||||
package_json_deps: Default::default(),
|
||||
dependencies: deno_json_deps(&member.config_file)
|
||||
.into_iter()
|
||||
.map(|req| req.to_string())
|
||||
.collect(),
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
None => deno_lockfile::WorkspaceConfig {
|
||||
root: WorkspaceMemberConfig {
|
||||
package_json_deps,
|
||||
dependencies: self
|
||||
.options
|
||||
.maybe_config_file()
|
||||
.as_ref()
|
||||
.map(|config| {
|
||||
deno_json_deps(config)
|
||||
.into_iter()
|
||||
.map(|req| req.to_string())
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default(),
|
||||
},
|
||||
members: Default::default(),
|
||||
let (root_url, root_folder) = self.options.workspace.root_folder();
|
||||
let config = deno_lockfile::WorkspaceConfig {
|
||||
root: WorkspaceMemberConfig {
|
||||
package_json_deps: pkg_json_deps(root_folder.pkg_json.as_deref()),
|
||||
dependencies: deno_json_deps(root_folder.deno_json.as_deref()),
|
||||
},
|
||||
members: self
|
||||
.options
|
||||
.workspace
|
||||
.config_folders()
|
||||
.iter()
|
||||
.filter(|(folder_url, _)| *folder_url != root_url)
|
||||
.filter_map(|(folder_url, folder)| {
|
||||
Some((
|
||||
{
|
||||
// should never be None here, but just ignore members that
|
||||
// do fail for this
|
||||
let mut relative_path = root_url.make_relative(folder_url)?;
|
||||
if relative_path.ends_with('/') {
|
||||
// make it slightly cleaner by removing the trailing slash
|
||||
relative_path.pop();
|
||||
}
|
||||
relative_path
|
||||
},
|
||||
{
|
||||
let config = WorkspaceMemberConfig {
|
||||
package_json_deps: pkg_json_deps(
|
||||
folder.pkg_json.as_deref(),
|
||||
),
|
||||
dependencies: deno_json_deps(folder.deno_json.as_deref()),
|
||||
};
|
||||
if config.package_json_deps.is_empty()
|
||||
&& config.dependencies.is_empty()
|
||||
{
|
||||
// exclude empty workspace members
|
||||
return None;
|
||||
}
|
||||
config
|
||||
},
|
||||
))
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
lockfile.set_workspace_config(
|
||||
deno_lockfile::SetWorkspaceConfigOptions {
|
||||
no_npm,
|
||||
no_npm: self.options.no_npm(),
|
||||
no_config: self.options.no_config(),
|
||||
config,
|
||||
},
|
||||
|
@ -437,8 +438,9 @@ impl CliFactory {
|
|||
cache_setting: self.options.cache_setting(),
|
||||
text_only_progress_bar: self.text_only_progress_bar().clone(),
|
||||
maybe_node_modules_path: self.options.node_modules_dir_path().cloned(),
|
||||
package_json_deps_provider:
|
||||
self.package_json_deps_provider().clone(),
|
||||
package_json_deps_provider: Arc::new(PackageJsonInstallDepsProvider::from_workspace(
|
||||
&self.options.workspace,
|
||||
)),
|
||||
npm_system_info: self.options.npm_system_info(),
|
||||
npmrc: self.options.npmrc().clone()
|
||||
})
|
||||
|
@ -447,28 +449,29 @@ impl CliFactory {
|
|||
.await
|
||||
}
|
||||
|
||||
pub fn package_json_deps_provider(&self) -> &Arc<PackageJsonDepsProvider> {
|
||||
self.services.package_json_deps_provider.get_or_init(|| {
|
||||
Arc::new(PackageJsonDepsProvider::new(
|
||||
self.options.maybe_package_json_deps(),
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn maybe_import_map(
|
||||
pub async fn workspace_resolver(
|
||||
&self,
|
||||
) -> Result<&Option<Arc<ImportMap>>, AnyError> {
|
||||
) -> Result<&Arc<WorkspaceResolver>, AnyError> {
|
||||
self
|
||||
.services
|
||||
.maybe_import_map
|
||||
.workspace_resolver
|
||||
.get_or_try_init_async(async {
|
||||
Ok(
|
||||
self
|
||||
.options
|
||||
.resolve_import_map(self.file_fetcher()?)
|
||||
.await?
|
||||
.map(Arc::new),
|
||||
)
|
||||
let resolver = self
|
||||
.options
|
||||
.create_workspace_resolver(self.file_fetcher()?)
|
||||
.await?;
|
||||
if !resolver.diagnostics().is_empty() {
|
||||
warn!(
|
||||
"Import map diagnostics:\n{}",
|
||||
resolver
|
||||
.diagnostics()
|
||||
.iter()
|
||||
.map(|d| format!(" - {d}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
);
|
||||
}
|
||||
Ok(Arc::new(resolver))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
@ -491,17 +494,15 @@ impl CliFactory {
|
|||
} else {
|
||||
Some(self.npm_resolver().await?.clone())
|
||||
},
|
||||
package_json_deps_provider: self
|
||||
.package_json_deps_provider()
|
||||
.clone(),
|
||||
maybe_jsx_import_source_config: self
|
||||
.options
|
||||
.to_maybe_jsx_import_source_config()?,
|
||||
maybe_import_map: self.maybe_import_map().await?.clone(),
|
||||
maybe_vendor_dir: self.options.vendor_dir_path(),
|
||||
workspace_resolver: self.workspace_resolver().await?.clone(),
|
||||
bare_node_builtins_enabled: self
|
||||
.options
|
||||
.unstable_bare_node_builtins(),
|
||||
maybe_jsx_import_source_config: self
|
||||
.options
|
||||
.workspace
|
||||
.to_maybe_jsx_import_source_config()?,
|
||||
maybe_vendor_dir: self.options.vendor_dir_path(),
|
||||
})))
|
||||
}
|
||||
.boxed_local(),
|
||||
|
@ -759,7 +760,6 @@ impl CliFactory {
|
|||
self.http_client_provider(),
|
||||
self.npm_resolver().await?.as_ref(),
|
||||
self.options.npm_system_info(),
|
||||
self.package_json_deps_provider(),
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -885,7 +885,6 @@ impl CliFactory {
|
|||
.unsafely_ignore_certificate_errors()
|
||||
.clone(),
|
||||
unstable: self.options.legacy_unstable_flag(),
|
||||
maybe_root_package_json_deps: self.options.maybe_package_json_deps(),
|
||||
create_hmr_runner,
|
||||
create_coverage_collector,
|
||||
})
|
||||
|
|
|
@ -98,7 +98,7 @@ impl MainModuleGraphContainer {
|
|||
&self,
|
||||
files: &[String],
|
||||
) -> Result<Vec<ModuleSpecifier>, AnyError> {
|
||||
let excludes = self.cli_options.resolve_config_excludes()?;
|
||||
let excludes = self.cli_options.workspace.resolve_config_excludes()?;
|
||||
Ok(
|
||||
files
|
||||
.iter()
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::config_to_deno_graph_workspace_member;
|
||||
use crate::args::jsr_url;
|
||||
use crate::args::CliLockfile;
|
||||
use crate::args::CliOptions;
|
||||
|
@ -18,12 +19,13 @@ use crate::tools::check;
|
|||
use crate::tools::check::TypeChecker;
|
||||
use crate::util::file_watcher::WatcherCommunicator;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
use deno_config::workspace::JsrPackageConfig;
|
||||
use deno_emit::LoaderChecksum;
|
||||
use deno_graph::JsrLoadError;
|
||||
use deno_graph::ModuleLoadError;
|
||||
use deno_graph::WorkspaceFastCheckOption;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
|
||||
use deno_config::WorkspaceMemberConfig;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -240,12 +242,12 @@ impl ModuleGraphCreator {
|
|||
|
||||
pub async fn create_and_validate_publish_graph(
|
||||
&self,
|
||||
packages: &[WorkspaceMemberConfig],
|
||||
package_configs: &[JsrPackageConfig],
|
||||
build_fast_check_graph: bool,
|
||||
) -> Result<ModuleGraph, AnyError> {
|
||||
let mut roots = Vec::new();
|
||||
for package in packages {
|
||||
roots.extend(package.config_file.resolve_export_value_urls()?);
|
||||
for package_config in package_configs {
|
||||
roots.extend(package_config.config_file.resolve_export_value_urls()?);
|
||||
}
|
||||
let mut graph = self
|
||||
.create_graph_with_options(CreateGraphOptions {
|
||||
|
@ -260,10 +262,16 @@ impl ModuleGraphCreator {
|
|||
self.type_check_graph(graph.clone()).await?;
|
||||
}
|
||||
if build_fast_check_graph {
|
||||
let fast_check_workspace_members = package_configs
|
||||
.iter()
|
||||
.map(|p| config_to_deno_graph_workspace_member(&p.config_file))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
self.module_graph_builder.build_fast_check_graph(
|
||||
&mut graph,
|
||||
BuildFastCheckGraphOptions {
|
||||
workspace_fast_check: true,
|
||||
workspace_fast_check: WorkspaceFastCheckOption::Enabled(
|
||||
&fast_check_workspace_members,
|
||||
),
|
||||
},
|
||||
)?;
|
||||
}
|
||||
|
@ -340,10 +348,10 @@ impl ModuleGraphCreator {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct BuildFastCheckGraphOptions {
|
||||
pub struct BuildFastCheckGraphOptions<'a> {
|
||||
/// Whether to do fast check on workspace members. This
|
||||
/// is mostly only useful when publishing.
|
||||
pub workspace_fast_check: bool,
|
||||
pub workspace_fast_check: deno_graph::WorkspaceFastCheckOption<'a>,
|
||||
}
|
||||
|
||||
pub struct ModuleGraphBuilder {
|
||||
|
@ -622,7 +630,10 @@ impl ModuleGraphBuilder {
|
|||
}
|
||||
|
||||
log::debug!("Building fast check graph");
|
||||
let fast_check_cache = if !options.workspace_fast_check {
|
||||
let fast_check_cache = if matches!(
|
||||
options.workspace_fast_check,
|
||||
deno_graph::WorkspaceFastCheckOption::Disabled
|
||||
) {
|
||||
Some(cache::FastCheckCache::new(self.caches.fast_check_db()))
|
||||
} else {
|
||||
None
|
||||
|
@ -631,11 +642,6 @@ impl ModuleGraphBuilder {
|
|||
let cli_resolver = &self.resolver;
|
||||
let graph_resolver = cli_resolver.as_graph_resolver();
|
||||
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver();
|
||||
let workspace_members = if options.workspace_fast_check {
|
||||
Some(self.options.resolve_deno_graph_workspace_members()?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
graph.build_fast_check_type_graph(
|
||||
deno_graph::BuildFastCheckTypeGraphOptions {
|
||||
|
@ -645,11 +651,7 @@ impl ModuleGraphBuilder {
|
|||
module_parser: Some(&parser),
|
||||
resolver: Some(graph_resolver),
|
||||
npm_resolver: Some(&graph_npm_resolver),
|
||||
workspace_fast_check: if let Some(members) = &workspace_members {
|
||||
deno_graph::WorkspaceFastCheckOption::Enabled(members)
|
||||
} else {
|
||||
deno_graph::WorkspaceFastCheckOption::Disabled
|
||||
},
|
||||
workspace_fast_check: options.workspace_fast_check,
|
||||
},
|
||||
);
|
||||
Ok(())
|
||||
|
|
|
@ -16,7 +16,6 @@ use crate::util::fs::canonicalize_path_maybe_not_exists;
|
|||
use deno_ast::MediaType;
|
||||
use deno_config::FmtOptionsConfig;
|
||||
use deno_config::TsConfig;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::normalize_path;
|
||||
use deno_core::serde::de::DeserializeOwned;
|
||||
use deno_core::serde::Deserialize;
|
||||
|
@ -27,6 +26,8 @@ use deno_core::serde_json::Value;
|
|||
use deno_core::ModuleSpecifier;
|
||||
use deno_lint::linter::LintConfig;
|
||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||
use deno_runtime::deno_fs::DenoConfigFsAdapter;
|
||||
use deno_runtime::deno_fs::RealFs;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
|
@ -935,7 +936,7 @@ impl Config {
|
|||
pub fn specifier_enabled(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
let config_file = self.tree.config_file_for_specifier(specifier);
|
||||
if let Some(cf) = config_file {
|
||||
if let Ok(files) = cf.to_files_config() {
|
||||
if let Ok(files) = cf.to_exclude_files_config() {
|
||||
if !files.matches_specifier(specifier) {
|
||||
return false;
|
||||
}
|
||||
|
@ -952,7 +953,7 @@ impl Config {
|
|||
specifier: &ModuleSpecifier,
|
||||
) -> bool {
|
||||
if let Some(cf) = self.tree.config_file_for_specifier(specifier) {
|
||||
if let Some(options) = cf.to_test_config().ok().flatten() {
|
||||
if let Ok(options) = cf.to_test_config() {
|
||||
if !options.files.matches_specifier(specifier) {
|
||||
return false;
|
||||
}
|
||||
|
@ -1135,8 +1136,9 @@ impl ConfigData {
|
|||
) -> Self {
|
||||
if let Some(specifier) = config_file_specifier {
|
||||
match ConfigFile::from_specifier(
|
||||
&DenoConfigFsAdapter::new(&RealFs),
|
||||
specifier.clone(),
|
||||
&deno_config::ParseOptions::default(),
|
||||
&deno_config::ConfigParseOptions::default(),
|
||||
) {
|
||||
Ok(config_file) => {
|
||||
lsp_log!(
|
||||
|
@ -1230,13 +1232,7 @@ impl ConfigData {
|
|||
.and_then(|config_file| {
|
||||
config_file
|
||||
.to_fmt_config()
|
||||
.and_then(|o| {
|
||||
let base_path = config_file
|
||||
.specifier
|
||||
.to_file_path()
|
||||
.map_err(|_| anyhow!("Invalid base path."))?;
|
||||
FmtOptions::resolve(o, None, &base_path)
|
||||
})
|
||||
.and_then(|o| FmtOptions::resolve(o, &Default::default(), None))
|
||||
.inspect_err(|err| {
|
||||
lsp_warn!(" Couldn't read formatter configuration: {}", err)
|
||||
})
|
||||
|
@ -1264,13 +1260,7 @@ impl ConfigData {
|
|||
.and_then(|config_file| {
|
||||
config_file
|
||||
.to_lint_config()
|
||||
.and_then(|o| {
|
||||
let base_path = config_file
|
||||
.specifier
|
||||
.to_file_path()
|
||||
.map_err(|_| anyhow!("Invalid base path."))?;
|
||||
LintOptions::resolve(o, None, &base_path)
|
||||
})
|
||||
.and_then(|o| LintOptions::resolve(o, Default::default(), None))
|
||||
.inspect_err(|err| {
|
||||
lsp_warn!(" Couldn't read lint configuration: {}", err)
|
||||
})
|
||||
|
@ -2115,7 +2105,7 @@ mod tests {
|
|||
ConfigFile::new(
|
||||
"{}",
|
||||
root_uri.join("deno.json").unwrap(),
|
||||
&deno_config::ParseOptions::default(),
|
||||
&deno_config::ConfigParseOptions::default(),
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
|
@ -2173,7 +2163,7 @@ mod tests {
|
|||
})
|
||||
.to_string(),
|
||||
root_uri.join("deno.json").unwrap(),
|
||||
&deno_config::ParseOptions::default(),
|
||||
&deno_config::ConfigParseOptions::default(),
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
|
@ -2199,7 +2189,7 @@ mod tests {
|
|||
})
|
||||
.to_string(),
|
||||
root_uri.join("deno.json").unwrap(),
|
||||
&deno_config::ParseOptions::default(),
|
||||
&deno_config::ConfigParseOptions::default(),
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
|
@ -2217,7 +2207,7 @@ mod tests {
|
|||
})
|
||||
.to_string(),
|
||||
root_uri.join("deno.json").unwrap(),
|
||||
&deno_config::ParseOptions::default(),
|
||||
&deno_config::ConfigParseOptions::default(),
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
|
|
|
@ -1655,7 +1655,7 @@ mod tests {
|
|||
let config_file = ConfigFile::new(
|
||||
json_string,
|
||||
base_url,
|
||||
&deno_config::ParseOptions::default(),
|
||||
&deno_config::ConfigParseOptions::default(),
|
||||
)
|
||||
.unwrap();
|
||||
config.tree.inject_config_file(config_file).await;
|
||||
|
|
|
@ -1751,7 +1751,7 @@ console.log(b, "hello deno");
|
|||
})
|
||||
.to_string(),
|
||||
config.root_uri().unwrap().join("deno.json").unwrap(),
|
||||
&deno_config::ParseOptions::default(),
|
||||
&deno_config::ConfigParseOptions::default(),
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
|
@ -1795,7 +1795,7 @@ console.log(b, "hello deno");
|
|||
})
|
||||
.to_string(),
|
||||
config.root_uri().unwrap().join("deno.json").unwrap(),
|
||||
&deno_config::ParseOptions::default(),
|
||||
&deno_config::ConfigParseOptions::default(),
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
use base64::Engine;
|
||||
use deno_ast::MediaType;
|
||||
use deno_config::workspace::Workspace;
|
||||
use deno_config::workspace::WorkspaceDiscoverOptions;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::resolve_url;
|
||||
|
@ -13,6 +15,7 @@ use deno_core::url;
|
|||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_graph::Resolution;
|
||||
use deno_runtime::deno_fs::DenoConfigFsAdapter;
|
||||
use deno_runtime::deno_tls::rustls::RootCertStore;
|
||||
use deno_runtime::deno_tls::RootCertStoreProvider;
|
||||
use deno_semver::jsr::JsrPackageReqReference;
|
||||
|
@ -3549,6 +3552,24 @@ impl Inner {
|
|||
}
|
||||
|
||||
let workspace_settings = self.config.workspace_settings();
|
||||
let initial_cwd = config_data
|
||||
.and_then(|d| d.scope.to_file_path().ok())
|
||||
.unwrap_or_else(|| self.initial_cwd.clone());
|
||||
// todo: we need a way to convert config data to a Workspace
|
||||
let workspace = Arc::new(Workspace::discover(
|
||||
deno_config::workspace::WorkspaceDiscoverStart::Dirs(&[
|
||||
initial_cwd.clone()
|
||||
]),
|
||||
&WorkspaceDiscoverOptions {
|
||||
fs: &DenoConfigFsAdapter::new(&deno_runtime::deno_fs::RealFs),
|
||||
pkg_json_cache: None,
|
||||
config_parse_options: deno_config::ConfigParseOptions {
|
||||
include_task_comments: false,
|
||||
},
|
||||
additional_config_file_names: &[],
|
||||
discover_pkg_json: true,
|
||||
},
|
||||
)?);
|
||||
let cli_options = CliOptions::new(
|
||||
Flags {
|
||||
cache_path: Some(self.cache.deno_dir().root.clone()),
|
||||
|
@ -3572,13 +3593,12 @@ impl Inner {
|
|||
type_check_mode: crate::args::TypeCheckMode::Local,
|
||||
..Default::default()
|
||||
},
|
||||
self.initial_cwd.clone(),
|
||||
config_data.and_then(|d| d.config_file.as_deref().cloned()),
|
||||
initial_cwd,
|
||||
config_data.and_then(|d| d.lockfile.clone()),
|
||||
config_data.and_then(|d| d.package_json.clone()),
|
||||
config_data
|
||||
.and_then(|d| d.npmrc.clone())
|
||||
.unwrap_or_else(create_default_npmrc),
|
||||
workspace,
|
||||
force_global_cache,
|
||||
)?;
|
||||
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::create_default_npmrc;
|
||||
use crate::args::package_json;
|
||||
use crate::args::CacheSetting;
|
||||
use crate::args::CliLockfile;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::graph_util::CliJsrUrlProvider;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::lsp::config::Config;
|
||||
|
@ -26,6 +26,8 @@ use crate::util::progress_bar::ProgressBarStyle;
|
|||
use dashmap::DashMap;
|
||||
use deno_ast::MediaType;
|
||||
use deno_cache_dir::HttpCache;
|
||||
use deno_config::workspace::PackageJsonDepResolution;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::url::Url;
|
||||
use deno_graph::source::Resolver;
|
||||
|
@ -43,7 +45,6 @@ use deno_semver::npm::NpmPackageReqReference;
|
|||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::package::PackageReq;
|
||||
use indexmap::IndexMap;
|
||||
use package_json::PackageJsonDepsProvider;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::BTreeSet;
|
||||
|
@ -460,13 +461,10 @@ async fn create_npm_resolver(
|
|||
text_only_progress_bar: ProgressBar::new(ProgressBarStyle::TextOnly),
|
||||
maybe_node_modules_path: config_data
|
||||
.and_then(|d| d.node_modules_dir.clone()),
|
||||
package_json_deps_provider: Arc::new(PackageJsonDepsProvider::new(
|
||||
config_data
|
||||
.and_then(|d| d.package_json.as_ref())
|
||||
.map(|package_json| {
|
||||
package_json.resolve_local_package_json_version_reqs()
|
||||
}),
|
||||
)),
|
||||
// only used for top level install, so we can ignore this
|
||||
package_json_deps_provider: Arc::new(
|
||||
PackageJsonInstallDepsProvider::empty(),
|
||||
),
|
||||
npmrc: config_data
|
||||
.and_then(|d| d.npmrc.clone())
|
||||
.unwrap_or_else(create_default_npmrc),
|
||||
|
@ -504,16 +502,22 @@ fn create_graph_resolver(
|
|||
Arc::new(CliGraphResolver::new(CliGraphResolverOptions {
|
||||
node_resolver: node_resolver.cloned(),
|
||||
npm_resolver: npm_resolver.cloned(),
|
||||
package_json_deps_provider: Arc::new(PackageJsonDepsProvider::new(
|
||||
workspace_resolver: Arc::new(WorkspaceResolver::new_raw(
|
||||
config_data.and_then(|d| d.import_map.as_ref().map(|i| (**i).clone())),
|
||||
config_data
|
||||
.and_then(|d| d.package_json.as_ref())
|
||||
.map(|package_json| {
|
||||
package_json.resolve_local_package_json_version_reqs()
|
||||
}),
|
||||
.and_then(|d| d.package_json.clone())
|
||||
.into_iter()
|
||||
.collect(),
|
||||
if config_data.map(|d| d.byonm).unwrap_or(false) {
|
||||
PackageJsonDepResolution::Disabled
|
||||
} else {
|
||||
// todo(dsherret): this should also be disabled for when using
|
||||
// auto-install with a node_modules directory
|
||||
PackageJsonDepResolution::Enabled
|
||||
},
|
||||
)),
|
||||
maybe_jsx_import_source_config: config_file
|
||||
.and_then(|cf| cf.to_maybe_jsx_import_source_config().ok().flatten()),
|
||||
maybe_import_map: config_data.and_then(|d| d.import_map.clone()),
|
||||
maybe_vendor_dir: config_data.and_then(|d| d.vendor_dir.as_ref()),
|
||||
bare_node_builtins_enabled: config_file
|
||||
.map(|cf| cf.has_unstable("bare-node-builtins"))
|
||||
|
|
|
@ -5405,7 +5405,7 @@ mod tests {
|
|||
})
|
||||
.to_string(),
|
||||
resolve_url("file:///deno.json").unwrap(),
|
||||
&deno_config::ParseOptions::default(),
|
||||
&deno_config::ConfigParseOptions::default(),
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
|
|
|
@ -81,7 +81,8 @@ pub async fn load_top_level_deps(factory: &CliFactory) -> Result<(), AnyError> {
|
|||
}
|
||||
}
|
||||
// cache as many entries in the import map as we can
|
||||
if let Some(import_map) = factory.maybe_import_map().await? {
|
||||
let resolver = factory.workspace_resolver().await?;
|
||||
if let Some(import_map) = resolver.maybe_import_map() {
|
||||
let roots = import_map
|
||||
.imports()
|
||||
.entries()
|
||||
|
@ -510,7 +511,7 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
.as_managed()
|
||||
.unwrap() // byonm won't create a Module::Npm
|
||||
.resolve_pkg_folder_from_deno_module(module.nv_reference.nv())?;
|
||||
let maybe_resolution = self
|
||||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve_package_sub_path_from_deno_module(
|
||||
|
@ -521,11 +522,8 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
)
|
||||
.with_context(|| {
|
||||
format!("Could not resolve '{}'.", module.nv_reference)
|
||||
})?;
|
||||
match maybe_resolution {
|
||||
Some(res) => res.into_url(),
|
||||
None => return Err(generic_error("not found")),
|
||||
}
|
||||
})?
|
||||
.into_url()
|
||||
}
|
||||
Some(Module::Node(module)) => module.specifier.clone(),
|
||||
Some(Module::Js(module)) => module.specifier.clone(),
|
||||
|
|
|
@ -6,6 +6,7 @@ use std::path::PathBuf;
|
|||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_config::package_json::PackageJsonDepValue;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
|
@ -87,13 +88,22 @@ impl ByonmCliNpmResolver {
|
|||
req: &PackageReq,
|
||||
pkg_json: &PackageJson,
|
||||
) -> Option<String> {
|
||||
let deps = pkg_json.resolve_local_package_json_version_reqs();
|
||||
let deps = pkg_json.resolve_local_package_json_deps();
|
||||
for (key, value) in deps {
|
||||
if let Ok(value) = value {
|
||||
if value.name == req.name
|
||||
&& value.version_req.intersects(&req.version_req)
|
||||
{
|
||||
return Some(key);
|
||||
match value {
|
||||
PackageJsonDepValue::Req(dep_req) => {
|
||||
if dep_req.name == req.name
|
||||
&& dep_req.version_req.intersects(&req.version_req)
|
||||
{
|
||||
return Some(key);
|
||||
}
|
||||
}
|
||||
PackageJsonDepValue::Workspace(_workspace) => {
|
||||
if key == req.name && req.version_req.tag() == Some("workspace") {
|
||||
return Some(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ use resolution::AddPkgReqsResult;
|
|||
use crate::args::CliLockfile;
|
||||
use crate::args::NpmProcessState;
|
||||
use crate::args::NpmProcessStateKind;
|
||||
use crate::args::PackageJsonDepsProvider;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::cache::FastInsecureHasher;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
|
||||
|
@ -66,7 +66,7 @@ pub struct CliNpmResolverManagedCreateOptions {
|
|||
pub text_only_progress_bar: crate::util::progress_bar::ProgressBar,
|
||||
pub maybe_node_modules_path: Option<PathBuf>,
|
||||
pub npm_system_info: NpmSystemInfo,
|
||||
pub package_json_deps_provider: Arc<PackageJsonDepsProvider>,
|
||||
pub package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
pub npmrc: Arc<ResolvedNpmRc>,
|
||||
}
|
||||
|
||||
|
@ -131,7 +131,7 @@ fn create_inner(
|
|||
npm_api: Arc<CliNpmRegistryApi>,
|
||||
npm_cache: Arc<NpmCache>,
|
||||
npm_rc: Arc<ResolvedNpmRc>,
|
||||
package_json_deps_provider: Arc<PackageJsonDepsProvider>,
|
||||
package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
text_only_progress_bar: crate::util::progress_bar::ProgressBar,
|
||||
node_modules_dir_path: Option<PathBuf>,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
|
@ -152,6 +152,7 @@ fn create_inner(
|
|||
let fs_resolver = create_npm_fs_resolver(
|
||||
fs.clone(),
|
||||
npm_cache.clone(),
|
||||
&package_json_deps_provider,
|
||||
&text_only_progress_bar,
|
||||
resolution.clone(),
|
||||
tarball_cache.clone(),
|
||||
|
@ -249,7 +250,7 @@ pub struct ManagedCliNpmResolver {
|
|||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||
npm_api: Arc<CliNpmRegistryApi>,
|
||||
npm_cache: Arc<NpmCache>,
|
||||
package_json_deps_provider: Arc<PackageJsonDepsProvider>,
|
||||
package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
resolution: Arc<NpmResolution>,
|
||||
tarball_cache: Arc<TarballCache>,
|
||||
text_only_progress_bar: ProgressBar,
|
||||
|
@ -273,7 +274,7 @@ impl ManagedCliNpmResolver {
|
|||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||
npm_api: Arc<CliNpmRegistryApi>,
|
||||
npm_cache: Arc<NpmCache>,
|
||||
package_json_deps_provider: Arc<PackageJsonDepsProvider>,
|
||||
package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
resolution: Arc<NpmResolution>,
|
||||
tarball_cache: Arc<TarballCache>,
|
||||
text_only_progress_bar: ProgressBar,
|
||||
|
@ -459,12 +460,14 @@ impl ManagedCliNpmResolver {
|
|||
pub async fn ensure_top_level_package_json_install(
|
||||
&self,
|
||||
) -> Result<bool, AnyError> {
|
||||
let Some(reqs) = self.package_json_deps_provider.reqs() else {
|
||||
return Ok(false);
|
||||
};
|
||||
if !self.top_level_install_flag.raise() {
|
||||
return Ok(false); // already did this
|
||||
}
|
||||
let reqs = self.package_json_deps_provider.remote_pkg_reqs();
|
||||
if reqs.is_empty() {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
// check if something needs resolving before bothering to load all
|
||||
// the package information (which is slow)
|
||||
if reqs
|
||||
|
@ -477,8 +480,7 @@ impl ManagedCliNpmResolver {
|
|||
return Ok(false); // everything is already resolvable
|
||||
}
|
||||
|
||||
let reqs = reqs.into_iter().cloned().collect::<Vec<_>>();
|
||||
self.add_package_reqs(&reqs).await.map(|_| true)
|
||||
self.add_package_reqs(reqs).await.map(|_| true)
|
||||
}
|
||||
|
||||
pub async fn cache_package_info(
|
||||
|
@ -563,6 +565,7 @@ impl CliNpmResolver for ManagedCliNpmResolver {
|
|||
create_npm_fs_resolver(
|
||||
self.fs.clone(),
|
||||
self.npm_cache.clone(),
|
||||
&self.package_json_deps_provider,
|
||||
&self.text_only_progress_bar,
|
||||
npm_resolution.clone(),
|
||||
self.tarball_cache.clone(),
|
||||
|
|
|
@ -15,6 +15,7 @@ use std::path::PathBuf;
|
|||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::cache::CACHE_PERM;
|
||||
use crate::npm::cache_dir::mixed_case_package_name_decode;
|
||||
use crate::util::fs::atomic_write_file_with_retries;
|
||||
|
@ -57,6 +58,7 @@ use super::common::RegistryReadPermissionChecker;
|
|||
pub struct LocalNpmPackageResolver {
|
||||
cache: Arc<NpmCache>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
pkg_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
progress_bar: ProgressBar,
|
||||
resolution: Arc<NpmResolution>,
|
||||
tarball_cache: Arc<TarballCache>,
|
||||
|
@ -67,9 +69,11 @@ pub struct LocalNpmPackageResolver {
|
|||
}
|
||||
|
||||
impl LocalNpmPackageResolver {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
cache: Arc<NpmCache>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
pkg_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
progress_bar: ProgressBar,
|
||||
resolution: Arc<NpmResolution>,
|
||||
tarball_cache: Arc<TarballCache>,
|
||||
|
@ -79,6 +83,7 @@ impl LocalNpmPackageResolver {
|
|||
Self {
|
||||
cache,
|
||||
fs: fs.clone(),
|
||||
pkg_json_deps_provider,
|
||||
progress_bar,
|
||||
resolution,
|
||||
tarball_cache,
|
||||
|
@ -221,6 +226,7 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
sync_resolution_with_fs(
|
||||
&self.resolution.snapshot(),
|
||||
&self.cache,
|
||||
&self.pkg_json_deps_provider,
|
||||
&self.progress_bar,
|
||||
&self.tarball_cache,
|
||||
&self.root_node_modules_path,
|
||||
|
@ -244,12 +250,13 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
async fn sync_resolution_with_fs(
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
cache: &Arc<NpmCache>,
|
||||
pkg_json_deps_provider: &PackageJsonInstallDepsProvider,
|
||||
progress_bar: &ProgressBar,
|
||||
tarball_cache: &Arc<TarballCache>,
|
||||
root_node_modules_dir_path: &Path,
|
||||
system_info: &NpmSystemInfo,
|
||||
) -> Result<(), AnyError> {
|
||||
if snapshot.is_empty() {
|
||||
if snapshot.is_empty() && pkg_json_deps_provider.workspace_pkgs().is_empty() {
|
||||
return Ok(()); // don't create the directory
|
||||
}
|
||||
|
||||
|
@ -475,6 +482,19 @@ async fn sync_resolution_with_fs(
|
|||
bin_entries.finish(snapshot, &bin_node_modules_dir_path)?;
|
||||
}
|
||||
|
||||
// 7. Create symlinks for the workspace packages
|
||||
{
|
||||
// todo(#24419): this is not exactly correct because it should
|
||||
// install correctly for a workspace (potentially in sub directories),
|
||||
// but this is good enough for a first pass
|
||||
for workspace in pkg_json_deps_provider.workspace_pkgs() {
|
||||
symlink_package_dir(
|
||||
&workspace.pkg_dir,
|
||||
&root_node_modules_dir_path.join(&workspace.alias),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
setup_cache.save();
|
||||
drop(single_process_lock);
|
||||
drop(pb_clear_guard);
|
||||
|
|
|
@ -10,6 +10,7 @@ use std::sync::Arc;
|
|||
use deno_npm::NpmSystemInfo;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
|
||||
pub use self::common::NpmPackageFsResolver;
|
||||
|
@ -21,9 +22,11 @@ use super::cache::NpmCache;
|
|||
use super::cache::TarballCache;
|
||||
use super::resolution::NpmResolution;
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn create_npm_fs_resolver(
|
||||
fs: Arc<dyn FileSystem>,
|
||||
npm_cache: Arc<NpmCache>,
|
||||
pkg_json_deps_provider: &Arc<PackageJsonInstallDepsProvider>,
|
||||
progress_bar: &ProgressBar,
|
||||
resolution: Arc<NpmResolution>,
|
||||
tarball_cache: Arc<TarballCache>,
|
||||
|
@ -34,6 +37,7 @@ pub fn create_npm_fs_resolver(
|
|||
Some(node_modules_folder) => Arc::new(LocalNpmPackageResolver::new(
|
||||
npm_cache,
|
||||
fs,
|
||||
pkg_json_deps_provider.clone(),
|
||||
progress_bar.clone(),
|
||||
resolution,
|
||||
tarball_cache,
|
||||
|
|
343
cli/resolver.rs
343
cli/resolver.rs
|
@ -4,7 +4,10 @@ use async_trait::async_trait;
|
|||
use dashmap::DashMap;
|
||||
use dashmap::DashSet;
|
||||
use deno_ast::MediaType;
|
||||
use deno_config::package_json::PackageJsonDeps;
|
||||
use deno_config::package_json::PackageJsonDepValue;
|
||||
use deno_config::workspace::MappedResolution;
|
||||
use deno_config::workspace::MappedResolutionError;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -30,14 +33,12 @@ use deno_runtime::deno_node::PackageJson;
|
|||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use deno_semver::package::PackageReq;
|
||||
use import_map::ImportMap;
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::args::JsxImportSourceConfig;
|
||||
use crate::args::PackageJsonDepsProvider;
|
||||
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
|
||||
use crate::colors;
|
||||
use crate::node::CliNodeCodeTranslator;
|
||||
|
@ -128,15 +129,31 @@ impl CliNodeResolver {
|
|||
referrer: &ModuleSpecifier,
|
||||
mode: NodeResolutionMode,
|
||||
) -> Result<NodeResolution, AnyError> {
|
||||
let package_folder = self
|
||||
.npm_resolver
|
||||
.resolve_pkg_folder_from_deno_module_req(req_ref.req(), referrer)?;
|
||||
let maybe_resolution = self.resolve_package_sub_path_from_deno_module(
|
||||
&package_folder,
|
||||
self.resolve_req_with_sub_path(
|
||||
req_ref.req(),
|
||||
req_ref.sub_path(),
|
||||
referrer,
|
||||
mode,
|
||||
)?;
|
||||
)
|
||||
}
|
||||
|
||||
pub fn resolve_req_with_sub_path(
|
||||
&self,
|
||||
req: &PackageReq,
|
||||
sub_path: Option<&str>,
|
||||
referrer: &ModuleSpecifier,
|
||||
mode: NodeResolutionMode,
|
||||
) -> Result<NodeResolution, AnyError> {
|
||||
let package_folder = self
|
||||
.npm_resolver
|
||||
.resolve_pkg_folder_from_deno_module_req(req, referrer)?;
|
||||
let maybe_resolution = self
|
||||
.maybe_resolve_package_sub_path_from_deno_module(
|
||||
&package_folder,
|
||||
sub_path,
|
||||
referrer,
|
||||
mode,
|
||||
)?;
|
||||
match maybe_resolution {
|
||||
Some(resolution) => Ok(resolution),
|
||||
None => {
|
||||
|
@ -150,8 +167,9 @@ impl CliNodeResolver {
|
|||
}
|
||||
}
|
||||
Err(anyhow!(
|
||||
"Failed resolving package subpath for '{}' in '{}'.",
|
||||
req_ref,
|
||||
"Failed resolving '{}{}' in '{}'.",
|
||||
req,
|
||||
sub_path.map(|s| format!("/{}", s)).unwrap_or_default(),
|
||||
package_folder.display()
|
||||
))
|
||||
}
|
||||
|
@ -164,6 +182,31 @@ impl CliNodeResolver {
|
|||
sub_path: Option<&str>,
|
||||
referrer: &ModuleSpecifier,
|
||||
mode: NodeResolutionMode,
|
||||
) -> Result<NodeResolution, AnyError> {
|
||||
self
|
||||
.maybe_resolve_package_sub_path_from_deno_module(
|
||||
package_folder,
|
||||
sub_path,
|
||||
referrer,
|
||||
mode,
|
||||
)?
|
||||
.ok_or_else(|| {
|
||||
anyhow!(
|
||||
"Failed resolving '{}' in '{}'.",
|
||||
sub_path
|
||||
.map(|s| format!("/{}", s))
|
||||
.unwrap_or_else(|| ".".to_string()),
|
||||
package_folder.display(),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn maybe_resolve_package_sub_path_from_deno_module(
|
||||
&self,
|
||||
package_folder: &Path,
|
||||
sub_path: Option<&str>,
|
||||
referrer: &ModuleSpecifier,
|
||||
mode: NodeResolutionMode,
|
||||
) -> Result<Option<NodeResolution>, AnyError> {
|
||||
self.handle_node_resolve_result(
|
||||
self.node_resolver.resolve_package_subpath_from_deno_module(
|
||||
|
@ -350,120 +393,39 @@ impl CjsResolutionStore {
|
|||
}
|
||||
}
|
||||
|
||||
/// Result of checking if a specifier is mapped via
|
||||
/// an import map or package.json.
|
||||
pub enum MappedResolution {
|
||||
None,
|
||||
PackageJson(ModuleSpecifier),
|
||||
ImportMap(ModuleSpecifier),
|
||||
}
|
||||
|
||||
impl MappedResolution {
|
||||
pub fn into_specifier(self) -> Option<ModuleSpecifier> {
|
||||
match self {
|
||||
MappedResolution::None => Option::None,
|
||||
MappedResolution::PackageJson(specifier) => Some(specifier),
|
||||
MappedResolution::ImportMap(specifier) => Some(specifier),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolver for specifiers that could be mapped via an
|
||||
/// import map or package.json.
|
||||
#[derive(Debug)]
|
||||
pub struct MappedSpecifierResolver {
|
||||
maybe_import_map: Option<Arc<ImportMap>>,
|
||||
package_json_deps_provider: Arc<PackageJsonDepsProvider>,
|
||||
}
|
||||
|
||||
impl MappedSpecifierResolver {
|
||||
pub fn new(
|
||||
maybe_import_map: Option<Arc<ImportMap>>,
|
||||
package_json_deps_provider: Arc<PackageJsonDepsProvider>,
|
||||
) -> Self {
|
||||
Self {
|
||||
maybe_import_map,
|
||||
package_json_deps_provider,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve(
|
||||
&self,
|
||||
specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<MappedResolution, AnyError> {
|
||||
// attempt to resolve with the import map first
|
||||
let maybe_import_map_err = match self
|
||||
.maybe_import_map
|
||||
.as_ref()
|
||||
.map(|import_map| import_map.resolve(specifier, referrer))
|
||||
{
|
||||
Some(Ok(value)) => return Ok(MappedResolution::ImportMap(value)),
|
||||
Some(Err(err)) => Some(err),
|
||||
None => None,
|
||||
};
|
||||
|
||||
// then with package.json
|
||||
if let Some(deps) = self.package_json_deps_provider.deps() {
|
||||
if let Some(specifier) = resolve_package_json_dep(specifier, deps)? {
|
||||
return Ok(MappedResolution::PackageJson(specifier));
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise, surface the import map error or try resolving when has no import map
|
||||
if let Some(err) = maybe_import_map_err {
|
||||
Err(err.into())
|
||||
} else {
|
||||
Ok(MappedResolution::None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A resolver that takes care of resolution, taking into account loaded
|
||||
/// import map, JSX settings.
|
||||
#[derive(Debug)]
|
||||
pub struct CliGraphResolver {
|
||||
node_resolver: Option<Arc<CliNodeResolver>>,
|
||||
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
|
||||
sloppy_imports_resolver: Option<SloppyImportsResolver>,
|
||||
mapped_specifier_resolver: MappedSpecifierResolver,
|
||||
workspace_resolver: Arc<WorkspaceResolver>,
|
||||
maybe_default_jsx_import_source: Option<String>,
|
||||
maybe_default_jsx_import_source_types: Option<String>,
|
||||
maybe_jsx_import_source_module: Option<String>,
|
||||
maybe_vendor_specifier: Option<ModuleSpecifier>,
|
||||
node_resolver: Option<Arc<CliNodeResolver>>,
|
||||
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
|
||||
found_package_json_dep_flag: AtomicFlag,
|
||||
bare_node_builtins_enabled: bool,
|
||||
}
|
||||
|
||||
pub struct CliGraphResolverOptions<'a> {
|
||||
pub sloppy_imports_resolver: Option<SloppyImportsResolver>,
|
||||
pub node_resolver: Option<Arc<CliNodeResolver>>,
|
||||
pub npm_resolver: Option<Arc<dyn CliNpmResolver>>,
|
||||
pub package_json_deps_provider: Arc<PackageJsonDepsProvider>,
|
||||
pub maybe_jsx_import_source_config: Option<JsxImportSourceConfig>,
|
||||
pub maybe_import_map: Option<Arc<ImportMap>>,
|
||||
pub maybe_vendor_dir: Option<&'a PathBuf>,
|
||||
pub sloppy_imports_resolver: Option<SloppyImportsResolver>,
|
||||
pub workspace_resolver: Arc<WorkspaceResolver>,
|
||||
pub bare_node_builtins_enabled: bool,
|
||||
pub maybe_jsx_import_source_config: Option<JsxImportSourceConfig>,
|
||||
pub maybe_vendor_dir: Option<&'a PathBuf>,
|
||||
}
|
||||
|
||||
impl CliGraphResolver {
|
||||
pub fn new(options: CliGraphResolverOptions) -> Self {
|
||||
let is_byonm = options
|
||||
.npm_resolver
|
||||
.as_ref()
|
||||
.map(|n| n.as_byonm().is_some())
|
||||
.unwrap_or(false);
|
||||
Self {
|
||||
node_resolver: options.node_resolver,
|
||||
npm_resolver: options.npm_resolver,
|
||||
sloppy_imports_resolver: options.sloppy_imports_resolver,
|
||||
mapped_specifier_resolver: MappedSpecifierResolver::new(
|
||||
options.maybe_import_map,
|
||||
if is_byonm {
|
||||
// don't resolve from the root package.json deps for byonm
|
||||
Arc::new(PackageJsonDepsProvider::new(None))
|
||||
} else {
|
||||
options.package_json_deps_provider
|
||||
},
|
||||
),
|
||||
workspace_resolver: options.workspace_resolver,
|
||||
maybe_default_jsx_import_source: options
|
||||
.maybe_jsx_import_source_config
|
||||
.as_ref()
|
||||
|
@ -478,8 +440,6 @@ impl CliGraphResolver {
|
|||
maybe_vendor_specifier: options
|
||||
.maybe_vendor_dir
|
||||
.and_then(|v| ModuleSpecifier::from_directory_path(v).ok()),
|
||||
node_resolver: options.node_resolver,
|
||||
npm_resolver: options.npm_resolver,
|
||||
found_package_json_dep_flag: Default::default(),
|
||||
bare_node_builtins_enabled: options.bare_node_builtins_enabled,
|
||||
}
|
||||
|
@ -497,6 +457,7 @@ impl CliGraphResolver {
|
|||
}
|
||||
}
|
||||
|
||||
// todo(dsherret): if we returned structured errors from the NodeResolver we wouldn't need this
|
||||
fn check_surface_byonm_node_error(
|
||||
&self,
|
||||
specifier: &str,
|
||||
|
@ -561,22 +522,92 @@ impl Resolver for CliGraphResolver {
|
|||
|
||||
let referrer = &referrer_range.specifier;
|
||||
let result: Result<_, ResolveError> = self
|
||||
.mapped_specifier_resolver
|
||||
.workspace_resolver
|
||||
.resolve(specifier, referrer)
|
||||
.map_err(|err| err.into())
|
||||
.and_then(|resolution| match resolution {
|
||||
MappedResolution::ImportMap(specifier) => Ok(specifier),
|
||||
MappedResolution::PackageJson(specifier) => {
|
||||
.map_err(|err| match err {
|
||||
MappedResolutionError::Specifier(err) => ResolveError::Specifier(err),
|
||||
MappedResolutionError::ImportMap(err) => {
|
||||
ResolveError::Other(err.into())
|
||||
}
|
||||
});
|
||||
let result = match result {
|
||||
Ok(resolution) => match resolution {
|
||||
MappedResolution::Normal(specifier)
|
||||
| MappedResolution::ImportMap(specifier) => Ok(specifier),
|
||||
// todo(dsherret): for byonm it should do resolution solely based on
|
||||
// the referrer and not the package.json
|
||||
MappedResolution::PackageJson {
|
||||
dep_result,
|
||||
alias,
|
||||
sub_path,
|
||||
..
|
||||
} => {
|
||||
// found a specifier in the package.json, so mark that
|
||||
// we need to do an "npm install" later
|
||||
self.found_package_json_dep_flag.raise();
|
||||
Ok(specifier)
|
||||
|
||||
dep_result
|
||||
.as_ref()
|
||||
.map_err(|e| ResolveError::Other(e.clone().into()))
|
||||
.and_then(|dep| match dep {
|
||||
PackageJsonDepValue::Req(req) => {
|
||||
ModuleSpecifier::parse(&format!(
|
||||
"npm:{}{}",
|
||||
req,
|
||||
sub_path.map(|s| format!("/{}", s)).unwrap_or_default()
|
||||
))
|
||||
.map_err(|e| ResolveError::Other(e.into()))
|
||||
}
|
||||
PackageJsonDepValue::Workspace(version_req) => self
|
||||
.workspace_resolver
|
||||
.resolve_workspace_pkg_json_folder_for_pkg_json_dep(
|
||||
alias,
|
||||
version_req,
|
||||
)
|
||||
.map_err(|e| ResolveError::Other(e.into()))
|
||||
.and_then(|pkg_folder| {
|
||||
Ok(
|
||||
self
|
||||
.node_resolver
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.resolve_package_sub_path_from_deno_module(
|
||||
pkg_folder,
|
||||
sub_path.as_deref(),
|
||||
referrer,
|
||||
to_node_mode(mode),
|
||||
)?
|
||||
.into_url(),
|
||||
)
|
||||
}),
|
||||
})
|
||||
}
|
||||
MappedResolution::None => {
|
||||
deno_graph::resolve_import(specifier, &referrer_range.specifier)
|
||||
.map_err(|err| err.into())
|
||||
},
|
||||
Err(err) => Err(err),
|
||||
};
|
||||
|
||||
// check if it's an npm specifier that resolves to a workspace member
|
||||
if let Some(node_resolver) = &self.node_resolver {
|
||||
if let Ok(specifier) = &result {
|
||||
if let Ok(req_ref) = NpmPackageReqReference::from_specifier(specifier) {
|
||||
if let Some(pkg_folder) = self
|
||||
.workspace_resolver
|
||||
.resolve_workspace_pkg_json_folder_for_npm_specifier(req_ref.req())
|
||||
{
|
||||
return Ok(
|
||||
node_resolver
|
||||
.resolve_package_sub_path_from_deno_module(
|
||||
pkg_folder,
|
||||
req_ref.sub_path(),
|
||||
referrer,
|
||||
to_node_mode(mode),
|
||||
)?
|
||||
.into_url(),
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// do sloppy imports resolution if enabled
|
||||
let result =
|
||||
|
@ -733,28 +764,6 @@ fn sloppy_imports_resolve(
|
|||
resolution.into_specifier().into_owned()
|
||||
}
|
||||
|
||||
fn resolve_package_json_dep(
|
||||
specifier: &str,
|
||||
deps: &PackageJsonDeps,
|
||||
) -> Result<Option<ModuleSpecifier>, AnyError> {
|
||||
for (bare_specifier, req_result) in deps {
|
||||
if specifier.starts_with(bare_specifier) {
|
||||
let path = &specifier[bare_specifier.len()..];
|
||||
if path.is_empty() || path.starts_with('/') {
|
||||
let req = req_result.as_ref().map_err(|err| {
|
||||
anyhow!(
|
||||
"Parsing version constraints in the application-level package.json is more strict at the moment.\n\n{:#}",
|
||||
err.clone()
|
||||
)
|
||||
})?;
|
||||
return Ok(Some(ModuleSpecifier::parse(&format!("npm:{req}{path}"))?));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct WorkerCliNpmGraphResolver<'a> {
|
||||
npm_resolver: Option<&'a Arc<dyn CliNpmResolver>>,
|
||||
|
@ -1266,72 +1275,10 @@ impl SloppyImportsResolver {
|
|||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use test_util::TestContext;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_resolve_package_json_dep() {
|
||||
fn resolve(
|
||||
specifier: &str,
|
||||
deps: &BTreeMap<String, PackageReq>,
|
||||
) -> Result<Option<String>, String> {
|
||||
let deps = deps
|
||||
.iter()
|
||||
.map(|(key, value)| (key.to_string(), Ok(value.clone())))
|
||||
.collect();
|
||||
resolve_package_json_dep(specifier, &deps)
|
||||
.map(|s| s.map(|s| s.to_string()))
|
||||
.map_err(|err| err.to_string())
|
||||
}
|
||||
|
||||
let deps = BTreeMap::from([
|
||||
(
|
||||
"package".to_string(),
|
||||
PackageReq::from_str("package@1.0").unwrap(),
|
||||
),
|
||||
(
|
||||
"package-alias".to_string(),
|
||||
PackageReq::from_str("package@^1.2").unwrap(),
|
||||
),
|
||||
(
|
||||
"@deno/test".to_string(),
|
||||
PackageReq::from_str("@deno/test@~0.2").unwrap(),
|
||||
),
|
||||
]);
|
||||
|
||||
assert_eq!(
|
||||
resolve("package", &deps).unwrap(),
|
||||
Some("npm:package@1.0".to_string()),
|
||||
);
|
||||
assert_eq!(
|
||||
resolve("package/some_path.ts", &deps).unwrap(),
|
||||
Some("npm:package@1.0/some_path.ts".to_string()),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
resolve("@deno/test", &deps).unwrap(),
|
||||
Some("npm:@deno/test@~0.2".to_string()),
|
||||
);
|
||||
assert_eq!(
|
||||
resolve("@deno/test/some_path.ts", &deps).unwrap(),
|
||||
Some("npm:@deno/test@~0.2/some_path.ts".to_string()),
|
||||
);
|
||||
// matches the start, but doesn't have the same length or a path
|
||||
assert_eq!(resolve("@deno/testing", &deps).unwrap(), None,);
|
||||
|
||||
// alias
|
||||
assert_eq!(
|
||||
resolve("package-alias", &deps).unwrap(),
|
||||
Some("npm:package@^1.2".to_string()),
|
||||
);
|
||||
|
||||
// non-existent bare specifier
|
||||
assert_eq!(resolve("non-existent", &deps).unwrap(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_unstable_sloppy_imports() {
|
||||
fn resolve(specifier: &ModuleSpecifier) -> SloppyImportsResolution {
|
||||
|
|
|
@ -604,7 +604,7 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"workspaces": {
|
||||
"workspace": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::VecDeque;
|
||||
use std::env::current_exe;
|
||||
use std::ffi::OsString;
|
||||
use std::fs;
|
||||
|
@ -15,8 +16,8 @@ use std::path::PathBuf;
|
|||
use std::process::Command;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_config::package_json::PackageJsonDepValueParseError;
|
||||
use deno_config::package_json::PackageJsonDeps;
|
||||
use deno_config::workspace::PackageJsonDepResolution;
|
||||
use deno_config::workspace::Workspace;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -26,9 +27,12 @@ use deno_core::futures::AsyncSeekExt;
|
|||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_semver::npm::NpmVersionReqParseError;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::VersionReqSpecifierParseError;
|
||||
use eszip::EszipRelativeFileBaseUrl;
|
||||
use indexmap::IndexMap;
|
||||
use log::Level;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
@ -36,7 +40,7 @@ use serde::Serialize;
|
|||
use crate::args::CaData;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::CompileFlags;
|
||||
use crate::args::PackageJsonDepsProvider;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::args::PermissionFlags;
|
||||
use crate::args::UnstableConfig;
|
||||
use crate::cache::DenoDir;
|
||||
|
@ -44,6 +48,8 @@ use crate::file_fetcher::FileFetcher;
|
|||
use crate::http_util::HttpClientProvider;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::npm::InnerCliNpmResolverRef;
|
||||
use crate::standalone::virtual_fs::VfsEntry;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
|
||||
|
@ -54,81 +60,30 @@ use super::virtual_fs::VirtualDirectory;
|
|||
|
||||
const MAGIC_TRAILER: &[u8; 8] = b"d3n0l4nd";
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
enum SerializablePackageJsonDepValueParseError {
|
||||
VersionReq(String),
|
||||
Unsupported { scheme: String },
|
||||
}
|
||||
|
||||
impl SerializablePackageJsonDepValueParseError {
|
||||
pub fn from_err(err: PackageJsonDepValueParseError) -> Self {
|
||||
match err {
|
||||
PackageJsonDepValueParseError::VersionReq(err) => {
|
||||
Self::VersionReq(err.source.to_string())
|
||||
}
|
||||
PackageJsonDepValueParseError::Unsupported { scheme } => {
|
||||
Self::Unsupported { scheme }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_err(self) -> PackageJsonDepValueParseError {
|
||||
match self {
|
||||
SerializablePackageJsonDepValueParseError::VersionReq(source) => {
|
||||
PackageJsonDepValueParseError::VersionReq(NpmVersionReqParseError {
|
||||
source: monch::ParseErrorFailureError::new(source),
|
||||
})
|
||||
}
|
||||
SerializablePackageJsonDepValueParseError::Unsupported { scheme } => {
|
||||
PackageJsonDepValueParseError::Unsupported { scheme }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SerializablePackageJsonDeps(
|
||||
BTreeMap<
|
||||
String,
|
||||
Result<PackageReq, SerializablePackageJsonDepValueParseError>,
|
||||
>,
|
||||
);
|
||||
|
||||
impl SerializablePackageJsonDeps {
|
||||
pub fn from_deps(deps: PackageJsonDeps) -> Self {
|
||||
Self(
|
||||
deps
|
||||
.into_iter()
|
||||
.map(|(name, req)| {
|
||||
let res =
|
||||
req.map_err(SerializablePackageJsonDepValueParseError::from_err);
|
||||
(name, res)
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn into_deps(self) -> PackageJsonDeps {
|
||||
self
|
||||
.0
|
||||
.into_iter()
|
||||
.map(|(name, res)| (name, res.map_err(|err| err.into_err())))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub enum NodeModules {
|
||||
Managed {
|
||||
/// Whether this uses a node_modules directory (true) or the global cache (false).
|
||||
node_modules_dir: bool,
|
||||
package_json_deps: Option<SerializablePackageJsonDeps>,
|
||||
/// Relative path for the node_modules directory in the vfs.
|
||||
node_modules_dir: Option<String>,
|
||||
},
|
||||
Byonm {
|
||||
package_json_deps: Option<SerializablePackageJsonDeps>,
|
||||
root_node_modules_dir: String,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct SerializedWorkspaceResolverImportMap {
|
||||
pub specifier: String,
|
||||
pub json: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct SerializedWorkspaceResolver {
|
||||
pub import_map: Option<SerializedWorkspaceResolverImportMap>,
|
||||
pub package_jsons: BTreeMap<String, serde_json::Value>,
|
||||
pub pkg_json_resolution: PackageJsonDepResolution,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct Metadata {
|
||||
pub argv: Vec<String>,
|
||||
|
@ -140,8 +95,8 @@ pub struct Metadata {
|
|||
pub ca_stores: Option<Vec<String>>,
|
||||
pub ca_data: Option<Vec<u8>>,
|
||||
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
|
||||
pub maybe_import_map: Option<(Url, String)>,
|
||||
pub entrypoint: ModuleSpecifier,
|
||||
pub workspace_resolver: SerializedWorkspaceResolver,
|
||||
pub entrypoint_key: String,
|
||||
pub node_modules: Option<NodeModules>,
|
||||
pub disable_deprecated_api_warning: bool,
|
||||
pub unstable_config: UnstableConfig,
|
||||
|
@ -415,13 +370,13 @@ pub fn unpack_into_dir(
|
|||
fs::remove_file(&archive_path)?;
|
||||
Ok(exe_path)
|
||||
}
|
||||
|
||||
pub struct DenoCompileBinaryWriter<'a> {
|
||||
deno_dir: &'a DenoDir,
|
||||
file_fetcher: &'a FileFetcher,
|
||||
http_client_provider: &'a HttpClientProvider,
|
||||
npm_resolver: &'a dyn CliNpmResolver,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
package_json_deps_provider: &'a PackageJsonDepsProvider,
|
||||
}
|
||||
|
||||
impl<'a> DenoCompileBinaryWriter<'a> {
|
||||
|
@ -432,7 +387,6 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
http_client_provider: &'a HttpClientProvider,
|
||||
npm_resolver: &'a dyn CliNpmResolver,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
package_json_deps_provider: &'a PackageJsonDepsProvider,
|
||||
) -> Self {
|
||||
Self {
|
||||
deno_dir,
|
||||
|
@ -440,7 +394,6 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
http_client_provider,
|
||||
npm_resolver,
|
||||
npm_system_info,
|
||||
package_json_deps_provider,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -448,7 +401,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
&self,
|
||||
writer: &mut impl Write,
|
||||
eszip: eszip::EszipV2,
|
||||
module_specifier: &ModuleSpecifier,
|
||||
root_dir_url: EszipRelativeFileBaseUrl<'_>,
|
||||
entrypoint: &ModuleSpecifier,
|
||||
compile_flags: &CompileFlags,
|
||||
cli_options: &CliOptions,
|
||||
) -> Result<(), AnyError> {
|
||||
|
@ -465,13 +419,13 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
}
|
||||
set_windows_binary_to_gui(&mut original_binary)?;
|
||||
}
|
||||
|
||||
self
|
||||
.write_standalone_binary(
|
||||
writer,
|
||||
original_binary,
|
||||
eszip,
|
||||
module_specifier,
|
||||
root_dir_url,
|
||||
entrypoint,
|
||||
cli_options,
|
||||
compile_flags,
|
||||
)
|
||||
|
@ -557,11 +511,13 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
|
||||
/// This functions creates a standalone deno binary by appending a bundle
|
||||
/// and magic trailer to the currently executing binary.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn write_standalone_binary(
|
||||
&self,
|
||||
writer: &mut impl Write,
|
||||
original_bin: Vec<u8>,
|
||||
mut eszip: eszip::EszipV2,
|
||||
root_dir_url: EszipRelativeFileBaseUrl<'_>,
|
||||
entrypoint: &ModuleSpecifier,
|
||||
cli_options: &CliOptions,
|
||||
compile_flags: &CompileFlags,
|
||||
|
@ -574,48 +530,60 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
Some(CaData::Bytes(bytes)) => Some(bytes.clone()),
|
||||
None => None,
|
||||
};
|
||||
let maybe_import_map = cli_options
|
||||
.resolve_import_map(self.file_fetcher)
|
||||
.await?
|
||||
.map(|import_map| (import_map.base_url().clone(), import_map.to_json()));
|
||||
let (npm_vfs, npm_files, node_modules) =
|
||||
match self.npm_resolver.as_inner() {
|
||||
InnerCliNpmResolverRef::Managed(managed) => {
|
||||
let snapshot =
|
||||
managed.serialized_valid_snapshot_for_system(&self.npm_system_info);
|
||||
if !snapshot.as_serialized().packages.is_empty() {
|
||||
let (root_dir, files) = self.build_vfs()?.into_dir_and_files();
|
||||
eszip.add_npm_snapshot(snapshot);
|
||||
(
|
||||
Some(root_dir),
|
||||
files,
|
||||
Some(NodeModules::Managed {
|
||||
node_modules_dir: self
|
||||
.npm_resolver
|
||||
.root_node_modules_path()
|
||||
.is_some(),
|
||||
package_json_deps: self.package_json_deps_provider.deps().map(
|
||||
|deps| SerializablePackageJsonDeps::from_deps(deps.clone()),
|
||||
),
|
||||
}),
|
||||
)
|
||||
} else {
|
||||
(None, Vec::new(), None)
|
||||
}
|
||||
}
|
||||
InnerCliNpmResolverRef::Byonm(_) => {
|
||||
let (root_dir, files) = self.build_vfs()?.into_dir_and_files();
|
||||
let workspace_resolver = cli_options
|
||||
.create_workspace_resolver(self.file_fetcher)
|
||||
.await?;
|
||||
let root_path = root_dir_url.inner().to_file_path().unwrap();
|
||||
let (npm_vfs, npm_files, node_modules) = match self.npm_resolver.as_inner()
|
||||
{
|
||||
InnerCliNpmResolverRef::Managed(managed) => {
|
||||
let snapshot =
|
||||
managed.serialized_valid_snapshot_for_system(&self.npm_system_info);
|
||||
if !snapshot.as_serialized().packages.is_empty() {
|
||||
let (root_dir, files) = self
|
||||
.build_vfs(&root_path, cli_options)?
|
||||
.into_dir_and_files();
|
||||
eszip.add_npm_snapshot(snapshot);
|
||||
(
|
||||
Some(root_dir),
|
||||
files,
|
||||
Some(NodeModules::Byonm {
|
||||
package_json_deps: self.package_json_deps_provider.deps().map(
|
||||
|deps| SerializablePackageJsonDeps::from_deps(deps.clone()),
|
||||
Some(NodeModules::Managed {
|
||||
node_modules_dir: self.npm_resolver.root_node_modules_path().map(
|
||||
|path| {
|
||||
root_dir_url
|
||||
.specifier_key(
|
||||
&ModuleSpecifier::from_directory_path(path).unwrap(),
|
||||
)
|
||||
.into_owned()
|
||||
},
|
||||
),
|
||||
}),
|
||||
)
|
||||
} else {
|
||||
(None, Vec::new(), None)
|
||||
}
|
||||
};
|
||||
}
|
||||
InnerCliNpmResolverRef::Byonm(resolver) => {
|
||||
let (root_dir, files) = self
|
||||
.build_vfs(&root_path, cli_options)?
|
||||
.into_dir_and_files();
|
||||
(
|
||||
Some(root_dir),
|
||||
files,
|
||||
Some(NodeModules::Byonm {
|
||||
root_node_modules_dir: root_dir_url
|
||||
.specifier_key(
|
||||
&ModuleSpecifier::from_directory_path(
|
||||
// will always be set for byonm
|
||||
resolver.root_node_modules_path().unwrap(),
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
.into_owned(),
|
||||
}),
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let metadata = Metadata {
|
||||
argv: compile_flags.args.clone(),
|
||||
|
@ -629,8 +597,32 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
log_level: cli_options.log_level(),
|
||||
ca_stores: cli_options.ca_stores().clone(),
|
||||
ca_data,
|
||||
entrypoint: entrypoint.clone(),
|
||||
maybe_import_map,
|
||||
entrypoint_key: root_dir_url.specifier_key(entrypoint).into_owned(),
|
||||
workspace_resolver: SerializedWorkspaceResolver {
|
||||
import_map: workspace_resolver.maybe_import_map().map(|i| {
|
||||
SerializedWorkspaceResolverImportMap {
|
||||
specifier: if i.base_url().scheme() == "file" {
|
||||
root_dir_url.specifier_key(i.base_url()).into_owned()
|
||||
} else {
|
||||
// just make a remote url local
|
||||
"deno.json".to_string()
|
||||
},
|
||||
json: i.to_json(),
|
||||
}
|
||||
}),
|
||||
package_jsons: workspace_resolver
|
||||
.package_jsons()
|
||||
.map(|pkg_json| {
|
||||
(
|
||||
root_dir_url
|
||||
.specifier_key(&pkg_json.specifier())
|
||||
.into_owned(),
|
||||
serde_json::to_value(pkg_json).unwrap(),
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
pkg_json_resolution: workspace_resolver.pkg_json_dep_resolution(),
|
||||
},
|
||||
node_modules,
|
||||
disable_deprecated_api_warning: cli_options
|
||||
.disable_deprecated_api_warning,
|
||||
|
@ -653,7 +645,11 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
)
|
||||
}
|
||||
|
||||
fn build_vfs(&self) -> Result<VfsBuilder, AnyError> {
|
||||
fn build_vfs(
|
||||
&self,
|
||||
root_path: &Path,
|
||||
cli_options: &CliOptions,
|
||||
) -> Result<VfsBuilder, AnyError> {
|
||||
fn maybe_warn_different_system(system_info: &NpmSystemInfo) {
|
||||
if system_info != &NpmSystemInfo::default() {
|
||||
log::warn!("{} The node_modules directory may be incompatible with the target system.", crate::colors::yellow("Warning"));
|
||||
|
@ -664,7 +660,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
InnerCliNpmResolverRef::Managed(npm_resolver) => {
|
||||
if let Some(node_modules_path) = npm_resolver.root_node_modules_path() {
|
||||
maybe_warn_different_system(&self.npm_system_info);
|
||||
let mut builder = VfsBuilder::new(node_modules_path.clone())?;
|
||||
let mut builder = VfsBuilder::new(root_path.to_path_buf())?;
|
||||
builder.add_dir_recursive(node_modules_path)?;
|
||||
Ok(builder)
|
||||
} else {
|
||||
|
@ -678,23 +674,82 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
npm_resolver.resolve_pkg_folder_from_pkg_id(&package.id)?;
|
||||
builder.add_dir_recursive(&folder)?;
|
||||
}
|
||||
// overwrite the root directory's name to obscure the user's registry url
|
||||
builder.set_root_dir_name("node_modules".to_string());
|
||||
|
||||
// Flatten all the registries folders into a single "node_modules/localhost" folder
|
||||
// that will be used by denort when loading the npm cache. This avoids us exposing
|
||||
// the user's private registry information and means we don't have to bother
|
||||
// serializing all the different registry config into the binary.
|
||||
builder.with_root_dir(|root_dir| {
|
||||
root_dir.name = "node_modules".to_string();
|
||||
let mut new_entries = Vec::with_capacity(root_dir.entries.len());
|
||||
let mut localhost_entries = IndexMap::new();
|
||||
for entry in std::mem::take(&mut root_dir.entries) {
|
||||
match entry {
|
||||
VfsEntry::Dir(dir) => {
|
||||
for entry in dir.entries {
|
||||
log::debug!(
|
||||
"Flattening {} into node_modules",
|
||||
entry.name()
|
||||
);
|
||||
if let Some(existing) =
|
||||
localhost_entries.insert(entry.name().to_string(), entry)
|
||||
{
|
||||
panic!(
|
||||
"Unhandled scenario where a duplicate entry was found: {:?}",
|
||||
existing
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
VfsEntry::File(_) | VfsEntry::Symlink(_) => {
|
||||
new_entries.push(entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
new_entries.push(VfsEntry::Dir(VirtualDirectory {
|
||||
name: "localhost".to_string(),
|
||||
entries: localhost_entries.into_iter().map(|(_, v)| v).collect(),
|
||||
}));
|
||||
// needs to be sorted by name
|
||||
new_entries.sort_by(|a, b| a.name().cmp(b.name()));
|
||||
root_dir.entries = new_entries;
|
||||
});
|
||||
|
||||
Ok(builder)
|
||||
}
|
||||
}
|
||||
InnerCliNpmResolverRef::Byonm(npm_resolver) => {
|
||||
InnerCliNpmResolverRef::Byonm(_) => {
|
||||
maybe_warn_different_system(&self.npm_system_info);
|
||||
// the root_node_modules directory will always exist for byonm
|
||||
let node_modules_path = npm_resolver.root_node_modules_path().unwrap();
|
||||
let parent_path = node_modules_path.parent().unwrap();
|
||||
let mut builder = VfsBuilder::new(parent_path.to_path_buf())?;
|
||||
let package_json_path = parent_path.join("package.json");
|
||||
if package_json_path.exists() {
|
||||
builder.add_file_at_path(&package_json_path)?;
|
||||
let mut builder = VfsBuilder::new(root_path.to_path_buf())?;
|
||||
for pkg_json in cli_options.workspace.package_jsons() {
|
||||
builder.add_file_at_path(&pkg_json.path)?;
|
||||
}
|
||||
if node_modules_path.exists() {
|
||||
builder.add_dir_recursive(node_modules_path)?;
|
||||
// traverse and add all the node_modules directories in the workspace
|
||||
let mut pending_dirs = VecDeque::new();
|
||||
pending_dirs.push_back(
|
||||
cli_options
|
||||
.workspace
|
||||
.root_folder()
|
||||
.0
|
||||
.to_file_path()
|
||||
.unwrap(),
|
||||
);
|
||||
while let Some(pending_dir) = pending_dirs.pop_front() {
|
||||
let entries = fs::read_dir(&pending_dir).with_context(|| {
|
||||
format!("Failed reading: {}", pending_dir.display())
|
||||
})?;
|
||||
for entry in entries {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if !path.is_dir() {
|
||||
continue;
|
||||
}
|
||||
if path.ends_with("node_modules") {
|
||||
builder.add_dir_recursive(&path)?;
|
||||
} else {
|
||||
pending_dirs.push_back(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(builder)
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ use crate::args::get_root_cert_store;
|
|||
use crate::args::npm_pkg_req_ref_to_binary_command;
|
||||
use crate::args::CaData;
|
||||
use crate::args::CacheSetting;
|
||||
use crate::args::PackageJsonDepsProvider;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::args::StorageKeyResolver;
|
||||
use crate::cache::Caches;
|
||||
use crate::cache::DenoDirProvider;
|
||||
|
@ -25,7 +25,6 @@ use crate::npm::CliNpmResolverManagedSnapshotOption;
|
|||
use crate::npm::NpmCacheDir;
|
||||
use crate::resolver::CjsResolutionStore;
|
||||
use crate::resolver::CliNodeResolver;
|
||||
use crate::resolver::MappedSpecifierResolver;
|
||||
use crate::resolver::NpmModuleLoader;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
|
@ -35,6 +34,10 @@ use crate::worker::CliMainWorkerOptions;
|
|||
use crate::worker::ModuleLoaderAndSourceMapGetter;
|
||||
use crate::worker::ModuleLoaderFactory;
|
||||
use deno_ast::MediaType;
|
||||
use deno_config::package_json::PackageJsonDepValue;
|
||||
use deno_config::workspace::MappedResolution;
|
||||
use deno_config::workspace::MappedResolutionError;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::type_error;
|
||||
|
@ -48,6 +51,7 @@ use deno_core::ModuleSpecifier;
|
|||
use deno_core::ModuleType;
|
||||
use deno_core::RequestedModuleType;
|
||||
use deno_core::ResolutionKind;
|
||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::analyze::NodeCodeTranslator;
|
||||
use deno_runtime::deno_node::NodeResolutionMode;
|
||||
|
@ -59,7 +63,9 @@ use deno_runtime::deno_tls::RootCertStoreProvider;
|
|||
use deno_runtime::WorkerExecutionMode;
|
||||
use deno_runtime::WorkerLogLevel;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use eszip::EszipRelativeFileBaseUrl;
|
||||
use import_map::parse_from_json;
|
||||
use std::borrow::Cow;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
|
@ -75,9 +81,43 @@ use self::binary::load_npm_vfs;
|
|||
use self::binary::Metadata;
|
||||
use self::file_system::DenoCompileFileSystem;
|
||||
|
||||
struct SharedModuleLoaderState {
|
||||
struct WorkspaceEszipModule {
|
||||
specifier: ModuleSpecifier,
|
||||
inner: eszip::Module,
|
||||
}
|
||||
|
||||
struct WorkspaceEszip {
|
||||
eszip: eszip::EszipV2,
|
||||
mapped_specifier_resolver: MappedSpecifierResolver,
|
||||
root_dir_url: ModuleSpecifier,
|
||||
}
|
||||
|
||||
impl WorkspaceEszip {
|
||||
pub fn get_module(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<WorkspaceEszipModule> {
|
||||
if specifier.scheme() == "file" {
|
||||
let specifier_key = EszipRelativeFileBaseUrl::new(&self.root_dir_url)
|
||||
.specifier_key(specifier);
|
||||
let module = self.eszip.get_module(&specifier_key)?;
|
||||
let specifier = self.root_dir_url.join(&module.specifier).unwrap();
|
||||
Some(WorkspaceEszipModule {
|
||||
specifier,
|
||||
inner: module,
|
||||
})
|
||||
} else {
|
||||
let module = self.eszip.get_module(specifier.as_str())?;
|
||||
Some(WorkspaceEszipModule {
|
||||
specifier: ModuleSpecifier::parse(&module.specifier).unwrap(),
|
||||
inner: module,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct SharedModuleLoaderState {
|
||||
eszip: WorkspaceEszip,
|
||||
workspace_resolver: WorkspaceResolver,
|
||||
node_resolver: Arc<CliNodeResolver>,
|
||||
npm_module_loader: Arc<NpmModuleLoader>,
|
||||
}
|
||||
|
@ -122,44 +162,92 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
};
|
||||
}
|
||||
|
||||
let maybe_mapped = self
|
||||
.shared
|
||||
.mapped_specifier_resolver
|
||||
.resolve(specifier, &referrer)?
|
||||
.into_specifier();
|
||||
let mapped_resolution =
|
||||
self.shared.workspace_resolver.resolve(specifier, &referrer);
|
||||
|
||||
// npm specifier
|
||||
let specifier_text = maybe_mapped
|
||||
.as_ref()
|
||||
.map(|r| r.as_str())
|
||||
.unwrap_or(specifier);
|
||||
if let Ok(reference) = NpmPackageReqReference::from_str(specifier_text) {
|
||||
return self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve_req_reference(
|
||||
&reference,
|
||||
match mapped_resolution {
|
||||
Ok(MappedResolution::PackageJson {
|
||||
dep_result,
|
||||
sub_path,
|
||||
alias,
|
||||
..
|
||||
}) => match dep_result.as_ref().map_err(|e| AnyError::from(e.clone()))? {
|
||||
PackageJsonDepValue::Req(req) => self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve_req_with_sub_path(
|
||||
req,
|
||||
sub_path.as_deref(),
|
||||
&referrer,
|
||||
NodeResolutionMode::Execution,
|
||||
)
|
||||
.map(|res| res.into_url()),
|
||||
PackageJsonDepValue::Workspace(version_req) => {
|
||||
let pkg_folder = self
|
||||
.shared
|
||||
.workspace_resolver
|
||||
.resolve_workspace_pkg_json_folder_for_pkg_json_dep(
|
||||
alias,
|
||||
version_req,
|
||||
)?;
|
||||
Ok(
|
||||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve_package_sub_path_from_deno_module(
|
||||
pkg_folder,
|
||||
sub_path.as_deref(),
|
||||
&referrer,
|
||||
NodeResolutionMode::Execution,
|
||||
)?
|
||||
.into_url(),
|
||||
)
|
||||
}
|
||||
},
|
||||
Ok(MappedResolution::Normal(specifier))
|
||||
| Ok(MappedResolution::ImportMap(specifier)) => {
|
||||
if let Ok(reference) =
|
||||
NpmPackageReqReference::from_specifier(&specifier)
|
||||
{
|
||||
return self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve_req_reference(
|
||||
&reference,
|
||||
&referrer,
|
||||
NodeResolutionMode::Execution,
|
||||
)
|
||||
.map(|res| res.into_url());
|
||||
}
|
||||
|
||||
if specifier.scheme() == "jsr" {
|
||||
if let Some(module) = self.shared.eszip.get_module(&specifier) {
|
||||
return Ok(module.specifier);
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.handle_if_in_node_modules(specifier)
|
||||
}
|
||||
Err(err)
|
||||
if err.is_unmapped_bare_specifier() && referrer.scheme() == "file" =>
|
||||
{
|
||||
// todo(dsherret): return a better error from node resolution so that
|
||||
// we can more easily tell whether to surface it or not
|
||||
let node_result = self.shared.node_resolver.resolve(
|
||||
specifier,
|
||||
&referrer,
|
||||
NodeResolutionMode::Execution,
|
||||
)
|
||||
.map(|res| res.into_url());
|
||||
}
|
||||
|
||||
let specifier = match maybe_mapped {
|
||||
Some(resolved) => resolved,
|
||||
None => deno_core::resolve_import(specifier, referrer.as_str())?,
|
||||
};
|
||||
|
||||
if specifier.scheme() == "jsr" {
|
||||
if let Some(module) = self.shared.eszip.get_module(specifier.as_str()) {
|
||||
return Ok(ModuleSpecifier::parse(&module.specifier).unwrap());
|
||||
);
|
||||
if let Ok(Some(res)) = node_result {
|
||||
return Ok(res.into_url());
|
||||
}
|
||||
Err(err.into())
|
||||
}
|
||||
Err(err) => Err(err.into()),
|
||||
}
|
||||
|
||||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.handle_if_in_node_modules(specifier)
|
||||
}
|
||||
|
||||
fn load(
|
||||
|
@ -215,27 +303,23 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
);
|
||||
}
|
||||
|
||||
let Some(module) =
|
||||
self.shared.eszip.get_module(original_specifier.as_str())
|
||||
else {
|
||||
let Some(module) = self.shared.eszip.get_module(original_specifier) else {
|
||||
return deno_core::ModuleLoadResponse::Sync(Err(type_error(format!(
|
||||
"Module not found: {}",
|
||||
original_specifier
|
||||
))));
|
||||
};
|
||||
let original_specifier = original_specifier.clone();
|
||||
let found_specifier =
|
||||
ModuleSpecifier::parse(&module.specifier).expect("invalid url in eszip");
|
||||
|
||||
deno_core::ModuleLoadResponse::Async(
|
||||
async move {
|
||||
let code = module.source().await.ok_or_else(|| {
|
||||
let code = module.inner.source().await.ok_or_else(|| {
|
||||
type_error(format!("Module not found: {}", original_specifier))
|
||||
})?;
|
||||
let code = arc_u8_to_arc_str(code)
|
||||
.map_err(|_| type_error("Module source is not utf-8"))?;
|
||||
Ok(deno_core::ModuleSource::new_with_redirect(
|
||||
match module.kind {
|
||||
match module.inner.kind {
|
||||
eszip::ModuleKind::JavaScript => ModuleType::JavaScript,
|
||||
eszip::ModuleKind::Json => ModuleType::Json,
|
||||
eszip::ModuleKind::Jsonc => {
|
||||
|
@ -247,7 +331,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
},
|
||||
ModuleSourceCode::String(code.into()),
|
||||
&original_specifier,
|
||||
&found_specifier,
|
||||
&module.specifier,
|
||||
None,
|
||||
))
|
||||
}
|
||||
|
@ -324,10 +408,10 @@ pub async fn run(
|
|||
mut eszip: eszip::EszipV2,
|
||||
metadata: Metadata,
|
||||
) -> Result<i32, AnyError> {
|
||||
let main_module = &metadata.entrypoint;
|
||||
let current_exe_path = std::env::current_exe().unwrap();
|
||||
let current_exe_name =
|
||||
current_exe_path.file_name().unwrap().to_string_lossy();
|
||||
let maybe_cwd = std::env::current_dir().ok();
|
||||
let deno_dir_provider = Arc::new(DenoDirProvider::new(None));
|
||||
let root_cert_store_provider = Arc::new(StandaloneRootCertStoreProvider {
|
||||
ca_stores: metadata.ca_stores,
|
||||
|
@ -341,119 +425,109 @@ pub async fn run(
|
|||
));
|
||||
// use a dummy npm registry url
|
||||
let npm_registry_url = ModuleSpecifier::parse("https://localhost/").unwrap();
|
||||
let root_path = std::env::temp_dir()
|
||||
.join(format!("deno-compile-{}", current_exe_name))
|
||||
.join("node_modules");
|
||||
let npm_cache_dir =
|
||||
NpmCacheDir::new(root_path.clone(), vec![npm_registry_url.clone()]);
|
||||
let root_path =
|
||||
std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name));
|
||||
let root_dir_url = ModuleSpecifier::from_directory_path(&root_path).unwrap();
|
||||
let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap();
|
||||
let root_node_modules_path = root_path.join("node_modules");
|
||||
let npm_cache_dir = NpmCacheDir::new(
|
||||
root_node_modules_path.clone(),
|
||||
vec![npm_registry_url.clone()],
|
||||
);
|
||||
let npm_global_cache_dir = npm_cache_dir.get_cache_location();
|
||||
let cache_setting = CacheSetting::Only;
|
||||
let (package_json_deps_provider, fs, npm_resolver, maybe_vfs_root) =
|
||||
match metadata.node_modules {
|
||||
Some(binary::NodeModules::Managed {
|
||||
node_modules_dir,
|
||||
package_json_deps,
|
||||
}) => {
|
||||
// this will always have a snapshot
|
||||
let snapshot = eszip.take_npm_snapshot().unwrap();
|
||||
let vfs_root_dir_path = if node_modules_dir {
|
||||
root_path
|
||||
} else {
|
||||
npm_cache_dir.root_dir().to_owned()
|
||||
};
|
||||
let vfs = load_npm_vfs(vfs_root_dir_path.clone())
|
||||
.context("Failed to load npm vfs.")?;
|
||||
let maybe_node_modules_path = if node_modules_dir {
|
||||
Some(vfs.root().to_path_buf())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let package_json_deps_provider =
|
||||
Arc::new(PackageJsonDepsProvider::new(
|
||||
package_json_deps.map(|serialized| serialized.into_deps()),
|
||||
));
|
||||
let fs = Arc::new(DenoCompileFileSystem::new(vfs))
|
||||
as Arc<dyn deno_fs::FileSystem>;
|
||||
let npm_resolver =
|
||||
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
|
||||
CliNpmResolverManagedCreateOptions {
|
||||
snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some(
|
||||
snapshot,
|
||||
)),
|
||||
maybe_lockfile: None,
|
||||
fs: fs.clone(),
|
||||
http_client_provider: http_client_provider.clone(),
|
||||
npm_global_cache_dir,
|
||||
cache_setting,
|
||||
text_only_progress_bar: progress_bar,
|
||||
maybe_node_modules_path,
|
||||
package_json_deps_provider: package_json_deps_provider.clone(),
|
||||
npm_system_info: Default::default(),
|
||||
// Packages from different registries are already inlined in the ESZip,
|
||||
// so no need to create actual `.npmrc` configuration.
|
||||
npmrc: create_default_npmrc(),
|
||||
},
|
||||
))
|
||||
.await?;
|
||||
(
|
||||
package_json_deps_provider,
|
||||
fs,
|
||||
npm_resolver,
|
||||
Some(vfs_root_dir_path),
|
||||
)
|
||||
}
|
||||
Some(binary::NodeModules::Byonm { package_json_deps }) => {
|
||||
let vfs_root_dir_path = root_path;
|
||||
let vfs = load_npm_vfs(vfs_root_dir_path.clone())
|
||||
.context("Failed to load npm vfs.")?;
|
||||
let node_modules_path = vfs.root().join("node_modules");
|
||||
let package_json_deps_provider =
|
||||
Arc::new(PackageJsonDepsProvider::new(
|
||||
package_json_deps.map(|serialized| serialized.into_deps()),
|
||||
));
|
||||
let fs = Arc::new(DenoCompileFileSystem::new(vfs))
|
||||
as Arc<dyn deno_fs::FileSystem>;
|
||||
let npm_resolver =
|
||||
create_cli_npm_resolver(CliNpmResolverCreateOptions::Byonm(
|
||||
CliNpmResolverByonmCreateOptions {
|
||||
fs: fs.clone(),
|
||||
root_node_modules_dir: node_modules_path,
|
||||
},
|
||||
))
|
||||
.await?;
|
||||
(
|
||||
package_json_deps_provider,
|
||||
fs,
|
||||
npm_resolver,
|
||||
Some(vfs_root_dir_path),
|
||||
)
|
||||
}
|
||||
None => {
|
||||
let package_json_deps_provider =
|
||||
Arc::new(PackageJsonDepsProvider::new(None));
|
||||
let fs = Arc::new(deno_fs::RealFs) as Arc<dyn deno_fs::FileSystem>;
|
||||
let npm_resolver =
|
||||
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
|
||||
CliNpmResolverManagedCreateOptions {
|
||||
snapshot: CliNpmResolverManagedSnapshotOption::Specified(None),
|
||||
maybe_lockfile: None,
|
||||
fs: fs.clone(),
|
||||
http_client_provider: http_client_provider.clone(),
|
||||
npm_global_cache_dir,
|
||||
cache_setting,
|
||||
text_only_progress_bar: progress_bar,
|
||||
maybe_node_modules_path: None,
|
||||
package_json_deps_provider: package_json_deps_provider.clone(),
|
||||
npm_system_info: Default::default(),
|
||||
// Packages from different registries are already inlined in the ESZip,
|
||||
// so no need to create actual `.npmrc` configuration.
|
||||
npmrc: create_default_npmrc(),
|
||||
},
|
||||
))
|
||||
.await?;
|
||||
(package_json_deps_provider, fs, npm_resolver, None)
|
||||
}
|
||||
};
|
||||
let (fs, npm_resolver, maybe_vfs_root) = match metadata.node_modules {
|
||||
Some(binary::NodeModules::Managed { node_modules_dir }) => {
|
||||
// this will always have a snapshot
|
||||
let snapshot = eszip.take_npm_snapshot().unwrap();
|
||||
let vfs_root_dir_path = if node_modules_dir.is_some() {
|
||||
root_path.clone()
|
||||
} else {
|
||||
npm_cache_dir.root_dir().to_owned()
|
||||
};
|
||||
let vfs = load_npm_vfs(vfs_root_dir_path.clone())
|
||||
.context("Failed to load npm vfs.")?;
|
||||
let maybe_node_modules_path = node_modules_dir
|
||||
.map(|node_modules_dir| vfs_root_dir_path.join(node_modules_dir));
|
||||
let fs = Arc::new(DenoCompileFileSystem::new(vfs))
|
||||
as Arc<dyn deno_fs::FileSystem>;
|
||||
let npm_resolver =
|
||||
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
|
||||
CliNpmResolverManagedCreateOptions {
|
||||
snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some(
|
||||
snapshot,
|
||||
)),
|
||||
maybe_lockfile: None,
|
||||
fs: fs.clone(),
|
||||
http_client_provider: http_client_provider.clone(),
|
||||
npm_global_cache_dir,
|
||||
cache_setting,
|
||||
text_only_progress_bar: progress_bar,
|
||||
maybe_node_modules_path,
|
||||
npm_system_info: Default::default(),
|
||||
package_json_deps_provider: Arc::new(
|
||||
// this is only used for installing packages, which isn't necessary with deno compile
|
||||
PackageJsonInstallDepsProvider::empty(),
|
||||
),
|
||||
// create an npmrc that uses the fake npm_registry_url to resolve packages
|
||||
npmrc: Arc::new(ResolvedNpmRc {
|
||||
default_config: deno_npm::npm_rc::RegistryConfigWithUrl {
|
||||
registry_url: npm_registry_url.clone(),
|
||||
config: Default::default(),
|
||||
},
|
||||
scopes: Default::default(),
|
||||
registry_configs: Default::default(),
|
||||
}),
|
||||
},
|
||||
))
|
||||
.await?;
|
||||
(fs, npm_resolver, Some(vfs_root_dir_path))
|
||||
}
|
||||
Some(binary::NodeModules::Byonm {
|
||||
root_node_modules_dir,
|
||||
}) => {
|
||||
let vfs_root_dir_path = root_path.clone();
|
||||
let vfs = load_npm_vfs(vfs_root_dir_path.clone())
|
||||
.context("Failed to load vfs.")?;
|
||||
let root_node_modules_dir = vfs.root().join(root_node_modules_dir);
|
||||
let fs = Arc::new(DenoCompileFileSystem::new(vfs))
|
||||
as Arc<dyn deno_fs::FileSystem>;
|
||||
let npm_resolver = create_cli_npm_resolver(
|
||||
CliNpmResolverCreateOptions::Byonm(CliNpmResolverByonmCreateOptions {
|
||||
fs: fs.clone(),
|
||||
root_node_modules_dir,
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
(fs, npm_resolver, Some(vfs_root_dir_path))
|
||||
}
|
||||
None => {
|
||||
let fs = Arc::new(deno_fs::RealFs) as Arc<dyn deno_fs::FileSystem>;
|
||||
let npm_resolver =
|
||||
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
|
||||
CliNpmResolverManagedCreateOptions {
|
||||
snapshot: CliNpmResolverManagedSnapshotOption::Specified(None),
|
||||
maybe_lockfile: None,
|
||||
fs: fs.clone(),
|
||||
http_client_provider: http_client_provider.clone(),
|
||||
npm_global_cache_dir,
|
||||
cache_setting,
|
||||
text_only_progress_bar: progress_bar,
|
||||
maybe_node_modules_path: None,
|
||||
npm_system_info: Default::default(),
|
||||
package_json_deps_provider: Arc::new(
|
||||
// this is only used for installing packages, which isn't necessary with deno compile
|
||||
PackageJsonInstallDepsProvider::empty(),
|
||||
),
|
||||
// Packages from different registries are already inlined in the ESZip,
|
||||
// so no need to create actual `.npmrc` configuration.
|
||||
npmrc: create_default_npmrc(),
|
||||
},
|
||||
))
|
||||
.await?;
|
||||
(fs, npm_resolver, None)
|
||||
}
|
||||
};
|
||||
|
||||
let has_node_modules_dir = npm_resolver.root_node_modules_path().is_some();
|
||||
let node_resolver = Arc::new(NodeResolver::new(
|
||||
|
@ -471,9 +545,42 @@ pub async fn run(
|
|||
node_resolver.clone(),
|
||||
npm_resolver.clone().into_npm_resolver(),
|
||||
));
|
||||
let maybe_import_map = metadata.maybe_import_map.map(|(base, source)| {
|
||||
Arc::new(parse_from_json(base, &source).unwrap().import_map)
|
||||
});
|
||||
let workspace_resolver = {
|
||||
let import_map = match metadata.workspace_resolver.import_map {
|
||||
Some(import_map) => Some(
|
||||
import_map::parse_from_json_with_options(
|
||||
root_dir_url.join(&import_map.specifier).unwrap(),
|
||||
&import_map.json,
|
||||
import_map::ImportMapOptions {
|
||||
address_hook: None,
|
||||
expand_imports: true,
|
||||
},
|
||||
)?
|
||||
.import_map,
|
||||
),
|
||||
None => None,
|
||||
};
|
||||
let pkg_jsons = metadata
|
||||
.workspace_resolver
|
||||
.package_jsons
|
||||
.into_iter()
|
||||
.map(|(relative_path, json)| {
|
||||
let path = root_dir_url
|
||||
.join(&relative_path)
|
||||
.unwrap()
|
||||
.to_file_path()
|
||||
.unwrap();
|
||||
let pkg_json =
|
||||
deno_config::package_json::PackageJson::load_from_value(path, json);
|
||||
Arc::new(pkg_json)
|
||||
})
|
||||
.collect();
|
||||
WorkspaceResolver::new_raw(
|
||||
import_map,
|
||||
pkg_jsons,
|
||||
metadata.workspace_resolver.pkg_json_resolution,
|
||||
)
|
||||
};
|
||||
let cli_node_resolver = Arc::new(CliNodeResolver::new(
|
||||
Some(cjs_resolutions.clone()),
|
||||
fs.clone(),
|
||||
|
@ -482,11 +589,11 @@ pub async fn run(
|
|||
));
|
||||
let module_loader_factory = StandaloneModuleLoaderFactory {
|
||||
shared: Arc::new(SharedModuleLoaderState {
|
||||
eszip,
|
||||
mapped_specifier_resolver: MappedSpecifierResolver::new(
|
||||
maybe_import_map.clone(),
|
||||
package_json_deps_provider.clone(),
|
||||
),
|
||||
eszip: WorkspaceEszip {
|
||||
eszip,
|
||||
root_dir_url,
|
||||
},
|
||||
workspace_resolver,
|
||||
node_resolver: cli_node_resolver.clone(),
|
||||
npm_module_loader: Arc::new(NpmModuleLoader::new(
|
||||
cjs_resolutions,
|
||||
|
@ -498,7 +605,6 @@ pub async fn run(
|
|||
};
|
||||
|
||||
let permissions = {
|
||||
let maybe_cwd = std::env::current_dir().ok();
|
||||
let mut permissions =
|
||||
metadata.permissions.to_options(maybe_cwd.as_deref())?;
|
||||
// if running with an npm vfs, grant read access to it
|
||||
|
@ -561,7 +667,7 @@ pub async fn run(
|
|||
is_npm_main: main_module.scheme() == "npm",
|
||||
skip_op_registration: true,
|
||||
location: metadata.location,
|
||||
argv0: NpmPackageReqReference::from_specifier(main_module)
|
||||
argv0: NpmPackageReqReference::from_specifier(&main_module)
|
||||
.ok()
|
||||
.map(|req_ref| npm_pkg_req_ref_to_binary_command(&req_ref))
|
||||
.or(std::env::args().next()),
|
||||
|
@ -571,7 +677,6 @@ pub async fn run(
|
|||
unsafely_ignore_certificate_errors: metadata
|
||||
.unsafely_ignore_certificate_errors,
|
||||
unstable: metadata.unstable_config.legacy_flag_enabled,
|
||||
maybe_root_package_json_deps: package_json_deps_provider.deps().cloned(),
|
||||
create_hmr_runner: None,
|
||||
create_coverage_collector: None,
|
||||
},
|
||||
|
@ -592,11 +697,7 @@ pub async fn run(
|
|||
deno_core::JsRuntime::init_platform(None);
|
||||
|
||||
let mut worker = worker_factory
|
||||
.create_main_worker(
|
||||
WorkerExecutionMode::Run,
|
||||
main_module.clone(),
|
||||
permissions,
|
||||
)
|
||||
.create_main_worker(WorkerExecutionMode::Run, main_module, permissions)
|
||||
.await?;
|
||||
|
||||
let exit_code = worker.run().await?;
|
||||
|
|
|
@ -12,6 +12,7 @@ use std::path::PathBuf;
|
|||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
|
@ -55,9 +56,8 @@ impl VfsBuilder {
|
|||
root_dir: VirtualDirectory {
|
||||
name: root_path
|
||||
.file_stem()
|
||||
.unwrap()
|
||||
.to_string_lossy()
|
||||
.into_owned(),
|
||||
.map(|s| s.to_string_lossy().into_owned())
|
||||
.unwrap_or("root".to_string()),
|
||||
entries: Vec::new(),
|
||||
},
|
||||
root_path,
|
||||
|
@ -67,13 +67,19 @@ impl VfsBuilder {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn set_root_dir_name(&mut self, name: String) {
|
||||
self.root_dir.name = name;
|
||||
pub fn with_root_dir<R>(
|
||||
&mut self,
|
||||
with_root: impl FnOnce(&mut VirtualDirectory) -> R,
|
||||
) -> R {
|
||||
with_root(&mut self.root_dir)
|
||||
}
|
||||
|
||||
pub fn add_dir_recursive(&mut self, path: &Path) -> Result<(), AnyError> {
|
||||
let path = canonicalize_path(path)?;
|
||||
self.add_dir_recursive_internal(&path)
|
||||
let target_path = canonicalize_path(path)?;
|
||||
if path != target_path {
|
||||
self.add_symlink(path, &target_path)?;
|
||||
}
|
||||
self.add_dir_recursive_internal(&target_path)
|
||||
}
|
||||
|
||||
fn add_dir_recursive_internal(
|
||||
|
@ -92,7 +98,7 @@ impl VfsBuilder {
|
|||
if file_type.is_dir() {
|
||||
self.add_dir_recursive_internal(&path)?;
|
||||
} else if file_type.is_file() {
|
||||
self.add_file_at_path(&path)?;
|
||||
self.add_file_at_path_not_symlink(&path)?;
|
||||
} else if file_type.is_symlink() {
|
||||
match util::fs::canonicalize_path(&path) {
|
||||
Ok(target) => {
|
||||
|
@ -175,6 +181,17 @@ impl VfsBuilder {
|
|||
}
|
||||
|
||||
pub fn add_file_at_path(&mut self, path: &Path) -> Result<(), AnyError> {
|
||||
let target_path = canonicalize_path(path)?;
|
||||
if target_path != path {
|
||||
self.add_symlink(path, &target_path)?;
|
||||
}
|
||||
self.add_file_at_path_not_symlink(&target_path)
|
||||
}
|
||||
|
||||
pub fn add_file_at_path_not_symlink(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
let file_bytes = std::fs::read(path)
|
||||
.with_context(|| format!("Reading {}", path.display()))?;
|
||||
self.add_file(path, file_bytes)
|
||||
|
@ -195,7 +212,9 @@ impl VfsBuilder {
|
|||
let name = path.file_name().unwrap().to_string_lossy();
|
||||
let data_len = data.len();
|
||||
match dir.entries.binary_search_by(|e| e.name().cmp(&name)) {
|
||||
Ok(_) => unreachable!(),
|
||||
Ok(_) => {
|
||||
// already added, just ignore
|
||||
}
|
||||
Err(insert_index) => {
|
||||
dir.entries.insert(
|
||||
insert_index,
|
||||
|
@ -228,6 +247,10 @@ impl VfsBuilder {
|
|||
target.display()
|
||||
);
|
||||
let dest = self.path_relative_root(target)?;
|
||||
if dest == self.path_relative_root(path)? {
|
||||
// it's the same, ignore
|
||||
return Ok(());
|
||||
}
|
||||
let dir = self.add_dir(path.parent().unwrap())?;
|
||||
let name = path.file_name().unwrap().to_string_lossy();
|
||||
match dir.entries.binary_search_by(|e| e.name().cmp(&name)) {
|
||||
|
|
|
@ -407,7 +407,8 @@ pub async fn run_benchmarks(
|
|||
bench_flags: BenchFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let cli_options = CliOptions::from_flags(flags)?;
|
||||
let bench_options = cli_options.resolve_bench_options(bench_flags)?;
|
||||
let workspace_bench_options =
|
||||
cli_options.resolve_workspace_bench_options(&bench_flags);
|
||||
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
|
||||
let cli_options = factory.cli_options();
|
||||
// Various bench files should not share the same permissions in terms of
|
||||
|
@ -416,11 +417,21 @@ pub async fn run_benchmarks(
|
|||
let permissions =
|
||||
Permissions::from_options(&cli_options.permissions_options()?)?;
|
||||
|
||||
let specifiers = collect_specifiers(
|
||||
bench_options.files,
|
||||
cli_options.vendor_dir_path().map(ToOwned::to_owned),
|
||||
is_supported_bench_path,
|
||||
)?;
|
||||
let members_with_bench_options =
|
||||
cli_options.resolve_bench_options_for_members(&bench_flags)?;
|
||||
let specifiers = members_with_bench_options
|
||||
.iter()
|
||||
.map(|(_, bench_options)| {
|
||||
collect_specifiers(
|
||||
bench_options.files.clone(),
|
||||
cli_options.vendor_dir_path().map(ToOwned::to_owned),
|
||||
is_supported_bench_path,
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if specifiers.is_empty() {
|
||||
return Err(generic_error("No bench modules found"));
|
||||
|
@ -429,7 +440,7 @@ pub async fn run_benchmarks(
|
|||
let main_graph_container = factory.main_module_graph_container().await?;
|
||||
main_graph_container.check_specifiers(&specifiers).await?;
|
||||
|
||||
if bench_options.no_run {
|
||||
if workspace_bench_options.no_run {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
@ -441,8 +452,8 @@ pub async fn run_benchmarks(
|
|||
&permissions,
|
||||
specifiers,
|
||||
BenchSpecifierOptions {
|
||||
filter: TestFilter::from_flag(&bench_options.filter),
|
||||
json: bench_options.json,
|
||||
filter: TestFilter::from_flag(&workspace_bench_options.filter),
|
||||
json: workspace_bench_options.json,
|
||||
log_level,
|
||||
},
|
||||
)
|
||||
|
@ -472,24 +483,40 @@ pub async fn run_benchmarks_with_watch(
|
|||
let factory = CliFactoryBuilder::new()
|
||||
.build_from_flags_for_watcher(flags, watcher_communicator.clone())?;
|
||||
let cli_options = factory.cli_options();
|
||||
let bench_options = cli_options.resolve_bench_options(bench_flags)?;
|
||||
let workspace_bench_options =
|
||||
cli_options.resolve_workspace_bench_options(&bench_flags);
|
||||
|
||||
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
|
||||
if let Some(set) = &bench_options.files.include {
|
||||
let watch_paths = set.base_paths();
|
||||
if !watch_paths.is_empty() {
|
||||
let _ = watcher_communicator.watch_paths(watch_paths);
|
||||
}
|
||||
}
|
||||
|
||||
let graph_kind = cli_options.type_check_mode().as_graph_kind();
|
||||
let module_graph_creator = factory.module_graph_creator().await?;
|
||||
|
||||
let bench_modules = collect_specifiers(
|
||||
bench_options.files.clone(),
|
||||
cli_options.vendor_dir_path().map(ToOwned::to_owned),
|
||||
is_supported_bench_path,
|
||||
)?;
|
||||
let members_with_bench_options =
|
||||
cli_options.resolve_bench_options_for_members(&bench_flags)?;
|
||||
let watch_paths = members_with_bench_options
|
||||
.iter()
|
||||
.filter_map(|(_, bench_options)| {
|
||||
bench_options
|
||||
.files
|
||||
.include
|
||||
.as_ref()
|
||||
.map(|set| set.base_paths())
|
||||
})
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
let _ = watcher_communicator.watch_paths(watch_paths);
|
||||
let collected_bench_modules = members_with_bench_options
|
||||
.iter()
|
||||
.map(|(_, bench_options)| {
|
||||
collect_specifiers(
|
||||
bench_options.files.clone(),
|
||||
cli_options.vendor_dir_path().map(ToOwned::to_owned),
|
||||
is_supported_bench_path,
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Various bench files should not share the same permissions in terms of
|
||||
// `PermissionsContainer` - otherwise granting/revoking permissions in one
|
||||
|
@ -498,7 +525,7 @@ pub async fn run_benchmarks_with_watch(
|
|||
Permissions::from_options(&cli_options.permissions_options()?)?;
|
||||
|
||||
let graph = module_graph_creator
|
||||
.create_graph(graph_kind, bench_modules)
|
||||
.create_graph(graph_kind, collected_bench_modules.clone())
|
||||
.await?;
|
||||
module_graph_creator.graph_valid(&graph)?;
|
||||
let bench_modules = &graph.roots;
|
||||
|
@ -524,16 +551,10 @@ pub async fn run_benchmarks_with_watch(
|
|||
let worker_factory =
|
||||
Arc::new(factory.create_cli_main_worker_factory().await?);
|
||||
|
||||
// todo(dsherret): why are we collecting specifiers twice in a row?
|
||||
// Seems like a perf bug.
|
||||
let specifiers = collect_specifiers(
|
||||
bench_options.files,
|
||||
cli_options.vendor_dir_path().map(ToOwned::to_owned),
|
||||
is_supported_bench_path,
|
||||
)?
|
||||
.into_iter()
|
||||
.filter(|specifier| bench_modules_to_reload.contains(specifier))
|
||||
.collect::<Vec<ModuleSpecifier>>();
|
||||
let specifiers = collected_bench_modules
|
||||
.into_iter()
|
||||
.filter(|specifier| bench_modules_to_reload.contains(specifier))
|
||||
.collect::<Vec<ModuleSpecifier>>();
|
||||
|
||||
factory
|
||||
.main_module_graph_container()
|
||||
|
@ -541,7 +562,7 @@ pub async fn run_benchmarks_with_watch(
|
|||
.check_specifiers(&specifiers)
|
||||
.await?;
|
||||
|
||||
if bench_options.no_run {
|
||||
if workspace_bench_options.no_run {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
@ -551,8 +572,8 @@ pub async fn run_benchmarks_with_watch(
|
|||
&permissions,
|
||||
specifiers,
|
||||
BenchSpecifierOptions {
|
||||
filter: TestFilter::from_flag(&bench_options.filter),
|
||||
json: bench_options.json,
|
||||
filter: TestFilter::from_flag(&workspace_bench_options.filter),
|
||||
json: workspace_bench_options.json,
|
||||
log_level,
|
||||
},
|
||||
)
|
||||
|
|
|
@ -183,7 +183,7 @@ impl TypeChecker {
|
|||
self.module_graph_builder.build_fast_check_graph(
|
||||
&mut graph,
|
||||
BuildFastCheckGraphOptions {
|
||||
workspace_fast_check: false,
|
||||
workspace_fast_check: deno_graph::WorkspaceFastCheckOption::Disabled,
|
||||
},
|
||||
)?;
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ use crate::args::Flags;
|
|||
use crate::factory::CliFactory;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::standalone::is_standalone_binary;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::generic_error;
|
||||
|
@ -12,6 +13,7 @@ use deno_core::error::AnyError;
|
|||
use deno_core::resolve_url_or_path;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_terminal::colors;
|
||||
use eszip::EszipRelativeFileBaseUrl;
|
||||
use rand::Rng;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
@ -82,12 +84,24 @@ pub async fn compile(
|
|||
ts_config_for_emit.ts_config,
|
||||
)?;
|
||||
let parser = parsed_source_cache.as_capturing_parser();
|
||||
let root_dir_url = resolve_root_dir_from_specifiers(
|
||||
cli_options.workspace.root_folder().0,
|
||||
graph.specifiers().map(|(s, _)| s).chain(
|
||||
cli_options
|
||||
.node_modules_dir_path()
|
||||
.and_then(|p| ModuleSpecifier::from_directory_path(p).ok())
|
||||
.iter(),
|
||||
),
|
||||
);
|
||||
log::debug!("Binary root dir: {}", root_dir_url);
|
||||
let root_dir_url = EszipRelativeFileBaseUrl::new(&root_dir_url);
|
||||
let eszip = eszip::EszipV2::from_graph(eszip::FromGraphOptions {
|
||||
graph,
|
||||
parser,
|
||||
transpile_options,
|
||||
emit_options,
|
||||
relative_file_base: None,
|
||||
// make all the modules relative to the root folder
|
||||
relative_file_base: Some(root_dir_url),
|
||||
})?;
|
||||
|
||||
log::info!(
|
||||
|
@ -116,6 +130,7 @@ pub async fn compile(
|
|||
.write_bin(
|
||||
&mut file,
|
||||
eszip,
|
||||
root_dir_url,
|
||||
&module_specifier,
|
||||
&compile_flags,
|
||||
cli_options,
|
||||
|
@ -268,6 +283,68 @@ fn get_os_specific_filepath(
|
|||
}
|
||||
}
|
||||
|
||||
fn resolve_root_dir_from_specifiers<'a>(
|
||||
starting_dir: &ModuleSpecifier,
|
||||
specifiers: impl Iterator<Item = &'a ModuleSpecifier>,
|
||||
) -> ModuleSpecifier {
|
||||
fn select_common_root<'a>(a: &'a str, b: &'a str) -> &'a str {
|
||||
let min_length = a.len().min(b.len());
|
||||
|
||||
let mut last_slash = 0;
|
||||
for i in 0..min_length {
|
||||
if a.as_bytes()[i] == b.as_bytes()[i] && a.as_bytes()[i] == b'/' {
|
||||
last_slash = i;
|
||||
} else if a.as_bytes()[i] != b.as_bytes()[i] {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Return the common root path up to the last common slash.
|
||||
// This returns a slice of the original string 'a', up to and including the last matching '/'.
|
||||
let common = &a[..=last_slash];
|
||||
if cfg!(windows) && common == "file:///" {
|
||||
a
|
||||
} else {
|
||||
common
|
||||
}
|
||||
}
|
||||
|
||||
fn is_file_system_root(url: &str) -> bool {
|
||||
let Some(path) = url.strip_prefix("file:///") else {
|
||||
return false;
|
||||
};
|
||||
if cfg!(windows) {
|
||||
let Some((_drive, path)) = path.split_once('/') else {
|
||||
return true;
|
||||
};
|
||||
path.is_empty()
|
||||
} else {
|
||||
path.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
let mut found_dir = starting_dir.as_str();
|
||||
if !is_file_system_root(found_dir) {
|
||||
for specifier in specifiers {
|
||||
if specifier.scheme() == "file" {
|
||||
found_dir = select_common_root(found_dir, specifier.as_str());
|
||||
}
|
||||
}
|
||||
}
|
||||
let found_dir = if is_file_system_root(found_dir) {
|
||||
found_dir
|
||||
} else {
|
||||
// include the parent dir name because it helps create some context
|
||||
found_dir
|
||||
.strip_suffix('/')
|
||||
.unwrap_or(found_dir)
|
||||
.rfind('/')
|
||||
.map(|i| &found_dir[..i + 1])
|
||||
.unwrap_or(found_dir)
|
||||
};
|
||||
ModuleSpecifier::parse(found_dir).unwrap()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
pub use super::*;
|
||||
|
@ -342,4 +419,38 @@ mod test {
|
|||
run_test("C:\\my-exe.0.1.2", Some("windows"), "C:\\my-exe.0.1.2.exe");
|
||||
run_test("my-exe-0.1.2", Some("linux"), "my-exe-0.1.2");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_root_dir_from_specifiers() {
|
||||
fn resolve(start: &str, specifiers: &[&str]) -> String {
|
||||
let specifiers = specifiers
|
||||
.iter()
|
||||
.map(|s| ModuleSpecifier::parse(s).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
resolve_root_dir_from_specifiers(
|
||||
&ModuleSpecifier::parse(start).unwrap(),
|
||||
specifiers.iter(),
|
||||
)
|
||||
.to_string()
|
||||
}
|
||||
|
||||
assert_eq!(resolve("file:///a/b/c", &["file:///a/b/c/d"]), "file:///a/");
|
||||
assert_eq!(
|
||||
resolve("file:///a/b/c/", &["file:///a/b/c/d"]),
|
||||
"file:///a/b/"
|
||||
);
|
||||
assert_eq!(
|
||||
resolve("file:///a/b/c/", &["file:///a/b/c/d", "file:///a/b/c/e"]),
|
||||
"file:///a/b/"
|
||||
);
|
||||
assert_eq!(resolve("file:///", &["file:///a/b/c/d"]), "file:///");
|
||||
if cfg!(windows) {
|
||||
assert_eq!(resolve("file:///c:/", &["file:///c:/test"]), "file:///c:/");
|
||||
// this will ignore the other one because it's on a separate drive
|
||||
assert_eq!(
|
||||
resolve("file:///c:/a/b/c/", &["file:///v:/a/b/c/d"]),
|
||||
"file:///c:/a/b/"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -187,31 +187,32 @@ pub async fn doc(flags: Flags, doc_flags: DocFlags) -> Result<(), AnyError> {
|
|||
Default::default()
|
||||
};
|
||||
|
||||
let rewrite_map =
|
||||
if let Some(config_file) = cli_options.maybe_config_file().clone() {
|
||||
let config = config_file.to_exports_config()?;
|
||||
let rewrite_map = if let Some(config_file) =
|
||||
cli_options.workspace.resolve_start_ctx().maybe_deno_json()
|
||||
{
|
||||
let config = config_file.to_exports_config()?;
|
||||
|
||||
let rewrite_map = config
|
||||
.clone()
|
||||
.into_map()
|
||||
.into_keys()
|
||||
.map(|key| {
|
||||
Ok((
|
||||
config.get_resolved(&key)?.unwrap(),
|
||||
key
|
||||
.strip_prefix('.')
|
||||
.unwrap_or(&key)
|
||||
.strip_prefix('/')
|
||||
.unwrap_or(&key)
|
||||
.to_owned(),
|
||||
))
|
||||
})
|
||||
.collect::<Result<IndexMap<_, _>, AnyError>>()?;
|
||||
let rewrite_map = config
|
||||
.clone()
|
||||
.into_map()
|
||||
.into_keys()
|
||||
.map(|key| {
|
||||
Ok((
|
||||
config.get_resolved(&key)?.unwrap(),
|
||||
key
|
||||
.strip_prefix('.')
|
||||
.unwrap_or(&key)
|
||||
.strip_prefix('/')
|
||||
.unwrap_or(&key)
|
||||
.to_owned(),
|
||||
))
|
||||
})
|
||||
.collect::<Result<IndexMap<_, _>, AnyError>>()?;
|
||||
|
||||
Some(rewrite_map)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Some(rewrite_map)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
generate_docs_directory(
|
||||
doc_nodes_by_url,
|
||||
|
|
477
cli/tools/fmt.rs
477
cli/tools/fmt.rs
|
@ -13,6 +13,7 @@ use crate::args::FmtFlags;
|
|||
use crate::args::FmtOptions;
|
||||
use crate::args::FmtOptionsConfig;
|
||||
use crate::args::ProseWrap;
|
||||
use crate::cache::Caches;
|
||||
use crate::colors;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::util::diff::diff;
|
||||
|
@ -20,6 +21,7 @@ use crate::util::file_watcher;
|
|||
use crate::util::fs::canonicalize_path;
|
||||
use crate::util::fs::FileCollector;
|
||||
use crate::util::path::get_extension;
|
||||
use async_trait::async_trait;
|
||||
use deno_ast::ParsedSource;
|
||||
use deno_config::glob::FilePatterns;
|
||||
use deno_core::anyhow::anyhow;
|
||||
|
@ -50,8 +52,11 @@ use crate::cache::IncrementalCache;
|
|||
pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
|
||||
if fmt_flags.is_stdin() {
|
||||
let cli_options = CliOptions::from_flags(flags)?;
|
||||
let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
|
||||
let start_ctx = cli_options.workspace.resolve_start_ctx();
|
||||
let fmt_options =
|
||||
cli_options.resolve_fmt_options(&fmt_flags, &start_ctx)?;
|
||||
return format_stdin(
|
||||
&fmt_flags,
|
||||
fmt_options,
|
||||
cli_options
|
||||
.ext_flag()
|
||||
|
@ -70,42 +75,42 @@ pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
|
|||
Ok(async move {
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
|
||||
let files = collect_fmt_files(cli_options, fmt_options.files.clone())
|
||||
.and_then(|files| {
|
||||
if files.is_empty() {
|
||||
Err(generic_error("No target files found."))
|
||||
let caches = factory.caches()?;
|
||||
let mut paths_with_options_batches =
|
||||
resolve_paths_with_options_batches(cli_options, &fmt_flags)?;
|
||||
|
||||
for paths_with_options in &mut paths_with_options_batches {
|
||||
let _ = watcher_communicator
|
||||
.watch_paths(paths_with_options.paths.clone());
|
||||
let files = std::mem::take(&mut paths_with_options.paths);
|
||||
paths_with_options.paths = if let Some(paths) = &changed_paths {
|
||||
if fmt_flags.check {
|
||||
// check all files on any changed (https://github.com/denoland/deno/issues/12446)
|
||||
files
|
||||
.iter()
|
||||
.any(|path| {
|
||||
canonicalize_path(path)
|
||||
.map(|path| paths.contains(&path))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.then_some(files)
|
||||
.unwrap_or_else(|| [].to_vec())
|
||||
} else {
|
||||
Ok(files)
|
||||
files
|
||||
.into_iter()
|
||||
.filter(|path| {
|
||||
canonicalize_path(path)
|
||||
.map(|path| paths.contains(&path))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
})?;
|
||||
let _ = watcher_communicator.watch_paths(files.clone());
|
||||
let refmt_files = if let Some(paths) = changed_paths {
|
||||
if fmt_options.check {
|
||||
// check all files on any changed (https://github.com/denoland/deno/issues/12446)
|
||||
files
|
||||
.iter()
|
||||
.any(|path| {
|
||||
canonicalize_path(path)
|
||||
.map(|path| paths.contains(&path))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.then_some(files)
|
||||
.unwrap_or_else(|| [].to_vec())
|
||||
} else {
|
||||
files
|
||||
.into_iter()
|
||||
.filter(|path| {
|
||||
canonicalize_path(path)
|
||||
.map(|path| paths.contains(&path))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
} else {
|
||||
files
|
||||
};
|
||||
format_files(factory, fmt_options, refmt_files).await?;
|
||||
};
|
||||
}
|
||||
|
||||
format_files(caches, &fmt_flags, paths_with_options_batches).await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
|
@ -114,43 +119,77 @@ pub async fn format(flags: Flags, fmt_flags: FmtFlags) -> Result<(), AnyError> {
|
|||
.await?;
|
||||
} else {
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let caches = factory.caches()?;
|
||||
let cli_options = factory.cli_options();
|
||||
let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
|
||||
let files = collect_fmt_files(cli_options, fmt_options.files.clone())
|
||||
.and_then(|files| {
|
||||
if files.is_empty() {
|
||||
Err(generic_error("No target files found."))
|
||||
} else {
|
||||
Ok(files)
|
||||
}
|
||||
})?;
|
||||
format_files(factory, fmt_options, files).await?;
|
||||
let paths_with_options_batches =
|
||||
resolve_paths_with_options_batches(cli_options, &fmt_flags)?;
|
||||
format_files(caches, &fmt_flags, paths_with_options_batches).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn format_files(
|
||||
factory: CliFactory,
|
||||
fmt_options: FmtOptions,
|
||||
struct PathsWithOptions {
|
||||
base: PathBuf,
|
||||
paths: Vec<PathBuf>,
|
||||
) -> Result<(), AnyError> {
|
||||
let caches = factory.caches()?;
|
||||
let check = fmt_options.check;
|
||||
let incremental_cache = Arc::new(IncrementalCache::new(
|
||||
caches.fmt_incremental_cache_db(),
|
||||
&fmt_options.options,
|
||||
&paths,
|
||||
));
|
||||
if check {
|
||||
check_source_files(paths, fmt_options.options, incremental_cache.clone())
|
||||
.await?;
|
||||
} else {
|
||||
format_source_files(paths, fmt_options.options, incremental_cache.clone())
|
||||
.await?;
|
||||
options: FmtOptions,
|
||||
}
|
||||
|
||||
fn resolve_paths_with_options_batches(
|
||||
cli_options: &CliOptions,
|
||||
fmt_flags: &FmtFlags,
|
||||
) -> Result<Vec<PathsWithOptions>, AnyError> {
|
||||
let members_fmt_options =
|
||||
cli_options.resolve_fmt_options_for_members(fmt_flags)?;
|
||||
let mut paths_with_options_batches =
|
||||
Vec::with_capacity(members_fmt_options.len());
|
||||
for member_fmt_options in members_fmt_options {
|
||||
let files =
|
||||
collect_fmt_files(cli_options, member_fmt_options.files.clone())?;
|
||||
if !files.is_empty() {
|
||||
paths_with_options_batches.push(PathsWithOptions {
|
||||
base: member_fmt_options.files.base.clone(),
|
||||
paths: files,
|
||||
options: member_fmt_options,
|
||||
});
|
||||
}
|
||||
}
|
||||
incremental_cache.wait_completion().await;
|
||||
Ok(())
|
||||
if paths_with_options_batches.is_empty() {
|
||||
return Err(generic_error("No target files found."));
|
||||
}
|
||||
Ok(paths_with_options_batches)
|
||||
}
|
||||
|
||||
async fn format_files(
|
||||
caches: &Arc<Caches>,
|
||||
fmt_flags: &FmtFlags,
|
||||
paths_with_options_batches: Vec<PathsWithOptions>,
|
||||
) -> Result<(), AnyError> {
|
||||
let formatter: Box<dyn Formatter> = if fmt_flags.check {
|
||||
Box::new(CheckFormatter::default())
|
||||
} else {
|
||||
Box::new(RealFormatter::default())
|
||||
};
|
||||
for paths_with_options in paths_with_options_batches {
|
||||
log::debug!(
|
||||
"Formatting {} file(s) in {}",
|
||||
paths_with_options.paths.len(),
|
||||
paths_with_options.base.display()
|
||||
);
|
||||
let fmt_options = paths_with_options.options;
|
||||
let paths = paths_with_options.paths;
|
||||
let incremental_cache = Arc::new(IncrementalCache::new(
|
||||
caches.fmt_incremental_cache_db(),
|
||||
&fmt_options.options,
|
||||
&paths,
|
||||
));
|
||||
formatter
|
||||
.handle_files(paths, fmt_options.options, incremental_cache.clone())
|
||||
.await?;
|
||||
incremental_cache.wait_completion().await;
|
||||
}
|
||||
|
||||
formatter.finish()
|
||||
}
|
||||
|
||||
fn collect_fmt_files(
|
||||
|
@ -274,156 +313,190 @@ pub fn format_parsed_source(
|
|||
)
|
||||
}
|
||||
|
||||
async fn check_source_files(
|
||||
paths: Vec<PathBuf>,
|
||||
fmt_options: FmtOptionsConfig,
|
||||
incremental_cache: Arc<IncrementalCache>,
|
||||
) -> Result<(), AnyError> {
|
||||
let not_formatted_files_count = Arc::new(AtomicUsize::new(0));
|
||||
let checked_files_count = Arc::new(AtomicUsize::new(0));
|
||||
#[async_trait]
|
||||
trait Formatter {
|
||||
async fn handle_files(
|
||||
&self,
|
||||
paths: Vec<PathBuf>,
|
||||
fmt_options: FmtOptionsConfig,
|
||||
incremental_cache: Arc<IncrementalCache>,
|
||||
) -> Result<(), AnyError>;
|
||||
|
||||
// prevent threads outputting at the same time
|
||||
let output_lock = Arc::new(Mutex::new(0));
|
||||
fn finish(&self) -> Result<(), AnyError>;
|
||||
}
|
||||
|
||||
run_parallelized(paths, {
|
||||
let not_formatted_files_count = not_formatted_files_count.clone();
|
||||
let checked_files_count = checked_files_count.clone();
|
||||
move |file_path| {
|
||||
checked_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let file_text = read_file_contents(&file_path)?.text;
|
||||
#[derive(Default)]
|
||||
struct CheckFormatter {
|
||||
not_formatted_files_count: Arc<AtomicUsize>,
|
||||
checked_files_count: Arc<AtomicUsize>,
|
||||
}
|
||||
|
||||
// skip checking the file if we know it's formatted
|
||||
if incremental_cache.is_file_same(&file_path, &file_text) {
|
||||
return Ok(());
|
||||
#[async_trait]
|
||||
impl Formatter for CheckFormatter {
|
||||
async fn handle_files(
|
||||
&self,
|
||||
paths: Vec<PathBuf>,
|
||||
fmt_options: FmtOptionsConfig,
|
||||
incremental_cache: Arc<IncrementalCache>,
|
||||
) -> Result<(), AnyError> {
|
||||
// prevent threads outputting at the same time
|
||||
let output_lock = Arc::new(Mutex::new(0));
|
||||
|
||||
run_parallelized(paths, {
|
||||
let not_formatted_files_count = self.not_formatted_files_count.clone();
|
||||
let checked_files_count = self.checked_files_count.clone();
|
||||
move |file_path| {
|
||||
checked_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let file_text = read_file_contents(&file_path)?.text;
|
||||
|
||||
// skip checking the file if we know it's formatted
|
||||
if incremental_cache.is_file_same(&file_path, &file_text) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
match format_file(&file_path, &file_text, &fmt_options) {
|
||||
Ok(Some(formatted_text)) => {
|
||||
not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let _g = output_lock.lock();
|
||||
let diff = diff(&file_text, &formatted_text);
|
||||
info!("");
|
||||
info!("{} {}:", colors::bold("from"), file_path.display());
|
||||
info!("{}", diff);
|
||||
}
|
||||
Ok(None) => {
|
||||
// When checking formatting, only update the incremental cache when
|
||||
// the file is the same since we don't bother checking for stable
|
||||
// formatting here. Additionally, ensure this is done during check
|
||||
// so that CIs that cache the DENO_DIR will get the benefit of
|
||||
// incremental formatting
|
||||
incremental_cache.update_file(&file_path, &file_text);
|
||||
}
|
||||
Err(e) => {
|
||||
not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let _g = output_lock.lock();
|
||||
warn!("Error checking: {}", file_path.to_string_lossy());
|
||||
warn!(
|
||||
"{}",
|
||||
format!("{e}")
|
||||
.split('\n')
|
||||
.map(|l| {
|
||||
if l.trim().is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!(" {l}")
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
|
||||
match format_file(&file_path, &file_text, &fmt_options) {
|
||||
Ok(Some(formatted_text)) => {
|
||||
not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let _g = output_lock.lock();
|
||||
let diff = diff(&file_text, &formatted_text);
|
||||
info!("");
|
||||
info!("{} {}:", colors::bold("from"), file_path.display());
|
||||
info!("{}", diff);
|
||||
}
|
||||
Ok(None) => {
|
||||
// When checking formatting, only update the incremental cache when
|
||||
// the file is the same since we don't bother checking for stable
|
||||
// formatting here. Additionally, ensure this is done during check
|
||||
// so that CIs that cache the DENO_DIR will get the benefit of
|
||||
// incremental formatting
|
||||
incremental_cache.update_file(&file_path, &file_text);
|
||||
}
|
||||
Err(e) => {
|
||||
not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let _g = output_lock.lock();
|
||||
warn!("Error checking: {}", file_path.to_string_lossy());
|
||||
warn!(
|
||||
"{}",
|
||||
format!("{e}")
|
||||
.split('\n')
|
||||
.map(|l| {
|
||||
if l.trim().is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!(" {l}")
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
|
||||
let not_formatted_files_count =
|
||||
not_formatted_files_count.load(Ordering::Relaxed);
|
||||
let checked_files_count = checked_files_count.load(Ordering::Relaxed);
|
||||
let checked_files_str =
|
||||
format!("{} {}", checked_files_count, files_str(checked_files_count));
|
||||
if not_formatted_files_count == 0 {
|
||||
info!("Checked {}", checked_files_str);
|
||||
Ok(())
|
||||
} else {
|
||||
let not_formatted_files_str = files_str(not_formatted_files_count);
|
||||
Err(generic_error(format!(
|
||||
"Found {not_formatted_files_count} not formatted {not_formatted_files_str} in {checked_files_str}",
|
||||
)))
|
||||
}
|
||||
|
||||
fn finish(&self) -> Result<(), AnyError> {
|
||||
let not_formatted_files_count =
|
||||
self.not_formatted_files_count.load(Ordering::Relaxed);
|
||||
let checked_files_count = self.checked_files_count.load(Ordering::Relaxed);
|
||||
let checked_files_str =
|
||||
format!("{} {}", checked_files_count, files_str(checked_files_count));
|
||||
if not_formatted_files_count == 0 {
|
||||
info!("Checked {}", checked_files_str);
|
||||
Ok(())
|
||||
} else {
|
||||
let not_formatted_files_str = files_str(not_formatted_files_count);
|
||||
Err(generic_error(format!(
|
||||
"Found {not_formatted_files_count} not formatted {not_formatted_files_str} in {checked_files_str}",
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn format_source_files(
|
||||
paths: Vec<PathBuf>,
|
||||
fmt_options: FmtOptionsConfig,
|
||||
incremental_cache: Arc<IncrementalCache>,
|
||||
) -> Result<(), AnyError> {
|
||||
let formatted_files_count = Arc::new(AtomicUsize::new(0));
|
||||
let checked_files_count = Arc::new(AtomicUsize::new(0));
|
||||
let output_lock = Arc::new(Mutex::new(0)); // prevent threads outputting at the same time
|
||||
#[derive(Default)]
|
||||
struct RealFormatter {
|
||||
formatted_files_count: Arc<AtomicUsize>,
|
||||
checked_files_count: Arc<AtomicUsize>,
|
||||
}
|
||||
|
||||
run_parallelized(paths, {
|
||||
let formatted_files_count = formatted_files_count.clone();
|
||||
let checked_files_count = checked_files_count.clone();
|
||||
move |file_path| {
|
||||
checked_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let file_contents = read_file_contents(&file_path)?;
|
||||
#[async_trait]
|
||||
impl Formatter for RealFormatter {
|
||||
async fn handle_files(
|
||||
&self,
|
||||
paths: Vec<PathBuf>,
|
||||
fmt_options: FmtOptionsConfig,
|
||||
incremental_cache: Arc<IncrementalCache>,
|
||||
) -> Result<(), AnyError> {
|
||||
let output_lock = Arc::new(Mutex::new(0)); // prevent threads outputting at the same time
|
||||
|
||||
// skip formatting the file if we know it's formatted
|
||||
if incremental_cache.is_file_same(&file_path, &file_contents.text) {
|
||||
return Ok(());
|
||||
run_parallelized(paths, {
|
||||
let formatted_files_count = self.formatted_files_count.clone();
|
||||
let checked_files_count = self.checked_files_count.clone();
|
||||
move |file_path| {
|
||||
checked_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let file_contents = read_file_contents(&file_path)?;
|
||||
|
||||
// skip formatting the file if we know it's formatted
|
||||
if incremental_cache.is_file_same(&file_path, &file_contents.text) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
match format_ensure_stable(
|
||||
&file_path,
|
||||
&file_contents.text,
|
||||
&fmt_options,
|
||||
format_file,
|
||||
) {
|
||||
Ok(Some(formatted_text)) => {
|
||||
incremental_cache.update_file(&file_path, &formatted_text);
|
||||
write_file_contents(
|
||||
&file_path,
|
||||
FileContents {
|
||||
had_bom: file_contents.had_bom,
|
||||
text: formatted_text,
|
||||
},
|
||||
)?;
|
||||
formatted_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let _g = output_lock.lock();
|
||||
info!("{}", file_path.to_string_lossy());
|
||||
}
|
||||
Ok(None) => {
|
||||
incremental_cache.update_file(&file_path, &file_contents.text);
|
||||
}
|
||||
Err(e) => {
|
||||
let _g = output_lock.lock();
|
||||
log::error!("Error formatting: {}", file_path.to_string_lossy());
|
||||
log::error!(" {e}");
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
match format_ensure_stable(
|
||||
&file_path,
|
||||
&file_contents.text,
|
||||
&fmt_options,
|
||||
format_file,
|
||||
) {
|
||||
Ok(Some(formatted_text)) => {
|
||||
incremental_cache.update_file(&file_path, &formatted_text);
|
||||
write_file_contents(
|
||||
&file_path,
|
||||
FileContents {
|
||||
had_bom: file_contents.had_bom,
|
||||
text: formatted_text,
|
||||
},
|
||||
)?;
|
||||
formatted_files_count.fetch_add(1, Ordering::Relaxed);
|
||||
let _g = output_lock.lock();
|
||||
info!("{}", file_path.to_string_lossy());
|
||||
}
|
||||
Ok(None) => {
|
||||
incremental_cache.update_file(&file_path, &file_contents.text);
|
||||
}
|
||||
Err(e) => {
|
||||
let _g = output_lock.lock();
|
||||
log::error!("Error formatting: {}", file_path.to_string_lossy());
|
||||
log::error!(" {e}");
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
fn finish(&self) -> Result<(), AnyError> {
|
||||
let formatted_files_count =
|
||||
self.formatted_files_count.load(Ordering::Relaxed);
|
||||
debug!(
|
||||
"Formatted {} {}",
|
||||
formatted_files_count,
|
||||
files_str(formatted_files_count),
|
||||
);
|
||||
|
||||
let formatted_files_count = formatted_files_count.load(Ordering::Relaxed);
|
||||
debug!(
|
||||
"Formatted {} {}",
|
||||
formatted_files_count,
|
||||
files_str(formatted_files_count),
|
||||
);
|
||||
|
||||
let checked_files_count = checked_files_count.load(Ordering::Relaxed);
|
||||
info!(
|
||||
"Checked {} {}",
|
||||
checked_files_count,
|
||||
files_str(checked_files_count)
|
||||
);
|
||||
|
||||
Ok(())
|
||||
let checked_files_count = self.checked_files_count.load(Ordering::Relaxed);
|
||||
info!(
|
||||
"Checked {} {}",
|
||||
checked_files_count,
|
||||
files_str(checked_files_count)
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// When storing any formatted text in the incremental cache, we want
|
||||
|
@ -491,14 +564,18 @@ fn format_ensure_stable(
|
|||
/// Format stdin and write result to stdout.
|
||||
/// Treats input as set by `--ext` flag.
|
||||
/// Compatible with `--check` flag.
|
||||
fn format_stdin(fmt_options: FmtOptions, ext: &str) -> Result<(), AnyError> {
|
||||
fn format_stdin(
|
||||
fmt_flags: &FmtFlags,
|
||||
fmt_options: FmtOptions,
|
||||
ext: &str,
|
||||
) -> Result<(), AnyError> {
|
||||
let mut source = String::new();
|
||||
if stdin().read_to_string(&mut source).is_err() {
|
||||
bail!("Failed to read from stdin");
|
||||
}
|
||||
let file_path = PathBuf::from(format!("_stdin.{ext}"));
|
||||
let formatted_text = format_file(&file_path, &source, &fmt_options.options)?;
|
||||
if fmt_options.check {
|
||||
if fmt_flags.check {
|
||||
#[allow(clippy::print_stdout)]
|
||||
if formatted_text.is_some() {
|
||||
println!("Not formatted stdin");
|
||||
|
|
|
@ -42,19 +42,20 @@ pub async fn info(flags: Flags, info_flags: InfoFlags) -> Result<(), AnyError> {
|
|||
let module_graph_creator = factory.module_graph_creator().await?;
|
||||
let npm_resolver = factory.npm_resolver().await?;
|
||||
let maybe_lockfile = factory.maybe_lockfile();
|
||||
let maybe_imports_map = factory.maybe_import_map().await?;
|
||||
let resolver = factory.workspace_resolver().await?;
|
||||
|
||||
let maybe_import_specifier = if let Some(imports_map) = maybe_imports_map {
|
||||
if let Ok(imports_specifier) =
|
||||
imports_map.resolve(&specifier, imports_map.base_url())
|
||||
{
|
||||
Some(imports_specifier)
|
||||
let maybe_import_specifier =
|
||||
if let Some(import_map) = resolver.maybe_import_map() {
|
||||
if let Ok(imports_specifier) =
|
||||
import_map.resolve(&specifier, import_map.base_url())
|
||||
{
|
||||
Some(imports_specifier)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
};
|
||||
|
||||
let specifier = match maybe_import_specifier {
|
||||
Some(specifier) => specifier,
|
||||
|
|
|
@ -9,13 +9,21 @@ use deno_ast::ParsedSource;
|
|||
use deno_ast::SourceRange;
|
||||
use deno_ast::SourceTextInfo;
|
||||
use deno_config::glob::FilePatterns;
|
||||
use deno_config::workspace::Workspace;
|
||||
use deno_config::workspace::WorkspaceMemberContext;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::future::LocalBoxFuture;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::unsync::future::LocalFutureExt;
|
||||
use deno_core::unsync::future::SharedLocal;
|
||||
use deno_graph::FastCheckDiagnostic;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_lint::diagnostic::LintDiagnostic;
|
||||
use deno_lint::linter::LintConfig;
|
||||
use deno_lint::linter::LintFileOptions;
|
||||
|
@ -33,6 +41,7 @@ use std::io::stdin;
|
|||
use std::io::Read;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::args::CliOptions;
|
||||
|
@ -41,9 +50,12 @@ use crate::args::LintFlags;
|
|||
use crate::args::LintOptions;
|
||||
use crate::args::LintReporterKind;
|
||||
use crate::args::LintRulesConfig;
|
||||
use crate::args::WorkspaceLintOptions;
|
||||
use crate::cache::Caches;
|
||||
use crate::cache::IncrementalCache;
|
||||
use crate::colors;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::graph_util::ModuleGraphCreator;
|
||||
use crate::tools::fmt::run_parallelized;
|
||||
use crate::util::file_watcher;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
|
@ -79,35 +91,49 @@ pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
|
|||
Ok(async move {
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let lint_options = cli_options.resolve_lint_options(lint_flags)?;
|
||||
let lint_config = cli_options.resolve_lint_config()?;
|
||||
let files =
|
||||
collect_lint_files(cli_options, lint_options.files.clone())
|
||||
.and_then(|files| {
|
||||
if files.is_empty() {
|
||||
Err(generic_error("No target files found."))
|
||||
} else {
|
||||
Ok(files)
|
||||
}
|
||||
})?;
|
||||
_ = watcher_communicator.watch_paths(files.clone());
|
||||
let mut paths_with_options_batches =
|
||||
resolve_paths_with_options_batches(cli_options, &lint_flags)?;
|
||||
for paths_with_options in &mut paths_with_options_batches {
|
||||
_ = watcher_communicator
|
||||
.watch_paths(paths_with_options.paths.clone());
|
||||
|
||||
let lint_paths = if let Some(paths) = changed_paths {
|
||||
// lint all files on any changed (https://github.com/denoland/deno/issues/12446)
|
||||
files
|
||||
.iter()
|
||||
.any(|path| {
|
||||
canonicalize_path(path)
|
||||
.map(|p| paths.contains(&p))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.then_some(files)
|
||||
.unwrap_or_else(|| [].to_vec())
|
||||
} else {
|
||||
files
|
||||
};
|
||||
let files = std::mem::take(&mut paths_with_options.paths);
|
||||
paths_with_options.paths = if let Some(paths) = &changed_paths {
|
||||
// lint all files on any changed (https://github.com/denoland/deno/issues/12446)
|
||||
files
|
||||
.iter()
|
||||
.any(|path| {
|
||||
canonicalize_path(path)
|
||||
.map(|p| paths.contains(&p))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.then_some(files)
|
||||
.unwrap_or_else(|| [].to_vec())
|
||||
} else {
|
||||
files
|
||||
};
|
||||
}
|
||||
|
||||
let mut linter = WorkspaceLinter::new(
|
||||
factory.caches()?.clone(),
|
||||
factory.module_graph_creator().await?.clone(),
|
||||
cli_options.workspace.clone(),
|
||||
&cli_options.resolve_workspace_lint_options(&lint_flags)?,
|
||||
);
|
||||
for paths_with_options in paths_with_options_batches {
|
||||
linter
|
||||
.lint_files(
|
||||
paths_with_options.options,
|
||||
lint_config.clone(),
|
||||
paths_with_options.ctx,
|
||||
paths_with_options.paths,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
linter.finish();
|
||||
|
||||
lint_files(factory, lint_options, lint_config, lint_paths).await?;
|
||||
Ok(())
|
||||
})
|
||||
},
|
||||
|
@ -117,15 +143,19 @@ pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
|
|||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let is_stdin = lint_flags.is_stdin();
|
||||
let lint_options = cli_options.resolve_lint_options(lint_flags)?;
|
||||
let lint_config = cli_options.resolve_lint_config()?;
|
||||
let files = &lint_options.files;
|
||||
let workspace_lint_options =
|
||||
cli_options.resolve_workspace_lint_options(&lint_flags)?;
|
||||
let success = if is_stdin {
|
||||
let reporter_kind = lint_options.reporter_kind;
|
||||
let reporter_lock = Arc::new(Mutex::new(create_reporter(reporter_kind)));
|
||||
let start_ctx = cli_options.workspace.resolve_start_ctx();
|
||||
let reporter_lock = Arc::new(Mutex::new(create_reporter(
|
||||
workspace_lint_options.reporter_kind,
|
||||
)));
|
||||
let lint_options =
|
||||
cli_options.resolve_lint_options(lint_flags, &start_ctx)?;
|
||||
let lint_rules = get_config_rules_err_empty(
|
||||
lint_options.rules,
|
||||
cli_options.maybe_config_file().as_ref(),
|
||||
start_ctx.maybe_deno_json().map(|c| c.as_ref()),
|
||||
)?;
|
||||
let file_path = cli_options.initial_cwd().join(STDIN_FILE_NAME);
|
||||
let r = lint_stdin(&file_path, lint_rules.rules, lint_config);
|
||||
|
@ -137,16 +167,25 @@ pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
|
|||
reporter_lock.lock().close(1);
|
||||
success
|
||||
} else {
|
||||
let target_files = collect_lint_files(cli_options, files.clone())
|
||||
.and_then(|files| {
|
||||
if files.is_empty() {
|
||||
Err(generic_error("No target files found."))
|
||||
} else {
|
||||
Ok(files)
|
||||
}
|
||||
})?;
|
||||
debug!("Found {} files", target_files.len());
|
||||
lint_files(factory, lint_options, lint_config, target_files).await?
|
||||
let mut linter = WorkspaceLinter::new(
|
||||
factory.caches()?.clone(),
|
||||
factory.module_graph_creator().await?.clone(),
|
||||
cli_options.workspace.clone(),
|
||||
&workspace_lint_options,
|
||||
);
|
||||
let paths_with_options_batches =
|
||||
resolve_paths_with_options_batches(cli_options, &lint_flags)?;
|
||||
for paths_with_options in paths_with_options_batches {
|
||||
linter
|
||||
.lint_files(
|
||||
paths_with_options.options,
|
||||
lint_config.clone(),
|
||||
paths_with_options.ctx,
|
||||
paths_with_options.paths,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
linter.finish()
|
||||
};
|
||||
if !success {
|
||||
std::process::exit(1);
|
||||
|
@ -156,121 +195,202 @@ pub async fn lint(flags: Flags, lint_flags: LintFlags) -> Result<(), AnyError> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn lint_files(
|
||||
factory: CliFactory,
|
||||
lint_options: LintOptions,
|
||||
lint_config: LintConfig,
|
||||
struct PathsWithOptions {
|
||||
ctx: WorkspaceMemberContext,
|
||||
paths: Vec<PathBuf>,
|
||||
) -> Result<bool, AnyError> {
|
||||
let caches = factory.caches()?;
|
||||
let maybe_config_file = factory.cli_options().maybe_config_file().as_ref();
|
||||
let lint_rules =
|
||||
get_config_rules_err_empty(lint_options.rules, maybe_config_file)?;
|
||||
let incremental_cache = Arc::new(IncrementalCache::new(
|
||||
caches.lint_incremental_cache_db(),
|
||||
&lint_rules.incremental_cache_state(),
|
||||
&paths,
|
||||
));
|
||||
let target_files_len = paths.len();
|
||||
let reporter_kind = lint_options.reporter_kind;
|
||||
// todo(dsherret): abstract away this lock behind a performant interface
|
||||
let reporter_lock =
|
||||
Arc::new(Mutex::new(create_reporter(reporter_kind.clone())));
|
||||
let has_error = Arc::new(AtomicFlag::default());
|
||||
options: LintOptions,
|
||||
}
|
||||
|
||||
let mut futures = Vec::with_capacity(2);
|
||||
if lint_rules.no_slow_types {
|
||||
if let Some(config_file) = maybe_config_file {
|
||||
let members = config_file.to_workspace_members()?;
|
||||
let has_error = has_error.clone();
|
||||
let reporter_lock = reporter_lock.clone();
|
||||
let module_graph_creator = factory.module_graph_creator().await?.clone();
|
||||
let path_urls = paths
|
||||
.iter()
|
||||
.filter_map(|p| ModuleSpecifier::from_file_path(p).ok())
|
||||
.collect::<HashSet<_>>();
|
||||
futures.push(deno_core::unsync::spawn(async move {
|
||||
let graph = module_graph_creator
|
||||
.create_and_validate_publish_graph(&members, true)
|
||||
.await?;
|
||||
// todo(dsherret): this isn't exactly correct as linting isn't properly
|
||||
// setup to handle workspaces. Iterating over the workspace members
|
||||
// should be done at a higher level because it also needs to take into
|
||||
// account the config per workspace member.
|
||||
for member in &members {
|
||||
let export_urls = member.config_file.resolve_export_value_urls()?;
|
||||
if !export_urls.iter().any(|url| path_urls.contains(url)) {
|
||||
continue; // entrypoint is not specified, so skip
|
||||
}
|
||||
let diagnostics = no_slow_types::collect_no_slow_type_diagnostics(
|
||||
&export_urls,
|
||||
&graph,
|
||||
);
|
||||
if !diagnostics.is_empty() {
|
||||
has_error.raise();
|
||||
let mut reporter = reporter_lock.lock();
|
||||
for diagnostic in &diagnostics {
|
||||
reporter
|
||||
.visit_diagnostic(LintOrCliDiagnostic::FastCheck(diagnostic));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}));
|
||||
fn resolve_paths_with_options_batches(
|
||||
cli_options: &CliOptions,
|
||||
lint_flags: &LintFlags,
|
||||
) -> Result<Vec<PathsWithOptions>, AnyError> {
|
||||
let members_lint_options =
|
||||
cli_options.resolve_lint_options_for_members(lint_flags)?;
|
||||
let mut paths_with_options_batches =
|
||||
Vec::with_capacity(members_lint_options.len());
|
||||
for (ctx, lint_options) in members_lint_options {
|
||||
let files = collect_lint_files(cli_options, lint_options.files.clone())?;
|
||||
if !files.is_empty() {
|
||||
paths_with_options_batches.push(PathsWithOptions {
|
||||
ctx,
|
||||
paths: files,
|
||||
options: lint_options,
|
||||
});
|
||||
}
|
||||
}
|
||||
if paths_with_options_batches.is_empty() {
|
||||
return Err(generic_error("No target files found."));
|
||||
}
|
||||
Ok(paths_with_options_batches)
|
||||
}
|
||||
|
||||
type WorkspaceModuleGraphFuture =
|
||||
SharedLocal<LocalBoxFuture<'static, Result<Rc<ModuleGraph>, Rc<AnyError>>>>;
|
||||
|
||||
struct WorkspaceLinter {
|
||||
caches: Arc<Caches>,
|
||||
module_graph_creator: Arc<ModuleGraphCreator>,
|
||||
workspace: Arc<Workspace>,
|
||||
reporter_lock: Arc<Mutex<Box<dyn LintReporter + Send>>>,
|
||||
workspace_module_graph: Option<WorkspaceModuleGraphFuture>,
|
||||
has_error: Arc<AtomicFlag>,
|
||||
file_count: usize,
|
||||
}
|
||||
|
||||
impl WorkspaceLinter {
|
||||
pub fn new(
|
||||
caches: Arc<Caches>,
|
||||
module_graph_creator: Arc<ModuleGraphCreator>,
|
||||
workspace: Arc<Workspace>,
|
||||
workspace_options: &WorkspaceLintOptions,
|
||||
) -> Self {
|
||||
let reporter_lock =
|
||||
Arc::new(Mutex::new(create_reporter(workspace_options.reporter_kind)));
|
||||
Self {
|
||||
caches,
|
||||
module_graph_creator,
|
||||
workspace,
|
||||
reporter_lock,
|
||||
workspace_module_graph: None,
|
||||
has_error: Default::default(),
|
||||
file_count: 0,
|
||||
}
|
||||
}
|
||||
|
||||
futures.push({
|
||||
let has_error = has_error.clone();
|
||||
let linter = create_linter(lint_rules.rules);
|
||||
let reporter_lock = reporter_lock.clone();
|
||||
let incremental_cache = incremental_cache.clone();
|
||||
let lint_config = lint_config.clone();
|
||||
let fix = lint_options.fix;
|
||||
deno_core::unsync::spawn(async move {
|
||||
run_parallelized(paths, {
|
||||
move |file_path| {
|
||||
let file_text = deno_ast::strip_bom(fs::read_to_string(&file_path)?);
|
||||
pub async fn lint_files(
|
||||
&mut self,
|
||||
lint_options: LintOptions,
|
||||
lint_config: LintConfig,
|
||||
member_ctx: WorkspaceMemberContext,
|
||||
paths: Vec<PathBuf>,
|
||||
) -> Result<(), AnyError> {
|
||||
self.file_count += paths.len();
|
||||
|
||||
// don't bother rechecking this file if it didn't have any diagnostics before
|
||||
if incremental_cache.is_file_same(&file_path, &file_text) {
|
||||
return Ok(());
|
||||
let lint_rules = get_config_rules_err_empty(
|
||||
lint_options.rules,
|
||||
member_ctx.maybe_deno_json().map(|c| c.as_ref()),
|
||||
)?;
|
||||
let incremental_cache = Arc::new(IncrementalCache::new(
|
||||
self.caches.lint_incremental_cache_db(),
|
||||
&lint_rules.incremental_cache_state(),
|
||||
&paths,
|
||||
));
|
||||
|
||||
let mut futures = Vec::with_capacity(2);
|
||||
if lint_rules.no_slow_types {
|
||||
if self.workspace_module_graph.is_none() {
|
||||
let module_graph_creator = self.module_graph_creator.clone();
|
||||
let packages = self.workspace.jsr_packages_for_publish();
|
||||
self.workspace_module_graph = Some(
|
||||
async move {
|
||||
module_graph_creator
|
||||
.create_and_validate_publish_graph(&packages, true)
|
||||
.await
|
||||
.map(Rc::new)
|
||||
.map_err(Rc::new)
|
||||
}
|
||||
|
||||
let r = lint_file(&linter, &file_path, file_text, lint_config, fix);
|
||||
if let Ok((file_source, file_diagnostics)) = &r {
|
||||
if file_diagnostics.is_empty() {
|
||||
// update the incremental cache if there were no diagnostics
|
||||
incremental_cache.update_file(
|
||||
&file_path,
|
||||
// ensure the returned text is used here as it may have been modified via --fix
|
||||
file_source.text(),
|
||||
)
|
||||
.boxed_local()
|
||||
.shared_local(),
|
||||
);
|
||||
}
|
||||
let workspace_module_graph_future =
|
||||
self.workspace_module_graph.as_ref().unwrap().clone();
|
||||
let publish_config = member_ctx.maybe_package_config();
|
||||
if let Some(publish_config) = publish_config {
|
||||
let has_error = self.has_error.clone();
|
||||
let reporter_lock = self.reporter_lock.clone();
|
||||
let path_urls = paths
|
||||
.iter()
|
||||
.filter_map(|p| ModuleSpecifier::from_file_path(p).ok())
|
||||
.collect::<HashSet<_>>();
|
||||
futures.push(
|
||||
async move {
|
||||
let graph = workspace_module_graph_future
|
||||
.await
|
||||
.map_err(|err| anyhow!("{:#}", err))?;
|
||||
let export_urls =
|
||||
publish_config.config_file.resolve_export_value_urls()?;
|
||||
if !export_urls.iter().any(|url| path_urls.contains(url)) {
|
||||
return Ok(()); // entrypoint is not specified, so skip
|
||||
}
|
||||
let diagnostics = no_slow_types::collect_no_slow_type_diagnostics(
|
||||
&export_urls,
|
||||
&graph,
|
||||
);
|
||||
if !diagnostics.is_empty() {
|
||||
has_error.raise();
|
||||
let mut reporter = reporter_lock.lock();
|
||||
for diagnostic in &diagnostics {
|
||||
reporter
|
||||
.visit_diagnostic(LintOrCliDiagnostic::FastCheck(diagnostic));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
.boxed_local(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let success = handle_lint_result(
|
||||
&file_path.to_string_lossy(),
|
||||
r,
|
||||
reporter_lock.clone(),
|
||||
);
|
||||
if !success {
|
||||
has_error.raise();
|
||||
futures.push({
|
||||
let has_error = self.has_error.clone();
|
||||
let linter = create_linter(lint_rules.rules);
|
||||
let reporter_lock = self.reporter_lock.clone();
|
||||
let incremental_cache = incremental_cache.clone();
|
||||
let lint_config = lint_config.clone();
|
||||
let fix = lint_options.fix;
|
||||
async move {
|
||||
run_parallelized(paths, {
|
||||
move |file_path| {
|
||||
let file_text =
|
||||
deno_ast::strip_bom(fs::read_to_string(&file_path)?);
|
||||
|
||||
// don't bother rechecking this file if it didn't have any diagnostics before
|
||||
if incremental_cache.is_file_same(&file_path, &file_text) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let r = lint_file(&linter, &file_path, file_text, lint_config, fix);
|
||||
if let Ok((file_source, file_diagnostics)) = &r {
|
||||
if file_diagnostics.is_empty() {
|
||||
// update the incremental cache if there were no diagnostics
|
||||
incremental_cache.update_file(
|
||||
&file_path,
|
||||
// ensure the returned text is used here as it may have been modified via --fix
|
||||
file_source.text(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
let success = handle_lint_result(
|
||||
&file_path.to_string_lossy(),
|
||||
r,
|
||||
reporter_lock.clone(),
|
||||
);
|
||||
if !success {
|
||||
has_error.raise();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
.boxed_local()
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.await
|
||||
})
|
||||
});
|
||||
deno_core::futures::future::try_join_all(futures).await?;
|
||||
|
||||
deno_core::futures::future::try_join_all(futures).await?;
|
||||
incremental_cache.wait_completion().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
incremental_cache.wait_completion().await;
|
||||
reporter_lock.lock().close(target_files_len);
|
||||
|
||||
Ok(!has_error.is_raised())
|
||||
pub fn finish(self) -> bool {
|
||||
debug!("Found {} files", self.file_count);
|
||||
self.reporter_lock.lock().close(self.file_count);
|
||||
!self.has_error.is_raised() // success
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_lint_files(
|
||||
|
@ -692,9 +812,8 @@ impl LintReporter for PrettyLintReporter {
|
|||
}
|
||||
|
||||
match check_count {
|
||||
n if n <= 1 => info!("Checked {} file", n),
|
||||
n if n > 1 => info!("Checked {} files", n),
|
||||
_ => unreachable!(),
|
||||
1 => info!("Checked 1 file"),
|
||||
n => info!("Checked {} files", n),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -744,9 +863,8 @@ impl LintReporter for CompactLintReporter {
|
|||
}
|
||||
|
||||
match check_count {
|
||||
n if n <= 1 => info!("Checked {} file", n),
|
||||
n if n > 1 => info!("Checked {} files", n),
|
||||
_ => unreachable!(),
|
||||
1 => info!("Checked 1 file"),
|
||||
n => info!("Checked {} files", n),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -910,9 +1028,8 @@ pub fn get_configured_rules(
|
|||
maybe_config_file: Option<&deno_config::ConfigFile>,
|
||||
) -> ConfiguredRules {
|
||||
const NO_SLOW_TYPES_NAME: &str = "no-slow-types";
|
||||
let implicit_no_slow_types = maybe_config_file
|
||||
.map(|c| c.is_package() || c.json.workspace.is_some())
|
||||
.unwrap_or(false);
|
||||
let implicit_no_slow_types =
|
||||
maybe_config_file.map(|c| c.is_package()).unwrap_or(false);
|
||||
let no_slow_types = implicit_no_slow_types
|
||||
&& !rules
|
||||
.exclude
|
||||
|
|
|
@ -11,9 +11,8 @@ use std::sync::Arc;
|
|||
use base64::prelude::BASE64_STANDARD;
|
||||
use base64::Engine;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_config::glob::FilePatterns;
|
||||
use deno_config::ConfigFile;
|
||||
use deno_config::WorkspaceMemberConfig;
|
||||
use deno_config::workspace::JsrPackageConfig;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -27,7 +26,6 @@ use deno_core::serde_json::Value;
|
|||
use deno_runtime::deno_fetch::reqwest;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_terminal::colors;
|
||||
use import_map::ImportMap;
|
||||
use lsp_types::Url;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
@ -44,7 +42,6 @@ use crate::cache::ParsedSourceCache;
|
|||
use crate::factory::CliFactory;
|
||||
use crate::graph_util::ModuleGraphCreator;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::resolver::MappedSpecifierResolver;
|
||||
use crate::resolver::SloppyImportsResolver;
|
||||
use crate::tools::check::CheckOptions;
|
||||
use crate::tools::lint::no_slow_types;
|
||||
|
@ -84,27 +81,28 @@ pub async fn publish(
|
|||
let auth_method =
|
||||
get_auth_method(publish_flags.token, publish_flags.dry_run)?;
|
||||
|
||||
let import_map = cli_factory
|
||||
.maybe_import_map()
|
||||
.await?
|
||||
.clone()
|
||||
.unwrap_or_else(|| {
|
||||
Arc::new(ImportMap::new(Url::parse("file:///dev/null").unwrap()))
|
||||
});
|
||||
let workspace_resolver = cli_factory.workspace_resolver().await?.clone();
|
||||
|
||||
let directory_path = cli_factory.cli_options().initial_cwd();
|
||||
|
||||
let mapped_resolver = Arc::new(MappedSpecifierResolver::new(
|
||||
Some(import_map),
|
||||
cli_factory.package_json_deps_provider().clone(),
|
||||
));
|
||||
let cli_options = cli_factory.cli_options();
|
||||
let Some(config_file) = cli_options.maybe_config_file() else {
|
||||
bail!(
|
||||
"Couldn't find a deno.json, deno.jsonc, jsr.json or jsr.jsonc configuration file in {}.",
|
||||
directory_path.display()
|
||||
);
|
||||
};
|
||||
let publish_configs = cli_options.workspace.jsr_packages_for_publish();
|
||||
if publish_configs.is_empty() {
|
||||
match cli_options.workspace.resolve_start_ctx().maybe_deno_json() {
|
||||
Some(deno_json) => {
|
||||
debug_assert!(!deno_json.is_package());
|
||||
bail!(
|
||||
"Missing 'name', 'version' and 'exports' field in '{}'.",
|
||||
deno_json.specifier
|
||||
);
|
||||
}
|
||||
None => {
|
||||
bail!(
|
||||
"Couldn't find a deno.json, deno.jsonc, jsr.json or jsr.jsonc configuration file in {}.",
|
||||
directory_path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let diagnostics_collector = PublishDiagnosticsCollector::default();
|
||||
let publish_preparer = PublishPreparer::new(
|
||||
|
@ -114,14 +112,14 @@ pub async fn publish(
|
|||
cli_factory.type_checker().await?.clone(),
|
||||
cli_factory.fs().clone(),
|
||||
cli_factory.cli_options().clone(),
|
||||
mapped_resolver,
|
||||
workspace_resolver,
|
||||
);
|
||||
|
||||
let prepared_data = publish_preparer
|
||||
.prepare_packages_for_publishing(
|
||||
publish_flags.allow_slow_types,
|
||||
&diagnostics_collector,
|
||||
config_file.clone(),
|
||||
publish_configs,
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
@ -193,8 +191,8 @@ struct PublishPreparer {
|
|||
source_cache: Arc<ParsedSourceCache>,
|
||||
type_checker: Arc<TypeChecker>,
|
||||
cli_options: Arc<CliOptions>,
|
||||
mapped_resolver: Arc<MappedSpecifierResolver>,
|
||||
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
|
||||
workspace_resolver: Arc<WorkspaceResolver>,
|
||||
}
|
||||
|
||||
impl PublishPreparer {
|
||||
|
@ -205,7 +203,7 @@ impl PublishPreparer {
|
|||
type_checker: Arc<TypeChecker>,
|
||||
fs: Arc<dyn FileSystem>,
|
||||
cli_options: Arc<CliOptions>,
|
||||
mapped_resolver: Arc<MappedSpecifierResolver>,
|
||||
workspace_resolver: Arc<WorkspaceResolver>,
|
||||
) -> Self {
|
||||
let sloppy_imports_resolver = if cli_options.unstable_sloppy_imports() {
|
||||
Some(Arc::new(SloppyImportsResolver::new(fs.clone())))
|
||||
|
@ -218,8 +216,8 @@ impl PublishPreparer {
|
|||
source_cache,
|
||||
type_checker,
|
||||
cli_options,
|
||||
mapped_resolver,
|
||||
sloppy_imports_resolver,
|
||||
workspace_resolver,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -227,11 +225,9 @@ impl PublishPreparer {
|
|||
&self,
|
||||
allow_slow_types: bool,
|
||||
diagnostics_collector: &PublishDiagnosticsCollector,
|
||||
deno_json: ConfigFile,
|
||||
publish_configs: Vec<JsrPackageConfig>,
|
||||
) -> Result<PreparePackagesData, AnyError> {
|
||||
let members = deno_json.to_workspace_members()?;
|
||||
|
||||
if members.len() > 1 {
|
||||
if publish_configs.len() > 1 {
|
||||
log::info!("Publishing a workspace...");
|
||||
}
|
||||
|
||||
|
@ -240,31 +236,24 @@ impl PublishPreparer {
|
|||
.build_and_check_graph_for_publish(
|
||||
allow_slow_types,
|
||||
diagnostics_collector,
|
||||
&members,
|
||||
&publish_configs,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut package_by_name = HashMap::with_capacity(members.len());
|
||||
let mut package_by_name = HashMap::with_capacity(publish_configs.len());
|
||||
let publish_order_graph =
|
||||
publish_order::build_publish_order_graph(&graph, &members)?;
|
||||
publish_order::build_publish_order_graph(&graph, &publish_configs)?;
|
||||
|
||||
let results = members
|
||||
let results = publish_configs
|
||||
.into_iter()
|
||||
.map(|member| {
|
||||
let graph = graph.clone();
|
||||
async move {
|
||||
let package = self
|
||||
.prepare_publish(
|
||||
&member.package_name,
|
||||
&member.config_file,
|
||||
graph,
|
||||
diagnostics_collector,
|
||||
)
|
||||
.prepare_publish(&member, graph, diagnostics_collector)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("Failed preparing '{}'.", member.package_name)
|
||||
})?;
|
||||
Ok::<_, AnyError>((member.package_name, package))
|
||||
.with_context(|| format!("Failed preparing '{}'.", member.name))?;
|
||||
Ok::<_, AnyError>((member.name, package))
|
||||
}
|
||||
.boxed()
|
||||
})
|
||||
|
@ -284,12 +273,15 @@ impl PublishPreparer {
|
|||
&self,
|
||||
allow_slow_types: bool,
|
||||
diagnostics_collector: &PublishDiagnosticsCollector,
|
||||
packages: &[WorkspaceMemberConfig],
|
||||
package_configs: &[JsrPackageConfig],
|
||||
) -> Result<Arc<deno_graph::ModuleGraph>, deno_core::anyhow::Error> {
|
||||
let build_fast_check_graph = !allow_slow_types;
|
||||
let graph = self
|
||||
.module_graph_creator
|
||||
.create_and_validate_publish_graph(packages, build_fast_check_graph)
|
||||
.create_and_validate_publish_graph(
|
||||
package_configs,
|
||||
build_fast_check_graph,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// todo(dsherret): move to lint rule
|
||||
|
@ -335,7 +327,7 @@ impl PublishPreparer {
|
|||
} else {
|
||||
log::info!("Checking for slow types in the public API...");
|
||||
let mut any_pkg_had_diagnostics = false;
|
||||
for package in packages {
|
||||
for package in package_configs {
|
||||
let export_urls = package.config_file.resolve_export_value_urls()?;
|
||||
let diagnostics =
|
||||
no_slow_types::collect_no_slow_type_diagnostics(&export_urls, &graph);
|
||||
|
@ -389,14 +381,14 @@ impl PublishPreparer {
|
|||
#[allow(clippy::too_many_arguments)]
|
||||
async fn prepare_publish(
|
||||
&self,
|
||||
package_name: &str,
|
||||
deno_json: &ConfigFile,
|
||||
package: &JsrPackageConfig,
|
||||
graph: Arc<deno_graph::ModuleGraph>,
|
||||
diagnostics_collector: &PublishDiagnosticsCollector,
|
||||
) -> Result<Rc<PreparedPublishPackage>, AnyError> {
|
||||
static SUGGESTED_ENTRYPOINTS: [&str; 4] =
|
||||
["mod.ts", "mod.js", "index.ts", "index.js"];
|
||||
|
||||
let deno_json = &package.config_file;
|
||||
let config_path = deno_json.specifier.to_file_path().unwrap();
|
||||
let root_dir = config_path.parent().unwrap().to_path_buf();
|
||||
let Some(version) = deno_json.json.version.clone() else {
|
||||
|
@ -418,32 +410,29 @@ impl PublishPreparer {
|
|||
"version": "{}",
|
||||
"exports": "{}"
|
||||
}}"#,
|
||||
package_name,
|
||||
package.name,
|
||||
version,
|
||||
suggested_entrypoint.unwrap_or("<path_to_entrypoint>")
|
||||
);
|
||||
|
||||
bail!(
|
||||
"You did not specify an entrypoint to \"{}\" package in {}. Add `exports` mapping in the configuration file, eg:\n{}",
|
||||
package_name,
|
||||
package.name,
|
||||
deno_json.specifier,
|
||||
exports_content
|
||||
);
|
||||
}
|
||||
let Some(name_no_at) = package_name.strip_prefix('@') else {
|
||||
let Some(name_no_at) = package.name.strip_prefix('@') else {
|
||||
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
|
||||
};
|
||||
let Some((scope, name_no_scope)) = name_no_at.split_once('/') else {
|
||||
bail!("Invalid package name, use '@<scope_name>/<package_name> format");
|
||||
};
|
||||
let file_patterns = deno_json
|
||||
.to_publish_config()?
|
||||
.map(|c| c.files)
|
||||
.unwrap_or_else(|| FilePatterns::new_with_base(root_dir.to_path_buf()));
|
||||
let file_patterns = package.member_ctx.to_publish_config()?.files;
|
||||
|
||||
let tarball = deno_core::unsync::spawn_blocking({
|
||||
let diagnostics_collector = diagnostics_collector.clone();
|
||||
let mapped_resolver = self.mapped_resolver.clone();
|
||||
let workspace_resolver = self.workspace_resolver.clone();
|
||||
let sloppy_imports_resolver = self.sloppy_imports_resolver.clone();
|
||||
let cli_options = self.cli_options.clone();
|
||||
let source_cache = self.source_cache.clone();
|
||||
|
@ -451,8 +440,8 @@ impl PublishPreparer {
|
|||
move || {
|
||||
let bare_node_builtins = cli_options.unstable_bare_node_builtins();
|
||||
let unfurler = SpecifierUnfurler::new(
|
||||
&mapped_resolver,
|
||||
sloppy_imports_resolver.as_deref(),
|
||||
&workspace_resolver,
|
||||
bare_node_builtins,
|
||||
);
|
||||
let root_specifier =
|
||||
|
@ -482,7 +471,7 @@ impl PublishPreparer {
|
|||
})
|
||||
.await??;
|
||||
|
||||
log::debug!("Tarball size ({}): {}", package_name, tarball.bytes.len());
|
||||
log::debug!("Tarball size ({}): {}", package.name, tarball.bytes.len());
|
||||
|
||||
Ok(Rc::new(PreparedPublishPackage {
|
||||
scope: scope.to_string(),
|
||||
|
|
|
@ -49,7 +49,7 @@ impl DenoConfigFormat {
|
|||
}
|
||||
|
||||
enum DenoOrPackageJson {
|
||||
Deno(deno_config::ConfigFile, DenoConfigFormat),
|
||||
Deno(Arc<deno_config::ConfigFile>, DenoConfigFormat),
|
||||
Npm(Arc<deno_node::PackageJson>, Option<FmtOptionsConfig>),
|
||||
}
|
||||
|
||||
|
@ -87,7 +87,6 @@ impl DenoOrPackageJson {
|
|||
DenoOrPackageJson::Deno(deno, ..) => deno
|
||||
.to_fmt_config()
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|f| f.options)
|
||||
.unwrap_or_default(),
|
||||
DenoOrPackageJson::Npm(_, config) => config.clone().unwrap_or_default(),
|
||||
|
@ -122,9 +121,10 @@ impl DenoOrPackageJson {
|
|||
/// the new config
|
||||
fn from_flags(flags: Flags) -> Result<(Self, CliFactory), AnyError> {
|
||||
let factory = CliFactory::from_flags(flags.clone())?;
|
||||
let options = factory.cli_options().clone();
|
||||
let options = factory.cli_options();
|
||||
let start_ctx = options.workspace.resolve_start_ctx();
|
||||
|
||||
match (options.maybe_config_file(), options.maybe_package_json()) {
|
||||
match (start_ctx.maybe_deno_json(), start_ctx.maybe_pkg_json()) {
|
||||
// when both are present, for now,
|
||||
// default to deno.json
|
||||
(Some(deno), Some(_) | None) => Ok((
|
||||
|
@ -141,20 +141,17 @@ impl DenoOrPackageJson {
|
|||
std::fs::write(options.initial_cwd().join("deno.json"), "{}\n")
|
||||
.context("Failed to create deno.json file")?;
|
||||
log::info!("Created deno.json configuration file.");
|
||||
let new_factory = CliFactory::from_flags(flags.clone())?;
|
||||
let new_options = new_factory.cli_options().clone();
|
||||
let factory = CliFactory::from_flags(flags.clone())?;
|
||||
let options = factory.cli_options().clone();
|
||||
let start_ctx = options.workspace.resolve_start_ctx();
|
||||
Ok((
|
||||
DenoOrPackageJson::Deno(
|
||||
new_options
|
||||
.maybe_config_file()
|
||||
.as_ref()
|
||||
.ok_or_else(|| {
|
||||
anyhow!("config not found, but it was just created")
|
||||
})?
|
||||
.clone(),
|
||||
start_ctx.maybe_deno_json().cloned().ok_or_else(|| {
|
||||
anyhow!("config not found, but it was just created")
|
||||
})?,
|
||||
DenoConfigFormat::Json,
|
||||
),
|
||||
new_factory,
|
||||
factory,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ use std::collections::HashSet;
|
|||
use std::collections::VecDeque;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_config::WorkspaceMemberConfig;
|
||||
use deno_config::workspace::JsrPackageConfig;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_graph::ModuleGraph;
|
||||
|
@ -114,7 +114,7 @@ impl PublishOrderGraph {
|
|||
|
||||
pub fn build_publish_order_graph(
|
||||
graph: &ModuleGraph,
|
||||
roots: &[WorkspaceMemberConfig],
|
||||
roots: &[JsrPackageConfig],
|
||||
) -> Result<PublishOrderGraph, AnyError> {
|
||||
let packages = build_pkg_deps(graph, roots)?;
|
||||
Ok(build_publish_order_graph_from_pkgs_deps(packages))
|
||||
|
@ -122,18 +122,23 @@ pub fn build_publish_order_graph(
|
|||
|
||||
fn build_pkg_deps(
|
||||
graph: &deno_graph::ModuleGraph,
|
||||
roots: &[WorkspaceMemberConfig],
|
||||
roots: &[JsrPackageConfig],
|
||||
) -> Result<HashMap<String, HashSet<String>>, AnyError> {
|
||||
let mut members = HashMap::with_capacity(roots.len());
|
||||
let mut seen_modules = HashSet::with_capacity(graph.modules().count());
|
||||
let roots = roots
|
||||
.iter()
|
||||
.map(|r| (ModuleSpecifier::from_file_path(&r.dir_path).unwrap(), r))
|
||||
.map(|r| {
|
||||
(
|
||||
ModuleSpecifier::from_directory_path(r.config_file.dir_path()).unwrap(),
|
||||
r,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
for (root_dir_url, root) in &roots {
|
||||
for (root_dir_url, pkg_config) in &roots {
|
||||
let mut deps = HashSet::new();
|
||||
let mut pending = VecDeque::new();
|
||||
pending.extend(root.config_file.resolve_export_value_urls()?);
|
||||
pending.extend(pkg_config.config_file.resolve_export_value_urls()?);
|
||||
while let Some(specifier) = pending.pop_front() {
|
||||
let Some(module) = graph.get(&specifier).and_then(|m| m.js()) else {
|
||||
continue;
|
||||
|
@ -168,12 +173,12 @@ fn build_pkg_deps(
|
|||
specifier.as_str().starts_with(dir_url.as_str())
|
||||
});
|
||||
if let Some(root) = found_root {
|
||||
deps.insert(root.1.package_name.clone());
|
||||
deps.insert(root.1.name.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
members.insert(root.package_name.clone(), deps);
|
||||
members.insert(pkg_config.name.clone(), deps);
|
||||
}
|
||||
Ok(members)
|
||||
}
|
||||
|
|
|
@ -3,6 +3,9 @@
|
|||
use deno_ast::ParsedSource;
|
||||
use deno_ast::SourceRange;
|
||||
use deno_ast::SourceTextInfo;
|
||||
use deno_config::package_json::PackageJsonDepValue;
|
||||
use deno_config::workspace::MappedResolution;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::DependencyDescriptor;
|
||||
use deno_graph::DynamicTemplatePart;
|
||||
|
@ -10,7 +13,6 @@ use deno_graph::ParserModuleAnalyzer;
|
|||
use deno_graph::TypeScriptReference;
|
||||
use deno_runtime::deno_node::is_builtin_node_module;
|
||||
|
||||
use crate::resolver::MappedSpecifierResolver;
|
||||
use crate::resolver::SloppyImportsResolver;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -39,20 +41,20 @@ impl SpecifierUnfurlerDiagnostic {
|
|||
}
|
||||
|
||||
pub struct SpecifierUnfurler<'a> {
|
||||
mapped_resolver: &'a MappedSpecifierResolver,
|
||||
sloppy_imports_resolver: Option<&'a SloppyImportsResolver>,
|
||||
workspace_resolver: &'a WorkspaceResolver,
|
||||
bare_node_builtins: bool,
|
||||
}
|
||||
|
||||
impl<'a> SpecifierUnfurler<'a> {
|
||||
pub fn new(
|
||||
mapped_resolver: &'a MappedSpecifierResolver,
|
||||
sloppy_imports_resolver: Option<&'a SloppyImportsResolver>,
|
||||
workspace_resolver: &'a WorkspaceResolver,
|
||||
bare_node_builtins: bool,
|
||||
) -> Self {
|
||||
Self {
|
||||
mapped_resolver,
|
||||
sloppy_imports_resolver,
|
||||
workspace_resolver,
|
||||
bare_node_builtins,
|
||||
}
|
||||
}
|
||||
|
@ -62,12 +64,46 @@ impl<'a> SpecifierUnfurler<'a> {
|
|||
referrer: &ModuleSpecifier,
|
||||
specifier: &str,
|
||||
) -> Option<String> {
|
||||
let resolved =
|
||||
if let Ok(resolved) = self.mapped_resolver.resolve(specifier, referrer) {
|
||||
resolved.into_specifier()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let resolved = if let Ok(resolved) =
|
||||
self.workspace_resolver.resolve(specifier, referrer)
|
||||
{
|
||||
match resolved {
|
||||
MappedResolution::Normal(specifier)
|
||||
| MappedResolution::ImportMap(specifier) => Some(specifier),
|
||||
MappedResolution::PackageJson {
|
||||
sub_path,
|
||||
dep_result,
|
||||
..
|
||||
} => match dep_result {
|
||||
Ok(dep) => match dep {
|
||||
PackageJsonDepValue::Req(req) => ModuleSpecifier::parse(&format!(
|
||||
"npm:{}{}",
|
||||
req,
|
||||
sub_path
|
||||
.as_ref()
|
||||
.map(|s| format!("/{}", s))
|
||||
.unwrap_or_default()
|
||||
))
|
||||
.ok(),
|
||||
PackageJsonDepValue::Workspace(_) => {
|
||||
log::warn!(
|
||||
"package.json workspace entries are not implemented yet for publishing."
|
||||
);
|
||||
None
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
log::warn!(
|
||||
"Ignoring failed to resolve package.json dependency. {:#}",
|
||||
err
|
||||
);
|
||||
None
|
||||
}
|
||||
},
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let resolved = match resolved {
|
||||
Some(resolved) => resolved,
|
||||
None if self.bare_node_builtins && is_builtin_node_module(specifier) => {
|
||||
|
@ -305,8 +341,6 @@ fn to_range(
|
|||
mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::args::PackageJsonDepsProvider;
|
||||
|
||||
use super::*;
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
|
@ -355,19 +389,17 @@ mod tests {
|
|||
}
|
||||
}),
|
||||
);
|
||||
let mapped_resolver = MappedSpecifierResolver::new(
|
||||
Some(Arc::new(import_map)),
|
||||
Arc::new(PackageJsonDepsProvider::new(Some(
|
||||
package_json.resolve_local_package_json_version_reqs(),
|
||||
))),
|
||||
let workspace_resolver = WorkspaceResolver::new_raw(
|
||||
Some(import_map),
|
||||
vec![Arc::new(package_json)],
|
||||
deno_config::workspace::PackageJsonDepResolution::Enabled,
|
||||
);
|
||||
|
||||
let fs = Arc::new(RealFs);
|
||||
let sloppy_imports_resolver = SloppyImportsResolver::new(fs);
|
||||
|
||||
let unfurler = SpecifierUnfurler::new(
|
||||
&mapped_resolver,
|
||||
Some(&sloppy_imports_resolver),
|
||||
&workspace_resolver,
|
||||
true,
|
||||
);
|
||||
|
||||
|
|
|
@ -8,24 +8,30 @@ use crate::npm::CliNpmResolver;
|
|||
use crate::npm::InnerCliNpmResolverRef;
|
||||
use crate::npm::ManagedCliNpmResolver;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
use deno_config::workspace::TaskOrScript;
|
||||
use deno_config::workspace::Workspace;
|
||||
use deno_config::workspace::WorkspaceTasksConfig;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures;
|
||||
use deno_core::futures::future::LocalBoxFuture;
|
||||
use deno_core::normalize_path;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_task_shell::ExecutableCommand;
|
||||
use deno_task_shell::ExecuteResult;
|
||||
use deno_task_shell::ShellCommand;
|
||||
use deno_task_shell::ShellCommandContext;
|
||||
use indexmap::IndexMap;
|
||||
use lazy_regex::Lazy;
|
||||
use regex::Regex;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use tokio::task::LocalSet;
|
||||
|
||||
// WARNING: Do not depend on this env var in user code. It's not stable API.
|
||||
|
@ -38,27 +44,10 @@ pub async fn execute_script(
|
|||
) -> Result<i32, AnyError> {
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let tasks_config = cli_options.resolve_tasks_config()?;
|
||||
let maybe_package_json = cli_options.maybe_package_json();
|
||||
let package_json_scripts = maybe_package_json
|
||||
.as_ref()
|
||||
.and_then(|p| p.scripts.clone())
|
||||
.unwrap_or_default();
|
||||
|
||||
let task_name = match &task_flags.task {
|
||||
Some(task) => task,
|
||||
None => {
|
||||
print_available_tasks(
|
||||
&mut std::io::stdout(),
|
||||
&tasks_config,
|
||||
&package_json_scripts,
|
||||
)?;
|
||||
return Ok(1);
|
||||
}
|
||||
};
|
||||
let npm_resolver = factory.npm_resolver().await?;
|
||||
let node_resolver = factory.node_resolver().await?;
|
||||
let env_vars = real_env_vars();
|
||||
let start_ctx = cli_options.workspace.resolve_start_ctx();
|
||||
if !start_ctx.has_deno_or_pkg_json() {
|
||||
bail!("deno task couldn't find deno.json(c). See https://deno.land/manual@v{}/getting_started/configuration_file", env!("CARGO_PKG_VERSION"))
|
||||
}
|
||||
let force_use_pkg_json = std::env::var_os(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME)
|
||||
.map(|v| {
|
||||
// always remove so sub processes don't inherit this env var
|
||||
|
@ -66,118 +55,113 @@ pub async fn execute_script(
|
|||
v == "1"
|
||||
})
|
||||
.unwrap_or(false);
|
||||
let tasks_config = start_ctx.to_tasks_config()?;
|
||||
let tasks_config = if force_use_pkg_json {
|
||||
tasks_config.with_only_pkg_json()
|
||||
} else {
|
||||
tasks_config
|
||||
};
|
||||
|
||||
if let Some(
|
||||
deno_config::Task::Definition(script)
|
||||
| deno_config::Task::Commented {
|
||||
definition: script, ..
|
||||
},
|
||||
) = tasks_config.get(task_name).filter(|_| !force_use_pkg_json)
|
||||
{
|
||||
let config_file_url = cli_options.maybe_config_file_specifier().unwrap();
|
||||
let config_file_path = if config_file_url.scheme() == "file" {
|
||||
config_file_url.to_file_path().unwrap()
|
||||
} else {
|
||||
bail!("Only local configuration files are supported")
|
||||
};
|
||||
let cwd = match task_flags.cwd {
|
||||
Some(path) => canonicalize_path(&PathBuf::from(path))
|
||||
.context("failed canonicalizing --cwd")?,
|
||||
None => config_file_path.parent().unwrap().to_owned(),
|
||||
};
|
||||
|
||||
let custom_commands =
|
||||
resolve_custom_commands(npm_resolver.as_ref(), node_resolver)?;
|
||||
run_task(RunTaskOptions {
|
||||
task_name,
|
||||
script,
|
||||
cwd: &cwd,
|
||||
init_cwd: cli_options.initial_cwd(),
|
||||
env_vars,
|
||||
argv: cli_options.argv(),
|
||||
custom_commands,
|
||||
root_node_modules_dir: npm_resolver
|
||||
.root_node_modules_path()
|
||||
.map(|p| p.as_path()),
|
||||
})
|
||||
.await
|
||||
} else if package_json_scripts.contains_key(task_name) {
|
||||
let package_json_deps_provider = factory.package_json_deps_provider();
|
||||
|
||||
if let Some(package_deps) = package_json_deps_provider.deps() {
|
||||
for (key, value) in package_deps {
|
||||
if let Err(err) = value {
|
||||
log::info!(
|
||||
"{} Ignoring dependency '{}' in package.json because its version requirement failed to parse: {:#}",
|
||||
colors::yellow("Warning"),
|
||||
key,
|
||||
err,
|
||||
);
|
||||
}
|
||||
}
|
||||
let task_name = match &task_flags.task {
|
||||
Some(task) => task,
|
||||
None => {
|
||||
print_available_tasks(
|
||||
&mut std::io::stdout(),
|
||||
&cli_options.workspace,
|
||||
&tasks_config,
|
||||
)?;
|
||||
return Ok(1);
|
||||
}
|
||||
};
|
||||
|
||||
// ensure the npm packages are installed if using a node_modules
|
||||
// directory and managed resolver
|
||||
if cli_options.has_node_modules_dir() {
|
||||
if let Some(npm_resolver) = npm_resolver.as_managed() {
|
||||
npm_resolver.ensure_top_level_package_json_install().await?;
|
||||
}
|
||||
}
|
||||
let npm_resolver = factory.npm_resolver().await?;
|
||||
let node_resolver = factory.node_resolver().await?;
|
||||
let env_vars = real_env_vars();
|
||||
|
||||
let cwd = match task_flags.cwd {
|
||||
Some(path) => canonicalize_path(&PathBuf::from(path))?,
|
||||
None => maybe_package_json
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.path
|
||||
.parent()
|
||||
.unwrap()
|
||||
.to_owned(),
|
||||
};
|
||||
match tasks_config.task(task_name) {
|
||||
Some((dir_url, task_or_script)) => match task_or_script {
|
||||
TaskOrScript::Task(_tasks, script) => {
|
||||
let cwd = match task_flags.cwd {
|
||||
Some(path) => canonicalize_path(&PathBuf::from(path))
|
||||
.context("failed canonicalizing --cwd")?,
|
||||
None => normalize_path(dir_url.to_file_path().unwrap()),
|
||||
};
|
||||
|
||||
// At this point we already checked if the task name exists in package.json.
|
||||
// We can therefore check for "pre" and "post" scripts too, since we're only
|
||||
// dealing with package.json here and not deno.json
|
||||
let task_names = vec![
|
||||
format!("pre{}", task_name),
|
||||
task_name.clone(),
|
||||
format!("post{}", task_name),
|
||||
];
|
||||
let custom_commands =
|
||||
resolve_custom_commands(npm_resolver.as_ref(), node_resolver)?;
|
||||
for task_name in &task_names {
|
||||
if let Some(script) = package_json_scripts.get(task_name) {
|
||||
let exit_code = run_task(RunTaskOptions {
|
||||
let custom_commands =
|
||||
resolve_custom_commands(npm_resolver.as_ref(), node_resolver)?;
|
||||
run_task(RunTaskOptions {
|
||||
task_name,
|
||||
script,
|
||||
cwd: &cwd,
|
||||
init_cwd: cli_options.initial_cwd(),
|
||||
env_vars: env_vars.clone(),
|
||||
env_vars,
|
||||
argv: cli_options.argv(),
|
||||
custom_commands: custom_commands.clone(),
|
||||
custom_commands,
|
||||
root_node_modules_dir: npm_resolver
|
||||
.root_node_modules_path()
|
||||
.map(|p| p.as_path()),
|
||||
})
|
||||
.await?;
|
||||
if exit_code > 0 {
|
||||
return Ok(exit_code);
|
||||
}
|
||||
.await
|
||||
}
|
||||
}
|
||||
TaskOrScript::Script(scripts, _script) => {
|
||||
// ensure the npm packages are installed if using a node_modules
|
||||
// directory and managed resolver
|
||||
if cli_options.has_node_modules_dir() {
|
||||
if let Some(npm_resolver) = npm_resolver.as_managed() {
|
||||
npm_resolver.ensure_top_level_package_json_install().await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(0)
|
||||
} else {
|
||||
log::error!("Task not found: {task_name}");
|
||||
if log::log_enabled!(log::Level::Error) {
|
||||
print_available_tasks(
|
||||
&mut std::io::stderr(),
|
||||
&tasks_config,
|
||||
&package_json_scripts,
|
||||
)?;
|
||||
let cwd = match task_flags.cwd {
|
||||
Some(path) => canonicalize_path(&PathBuf::from(path))?,
|
||||
None => normalize_path(dir_url.to_file_path().unwrap()),
|
||||
};
|
||||
|
||||
// At this point we already checked if the task name exists in package.json.
|
||||
// We can therefore check for "pre" and "post" scripts too, since we're only
|
||||
// dealing with package.json here and not deno.json
|
||||
let task_names = vec![
|
||||
format!("pre{}", task_name),
|
||||
task_name.clone(),
|
||||
format!("post{}", task_name),
|
||||
];
|
||||
let custom_commands =
|
||||
resolve_custom_commands(npm_resolver.as_ref(), node_resolver)?;
|
||||
for task_name in &task_names {
|
||||
if let Some(script) = scripts.get(task_name) {
|
||||
let exit_code = run_task(RunTaskOptions {
|
||||
task_name,
|
||||
script,
|
||||
cwd: &cwd,
|
||||
init_cwd: cli_options.initial_cwd(),
|
||||
env_vars: env_vars.clone(),
|
||||
argv: cli_options.argv(),
|
||||
custom_commands: custom_commands.clone(),
|
||||
root_node_modules_dir: npm_resolver
|
||||
.root_node_modules_path()
|
||||
.map(|p| p.as_path()),
|
||||
})
|
||||
.await?;
|
||||
if exit_code > 0 {
|
||||
return Ok(exit_code);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(0)
|
||||
}
|
||||
},
|
||||
None => {
|
||||
log::error!("Task not found: {task_name}");
|
||||
if log::log_enabled!(log::Level::Error) {
|
||||
print_available_tasks(
|
||||
&mut std::io::stderr(),
|
||||
&cli_options.workspace,
|
||||
&tasks_config,
|
||||
)?;
|
||||
}
|
||||
Ok(1)
|
||||
}
|
||||
Ok(1)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -282,53 +266,92 @@ fn real_env_vars() -> HashMap<String, String> {
|
|||
|
||||
fn print_available_tasks(
|
||||
writer: &mut dyn std::io::Write,
|
||||
tasks_config: &IndexMap<String, deno_config::Task>,
|
||||
package_json_scripts: &IndexMap<String, String>,
|
||||
workspace: &Arc<Workspace>,
|
||||
tasks_config: &WorkspaceTasksConfig,
|
||||
) -> Result<(), std::io::Error> {
|
||||
writeln!(writer, "{}", colors::green("Available tasks:"))?;
|
||||
let is_cwd_root_dir = tasks_config.root.is_none();
|
||||
|
||||
if tasks_config.is_empty() && package_json_scripts.is_empty() {
|
||||
if tasks_config.is_empty() {
|
||||
writeln!(
|
||||
writer,
|
||||
" {}",
|
||||
colors::red("No tasks found in configuration file")
|
||||
)?;
|
||||
} else {
|
||||
for (is_deno, (key, task)) in tasks_config
|
||||
.iter()
|
||||
.map(|(k, t)| (true, (k, t.clone())))
|
||||
.chain(
|
||||
package_json_scripts
|
||||
.iter()
|
||||
.filter(|(key, _)| !tasks_config.contains_key(*key))
|
||||
.map(|(k, v)| (false, (k, deno_config::Task::Definition(v.clone())))),
|
||||
)
|
||||
{
|
||||
writeln!(
|
||||
writer,
|
||||
"- {}{}",
|
||||
colors::cyan(key),
|
||||
if is_deno {
|
||||
"".to_string()
|
||||
} else {
|
||||
format!(" {}", colors::italic_gray("(package.json)"))
|
||||
}
|
||||
)?;
|
||||
let definition = match &task {
|
||||
deno_config::Task::Definition(definition) => definition,
|
||||
deno_config::Task::Commented { definition, .. } => definition,
|
||||
let mut seen_task_names =
|
||||
HashSet::with_capacity(tasks_config.tasks_count());
|
||||
for maybe_config in [&tasks_config.member, &tasks_config.root] {
|
||||
let Some(config) = maybe_config else {
|
||||
continue;
|
||||
};
|
||||
if let deno_config::Task::Commented { comments, .. } = &task {
|
||||
let slash_slash = colors::italic_gray("//");
|
||||
for comment in comments {
|
||||
writeln!(
|
||||
writer,
|
||||
" {slash_slash} {}",
|
||||
colors::italic_gray(comment)
|
||||
)?;
|
||||
for (is_root, is_deno, (key, task)) in config
|
||||
.deno_json
|
||||
.as_ref()
|
||||
.map(|config| {
|
||||
let is_root = !is_cwd_root_dir
|
||||
&& config.folder_url == *workspace.root_folder().0.as_ref();
|
||||
config
|
||||
.tasks
|
||||
.iter()
|
||||
.map(move |(k, t)| (is_root, true, (k, Cow::Borrowed(t))))
|
||||
})
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.chain(
|
||||
config
|
||||
.package_json
|
||||
.as_ref()
|
||||
.map(|config| {
|
||||
let is_root = !is_cwd_root_dir
|
||||
&& config.folder_url == *workspace.root_folder().0.as_ref();
|
||||
config.tasks.iter().map(move |(k, v)| {
|
||||
(
|
||||
is_root,
|
||||
false,
|
||||
(k, Cow::Owned(deno_config::Task::Definition(v.clone()))),
|
||||
)
|
||||
})
|
||||
})
|
||||
.into_iter()
|
||||
.flatten(),
|
||||
)
|
||||
{
|
||||
if !seen_task_names.insert(key) {
|
||||
continue; // already seen
|
||||
}
|
||||
writeln!(
|
||||
writer,
|
||||
"- {}{}",
|
||||
colors::cyan(key),
|
||||
if is_root {
|
||||
if is_deno {
|
||||
format!(" {}", colors::italic_gray("(workspace)"))
|
||||
} else {
|
||||
format!(" {}", colors::italic_gray("(workspace package.json)"))
|
||||
}
|
||||
} else if is_deno {
|
||||
"".to_string()
|
||||
} else {
|
||||
format!(" {}", colors::italic_gray("(package.json)"))
|
||||
}
|
||||
)?;
|
||||
let definition = match task.as_ref() {
|
||||
deno_config::Task::Definition(definition) => definition,
|
||||
deno_config::Task::Commented { definition, .. } => definition,
|
||||
};
|
||||
if let deno_config::Task::Commented { comments, .. } = task.as_ref() {
|
||||
let slash_slash = colors::italic_gray("//");
|
||||
for comment in comments {
|
||||
writeln!(
|
||||
writer,
|
||||
" {slash_slash} {}",
|
||||
colors::italic_gray(comment)
|
||||
)?;
|
||||
}
|
||||
}
|
||||
writeln!(writer, " {definition}")?;
|
||||
}
|
||||
writeln!(writer, " {definition}")?;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1705,11 +1705,17 @@ fn collect_specifiers_with_test_mode(
|
|||
async fn fetch_specifiers_with_test_mode(
|
||||
cli_options: &CliOptions,
|
||||
file_fetcher: &FileFetcher,
|
||||
files: FilePatterns,
|
||||
member_patterns: impl Iterator<Item = FilePatterns>,
|
||||
doc: &bool,
|
||||
) -> Result<Vec<(ModuleSpecifier, TestMode)>, AnyError> {
|
||||
let mut specifiers_with_mode =
|
||||
collect_specifiers_with_test_mode(cli_options, files, doc)?;
|
||||
let mut specifiers_with_mode = member_patterns
|
||||
.map(|files| {
|
||||
collect_specifiers_with_test_mode(cli_options, files.clone(), doc)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for (specifier, mode) in &mut specifiers_with_mode {
|
||||
let file = file_fetcher
|
||||
|
@ -1731,7 +1737,8 @@ pub async fn run_tests(
|
|||
) -> Result<(), AnyError> {
|
||||
let factory = CliFactory::from_flags(flags)?;
|
||||
let cli_options = factory.cli_options();
|
||||
let test_options = cli_options.resolve_test_options(test_flags)?;
|
||||
let workspace_test_options =
|
||||
cli_options.resolve_workspace_test_options(&test_flags);
|
||||
let file_fetcher = factory.file_fetcher()?;
|
||||
// Various test files should not share the same permissions in terms of
|
||||
// `PermissionsContainer` - otherwise granting/revoking permissions in one
|
||||
|
@ -1740,15 +1747,17 @@ pub async fn run_tests(
|
|||
Permissions::from_options(&cli_options.permissions_options()?)?;
|
||||
let log_level = cli_options.log_level();
|
||||
|
||||
let members_with_test_options =
|
||||
cli_options.resolve_test_options_for_members(&test_flags)?;
|
||||
let specifiers_with_mode = fetch_specifiers_with_test_mode(
|
||||
cli_options,
|
||||
file_fetcher,
|
||||
test_options.files.clone(),
|
||||
&test_options.doc,
|
||||
members_with_test_options.into_iter().map(|(_, v)| v.files),
|
||||
&workspace_test_options.doc,
|
||||
)
|
||||
.await?;
|
||||
|
||||
if !test_options.allow_none && specifiers_with_mode.is_empty() {
|
||||
if !workspace_test_options.allow_none && specifiers_with_mode.is_empty() {
|
||||
return Err(generic_error("No test modules found"));
|
||||
}
|
||||
|
||||
|
@ -1761,7 +1770,7 @@ pub async fn run_tests(
|
|||
)
|
||||
.await?;
|
||||
|
||||
if test_options.no_run {
|
||||
if workspace_test_options.no_run {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
@ -1787,16 +1796,16 @@ pub async fn run_tests(
|
|||
))
|
||||
},
|
||||
)?,
|
||||
concurrent_jobs: test_options.concurrent_jobs,
|
||||
fail_fast: test_options.fail_fast,
|
||||
concurrent_jobs: workspace_test_options.concurrent_jobs,
|
||||
fail_fast: workspace_test_options.fail_fast,
|
||||
log_level,
|
||||
filter: test_options.filter.is_some(),
|
||||
reporter: test_options.reporter,
|
||||
junit_path: test_options.junit_path,
|
||||
filter: workspace_test_options.filter.is_some(),
|
||||
reporter: workspace_test_options.reporter,
|
||||
junit_path: workspace_test_options.junit_path,
|
||||
specifier: TestSpecifierOptions {
|
||||
filter: TestFilter::from_flag(&test_options.filter),
|
||||
shuffle: test_options.shuffle,
|
||||
trace_leaks: test_options.trace_leaks,
|
||||
filter: TestFilter::from_flag(&workspace_test_options.filter),
|
||||
shuffle: workspace_test_options.shuffle,
|
||||
trace_leaks: workspace_test_options.trace_leaks,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
@ -1838,34 +1847,47 @@ pub async fn run_tests_with_watch(
|
|||
let factory = CliFactoryBuilder::new()
|
||||
.build_from_flags_for_watcher(flags, watcher_communicator.clone())?;
|
||||
let cli_options = factory.cli_options();
|
||||
let test_options = cli_options.resolve_test_options(test_flags)?;
|
||||
let workspace_test_options =
|
||||
cli_options.resolve_workspace_test_options(&test_flags);
|
||||
|
||||
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
|
||||
if let Some(set) = &test_options.files.include {
|
||||
let watch_paths = set.base_paths();
|
||||
if !watch_paths.is_empty() {
|
||||
let _ = watcher_communicator.watch_paths(watch_paths);
|
||||
}
|
||||
}
|
||||
|
||||
let graph_kind = cli_options.type_check_mode().as_graph_kind();
|
||||
let log_level = cli_options.log_level();
|
||||
let cli_options = cli_options.clone();
|
||||
let module_graph_creator = factory.module_graph_creator().await?;
|
||||
let file_fetcher = factory.file_fetcher()?;
|
||||
let test_modules = if test_options.doc {
|
||||
collect_specifiers(
|
||||
test_options.files.clone(),
|
||||
cli_options.vendor_dir_path().map(ToOwned::to_owned),
|
||||
|e| is_supported_test_ext(e.path),
|
||||
)
|
||||
} else {
|
||||
collect_specifiers(
|
||||
test_options.files.clone(),
|
||||
cli_options.vendor_dir_path().map(ToOwned::to_owned),
|
||||
is_supported_test_path_predicate,
|
||||
)
|
||||
}?;
|
||||
let members_with_test_options =
|
||||
cli_options.resolve_test_options_for_members(&test_flags)?;
|
||||
let watch_paths = members_with_test_options
|
||||
.iter()
|
||||
.filter_map(|(_, test_options)| {
|
||||
test_options
|
||||
.files
|
||||
.include
|
||||
.as_ref()
|
||||
.map(|set| set.base_paths())
|
||||
})
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
let _ = watcher_communicator.watch_paths(watch_paths);
|
||||
let test_modules = members_with_test_options
|
||||
.iter()
|
||||
.map(|(_, test_options)| {
|
||||
collect_specifiers(
|
||||
test_options.files.clone(),
|
||||
cli_options.vendor_dir_path().map(ToOwned::to_owned),
|
||||
if workspace_test_options.doc {
|
||||
Box::new(|e: WalkEntry| is_supported_test_ext(e.path))
|
||||
as Box<dyn Fn(WalkEntry) -> bool>
|
||||
} else {
|
||||
Box::new(is_supported_test_path_predicate)
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let permissions =
|
||||
Permissions::from_options(&cli_options.permissions_options()?)?;
|
||||
|
@ -1898,8 +1920,8 @@ pub async fn run_tests_with_watch(
|
|||
let specifiers_with_mode = fetch_specifiers_with_test_mode(
|
||||
&cli_options,
|
||||
file_fetcher,
|
||||
test_options.files.clone(),
|
||||
&test_options.doc,
|
||||
members_with_test_options.into_iter().map(|(_, v)| v.files),
|
||||
&workspace_test_options.doc,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
|
@ -1915,7 +1937,7 @@ pub async fn run_tests_with_watch(
|
|||
)
|
||||
.await?;
|
||||
|
||||
if test_options.no_run {
|
||||
if workspace_test_options.no_run {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
@ -1938,16 +1960,16 @@ pub async fn run_tests_with_watch(
|
|||
))
|
||||
},
|
||||
)?,
|
||||
concurrent_jobs: test_options.concurrent_jobs,
|
||||
fail_fast: test_options.fail_fast,
|
||||
concurrent_jobs: workspace_test_options.concurrent_jobs,
|
||||
fail_fast: workspace_test_options.fail_fast,
|
||||
log_level,
|
||||
filter: test_options.filter.is_some(),
|
||||
reporter: test_options.reporter,
|
||||
junit_path: test_options.junit_path,
|
||||
filter: workspace_test_options.filter.is_some(),
|
||||
reporter: workspace_test_options.reporter,
|
||||
junit_path: workspace_test_options.junit_path,
|
||||
specifier: TestSpecifierOptions {
|
||||
filter: TestFilter::from_flag(&test_options.filter),
|
||||
shuffle: test_options.shuffle,
|
||||
trace_leaks: test_options.trace_leaks,
|
||||
filter: TestFilter::from_flag(&workspace_test_options.filter),
|
||||
shuffle: workspace_test_options.shuffle,
|
||||
trace_leaks: workspace_test_options.trace_leaks,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
|
12
cli/tools/vendor/build.rs
vendored
12
cli/tools/vendor/build.rs
vendored
|
@ -81,8 +81,8 @@ pub async fn build<
|
|||
build_graph,
|
||||
parsed_source_cache,
|
||||
output_dir,
|
||||
maybe_original_import_map: original_import_map,
|
||||
maybe_jsx_import_source: jsx_import_source,
|
||||
maybe_original_import_map,
|
||||
maybe_jsx_import_source,
|
||||
resolver,
|
||||
environment,
|
||||
} = input;
|
||||
|
@ -90,12 +90,12 @@ pub async fn build<
|
|||
let output_dir_specifier =
|
||||
ModuleSpecifier::from_directory_path(output_dir).unwrap();
|
||||
|
||||
if let Some(original_im) = &original_import_map {
|
||||
if let Some(original_im) = &maybe_original_import_map {
|
||||
validate_original_import_map(original_im, &output_dir_specifier)?;
|
||||
}
|
||||
|
||||
// add the jsx import source to the entry points to ensure it is always vendored
|
||||
if let Some(jsx_import_source) = jsx_import_source {
|
||||
if let Some(jsx_import_source) = maybe_jsx_import_source {
|
||||
if let Some(specifier_text) = jsx_import_source.maybe_specifier_text() {
|
||||
if let Ok(specifier) = resolver.resolve(
|
||||
&specifier_text,
|
||||
|
@ -171,8 +171,8 @@ pub async fn build<
|
|||
graph: &graph,
|
||||
modules: &all_modules,
|
||||
mappings: &mappings,
|
||||
original_import_map,
|
||||
jsx_import_source,
|
||||
maybe_original_import_map,
|
||||
maybe_jsx_import_source,
|
||||
resolver,
|
||||
parsed_source_cache,
|
||||
})?;
|
||||
|
|
16
cli/tools/vendor/import_map.rs
vendored
16
cli/tools/vendor/import_map.rs
vendored
|
@ -59,7 +59,7 @@ impl<'a> ImportMapBuilder<'a> {
|
|||
|
||||
pub fn into_import_map(
|
||||
self,
|
||||
original_import_map: Option<&ImportMap>,
|
||||
maybe_original_import_map: Option<&ImportMap>,
|
||||
) -> ImportMap {
|
||||
fn get_local_imports(
|
||||
new_relative_path: &str,
|
||||
|
@ -99,7 +99,7 @@ impl<'a> ImportMapBuilder<'a> {
|
|||
|
||||
let mut import_map = ImportMap::new(self.base_dir.clone());
|
||||
|
||||
if let Some(original_im) = original_import_map {
|
||||
if let Some(original_im) = maybe_original_import_map {
|
||||
let original_base_dir = ModuleSpecifier::from_directory_path(
|
||||
original_im
|
||||
.base_url()
|
||||
|
@ -183,8 +183,8 @@ pub struct BuildImportMapInput<'a> {
|
|||
pub modules: &'a [&'a Module],
|
||||
pub graph: &'a ModuleGraph,
|
||||
pub mappings: &'a Mappings,
|
||||
pub original_import_map: Option<&'a ImportMap>,
|
||||
pub jsx_import_source: Option<&'a JsxImportSourceConfig>,
|
||||
pub maybe_original_import_map: Option<&'a ImportMap>,
|
||||
pub maybe_jsx_import_source: Option<&'a JsxImportSourceConfig>,
|
||||
pub resolver: &'a dyn deno_graph::source::Resolver,
|
||||
pub parsed_source_cache: &'a ParsedSourceCache,
|
||||
}
|
||||
|
@ -197,8 +197,8 @@ pub fn build_import_map(
|
|||
modules,
|
||||
graph,
|
||||
mappings,
|
||||
original_import_map,
|
||||
jsx_import_source,
|
||||
maybe_original_import_map,
|
||||
maybe_jsx_import_source,
|
||||
resolver,
|
||||
parsed_source_cache,
|
||||
} = input;
|
||||
|
@ -212,7 +212,7 @@ pub fn build_import_map(
|
|||
}
|
||||
|
||||
// add the jsx import source to the destination import map, if mapped in the original import map
|
||||
if let Some(jsx_import_source) = jsx_import_source {
|
||||
if let Some(jsx_import_source) = maybe_jsx_import_source {
|
||||
if let Some(specifier_text) = jsx_import_source.maybe_specifier_text() {
|
||||
if let Ok(resolved_url) = resolver.resolve(
|
||||
&specifier_text,
|
||||
|
@ -228,7 +228,7 @@ pub fn build_import_map(
|
|||
}
|
||||
}
|
||||
|
||||
Ok(builder.into_import_map(original_import_map).to_json())
|
||||
Ok(builder.into_import_map(maybe_original_import_map).to_json())
|
||||
}
|
||||
|
||||
fn visit_modules(
|
||||
|
|
25
cli/tools/vendor/mod.rs
vendored
25
cli/tools/vendor/mod.rs
vendored
|
@ -48,10 +48,17 @@ pub async fn vendor(
|
|||
validate_options(&mut cli_options, &output_dir)?;
|
||||
let factory = CliFactory::from_cli_options(Arc::new(cli_options));
|
||||
let cli_options = factory.cli_options();
|
||||
if cli_options.workspace.config_folders().len() > 1 {
|
||||
bail!("deno vendor is not supported in a workspace. Set `\"vendor\": true` in the workspace deno.json file instead");
|
||||
}
|
||||
let entry_points =
|
||||
resolve_entry_points(&vendor_flags, cli_options.initial_cwd())?;
|
||||
let jsx_import_source = cli_options.to_maybe_jsx_import_source_config()?;
|
||||
let jsx_import_source =
|
||||
cli_options.workspace.to_maybe_jsx_import_source_config()?;
|
||||
let module_graph_creator = factory.module_graph_creator().await?.clone();
|
||||
let workspace_resolver = factory.workspace_resolver().await?;
|
||||
let root_folder = cli_options.workspace.root_folder().1;
|
||||
let maybe_config_file = root_folder.deno_json.as_ref();
|
||||
let output = build::build(build::BuildInput {
|
||||
entry_points,
|
||||
build_graph: move |entry_points| {
|
||||
|
@ -64,7 +71,7 @@ pub async fn vendor(
|
|||
},
|
||||
parsed_source_cache: factory.parsed_source_cache(),
|
||||
output_dir: &output_dir,
|
||||
maybe_original_import_map: factory.maybe_import_map().await?.as_deref(),
|
||||
maybe_original_import_map: workspace_resolver.maybe_import_map(),
|
||||
maybe_jsx_import_source: jsx_import_source.as_ref(),
|
||||
resolver: factory.resolver().await?.as_graph_resolver(),
|
||||
environment: &build::RealVendorEnvironment,
|
||||
|
@ -91,7 +98,7 @@ pub async fn vendor(
|
|||
let try_add_import_map = vendored_count > 0;
|
||||
let modified_result = maybe_update_config_file(
|
||||
&output_dir,
|
||||
cli_options,
|
||||
maybe_config_file,
|
||||
try_add_import_map,
|
||||
try_add_node_modules_dir,
|
||||
);
|
||||
|
@ -100,8 +107,9 @@ pub async fn vendor(
|
|||
if modified_result.added_node_modules_dir {
|
||||
let node_modules_path =
|
||||
cli_options.node_modules_dir_path().cloned().or_else(|| {
|
||||
cli_options
|
||||
.maybe_config_file_specifier()
|
||||
maybe_config_file
|
||||
.as_ref()
|
||||
.map(|d| &d.specifier)
|
||||
.filter(|c| c.scheme() == "file")
|
||||
.and_then(|c| c.to_file_path().ok())
|
||||
.map(|config_path| config_path.parent().unwrap().join("node_modules"))
|
||||
|
@ -176,7 +184,7 @@ fn validate_options(
|
|||
let import_map_specifier = options
|
||||
.resolve_specified_import_map_specifier()?
|
||||
.or_else(|| {
|
||||
let config_file = options.maybe_config_file().as_ref()?;
|
||||
let config_file = options.workspace.root_folder().1.deno_json.as_ref()?;
|
||||
config_file
|
||||
.to_import_map_specifier()
|
||||
.ok()
|
||||
|
@ -229,12 +237,12 @@ fn validate_options(
|
|||
|
||||
fn maybe_update_config_file(
|
||||
output_dir: &Path,
|
||||
options: &CliOptions,
|
||||
maybe_config_file: Option<&Arc<ConfigFile>>,
|
||||
try_add_import_map: bool,
|
||||
try_add_node_modules_dir: bool,
|
||||
) -> ModifiedResult {
|
||||
assert!(output_dir.is_absolute());
|
||||
let config_file = match options.maybe_config_file() {
|
||||
let config_file = match maybe_config_file {
|
||||
Some(config_file) => config_file,
|
||||
None => return ModifiedResult::default(),
|
||||
};
|
||||
|
@ -245,7 +253,6 @@ fn maybe_update_config_file(
|
|||
let fmt_config_options = config_file
|
||||
.to_fmt_config()
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|config| config.options)
|
||||
.unwrap_or_default();
|
||||
let result = update_config_file(
|
||||
|
|
18
cli/tools/vendor/test.rs
vendored
18
cli/tools/vendor/test.rs
vendored
|
@ -8,6 +8,7 @@ use std::path::PathBuf;
|
|||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -182,7 +183,7 @@ pub struct VendorOutput {
|
|||
pub struct VendorTestBuilder {
|
||||
entry_points: Vec<ModuleSpecifier>,
|
||||
loader: TestLoader,
|
||||
original_import_map: Option<ImportMap>,
|
||||
maybe_original_import_map: Option<ImportMap>,
|
||||
environment: TestVendorEnvironment,
|
||||
jsx_import_source_config: Option<JsxImportSourceConfig>,
|
||||
}
|
||||
|
@ -207,7 +208,7 @@ impl VendorTestBuilder {
|
|||
&mut self,
|
||||
import_map: ImportMap,
|
||||
) -> &mut Self {
|
||||
self.original_import_map = Some(import_map);
|
||||
self.maybe_original_import_map = Some(import_map);
|
||||
self
|
||||
}
|
||||
|
||||
|
@ -234,7 +235,7 @@ impl VendorTestBuilder {
|
|||
let parsed_source_cache = ParsedSourceCache::default();
|
||||
let resolver = Arc::new(build_resolver(
|
||||
self.jsx_import_source_config.clone(),
|
||||
self.original_import_map.clone(),
|
||||
self.maybe_original_import_map.clone(),
|
||||
));
|
||||
super::build::build(super::build::BuildInput {
|
||||
entry_points,
|
||||
|
@ -257,7 +258,7 @@ impl VendorTestBuilder {
|
|||
},
|
||||
parsed_source_cache: &parsed_source_cache,
|
||||
output_dir: &output_dir,
|
||||
maybe_original_import_map: self.original_import_map.as_ref(),
|
||||
maybe_original_import_map: self.maybe_original_import_map.as_ref(),
|
||||
maybe_jsx_import_source: self.jsx_import_source_config.as_ref(),
|
||||
resolver: resolver.as_graph_resolver(),
|
||||
environment: &self.environment,
|
||||
|
@ -287,15 +288,18 @@ impl VendorTestBuilder {
|
|||
|
||||
fn build_resolver(
|
||||
maybe_jsx_import_source_config: Option<JsxImportSourceConfig>,
|
||||
original_import_map: Option<ImportMap>,
|
||||
maybe_original_import_map: Option<ImportMap>,
|
||||
) -> CliGraphResolver {
|
||||
CliGraphResolver::new(CliGraphResolverOptions {
|
||||
node_resolver: None,
|
||||
npm_resolver: None,
|
||||
sloppy_imports_resolver: None,
|
||||
package_json_deps_provider: Default::default(),
|
||||
workspace_resolver: Arc::new(WorkspaceResolver::new_raw(
|
||||
maybe_original_import_map,
|
||||
Vec::new(),
|
||||
deno_config::workspace::PackageJsonDepResolution::Enabled,
|
||||
)),
|
||||
maybe_jsx_import_source_config,
|
||||
maybe_import_map: original_import_map.map(Arc::new),
|
||||
maybe_vendor_dir: None,
|
||||
bare_node_builtins_enabled: false,
|
||||
})
|
||||
|
|
38
cli/util/collections.rs
Normal file
38
cli/util/collections.rs
Normal file
|
@ -0,0 +1,38 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::marker::PhantomData;
|
||||
|
||||
pub struct CheckedSet<T: std::hash::Hash + ?Sized> {
|
||||
_kind: PhantomData<T>,
|
||||
checked: std::collections::HashSet<u64>,
|
||||
}
|
||||
|
||||
impl<T: std::hash::Hash + ?Sized> Default for CheckedSet<T> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
_kind: Default::default(),
|
||||
checked: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: std::hash::Hash + ?Sized> CheckedSet<T> {
|
||||
pub fn with_capacity(capacity: usize) -> Self {
|
||||
Self {
|
||||
_kind: PhantomData,
|
||||
checked: std::collections::HashSet::with_capacity(capacity),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, value: &T) -> bool {
|
||||
self.checked.insert(self.get_hash(value))
|
||||
}
|
||||
|
||||
fn get_hash(&self, value: &T) -> u64 {
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::hash::Hasher;
|
||||
let mut hasher = DefaultHasher::new();
|
||||
value.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
}
|
|
@ -163,6 +163,9 @@ pub struct WatcherCommunicator {
|
|||
|
||||
impl WatcherCommunicator {
|
||||
pub fn watch_paths(&self, paths: Vec<PathBuf>) -> Result<(), AnyError> {
|
||||
if paths.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
self.paths_to_watch_tx.send(paths).map_err(AnyError::from)
|
||||
}
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
// Note: Only add code in this folder that has no application specific logic
|
||||
pub mod checksum;
|
||||
pub mod collections;
|
||||
pub mod console;
|
||||
pub mod diff;
|
||||
pub mod display;
|
||||
|
|
|
@ -6,7 +6,6 @@ use std::rc::Rc;
|
|||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_config::package_json::PackageJsonDeps;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::FutureExt;
|
||||
|
@ -41,7 +40,6 @@ use deno_runtime::BootstrapOptions;
|
|||
use deno_runtime::WorkerExecutionMode;
|
||||
use deno_runtime::WorkerLogLevel;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use deno_semver::package::PackageReqReference;
|
||||
use deno_terminal::colors;
|
||||
use tokio::select;
|
||||
|
||||
|
@ -117,7 +115,6 @@ pub struct CliMainWorkerOptions {
|
|||
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
|
||||
pub unstable: bool,
|
||||
pub skip_op_registration: bool,
|
||||
pub maybe_root_package_json_deps: Option<PackageJsonDeps>,
|
||||
pub create_hmr_runner: Option<CreateHmrRunnerCb>,
|
||||
pub create_coverage_collector: Option<CreateCoverageCollectorCb>,
|
||||
}
|
||||
|
@ -479,29 +476,6 @@ impl CliMainWorkerFactory {
|
|||
let (main_module, is_main_cjs) = if let Ok(package_ref) =
|
||||
NpmPackageReqReference::from_specifier(&main_module)
|
||||
{
|
||||
let package_ref = if package_ref.req().version_req.version_text() == "*" {
|
||||
// When using the wildcard version, select the same version used in the
|
||||
// package.json deps in order to prevent adding new dependency version
|
||||
shared
|
||||
.options
|
||||
.maybe_root_package_json_deps
|
||||
.as_ref()
|
||||
.and_then(|deps| {
|
||||
deps
|
||||
.values()
|
||||
.filter_map(|v| v.as_ref().ok())
|
||||
.find(|dep| dep.name == package_ref.req().name)
|
||||
.map(|dep| {
|
||||
NpmPackageReqReference::new(PackageReqReference {
|
||||
req: dep.clone(),
|
||||
sub_path: package_ref.sub_path().map(|s| s.to_string()),
|
||||
})
|
||||
})
|
||||
})
|
||||
.unwrap_or(package_ref)
|
||||
} else {
|
||||
package_ref
|
||||
};
|
||||
if let Some(npm_resolver) = shared.npm_resolver.as_managed() {
|
||||
npm_resolver
|
||||
.add_package_reqs(&[package_ref.req().clone()])
|
||||
|
|
|
@ -822,7 +822,7 @@ testing[WILDCARD]this
|
|||
.args("compile --output binary main.ts")
|
||||
.run()
|
||||
.assert_exit_code(0)
|
||||
.assert_matches_text("Check file:///[WILDCARD]/main.ts\nCompile file:///[WILDCARD]/main.ts to binary[WILDCARD]\n");
|
||||
.assert_matches_text("Check file:///[WILDLINE]/main.ts\nCompile file:///[WILDLINE]/main.ts to binary[WILDLINE]\n");
|
||||
|
||||
context
|
||||
.new_command()
|
||||
|
@ -835,6 +835,7 @@ testing[WILDCARD]this
|
|||
fn compile_npm_file_system() {
|
||||
run_npm_bin_compile_test(RunNpmBinCompileOptions {
|
||||
input_specifier: "compile/npm_fs/main.ts",
|
||||
copy_temp_dir: Some("compile/npm_fs"),
|
||||
compile_args: vec!["-A"],
|
||||
run_args: vec![],
|
||||
output_file: "compile/npm_fs/main.out",
|
||||
|
@ -849,6 +850,7 @@ fn compile_npm_file_system() {
|
|||
fn compile_npm_bin_esm() {
|
||||
run_npm_bin_compile_test(RunNpmBinCompileOptions {
|
||||
input_specifier: "npm:@denotest/bin/cli-esm",
|
||||
copy_temp_dir: None,
|
||||
compile_args: vec![],
|
||||
run_args: vec!["this", "is", "a", "test"],
|
||||
output_file: "npm/deno_run_esm.out",
|
||||
|
@ -863,6 +865,7 @@ fn compile_npm_bin_esm() {
|
|||
fn compile_npm_bin_cjs() {
|
||||
run_npm_bin_compile_test(RunNpmBinCompileOptions {
|
||||
input_specifier: "npm:@denotest/bin/cli-cjs",
|
||||
copy_temp_dir: None,
|
||||
compile_args: vec![],
|
||||
run_args: vec!["this", "is", "a", "test"],
|
||||
output_file: "npm/deno_run_cjs.out",
|
||||
|
@ -877,6 +880,7 @@ fn compile_npm_bin_cjs() {
|
|||
fn compile_npm_cowsay_main() {
|
||||
run_npm_bin_compile_test(RunNpmBinCompileOptions {
|
||||
input_specifier: "npm:cowsay@1.5.0",
|
||||
copy_temp_dir: None,
|
||||
compile_args: vec!["--allow-read"],
|
||||
run_args: vec!["Hello"],
|
||||
output_file: "npm/deno_run_cowsay.out",
|
||||
|
@ -891,6 +895,7 @@ fn compile_npm_cowsay_main() {
|
|||
fn compile_npm_vfs_implicit_read_permissions() {
|
||||
run_npm_bin_compile_test(RunNpmBinCompileOptions {
|
||||
input_specifier: "compile/vfs_implicit_read_permission/main.ts",
|
||||
copy_temp_dir: Some("compile/vfs_implicit_read_permission"),
|
||||
compile_args: vec![],
|
||||
run_args: vec![],
|
||||
output_file: "compile/vfs_implicit_read_permission/main.out",
|
||||
|
@ -905,6 +910,7 @@ fn compile_npm_vfs_implicit_read_permissions() {
|
|||
fn compile_npm_no_permissions() {
|
||||
run_npm_bin_compile_test(RunNpmBinCompileOptions {
|
||||
input_specifier: "npm:cowsay@1.5.0",
|
||||
copy_temp_dir: None,
|
||||
compile_args: vec![],
|
||||
run_args: vec!["Hello"],
|
||||
output_file: "npm/deno_run_cowsay_no_permissions.out",
|
||||
|
@ -919,6 +925,7 @@ fn compile_npm_no_permissions() {
|
|||
fn compile_npm_cowsay_explicit() {
|
||||
run_npm_bin_compile_test(RunNpmBinCompileOptions {
|
||||
input_specifier: "npm:cowsay@1.5.0/cowsay",
|
||||
copy_temp_dir: None,
|
||||
compile_args: vec!["--allow-read"],
|
||||
run_args: vec!["Hello"],
|
||||
output_file: "npm/deno_run_cowsay.out",
|
||||
|
@ -933,6 +940,7 @@ fn compile_npm_cowsay_explicit() {
|
|||
fn compile_npm_cowthink() {
|
||||
run_npm_bin_compile_test(RunNpmBinCompileOptions {
|
||||
input_specifier: "npm:cowsay@1.5.0/cowthink",
|
||||
copy_temp_dir: None,
|
||||
compile_args: vec!["--allow-read"],
|
||||
run_args: vec!["Hello"],
|
||||
output_file: "npm/deno_run_cowthink.out",
|
||||
|
@ -945,6 +953,7 @@ fn compile_npm_cowthink() {
|
|||
|
||||
struct RunNpmBinCompileOptions<'a> {
|
||||
input_specifier: &'a str,
|
||||
copy_temp_dir: Option<&'a str>,
|
||||
node_modules_dir: bool,
|
||||
output_file: &'a str,
|
||||
input_name: Option<&'a str>,
|
||||
|
@ -955,15 +964,13 @@ struct RunNpmBinCompileOptions<'a> {
|
|||
}
|
||||
|
||||
fn run_npm_bin_compile_test(opts: RunNpmBinCompileOptions) {
|
||||
let context = TestContextBuilder::for_npm().use_temp_cwd().build();
|
||||
|
||||
let temp_dir = context.temp_dir();
|
||||
let main_specifier = if opts.input_specifier.starts_with("npm:") {
|
||||
opts.input_specifier.to_string()
|
||||
} else {
|
||||
testdata_path().join(opts.input_specifier).to_string()
|
||||
let builder = TestContextBuilder::for_npm();
|
||||
let context = match opts.copy_temp_dir {
|
||||
Some(copy_temp_dir) => builder.use_copy_temp_dir(copy_temp_dir).build(),
|
||||
None => builder.use_temp_cwd().build(),
|
||||
};
|
||||
|
||||
let temp_dir = context.temp_dir();
|
||||
let mut args = vec!["compile".to_string()];
|
||||
|
||||
args.extend(opts.compile_args.iter().map(|s| s.to_string()));
|
||||
|
@ -977,7 +984,7 @@ fn run_npm_bin_compile_test(opts: RunNpmBinCompileOptions) {
|
|||
args.push(bin_name.to_string());
|
||||
}
|
||||
|
||||
args.push(main_specifier);
|
||||
args.push(opts.input_specifier.to_string());
|
||||
|
||||
// compile
|
||||
let output = context.new_command().args_vec(args).run();
|
||||
|
@ -1004,7 +1011,13 @@ fn run_npm_bin_compile_test(opts: RunNpmBinCompileOptions) {
|
|||
|
||||
#[test]
|
||||
fn compile_node_modules_symlink_outside() {
|
||||
// this code is using a canonicalized temp dir because otherwise
|
||||
// it fails on the Windows CI because Deno makes the root directory
|
||||
// a common ancestor of the symlinked temp dir and the canonicalized
|
||||
// temp dir, which causes the warnings to not be surfaced
|
||||
#[allow(deprecated)]
|
||||
let context = TestContextBuilder::for_npm()
|
||||
.use_canonicalized_temp_dir()
|
||||
.use_copy_temp_dir("compile/node_modules_symlink_outside")
|
||||
.cwd("compile/node_modules_symlink_outside")
|
||||
.build();
|
||||
|
@ -1014,15 +1027,15 @@ fn compile_node_modules_symlink_outside() {
|
|||
.path()
|
||||
.join("compile")
|
||||
.join("node_modules_symlink_outside");
|
||||
temp_dir.create_dir_all(project_dir.join("node_modules"));
|
||||
temp_dir.create_dir_all(project_dir.join("some_folder"));
|
||||
temp_dir.write(project_dir.join("test.txt"), "5");
|
||||
let symlink_target_dir = temp_dir.path().join("some_folder");
|
||||
project_dir.join("node_modules").create_dir_all();
|
||||
symlink_target_dir.create_dir_all();
|
||||
let symlink_target_file = temp_dir.path().join("target.txt");
|
||||
symlink_target_file.write("5");
|
||||
let symlink_dir = project_dir.join("node_modules").join("symlink_dir");
|
||||
|
||||
// create a symlink in the node_modules directory that points to a folder in the cwd
|
||||
temp_dir.symlink_dir(
|
||||
project_dir.join("some_folder"),
|
||||
project_dir.join("node_modules").join("some_folder"),
|
||||
);
|
||||
// create a symlink in the node_modules directory that points to a folder outside the project
|
||||
temp_dir.symlink_dir(&symlink_target_dir, &symlink_dir);
|
||||
// compile folder
|
||||
let output = context
|
||||
.new_command()
|
||||
|
@ -1032,16 +1045,16 @@ fn compile_node_modules_symlink_outside() {
|
|||
output.assert_matches_file(
|
||||
"compile/node_modules_symlink_outside/main_compile_folder.out",
|
||||
);
|
||||
assert!(project_dir.join("node_modules/some_folder").exists());
|
||||
assert!(symlink_dir.exists());
|
||||
|
||||
// Cleanup and remove the folder. The folder test is done separately from
|
||||
// the file symlink test because different systems would traverse
|
||||
// the directory items in different order.
|
||||
temp_dir.remove_dir_all(project_dir.join("node_modules/some_folder"));
|
||||
symlink_dir.remove_dir_all();
|
||||
|
||||
// create a symlink in the node_modules directory that points to a file in the cwd
|
||||
temp_dir.symlink_file(
|
||||
project_dir.join("test.txt"),
|
||||
&symlink_target_file,
|
||||
project_dir.join("node_modules").join("test.txt"),
|
||||
);
|
||||
assert!(project_dir.join("node_modules/test.txt").exists());
|
||||
|
@ -1154,8 +1167,11 @@ fn granular_unstable_features() {
|
|||
|
||||
#[test]
|
||||
fn granular_unstable_features_config_file() {
|
||||
let context = TestContextBuilder::new().build();
|
||||
let context = TestContextBuilder::new().use_temp_cwd().build();
|
||||
let dir = context.temp_dir();
|
||||
testdata_path()
|
||||
.join("compile/unstable_features.ts")
|
||||
.copy(&dir.path().join("unstable_features.ts"));
|
||||
let exe = if cfg!(windows) {
|
||||
dir.path().join("app.exe")
|
||||
} else {
|
||||
|
@ -1176,7 +1192,7 @@ fn granular_unstable_features_config_file() {
|
|||
&dir.path().join("deno.json").to_string(),
|
||||
"--output",
|
||||
&exe.to_string_lossy(),
|
||||
"./compile/unstable_features.ts",
|
||||
"./unstable_features.ts",
|
||||
])
|
||||
.run();
|
||||
output.assert_exit_code(0);
|
||||
|
|
|
@ -13051,7 +13051,7 @@ fn lsp_deno_json_workspace_fmt_config() {
|
|||
temp_dir.write(
|
||||
"deno.json",
|
||||
json!({
|
||||
"workspaces": ["project1", "project2"],
|
||||
"workspace": ["project1", "project2"],
|
||||
"fmt": {
|
||||
"semiColons": false,
|
||||
},
|
||||
|
@ -13174,7 +13174,7 @@ fn lsp_deno_json_workspace_lint_config() {
|
|||
temp_dir.write(
|
||||
"deno.json",
|
||||
json!({
|
||||
"workspaces": ["project1", "project2"],
|
||||
"workspace": ["project1", "project2"],
|
||||
"lint": {
|
||||
"rules": {
|
||||
"include": ["camelcase"],
|
||||
|
@ -13315,7 +13315,7 @@ fn lsp_deno_json_workspace_import_map() {
|
|||
temp_dir.write(
|
||||
"project1/deno.json",
|
||||
json!({
|
||||
"workspaces": ["project2"],
|
||||
"workspace": ["project2"],
|
||||
"imports": {
|
||||
"foo": "./foo1.ts",
|
||||
},
|
||||
|
@ -13376,7 +13376,7 @@ fn lsp_deno_json_workspace_jsr_resolution() {
|
|||
temp_dir.write(
|
||||
"deno.json",
|
||||
json!({
|
||||
"workspaces": ["project1"],
|
||||
"workspace": ["project1"],
|
||||
})
|
||||
.to_string(),
|
||||
);
|
||||
|
|
|
@ -1113,7 +1113,9 @@ fn lock_deno_json_package_json_deps_workspace() {
|
|||
|
||||
// deno.json
|
||||
let deno_json = temp_dir.join("deno.json");
|
||||
deno_json.write_json(&json!({}));
|
||||
deno_json.write_json(&json!({
|
||||
"nodeModulesDir": true
|
||||
}));
|
||||
|
||||
// package.json
|
||||
let package_json = temp_dir.join("package.json");
|
||||
|
@ -1147,16 +1149,23 @@ fn lock_deno_json_package_json_deps_workspace() {
|
|||
let lockfile = temp_dir.join("deno.lock");
|
||||
let esm_basic_integrity =
|
||||
get_lockfile_npm_package_integrity(&lockfile, "@denotest/esm-basic@1.0.0");
|
||||
let cjs_default_export_integrity = get_lockfile_npm_package_integrity(
|
||||
&lockfile,
|
||||
"@denotest/cjs-default-export@1.0.0",
|
||||
);
|
||||
|
||||
// no "workspace" because deno isn't smart enough to figure this out yet
|
||||
// since it discovered the package.json in a folder different from the lockfile
|
||||
lockfile.assert_matches_json(json!({
|
||||
"version": "3",
|
||||
"packages": {
|
||||
"specifiers": {
|
||||
"npm:@denotest/cjs-default-export@1": "npm:@denotest/cjs-default-export@1.0.0",
|
||||
"npm:@denotest/esm-basic@1": "npm:@denotest/esm-basic@1.0.0"
|
||||
},
|
||||
"npm": {
|
||||
"@denotest/cjs-default-export@1.0.0": {
|
||||
"integrity": cjs_default_export_integrity,
|
||||
"dependencies": {}
|
||||
},
|
||||
"@denotest/esm-basic@1.0.0": {
|
||||
"integrity": esm_basic_integrity,
|
||||
"dependencies": {}
|
||||
|
@ -1164,6 +1173,22 @@ fn lock_deno_json_package_json_deps_workspace() {
|
|||
}
|
||||
},
|
||||
"remote": {},
|
||||
"workspace": {
|
||||
"packageJson": {
|
||||
"dependencies": [
|
||||
"npm:@denotest/cjs-default-export@1"
|
||||
]
|
||||
},
|
||||
"members": {
|
||||
"package-a": {
|
||||
"packageJson": {
|
||||
"dependencies": [
|
||||
"npm:@denotest/esm-basic@1"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
// run a command that causes discovery of the root package.json beside the lockfile
|
||||
|
@ -1201,6 +1226,15 @@ fn lock_deno_json_package_json_deps_workspace() {
|
|||
"dependencies": [
|
||||
"npm:@denotest/cjs-default-export@1"
|
||||
]
|
||||
},
|
||||
"members": {
|
||||
"package-a": {
|
||||
"packageJson": {
|
||||
"dependencies": [
|
||||
"npm:@denotest/esm-basic@1"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1547,7 +1547,7 @@ async fn run_watch_dynamic_imports() {
|
|||
.unwrap();
|
||||
let (mut stdout_lines, mut stderr_lines) = child_lines(&mut child);
|
||||
wait_contains("Process started", &mut stderr_lines).await;
|
||||
wait_contains("No package.json file found", &mut stderr_lines).await;
|
||||
wait_contains("Finished config loading.", &mut stderr_lines).await;
|
||||
|
||||
wait_contains(
|
||||
"Hopefully dynamic import will be watched...",
|
||||
|
@ -1714,7 +1714,7 @@ console.log("Listening...")
|
|||
.unwrap();
|
||||
let (mut stdout_lines, mut stderr_lines) = child_lines(&mut child);
|
||||
wait_contains("Process started", &mut stderr_lines).await;
|
||||
wait_contains("No package.json file found", &mut stderr_lines).await;
|
||||
wait_contains("Finished config loading.", &mut stderr_lines).await;
|
||||
|
||||
wait_for_watcher("file_to_watch.js", &mut stderr_lines).await;
|
||||
wait_contains("Listening...", &mut stdout_lines).await;
|
||||
|
@ -1787,7 +1787,7 @@ export function foo() {
|
|||
.unwrap();
|
||||
let (mut stdout_lines, mut stderr_lines) = child_lines(&mut child);
|
||||
wait_contains("Process started", &mut stderr_lines).await;
|
||||
wait_contains("No package.json file found", &mut stderr_lines).await;
|
||||
wait_contains("Finished config loading.", &mut stderr_lines).await;
|
||||
|
||||
wait_for_watcher("file_to_watch.js", &mut stderr_lines).await;
|
||||
wait_contains("5 <h1>Hello</h1>", &mut stdout_lines).await;
|
||||
|
@ -1846,7 +1846,7 @@ export function foo() {
|
|||
.unwrap();
|
||||
let (mut stdout_lines, mut stderr_lines) = child_lines(&mut child);
|
||||
wait_contains("Process started", &mut stderr_lines).await;
|
||||
wait_contains("No package.json file found", &mut stderr_lines).await;
|
||||
wait_contains("Finished config loading.", &mut stderr_lines).await;
|
||||
|
||||
wait_for_watcher("file_to_watch.js", &mut stderr_lines).await;
|
||||
wait_contains("<h1>asd1</h1>", &mut stdout_lines).await;
|
||||
|
@ -1912,7 +1912,7 @@ export function foo() {
|
|||
.unwrap();
|
||||
let (mut stdout_lines, mut stderr_lines) = child_lines(&mut child);
|
||||
wait_contains("Process started", &mut stderr_lines).await;
|
||||
wait_contains("No package.json file found", &mut stderr_lines).await;
|
||||
wait_contains("Finished config loading.", &mut stderr_lines).await;
|
||||
|
||||
wait_for_watcher("file_to_watch.js", &mut stderr_lines).await;
|
||||
wait_contains("2 <h1>asd1</h1>", &mut stdout_lines).await;
|
||||
|
|
BIN
tests/registry/npm/@types/lz-string/lz-string-1.3.33.tgz
Normal file
BIN
tests/registry/npm/@types/lz-string/lz-string-1.3.33.tgz
Normal file
Binary file not shown.
BIN
tests/registry/npm/@types/lz-string/lz-string-1.5.0.tgz
Normal file
BIN
tests/registry/npm/@types/lz-string/lz-string-1.5.0.tgz
Normal file
Binary file not shown.
113
tests/registry/npm/@types/lz-string/registry.json
Normal file
113
tests/registry/npm/@types/lz-string/registry.json
Normal file
|
@ -0,0 +1,113 @@
|
|||
{
|
||||
"_id": "@types/lz-string",
|
||||
"_rev": "554-923923210a37cca16c53a3e8dd472e22",
|
||||
"name": "@types/lz-string",
|
||||
"description": "Stub TypeScript definitions entry for lz-string, which provides its own types definitions",
|
||||
"dist-tags": {
|
||||
"latest": "1.5.0"
|
||||
},
|
||||
"versions": {
|
||||
"1.3.33": {
|
||||
"name": "@types/lz-string",
|
||||
"version": "1.3.33",
|
||||
"description": "TypeScript definitions for lz-string",
|
||||
"license": "MIT",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Roman Nikitin",
|
||||
"url": "https://github.com/M0ns1gn0r",
|
||||
"githubUsername": "M0ns1gn0r"
|
||||
}
|
||||
],
|
||||
"main": "",
|
||||
"types": "index",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git"
|
||||
},
|
||||
"scripts": {},
|
||||
"dependencies": {},
|
||||
"typesPublisherContentHash": "37e0f8cf2fb1fe08bdcc8e21278c91217d4a03d1cd6b32fc0eaec30757c6d4b1",
|
||||
"typeScriptVersion": "2.0",
|
||||
"_id": "@types/lz-string@1.3.33",
|
||||
"dist": {
|
||||
"integrity": "sha512-yWj3OnlKlwNpq9+Jh/nJkVAD3ta8Abk2kIRpjWpVkDlAD43tn6Q6xk5hurp84ndcq54jBDBGCD/WcIR0pspG0A==",
|
||||
"shasum": "de2d6105ea7bcaf67dd1d9451d580700d30473fc",
|
||||
"tarball": "http://localhost:4260/@types/lz-string/lz-string-1.3.33.tgz",
|
||||
"fileCount": 4,
|
||||
"unpackedSize": 5960,
|
||||
"npm-signature": "-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v3.0.4\r\nComment: https://openpgpjs.org\r\n\r\nwsFcBAEBCAAQBQJcZIpFCRA9TVsSAnZWagAAYm8QAJCahktTDI8BUR8q+Qra\nzsvv7Vbb20Ti7uoh97yzJiEC8UEWCGnxpZouWr3xoy0FjByYIvGmHqslGohP\nksiikCXiy+5pfT0Yi3M4QeADPlQjqUVTweCoeMmpUaHWGBdqG2kE6tnioCQy\nAL9n/YnQc10b5SE/XYgKHuBN/HJ5tx1Ejcg/o7qJG/2cUe/1K1asIMFUockV\ncgwFXFl8OSMTcA3Bs0C84zIdcaC4njVqUIQOWqdgKbe1vs+O/Zf/OdiYQh9f\nZZMXffwJKVpLSfhOTeDHeD1WMNmiww+FVIikeUIihp7Xahk9YbrLtE5BUSgG\nl9/vNfzUDW+J5oJb6n8k9WojHjte00inzMa1O7QVT7cUC+e5Nup1df0VErNF\nVuaBMUy2o0LViCVcXOYUnDBQCoaKpQ8cIVhtl0VLFrOdyn+a0blcwaNNrvE1\nFKb+OgBqipIDwAx1QghV45MPtRzI/TLYeSZtHoOYVJ8zc11FzjaQ33NZj/5w\nVzMnRkmjpwF5j++JSOa3687iKJTgrJ6XHYliYpxRRpJY3Oa4Nl0/G+xMm1BS\n0ueZuqpM+h2ZMuG7TQOeDKtTll7tsuKwy2UlkkP2uJOVurqJkCvcK/ImG25W\nKENAcoJvsk956vlbvJCdqvIcV5OF5XhgQh10gaAfHl+pJiLbCBhHpeWd95+Y\n5/3T\r\n=MjUN\r\n-----END PGP SIGNATURE-----\r\n",
|
||||
"signatures": [
|
||||
{
|
||||
"keyid": "SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA",
|
||||
"sig": "MEUCIHlPUpoP+v+OWyats51tKkKMx97XrotlO8GzoVtS22/KAiEAxLb7ultFaZZIfGVCNeHE/X+J9I58zkNA6a8LKcm2Wns="
|
||||
}
|
||||
]
|
||||
},
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "types",
|
||||
"email": "ts-npm-types@microsoft.com"
|
||||
}
|
||||
],
|
||||
"_npmUser": {
|
||||
"name": "types",
|
||||
"email": "ts-npm-types@microsoft.com"
|
||||
},
|
||||
"directories": {},
|
||||
"_npmOperationalInternal": {
|
||||
"host": "s3://npm-registry-packages",
|
||||
"tmp": "tmp/lz-string_1.3.33_1550092868900_0.5979975733337666"
|
||||
},
|
||||
"_hasShrinkwrap": false
|
||||
},
|
||||
"1.5.0": {
|
||||
"name": "@types/lz-string",
|
||||
"version": "1.5.0",
|
||||
"description": "Stub TypeScript definitions entry for lz-string, which provides its own types definitions",
|
||||
"main": "",
|
||||
"scripts": {},
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"lz-string": "*"
|
||||
},
|
||||
"deprecated": "This is a stub types definition. lz-string provides its own type definitions, so you do not need this installed.",
|
||||
"_id": "@types/lz-string@1.5.0",
|
||||
"dist": {
|
||||
"integrity": "sha512-s84fKOrzqqNCAPljhVyC5TjAo6BH4jKHw9NRNFNiRUY5QSgZCmVm5XILlWbisiKl+0OcS7eWihmKGS5akc2iQw==",
|
||||
"shasum": "2f15d2dd9dbfb344f3d31aee5a82cf350b037e5f",
|
||||
"tarball": "http://localhost:4260/@types/lz-string/lz-string-1.5.0.tgz",
|
||||
"fileCount": 4,
|
||||
"unpackedSize": 1754,
|
||||
"signatures": [
|
||||
{
|
||||
"keyid": "SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA",
|
||||
"sig": "MEQCIFwugI1BNDwbq90OnD5/morYlSnSQheJEnyTkclzw0SKAiAThdPB2+I/hjRlN5URdZcK4v0XXcVnh5xvMSf7SgQZ8A=="
|
||||
}
|
||||
],
|
||||
"npm-signature": "-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v4.10.10\r\nComment: https://openpgpjs.org\r\n\r\nwsFzBAEBCAAGBQJkECMgACEJED1NWxICdlZqFiEECWMYAoorWMhJKdjhPU1b\r\nEgJ2VmopOQ//U+7G8WFrWQ3ecjTZrMAAqmwWNK1jgA3r0PonmwkiDjQlNAHj\r\nXAfJK8YSuFBrl8buIAkoJT9i+H6bpHIShj5fA4FKVtA1ihcwclAdvvoilwH2\r\nNCvoFeZZgrZB6y5e6AvGDHY67C2DzQ9XhfqYM0myyXS+of2gfznAPVqXwGCs\r\nWW39ee/WAbBEoN2Z1/hEAh+W51hV0HUjs39sbupo0vOHy9GdYuVJtTMeqesF\r\nmCfDDaM1FxbsMFccy8qRsihD26iwBMRa+W3+208gCc0i9xs8wRc+8GQcAGWd\r\nxSrTEgRd8hfBs6bxDKlSD3Qg7pTq3L+HvlUZGL2AHSbC6k/MCNduHhxEcrrj\r\nssFE4iuCievfQsd0CC4rI/8s5MDGwdQ+nldv0rYjsSphjLgHDly0LE1kAbNv\r\nxZWFXmFb7318wmbC38KYDn1I0b6YndHQFu1usVJ+Z107H/mxWRZeRg0THlD8\r\n3LuLEkCJqRddGmLkSQkJ6IZtX8H9EuuhU4ny6Xb3FYFhnXWmw7YSuvrrfSgs\r\nPlLlscCRsXgWYPzQ7h8mOyE4MoHfrjzcgFKIUgWPvW6EprDPAKu28vIXnn7j\r\nG0CiCYL+IWWTqa6pKkOJsE1ILkPYTZj/592zfGPzspl9Kfb/4+IaDMmApBVO\r\n51TMBjyXgYYDajmh6y8/U389X93/bIV/wjY=\r\n=935O\r\n-----END PGP SIGNATURE-----\r\n"
|
||||
},
|
||||
"_npmUser": {
|
||||
"name": "types",
|
||||
"email": "ts-npm-types@microsoft.com"
|
||||
},
|
||||
"directories": {},
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "types",
|
||||
"email": "ts-npm-types@microsoft.com"
|
||||
}
|
||||
],
|
||||
"_npmOperationalInternal": {
|
||||
"host": "s3://npm-registry-packages",
|
||||
"tmp": "tmp/lz-string_1.5.0_1678779167950_0.5151061207876493"
|
||||
},
|
||||
"_hasShrinkwrap": false
|
||||
}
|
||||
},
|
||||
"license": "MIT",
|
||||
"readmeFilename": "",
|
||||
"users": {
|
||||
"flumpus-dev": true
|
||||
}
|
||||
}
|
BIN
tests/registry/npm/lz-string/lz-string-1.3.6.tgz
Normal file
BIN
tests/registry/npm/lz-string/lz-string-1.3.6.tgz
Normal file
Binary file not shown.
BIN
tests/registry/npm/lz-string/lz-string-1.5.0.tgz
Normal file
BIN
tests/registry/npm/lz-string/lz-string-1.5.0.tgz
Normal file
Binary file not shown.
165
tests/registry/npm/lz-string/registry.json
Normal file
165
tests/registry/npm/lz-string/registry.json
Normal file
|
@ -0,0 +1,165 @@
|
|||
{
|
||||
"_id": "lz-string",
|
||||
"_rev": "45-a265b69aa69ae37972e7a7931a9be325",
|
||||
"name": "lz-string",
|
||||
"description": "LZ-based compression algorithm",
|
||||
"dist-tags": {
|
||||
"latest": "1.5.0"
|
||||
},
|
||||
"versions": {
|
||||
"1.3.6": {
|
||||
"name": "lz-string",
|
||||
"version": "1.3.6",
|
||||
"license": "WTFPL",
|
||||
"description": "LZ-based compression algorithm",
|
||||
"homepage": "http://pieroxy.net/blog/pages/lz-string/index.html",
|
||||
"keywords": [
|
||||
"lz",
|
||||
"compression",
|
||||
"string"
|
||||
],
|
||||
"main": "libs/lz-string.js",
|
||||
"bin": {
|
||||
"lz-string": "bin/bin.js"
|
||||
},
|
||||
"scripts": {},
|
||||
"dependencies": {},
|
||||
"devDependencies": {},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/pieroxy/lz-string.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/pieroxy/lz-string/issues"
|
||||
},
|
||||
"directories": {
|
||||
"test": "tests"
|
||||
},
|
||||
"author": {
|
||||
"name": "pieroxy",
|
||||
"email": "pieroxy@pieroxy.net"
|
||||
},
|
||||
"_id": "lz-string@1.3.6",
|
||||
"dist": {
|
||||
"shasum": "cc91b00d3264b15402e428e76dfeb709193bc10f",
|
||||
"tarball": "http://localhost:4260/lz-string/lz-string-1.3.6.tgz",
|
||||
"integrity": "sha512-gIHN4Nkmln8SrIRAXJ3qzGH7gJ8WjAORiwD+SB3PYW4n4ri+gP257pXSeyw/VGOV+6ZLIkZmNfK4xT6e2U5QIQ==",
|
||||
"signatures": [
|
||||
{
|
||||
"keyid": "SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA",
|
||||
"sig": "MEQCICsj8exNp9xi4L5Kz31ojhaj18oeqnD4vzlhr/RMaAIiAiA/3mY8M6oycukeCebQdfWQtZC640OyMjQO11da2GnGGg=="
|
||||
}
|
||||
]
|
||||
},
|
||||
"_from": "./",
|
||||
"_npmVersion": "1.3.10",
|
||||
"_npmUser": {
|
||||
"name": "pieroxy",
|
||||
"email": "pieroxy@pieroxy.net"
|
||||
},
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "pieroxy",
|
||||
"email": "pieroxy@pieroxy.net"
|
||||
}
|
||||
]
|
||||
},
|
||||
"1.5.0": {
|
||||
"name": "lz-string",
|
||||
"version": "1.5.0",
|
||||
"license": "MIT",
|
||||
"filename": "lz-string.js",
|
||||
"description": "LZ-based compression algorithm",
|
||||
"homepage": "http://pieroxy.net/blog/pages/lz-string/index.html",
|
||||
"keywords": [
|
||||
"lz",
|
||||
"compression",
|
||||
"string"
|
||||
],
|
||||
"main": "libs/lz-string.js",
|
||||
"typings": "typings/lz-string.d.ts",
|
||||
"bin": {
|
||||
"lz-string": "bin/bin.js"
|
||||
},
|
||||
"scripts": {},
|
||||
"dependencies": {},
|
||||
"devDependencies": {},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/pieroxy/lz-string.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/pieroxy/lz-string/issues"
|
||||
},
|
||||
"directories": {
|
||||
"test": "tests"
|
||||
},
|
||||
"author": {
|
||||
"name": "pieroxy",
|
||||
"email": "pieroxy@pieroxy.net"
|
||||
},
|
||||
"autoupdate": {
|
||||
"source": "git",
|
||||
"target": "git://github.com/pieroxy/lz-string.git",
|
||||
"basePath": "libs/",
|
||||
"files": [
|
||||
"lz-string.js",
|
||||
"lz-string.min.js",
|
||||
"base64-string.js"
|
||||
]
|
||||
},
|
||||
"gitHead": "4a94308c1e684fb98866f7ba1288f3db6d9f8801",
|
||||
"_id": "lz-string@1.5.0",
|
||||
"_nodeVersion": "16.19.1",
|
||||
"_npmVersion": "8.19.3",
|
||||
"dist": {
|
||||
"integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==",
|
||||
"shasum": "c1ab50f77887b712621201ba9fd4e3a6ed099941",
|
||||
"tarball": "http://localhost:4260/lz-string/lz-string-1.5.0.tgz",
|
||||
"fileCount": 16,
|
||||
"unpackedSize": 175825,
|
||||
"signatures": [
|
||||
{
|
||||
"keyid": "SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA",
|
||||
"sig": "MEQCIDFXe2mJhe/c2RygpDTZFwYF+ZLzmWmrobWbcX05nZzgAiB2NY0LGdJ8X/8K5Y24goCdb/HvaDnCxn4BdQm7jfU/Jw=="
|
||||
}
|
||||
],
|
||||
"npm-signature": "-----BEGIN PGP SIGNATURE-----\r\nVersion: OpenPGP.js v4.10.10\r\nComment: https://openpgpjs.org\r\n\r\nwsFzBAEBCAAGBQJkAwBbACEJED1NWxICdlZqFiEECWMYAoorWMhJKdjhPU1b\r\nEgJ2VmrCaw/+L77yb5aRlRo8abeR0BMuhftlzyMGzGh+asUdX+afBEOGYTyJ\r\n2XM9fqdpZrtJv3+q9D+gqnLg7MoRQQkmvC+U0HTHEWtEJNaIH1at/IMhi+xB\r\n5/3Jho9VOtLhPto1/ld1CVu0JTxdUTDiTjpE26a4wdd7qMDhjaSJkypjtutn\r\nfwZXUs2YzKZQ1h6RlLSpB2b19KwiVjFsqnV+tIgs1WmjcrC7RxqEtA2yDdt5\r\nfWDM3lLgSGjFkedydnOskMNqLaL9COVzQ8iuFXGeS/NJvhi64gKDcGFl2ztx\r\nQS30dC/ud+EkF3omjN/cFhAnBCcXLvK52MxglR4+Ph4QAa4f3NhbUZbc1i4G\r\nf3Qa8GxOPHAAfR4X7z4E2fKlpybz7it3Sl5SJ8RQo3X24TGR69rM4Flc7G7S\r\ncNUtFXu/zJLmxYlc3u0Qcbx8sbdkg65V9y0n1aFXpwlofPbSqjOp/M4F5Yu4\r\nqQjGV6n8fz7CUb5ZpcEWFgztd+pi+7G0hhbKWrznOPxss9LWjr1j5PbIsY/9\r\nfZNeHynSv7Bkx2X7Cr7UPVZr9zNWLXdT7bxcI3ielAUVAeQRtRB9ostiCGvL\r\nChEZ3dZmIbYAeeSgL/175rpseCxPotDpLJ9xMBcyozfC1bbedA2LFbIkDzwA\r\nDKmVP8Nl733GahX08ZwxYSsoIU6oh9hYTeQ=\r\n=6NYt\r\n-----END PGP SIGNATURE-----\r\n"
|
||||
},
|
||||
"_npmUser": {
|
||||
"name": "pieroxy",
|
||||
"email": "pieroxy@pieroxy.net"
|
||||
},
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "pieroxy",
|
||||
"email": "pieroxy@pieroxy.net"
|
||||
}
|
||||
],
|
||||
"_npmOperationalInternal": {
|
||||
"host": "s3://npm-registry-packages",
|
||||
"tmp": "tmp/lz-string_1.5.0_1677918299665_0.8929158378621742"
|
||||
},
|
||||
"_hasShrinkwrap": false
|
||||
}
|
||||
},
|
||||
"maintainers": [],
|
||||
"time": {},
|
||||
"repository": {},
|
||||
"users": {},
|
||||
"homepage": "http://pieroxy.net/blog/pages/lz-string/index.html",
|
||||
"keywords": [
|
||||
"lz",
|
||||
"compression",
|
||||
"string"
|
||||
],
|
||||
"license": "MIT",
|
||||
"readmeFilename": "README.md",
|
||||
"author": {
|
||||
"name": "pieroxy",
|
||||
"email": "pieroxy@pieroxy.net"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/pieroxy/lz-string/issues"
|
||||
}
|
||||
}
|
13
tests/specs/bench/workspace/__test__.jsonc
Normal file
13
tests/specs/bench/workspace/__test__.jsonc
Normal file
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"tests": {
|
||||
"root": {
|
||||
"args": "bench",
|
||||
"output": "root.out"
|
||||
},
|
||||
"package": {
|
||||
"args": "bench",
|
||||
"cwd": "package-b",
|
||||
"output": "package_b.out"
|
||||
}
|
||||
}
|
||||
}
|
6
tests/specs/bench/workspace/deno.json
Normal file
6
tests/specs/bench/workspace/deno.json
Normal file
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"workspace": [
|
||||
"./package-a",
|
||||
"./package-b"
|
||||
]
|
||||
}
|
5
tests/specs/bench/workspace/package-a/deno.json
Normal file
5
tests/specs/bench/workspace/package-a/deno.json
Normal file
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"name": "@scope/a",
|
||||
"version": "1.0.0",
|
||||
"exports": "./mod.ts"
|
||||
}
|
7
tests/specs/bench/workspace/package-a/mod.bench.ts
Normal file
7
tests/specs/bench/workspace/package-a/mod.bench.ts
Normal file
|
@ -0,0 +1,7 @@
|
|||
import { add } from "./mod.ts";
|
||||
|
||||
Deno.bench("add", () => {
|
||||
if (add(1, 2) !== 3) {
|
||||
throw new Error("failed");
|
||||
}
|
||||
});
|
3
tests/specs/bench/workspace/package-a/mod.ts
Normal file
3
tests/specs/bench/workspace/package-a/mod.ts
Normal file
|
@ -0,0 +1,3 @@
|
|||
export function add(a: number, b: number): number {
|
||||
return a + b;
|
||||
}
|
5
tests/specs/bench/workspace/package-b/deno.json
Normal file
5
tests/specs/bench/workspace/package-b/deno.json
Normal file
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"name": "@scope/b",
|
||||
"version": "1.0.0",
|
||||
"exports": "./mod.ts"
|
||||
}
|
7
tests/specs/bench/workspace/package-b/mod.bench.ts
Normal file
7
tests/specs/bench/workspace/package-b/mod.bench.ts
Normal file
|
@ -0,0 +1,7 @@
|
|||
import { addOne } from "./mod.ts";
|
||||
|
||||
Deno.bench("addOne", () => {
|
||||
if (addOne(1) !== 2) {
|
||||
throw new Error("failed");
|
||||
}
|
||||
});
|
5
tests/specs/bench/workspace/package-b/mod.ts
Normal file
5
tests/specs/bench/workspace/package-b/mod.ts
Normal file
|
@ -0,0 +1,5 @@
|
|||
import { add } from "@scope/a";
|
||||
|
||||
export function addOne(a: number): number {
|
||||
return add(a, 1);
|
||||
}
|
9
tests/specs/bench/workspace/package_b.out
Normal file
9
tests/specs/bench/workspace/package_b.out
Normal file
|
@ -0,0 +1,9 @@
|
|||
Check file:///[WILDLINE]/package-b/mod.bench.ts
|
||||
cpu: [WILDLINE]
|
||||
runtime: [WILDLINE]
|
||||
|
||||
file:///[WILDLINE]/package-b/mod.bench.ts
|
||||
benchmark[WILDLINE]
|
||||
---[WILDLINE]
|
||||
addOne[WILDLINE]
|
||||
|
16
tests/specs/bench/workspace/root.out
Normal file
16
tests/specs/bench/workspace/root.out
Normal file
|
@ -0,0 +1,16 @@
|
|||
Check file:///[WILDLINE]/package-a/mod.bench.ts
|
||||
Check file:///[WILDLINE]/package-b/mod.bench.ts
|
||||
cpu: [WILDLINE]
|
||||
runtime: [WILDLINE]
|
||||
|
||||
file:///[WILDLINE]/package-a/mod.bench.ts
|
||||
benchmark[WILDLINE]
|
||||
---[WILDLINE]
|
||||
add[WILDLINE]
|
||||
|
||||
|
||||
file:///[WILDLINE]/package-b/mod.bench.ts
|
||||
benchmark[WILDLINE]
|
||||
---[WILDLINE]
|
||||
addOne[WILDLINE]
|
||||
|
22
tests/specs/check/workspace/__test__.jsonc
Normal file
22
tests/specs/check/workspace/__test__.jsonc
Normal file
|
@ -0,0 +1,22 @@
|
|||
{
|
||||
"tests": {
|
||||
"root": {
|
||||
// todo(dsherret): should be possible to not provide args here
|
||||
"args": "check package-a/mod.ts package-b/mod.ts",
|
||||
"output": "root.out",
|
||||
"exitCode": 1
|
||||
},
|
||||
"package_a": {
|
||||
"args": "check mod.ts",
|
||||
"cwd": "package-a",
|
||||
"output": "package_a.out",
|
||||
"exitCode": 0
|
||||
},
|
||||
"package_b": {
|
||||
"args": "check mod.ts",
|
||||
"cwd": "package-b",
|
||||
"output": "package_b.out",
|
||||
"exitCode": 1
|
||||
}
|
||||
}
|
||||
}
|
6
tests/specs/check/workspace/deno.json
Normal file
6
tests/specs/check/workspace/deno.json
Normal file
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"workspace": [
|
||||
"./package-a",
|
||||
"./package-b"
|
||||
]
|
||||
}
|
5
tests/specs/check/workspace/package-a/deno.json
Normal file
5
tests/specs/check/workspace/package-a/deno.json
Normal file
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"name": "@scope/a",
|
||||
"version": "1.0.0",
|
||||
"exports": "./mod.ts"
|
||||
}
|
3
tests/specs/check/workspace/package-a/mod.ts
Normal file
3
tests/specs/check/workspace/package-a/mod.ts
Normal file
|
@ -0,0 +1,3 @@
|
|||
export function add(a: number, b: number): number {
|
||||
return a + b;
|
||||
}
|
5
tests/specs/check/workspace/package-b/deno.json
Normal file
5
tests/specs/check/workspace/package-b/deno.json
Normal file
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"name": "@scope/b",
|
||||
"version": "1.0.0",
|
||||
"exports": "./mod.ts"
|
||||
}
|
4
tests/specs/check/workspace/package-b/mod.ts
Normal file
4
tests/specs/check/workspace/package-b/mod.ts
Normal file
|
@ -0,0 +1,4 @@
|
|||
import { add } from "@scope/a";
|
||||
|
||||
const test: string = add(1, 2);
|
||||
console.log(test);
|
1
tests/specs/check/workspace/package_a.out
Normal file
1
tests/specs/check/workspace/package_a.out
Normal file
|
@ -0,0 +1 @@
|
|||
Check file:///[WILDLINE]/package-a/mod.ts
|
5
tests/specs/check/workspace/package_b.out
Normal file
5
tests/specs/check/workspace/package_b.out
Normal file
|
@ -0,0 +1,5 @@
|
|||
Check file:///[WILDLINE]/package-b/mod.ts
|
||||
error: TS2322 [ERROR]: Type 'number' is not assignable to type 'string'.
|
||||
const test: string = add(1, 2);
|
||||
~~~~
|
||||
at [WILDLINE]
|
6
tests/specs/check/workspace/root.out
Normal file
6
tests/specs/check/workspace/root.out
Normal file
|
@ -0,0 +1,6 @@
|
|||
Check file:///[WILDLINE]/package-a/mod.ts
|
||||
Check file:///[WILDLINE]/package-b/mod.ts
|
||||
error: TS2322 [ERROR]: Type 'number' is not assignable to type 'string'.
|
||||
const test: string = add(1, 2);
|
||||
~~~~
|
||||
at [WILDLINE]
|
22
tests/specs/compile/npmrc_auto_install/__test__.jsonc
Normal file
22
tests/specs/compile/npmrc_auto_install/__test__.jsonc
Normal file
|
@ -0,0 +1,22 @@
|
|||
{
|
||||
"tempDir": true,
|
||||
"steps": [{
|
||||
"if": "unix",
|
||||
"args": "compile --output main main.js",
|
||||
"output": "[WILDCARD]"
|
||||
}, {
|
||||
"if": "unix",
|
||||
"commandName": "./main",
|
||||
"args": [],
|
||||
"output": "main.out"
|
||||
}, {
|
||||
"if": "windows",
|
||||
"args": "compile --output main.exe main.js",
|
||||
"output": "[WILDCARD]"
|
||||
}, {
|
||||
"if": "windows",
|
||||
"commandName": "./main.exe",
|
||||
"args": [],
|
||||
"output": "main.out"
|
||||
}]
|
||||
}
|
3
tests/specs/compile/npmrc_auto_install/deno.json
Normal file
3
tests/specs/compile/npmrc_auto_install/deno.json
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"nodeModulesDir": true
|
||||
}
|
4
tests/specs/compile/npmrc_byonm/.npmrc
Normal file
4
tests/specs/compile/npmrc_byonm/.npmrc
Normal file
|
@ -0,0 +1,4 @@
|
|||
@denotest:registry=http://localhost:4261/
|
||||
//localhost:4261/:_authToken=private-reg-token
|
||||
@denotest2:registry=http://localhost:4262/
|
||||
//localhost:4262/:_authToken=private-reg-token2
|
8
tests/specs/compile/npmrc_byonm/main.js
Normal file
8
tests/specs/compile/npmrc_byonm/main.js
Normal file
|
@ -0,0 +1,8 @@
|
|||
import { getValue, setValue } from "@denotest/basic";
|
||||
import * as test from "@denotest2/basic";
|
||||
|
||||
console.log(getValue());
|
||||
setValue(42);
|
||||
console.log(getValue());
|
||||
|
||||
console.log(test.getValue());
|
3
tests/specs/compile/npmrc_byonm/main.out
Normal file
3
tests/specs/compile/npmrc_byonm/main.out
Normal file
|
@ -0,0 +1,3 @@
|
|||
0
|
||||
42
|
||||
0
|
8
tests/specs/compile/npmrc_byonm/package.json
Normal file
8
tests/specs/compile/npmrc_byonm/package.json
Normal file
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"name": "npmrc_test",
|
||||
"version": "0.0.1",
|
||||
"dependencies": {
|
||||
"@denotest/basic": "1.0.0",
|
||||
"@denotest2/basic": "1.0.0"
|
||||
}
|
||||
}
|
26
tests/specs/fmt/workspace/__test__.jsonc
Normal file
26
tests/specs/fmt/workspace/__test__.jsonc
Normal file
|
@ -0,0 +1,26 @@
|
|||
{
|
||||
"tests": {
|
||||
"root_fmt": {
|
||||
"tempDir": true,
|
||||
"args": "fmt",
|
||||
"output": "root_fmt.out"
|
||||
},
|
||||
"root_check": {
|
||||
"args": "fmt --check",
|
||||
"exitCode": 1,
|
||||
"output": "root_check.out"
|
||||
},
|
||||
"sub_dir_fmt": {
|
||||
"tempDir": true,
|
||||
"args": "fmt",
|
||||
"cwd": "a",
|
||||
"output": "a_fmt.out"
|
||||
},
|
||||
"subdir_check": {
|
||||
"args": "fmt --check",
|
||||
"cwd": "a",
|
||||
"exitCode": 1,
|
||||
"output": "a_check.out"
|
||||
}
|
||||
}
|
||||
}
|
1
tests/specs/fmt/workspace/a/a.ts
Normal file
1
tests/specs/fmt/workspace/a/a.ts
Normal file
|
@ -0,0 +1 @@
|
|||
console.log("a");
|
5
tests/specs/fmt/workspace/a/deno.json
Normal file
5
tests/specs/fmt/workspace/a/deno.json
Normal file
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"fmt": {
|
||||
"semiColons": false
|
||||
}
|
||||
}
|
6
tests/specs/fmt/workspace/a_check.out
Normal file
6
tests/specs/fmt/workspace/a_check.out
Normal file
|
@ -0,0 +1,6 @@
|
|||
|
||||
from [WILDLINE]a.ts:
|
||||
1 | -console.log("a");
|
||||
1 | +console.log('a')
|
||||
|
||||
error: Found 1 not formatted file in 2 files
|
2
tests/specs/fmt/workspace/a_fmt.out
Normal file
2
tests/specs/fmt/workspace/a_fmt.out
Normal file
|
@ -0,0 +1,2 @@
|
|||
[WILDLINE]a.ts
|
||||
Checked 2 files
|
1
tests/specs/fmt/workspace/b/b.ts
Normal file
1
tests/specs/fmt/workspace/b/b.ts
Normal file
|
@ -0,0 +1 @@
|
|||
console.log('a');
|
5
tests/specs/fmt/workspace/b/deno.json
Normal file
5
tests/specs/fmt/workspace/b/deno.json
Normal file
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"fmt": {
|
||||
"singleQuote": false
|
||||
}
|
||||
}
|
9
tests/specs/fmt/workspace/deno.json
Normal file
9
tests/specs/fmt/workspace/deno.json
Normal file
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"workspace": [
|
||||
"./a",
|
||||
"./b"
|
||||
],
|
||||
"fmt": {
|
||||
"singleQuote": true
|
||||
}
|
||||
}
|
1
tests/specs/fmt/workspace/root.ts
Normal file
1
tests/specs/fmt/workspace/root.ts
Normal file
|
@ -0,0 +1 @@
|
|||
console.log("root")
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue