mirror of
https://github.com/denoland/deno.git
synced 2024-11-21 15:04:11 -05:00
Merge branch 'main' into will/test-buildjet
This commit is contained in:
commit
d489baef01
1213 changed files with 48977 additions and 24996 deletions
17
.dprint.json
17
.dprint.json
|
@ -13,7 +13,9 @@
|
|||
"associations": "**/*.rs",
|
||||
"rustfmt": "rustfmt --config imports_granularity=item"
|
||||
},
|
||||
"includes": ["**/*.{ts,tsx,js,jsx,json,md,toml,rs}"],
|
||||
"includes": [
|
||||
"**/*.{ts,tsx,js,jsx,json,md,toml,rs}"
|
||||
],
|
||||
"excludes": [
|
||||
".cargo_home",
|
||||
".git",
|
||||
|
@ -33,6 +35,8 @@
|
|||
"cli/tests/testdata/byte_order_mark.ts",
|
||||
"cli/tests/testdata/encoding",
|
||||
"cli/tests/testdata/fmt/*",
|
||||
"cli/tests/testdata/lint/glob/*",
|
||||
"cli/tests/testdata/test/glob/*",
|
||||
"cli/tests/testdata/import_assertions/json_with_shebang.json",
|
||||
"cli/tests/testdata/run/inline_js_source_map*",
|
||||
"cli/tests/testdata/malformed_config/*",
|
||||
|
@ -46,14 +50,15 @@
|
|||
"test_util/wpt",
|
||||
"third_party",
|
||||
"tools/node_compat/TODO.md",
|
||||
"tools/node_compat/versions",
|
||||
"tools/node_compat/node",
|
||||
"tools/wpt/expectation.json",
|
||||
"tools/wpt/manifest.json"
|
||||
"tools/wpt/manifest.json",
|
||||
"ext/websocket/autobahn/reports"
|
||||
],
|
||||
"plugins": [
|
||||
"https://plugins.dprint.dev/typescript-0.84.0.wasm",
|
||||
"https://plugins.dprint.dev/json-0.17.0.wasm",
|
||||
"https://plugins.dprint.dev/markdown-0.15.2.wasm",
|
||||
"https://plugins.dprint.dev/typescript-0.85.0.wasm",
|
||||
"https://plugins.dprint.dev/json-0.17.3.wasm",
|
||||
"https://plugins.dprint.dev/markdown-0.15.3.wasm",
|
||||
"https://plugins.dprint.dev/toml-0.5.4.wasm",
|
||||
"https://plugins.dprint.dev/exec-0.3.5.json@d687dda57be0fe9a0088ccdaefa5147649ff24127d8b3ea227536c68ee7abeab"
|
||||
]
|
||||
|
|
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
|
@ -1,5 +1,5 @@
|
|||
<!--
|
||||
Before submitting a PR, please read http://deno.land/manual/contributing
|
||||
Before submitting a PR, please read https://deno.com/manual/contributing
|
||||
|
||||
1. Give the PR a descriptive title.
|
||||
|
||||
|
|
4
.github/SECURITY.md
vendored
4
.github/SECURITY.md
vendored
|
@ -48,6 +48,10 @@ may change slightly over time, but in general the model is as follows:
|
|||
that a value set in one web worker can not be accessed by another.
|
||||
- All runtime I/O is considered to be privileged and must always be guarded by a
|
||||
runtime permission. This includes filesystem access, network access, etc.
|
||||
- The only exception to this is runtime storage explosion attacks that are
|
||||
isolated to a part of the file system, caused by evaluated code (for
|
||||
example, caching big dependencies or no limits on runtime caches such as the
|
||||
[Web Cache](https://developer.mozilla.org/en-US/docs/Web/API/Cache) API).
|
||||
- Users should not be able to self-escalate their permissions without explicit
|
||||
consent.
|
||||
- I/O required to build an initial static module graph should always follow the
|
||||
|
|
36
.github/workflows/ci.generate.ts
vendored
36
.github/workflows/ci.generate.ts
vendored
|
@ -2,6 +2,11 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
import * as yaml from "https://deno.land/std@0.173.0/encoding/yaml.ts";
|
||||
|
||||
// Bump this number when you want to purge the cache.
|
||||
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
|
||||
// automatically via regex, so ensure that this line maintains this format.
|
||||
const cacheVersion = 35;
|
||||
|
||||
const Runners = (() => {
|
||||
const ubuntuRunner = "ubuntu-22.04";
|
||||
const ubuntuXlRunner = "buildjet-8vcpu-ubuntu-2204";
|
||||
|
@ -15,9 +20,8 @@ const Runners = (() => {
|
|||
windows: "windows-2022",
|
||||
};
|
||||
})();
|
||||
// bump the number at the start when you want to purge the cache
|
||||
const prCacheKeyPrefix =
|
||||
"20-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-";
|
||||
`${cacheVersion}-cargo-target-\${{ matrix.os }}-\${{ matrix.profile }}-\${{ matrix.job }}-`;
|
||||
|
||||
const installPkgsCommand =
|
||||
"sudo apt-get install --no-install-recommends debootstrap clang lld";
|
||||
|
@ -362,6 +366,10 @@ const ci = {
|
|||
...submoduleStep("./test_util/wpt"),
|
||||
if: "matrix.wpt",
|
||||
},
|
||||
{
|
||||
...submoduleStep("./tools/node_compat/node"),
|
||||
if: "matrix.job == 'lint'",
|
||||
},
|
||||
{
|
||||
name: "Create source tarballs (release, linux)",
|
||||
if: [
|
||||
|
@ -476,7 +484,7 @@ const ci = {
|
|||
"~/.cargo/git/db",
|
||||
].join("\n"),
|
||||
key:
|
||||
"20-cargo-home-${{ matrix.os }}-${{ hashFiles('Cargo.lock') }}",
|
||||
`${cacheVersion}-cargo-home-\${{ matrix.os }}-\${{ hashFiles('Cargo.lock') }}`,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -541,10 +549,21 @@ const ci = {
|
|||
run:
|
||||
"deno run --unstable --allow-write --allow-read --allow-run ./tools/lint.js",
|
||||
},
|
||||
{
|
||||
name: "node_compat/setup.ts --check",
|
||||
if: "matrix.job == 'lint'",
|
||||
run:
|
||||
"deno run --allow-write --allow-read --allow-run=git ./tools/node_compat/setup.ts --check",
|
||||
},
|
||||
{
|
||||
name: "Build debug",
|
||||
if: "matrix.job == 'test' && matrix.profile == 'debug'",
|
||||
run: "cargo build --locked --all-targets",
|
||||
run: [
|
||||
// output fs space before and after building
|
||||
"df -h",
|
||||
"cargo build --locked --all-targets",
|
||||
"df -h",
|
||||
].join("\n"),
|
||||
env: { CARGO_PROFILE_DEV_DEBUG: 0 },
|
||||
},
|
||||
{
|
||||
|
@ -642,6 +661,15 @@ const ci = {
|
|||
run:
|
||||
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/',
|
||||
},
|
||||
{
|
||||
name: "Autobahn testsuite",
|
||||
if: [
|
||||
"matrix.job == 'test' && matrix.profile == 'release' &&",
|
||||
"!startsWith(github.ref, 'refs/tags/') && startsWith(matrix.os, 'ubuntu')",
|
||||
].join("\n"),
|
||||
run:
|
||||
"target/release/deno run -A --unstable ext/websocket/autobahn/fuzzingclient.js",
|
||||
},
|
||||
{
|
||||
name: "Test debug",
|
||||
if: [
|
||||
|
|
22
.github/workflows/ci.yml
vendored
22
.github/workflows/ci.yml
vendored
|
@ -120,6 +120,9 @@ jobs:
|
|||
- name: Clone submodule ./test_util/wpt
|
||||
run: git submodule update --init --recursive --depth=1 -- ./test_util/wpt
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.wpt)'
|
||||
- name: Clone submodule ./tools/node_compat/node
|
||||
run: git submodule update --init --recursive --depth=1 -- ./tools/node_compat/node
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''lint'')'
|
||||
- name: 'Create source tarballs (release, linux)'
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (startsWith(matrix.os, 'ubuntu') &&
|
||||
|
@ -290,7 +293,7 @@ jobs:
|
|||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
~/.cargo/git/db
|
||||
key: '20-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
key: '35-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr)'
|
||||
- name: Restore cache build output (PR)
|
||||
uses: actions/cache/restore@v3
|
||||
|
@ -302,7 +305,7 @@ jobs:
|
|||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: never_saved
|
||||
restore-keys: '20-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
restore-keys: '35-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
- name: Apply and update mtime cache
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (!startsWith(github.ref, ''refs/tags/''))'
|
||||
uses: ./.github/mtime_cache
|
||||
|
@ -328,9 +331,15 @@ jobs:
|
|||
- name: lint.js
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''lint'')'
|
||||
run: deno run --unstable --allow-write --allow-read --allow-run ./tools/lint.js
|
||||
- name: node_compat/setup.ts --check
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''lint'')'
|
||||
run: deno run --allow-write --allow-read --allow-run=git ./tools/node_compat/setup.ts --check
|
||||
- name: Build debug
|
||||
if: '!(github.event_name == ''pull_request'' && matrix.skip_pr) && (matrix.job == ''test'' && matrix.profile == ''debug'')'
|
||||
run: cargo build --locked --all-targets
|
||||
run: |-
|
||||
df -h
|
||||
cargo build --locked --all-targets
|
||||
df -h
|
||||
env:
|
||||
CARGO_PROFILE_DEV_DEBUG: 0
|
||||
- name: Build release
|
||||
|
@ -399,6 +408,11 @@ jobs:
|
|||
env:
|
||||
CLOUDSDK_PYTHON: '${{env.pythonLocation}}\python.exe'
|
||||
run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/'
|
||||
- name: Autobahn testsuite
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.job == 'test' && matrix.profile == 'release' &&
|
||||
!startsWith(github.ref, 'refs/tags/') && startsWith(matrix.os, 'ubuntu'))
|
||||
run: target/release/deno run -A --unstable ext/websocket/autobahn/fuzzingclient.js
|
||||
- name: Test debug
|
||||
if: |-
|
||||
!(github.event_name == 'pull_request' && matrix.skip_pr) && (matrix.job == 'test' && matrix.profile == 'debug' &&
|
||||
|
@ -578,7 +592,7 @@ jobs:
|
|||
!./target/*/gn_out
|
||||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: '20-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
key: '35-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
publish-canary:
|
||||
name: publish canary
|
||||
runs-on: ubuntu-22.04
|
||||
|
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -26,3 +26,5 @@ gclient_config.py_entries
|
|||
# WPT generated cert files
|
||||
/tools/wpt/certs/index.txt*
|
||||
/tools/wpt/certs/serial*
|
||||
|
||||
/ext/websocket/autobahn/reports
|
||||
|
|
3
.gitmodules
vendored
3
.gitmodules
vendored
|
@ -10,3 +10,6 @@
|
|||
path = test_util/wpt
|
||||
url = https://github.com/web-platform-tests/wpt.git
|
||||
|
||||
[submodule "tools/node_compat/node"]
|
||||
path = tools/node_compat/node
|
||||
url = https://github.com/denoland/node_test.git
|
||||
|
|
937
Cargo.lock
generated
937
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
81
Cargo.toml
81
Cargo.toml
|
@ -41,38 +41,41 @@ license = "MIT"
|
|||
repository = "https://github.com/denoland/deno"
|
||||
|
||||
[workspace.dependencies]
|
||||
v8 = { version = "0.68.0", default-features = false }
|
||||
deno_ast = { version = "0.25.0", features = ["transpiling"] }
|
||||
v8 = { version = "0.73.0", default-features = false }
|
||||
deno_ast = { version = "0.27.0", features = ["transpiling"] }
|
||||
|
||||
deno_core = { version = "0.181.0", path = "./core" }
|
||||
deno_ops = { version = "0.59.0", path = "./ops" }
|
||||
serde_v8 = { version = "0.92.0", path = "./serde_v8" }
|
||||
deno_runtime = { version = "0.107.0", path = "./runtime" }
|
||||
napi_sym = { version = "0.29.0", path = "./cli/napi/sym" }
|
||||
deno_bench_util = { version = "0.93.0", path = "./bench_util" }
|
||||
deno_core = { version = "0.189.0", path = "./core" }
|
||||
deno_ops = { version = "0.67.0", path = "./ops" }
|
||||
serde_v8 = { version = "0.100.0", path = "./serde_v8" }
|
||||
deno_runtime = { version = "0.115.0", path = "./runtime" }
|
||||
napi_sym = { version = "0.37.0", path = "./cli/napi/sym" }
|
||||
deno_bench_util = { version = "0.101.0", path = "./bench_util" }
|
||||
test_util = { path = "./test_util" }
|
||||
deno_lockfile = "0.13.0"
|
||||
deno_lockfile = "0.14.1"
|
||||
deno_media_type = { version = "0.1.0", features = ["module_specifier"] }
|
||||
deno_npm = "0.6.0"
|
||||
deno_semver = "0.2.1"
|
||||
|
||||
# exts
|
||||
deno_broadcast_channel = { version = "0.93.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.31.0", path = "./ext/cache" }
|
||||
deno_console = { version = "0.99.0", path = "./ext/console" }
|
||||
deno_crypto = { version = "0.113.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.123.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.86.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.9.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.94.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.9.0", path = "./ext/io" }
|
||||
deno_net = { version = "0.91.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.36.0", path = "./ext/node" }
|
||||
deno_kv = { version = "0.7.0", path = "./ext/kv" }
|
||||
deno_tls = { version = "0.86.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.99.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.130.0", path = "./ext/web" }
|
||||
deno_webidl = { version = "0.99.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.104.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.94.0", path = "./ext/webstorage" }
|
||||
deno_napi = { version = "0.29.0", path = "./ext/napi" }
|
||||
deno_broadcast_channel = { version = "0.101.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.39.0", path = "./ext/cache" }
|
||||
deno_console = { version = "0.107.0", path = "./ext/console" }
|
||||
deno_crypto = { version = "0.121.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.131.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.94.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.17.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.102.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.17.0", path = "./ext/io" }
|
||||
deno_net = { version = "0.99.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.44.0", path = "./ext/node" }
|
||||
deno_kv = { version = "0.15.0", path = "./ext/kv" }
|
||||
deno_tls = { version = "0.94.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.107.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.138.0", path = "./ext/web" }
|
||||
deno_webidl = { version = "0.107.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.112.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.102.0", path = "./ext/webstorage" }
|
||||
deno_napi = { version = "0.37.0", path = "./ext/napi" }
|
||||
|
||||
aes = "=0.8.2"
|
||||
anyhow = "1.0.57"
|
||||
|
@ -88,13 +91,16 @@ data-url = "=0.2.0"
|
|||
dlopen = "0.1.8"
|
||||
encoding_rs = "=0.8.31"
|
||||
ecb = "=0.1.1"
|
||||
fastwebsockets = "=0.3.1"
|
||||
filetime = "0.2.16"
|
||||
flate2 = "=1.0.24"
|
||||
fs3 = "0.5.0"
|
||||
futures = "0.3.21"
|
||||
glob = "0.3.1"
|
||||
hex = "0.4"
|
||||
http = "0.2.9"
|
||||
httparse = "1.8.0"
|
||||
hyper = "0.14.26"
|
||||
hyper = { version = "0.14.26", features = ["runtime", "http1"] }
|
||||
indexmap = { version = "1.9.2", features = ["serde"] }
|
||||
libc = "0.2.126"
|
||||
log = "=0.4.17"
|
||||
|
@ -111,10 +117,10 @@ pretty_assertions = "=1.3.0"
|
|||
rand = "=0.8.5"
|
||||
regex = "^1.7.0"
|
||||
lazy-regex = "2.5.0"
|
||||
reqwest = { version = "0.11.11", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks"] }
|
||||
reqwest = { version = "0.11.18", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks"] }
|
||||
ring = "=0.16.20"
|
||||
rusqlite = { version = "=0.28.0", features = ["unlock_notify", "bundled"] }
|
||||
rustls = "0.20.5"
|
||||
rustls = "0.21.0"
|
||||
rustls-pemfile = "1.0.0"
|
||||
serde = { version = "1.0.149", features = ["derive"] }
|
||||
serde_bytes = "0.11"
|
||||
|
@ -122,19 +128,23 @@ serde_json = "1.0.85"
|
|||
serde_repr = "=0.1.9"
|
||||
sha2 = { version = "0.10.6", features = ["oid"] }
|
||||
signature = "=1.6.4"
|
||||
slab = "0.4"
|
||||
smallvec = "1.8"
|
||||
socket2 = "0.4.7"
|
||||
tar = "=0.4.38"
|
||||
tempfile = "3.4.0"
|
||||
thiserror = "=1.0.38"
|
||||
tokio = { version = "=1.25.0", features = ["full"] }
|
||||
tokio-rustls = "0.23.3"
|
||||
tokio-tungstenite = "0.16.1"
|
||||
thiserror = "1.0.40"
|
||||
tokio = { version = "1.28.1", features = ["full"] }
|
||||
tokio-rustls = "0.24.0"
|
||||
tokio-util = "0.7.4"
|
||||
tower-lsp = { version = "=0.17.0", features = ["proposed"] }
|
||||
url = { version = "2.3.1", features = ["serde", "expose_internals"] }
|
||||
uuid = { version = "1.3.0", features = ["v4"] }
|
||||
zstd = "=0.11.2"
|
||||
elliptic-curve = { version = "0.13.4", features = ["alloc", "arithmetic", "ecdh", "std", "pem"] }
|
||||
p224 = { version = "0.13.0", features = ["ecdh"] }
|
||||
p256 = { version = "0.13.2", features = ["ecdh"] }
|
||||
p384 = { version = "0.13.0", features = ["ecdh"] }
|
||||
|
||||
# crypto
|
||||
rsa = { version = "0.7.0", default-features = false, features = ["std", "pem", "hazmat"] } # hazmat needed for PrehashSigner in ext/node
|
||||
|
@ -152,6 +162,7 @@ nix = "=0.24.2"
|
|||
fwdansi = "=1.1.0"
|
||||
winres = "=0.1.12"
|
||||
winapi = "=0.3.9"
|
||||
windows-sys = { version = "0.48.0", features = ["Win32_Media"] }
|
||||
|
||||
# NB: the `bench` and `release` profiles must remain EXACTLY the same.
|
||||
[profile.release]
|
||||
|
|
176
Releases.md
176
Releases.md
|
@ -6,6 +6,182 @@ https://github.com/denoland/deno/releases
|
|||
We also have one-line install commands at:
|
||||
https://github.com/denoland/deno_install
|
||||
|
||||
### 1.34.1 / 2023.05.29
|
||||
|
||||
- fix(compile): handle when DENO_DIR is readonly (#19257)
|
||||
- fix(compile): implicit read permission to npm vfs (#19281)
|
||||
- fix(compile): improve panic message when stripping root path fails (#19258)
|
||||
- fix(compile): inline symlinks as files outside node_modules dir and warn for
|
||||
directories (#19285)
|
||||
- fix(ext/http): fix a possible memleak in Brotli (#19250)
|
||||
- fix(napi): clear currently registering module slot (#19249)
|
||||
- fix(napi): properly handle arguments in napi_get_cb_info (#19269)
|
||||
- fix(node): http.IncomingMessageForClient.complete (#19302)
|
||||
- fix(node): make 'v8.setFlagsFromString' a noop (#19271)
|
||||
- fix: don't print release notes on version check prompt (#19252)
|
||||
- fix: use proper ALPN protocols if HTTP client is HTTP/1.1 only (#19303)
|
||||
|
||||
### 1.34.0 / 2023.05.24
|
||||
|
||||
- BREAKING(unstable): change return type of Deno.serve() API (#19189)
|
||||
- feat(cli): add `nodeModulesDir` option to config file (#19095)
|
||||
- feat(cli): top-level `exclude` field in `deno.json` (#17778)
|
||||
- feat(ext/fs): add isBlockDevice, isCharDevice, isFifo, isSocket to FileInfo
|
||||
(#19008)
|
||||
- feat(ext/http): Add support for trailers w/internal API (HTTP/2 only) (#19182)
|
||||
- feat(ext/http): Brotli Compression (#19216)
|
||||
- feat(ext/http): ref/unref for server (#19197)
|
||||
- feat(lsp): support lockfile and node_modules directory (#19203)
|
||||
- feat(runtime): Provide environment-configurable options for tokio parameters
|
||||
(#19173)
|
||||
- feat(task): glob expansion (#19084)
|
||||
- feat(unstable): add more options to Deno.createHttpClient (#17385)
|
||||
- feat(vendor): support for npm specifiers (#19186)
|
||||
- feat: add support for globs in the config file and CLI arguments for files
|
||||
(#19102)
|
||||
- feat: top level package.json install when node_modules dir is explicitly opted
|
||||
into (#19233)
|
||||
- fix(ext/node): ClientRequest.setTimeout(0) should remove listeners (#19240)
|
||||
- fix(ext/node): add basic node:worker_threads support (#19192)
|
||||
- fix(ext/web): improve timers resolution for 0ms timeouts (#19212)
|
||||
- fix(napi): add napi_async_init and napi_async_destroy (#19234)
|
||||
- fix(node): add http.Server.unref() (#19201)
|
||||
- fix(node): duplicate node_module suffixes (#19222)
|
||||
- fix(node): fire 'unhandledrejection' event when using node: or npm: imports
|
||||
(#19235)
|
||||
- fix(node): make sure "setImmediate" is not clamped to 4ms (#19213)
|
||||
- fix(npm): `process` not defined in readline (#19184)
|
||||
- fix(npm): better handling of optional peer dependencies (#19236)
|
||||
- fix(npm): create `node_modules/.deno/node_modules` folder (#19242)
|
||||
- fix(npm): run pre and post tasks if present (#19178)
|
||||
- fix(npm): store npm binary command resolution in lockfile (#19219)
|
||||
|
||||
### 1.33.4 / 2023.05.18
|
||||
|
||||
- fix(ext/web): Request higher-resolution timer on Windows if user requests
|
||||
setTimeout w/short delay (#19149)
|
||||
- feat(node/crypto): Builtin Diffie-Hellman Groups (#19137)
|
||||
- feat(node/crypto): Diffie Hellman Support (#18943)
|
||||
- fix(cli/napi): handle finalizers (#19168)
|
||||
- fix(deno/upgrade): allow --version vX.Y.Z (#19139)
|
||||
- fix(dts): move BroadcastChannel type to lib.deno.unstable.d.ts (#19108)
|
||||
- fix(ext/http): Ensure cancelled requests don't crash Deno.serve (#19154)
|
||||
- fix(ext/node): fix whatwg url formatting (#19146)
|
||||
- fix(ext/node): make nodeGlobalThis configurable (#19163)
|
||||
- fix(ext/webidl): change createPromiseConverter (#16367)
|
||||
- fix(ext/websocket): order of ws writes (#19131)
|
||||
- fix(fetch): Correctly decode `multipart/form-data` names and filenames
|
||||
(#19145)
|
||||
- fix(kv): kv.close() interrupts in-flight operations (#19076)
|
||||
- fix(lsp): increase default max heap size to 3Gb (#19115)
|
||||
- fix(napi): BigInt related APIs (#19174)
|
||||
- fix(node): export diagnostics_channel module (#19167)
|
||||
- fix(node): export punycode module (#19151)
|
||||
- fix(node): support passing parent stdio streams (#19171)
|
||||
- fix(npm): add performance.markResourceTiming sham (#19123)
|
||||
- fix(npm): improved optional dependency support (#19135)
|
||||
- fix(runtime): Box the main future to avoid blowing up the stack (#19155)
|
||||
- fix(runtime): Example hello_runtime panic (#19125)
|
||||
- fix: support "fetch" over HTTPS for IP addresses (#18499)
|
||||
|
||||
### 1.33.3 / 2023.05.12
|
||||
|
||||
- feat(compile): unstable npm and node specifier support (#19005)
|
||||
- feat(ext/http): Automatic compression for Deno.serve (#19031)
|
||||
- feat(lsp): ability to configure document pre-load limit (#19097)
|
||||
- feat(node): add `Module.runMain()` (#19080)
|
||||
- fix(cli): upgrade to Typescript 5.0.4 (#19090)
|
||||
- fix(console): handle error when inspecting promise-like (#19083)
|
||||
- fix(core): always report the first error on unhandled rejection (#18992)
|
||||
- fix(core): let V8 drive extension ESM loads (#18997)
|
||||
- fix(dts): align `seekSync` `position` arg with `seek` (#19077)
|
||||
- fix(ext/ffi): Callbacks panic on returning isize (#19022)
|
||||
- fix(ext/ffi): UnsafeCallback can hang with 'deno test' (#19018)
|
||||
- fix(ext/fs): add more context_path (#19101)
|
||||
- fix(ext/http): Ensure Deno.serve works across --watch restarts (#18998)
|
||||
- fix(lsp): hard to soft error when unable to get completion info (#19091)
|
||||
- fix(lsp): preload documents when `deno.documentPreloadLimit` changes (#19103)
|
||||
- fix(node): conditional exports edge case (#19082)
|
||||
- fix(node): expose channels in worker_threads (#19086)
|
||||
- fix(npm): make http2 module available, make 'nodeGlobalThisName' writable
|
||||
(#19092)
|
||||
- fix(runtime): `ChildProcess::kill()` doesn't require additional perms (#15339)
|
||||
- fix(vendor): better handling of redirects (#19063)
|
||||
- perf(ext/ffi): Use `Box<[NativeType]>` in CallbackInfo parameters (#19032)
|
||||
- perf(fmt): faster formatting for minified object literals (#19050)
|
||||
|
||||
### 1.33.2 / 2023.05.04
|
||||
|
||||
- fix(core): Use primordials for methods (#18839)
|
||||
- fix(core): allow esm extensions not included in snapshot (#18980)
|
||||
- fix(core): rebuild when JS sources for snapshotting change (#18976)
|
||||
- fix(ext/io) several sync fs fixes (#18886)
|
||||
- fix(ext/kv): KvU64#valueOf and KvU64 inspect (#18656)
|
||||
- fix(ext/kv): stricter structured clone serializer (#18914)
|
||||
- fix(ext/kv): throw on the Kv constructor (#18978)
|
||||
- fix(ext/node): add missing `release` property to node's `process` (#18923)
|
||||
- fix(ext/url): throw `TypeError` for empty argument (#18896)
|
||||
- fix(ext/websocket): update fastwebsockets to 0.3.1 (#18916)
|
||||
- fix(fmt/json): support formatting number with exponent and no sign (#18894)
|
||||
- fix(node/http): Request.setTimeout(0) should clear (#18949)
|
||||
- fix(npm): canonicalize filename before returning (#18948)
|
||||
- fix(npm): canonicalize search directory when looking for package.json (#18981)
|
||||
- fix(test): disable preventDefault() for beforeunload event (#18911)
|
||||
- perf(core): async op pseudo-codegen and performance work (#18887)
|
||||
- perf(core): use jemalloc for V8 array buffer allocator (#18875)
|
||||
- perf(ext/web): fast path for ws events (#18905)
|
||||
- perf(ext/websocket): use internal dispatch for msg events (#18904)
|
||||
- perf: lazily create RootCertStore (#18938)
|
||||
- perf: lazily retrieve ppid (#18940)
|
||||
- perf: use jemalloc as global allocator (#18957)
|
||||
|
||||
### 1.33.1 / 2023.04.28
|
||||
|
||||
- fix(ext/fetch): subview Uint8Array in Req/Resp (#18890)
|
||||
- fix(ext/websocket): client connect URI (#18892)
|
||||
- fix(ext/websocket): restore op_ws_send_ping (#18891)
|
||||
- fix(repl): don't panic on undefined exception (#18888)
|
||||
|
||||
### 1.33.0 / 2023.04.27
|
||||
|
||||
- BREAKING(unstable): remove "Deno.serve(handler, options)" overload (#18759)
|
||||
- Revert "chore(ext/websocket): Add autobahn|testsuite fuzzingclient (#…
|
||||
(#18856)
|
||||
- feat(bench): add `--no-run` flag (#18433)
|
||||
- feat(cli): don't check permissions for statically analyzable dynamic imports
|
||||
(#18713)
|
||||
- feat(cli): flatten deno.json configuaration (#17799)
|
||||
- feat(ext/ffi): support marking symbols as optional (#18529)
|
||||
- feat(ext/http): Rework Deno.serve using hyper 1.0-rc3 (#18619)
|
||||
- feat(ext/kv): add more atomic operation helpers (#18854)
|
||||
- feat(ext/kv): return ok bool from atomic commit (#18873)
|
||||
- feat(ext/url): `URL.canParse` (#18286)
|
||||
- feat(lint): add `Deno.run` to `no-deprecated-deno-api` (#18869)
|
||||
- feat(node/crypto): Elliptic Curve Diffie-Hellman (ECDH) support (#18832)
|
||||
- feat(node/http): implement ClientRequest.setTimeout() (#18783)
|
||||
- feat(task): introduce built-in `unset` command to `deno task` (#18606)
|
||||
- feat: Deprecate Deno.run API in favor of Deno.Command (#17630) (#18866)
|
||||
- fix(compile): write bytes directly to output file (#18777)
|
||||
- fix(core): Wrap safe collections' argument of primordials (#18750)
|
||||
- fix(coverage): exclude test files (#18748)
|
||||
- fix(dts): `URLPatternComponentResult` groups should have possibly undefined
|
||||
key values (#18643)
|
||||
- fix(ext/node): add crypto.sign|verify methods (#18765)
|
||||
- fix(ext/node): fix hash.flush (#18818)
|
||||
- fix(ext/node): implement asymmetric keygen (#18651)
|
||||
- fix(ext/node): improve vm.runInThisContext (#18767)
|
||||
- fix(ext/node): prime generation (#18861)
|
||||
- fix(lsp): show dependency errors for repeated imports (#18807)
|
||||
- fix(npm): only include top level packages in top level node_modules directory
|
||||
(#18824)
|
||||
- fix(test): allow explicit undefined for boolean test options (#18786)
|
||||
- fix(test): handle dispatched exceptions from test functions (#18853)
|
||||
- perf(ext/http): avoid spread arg deopt in op_http_wait (#18850)
|
||||
- perf(ext/http): optimize away code based on callback length (#18849)
|
||||
- perf(ext/http): optimize for zero or one-packet response streams (#18834)
|
||||
- perf(ext/http): use smi for slab IDs (#18848)
|
||||
- perf(ext/websocket): various performance improvements (#18862)
|
||||
|
||||
### 1.32.5 / 2023.04.18
|
||||
|
||||
- feat(UNSTABLE/kv): AtomicOperation#sum (#18704)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_bench_util"
|
||||
version = "0.93.0"
|
||||
version = "0.101.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -10,9 +10,7 @@ use crate::profiling::is_profiling;
|
|||
pub fn create_js_runtime(setup: impl FnOnce() -> Vec<Extension>) -> JsRuntime {
|
||||
JsRuntime::new(RuntimeOptions {
|
||||
extensions: setup(),
|
||||
module_loader: Some(
|
||||
std::rc::Rc::new(deno_core::ExtModuleLoader::default()),
|
||||
),
|
||||
module_loader: None,
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno"
|
||||
version = "1.32.5"
|
||||
version = "1.34.1"
|
||||
authors.workspace = true
|
||||
default-run = "deno"
|
||||
edition.workspace = true
|
||||
|
@ -42,15 +42,16 @@ winres.workspace = true
|
|||
[dependencies]
|
||||
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "dep_graph", "module_specifier", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
|
||||
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_doc = "0.61.0"
|
||||
deno_emit = "0.19.0"
|
||||
deno_graph = "=0.47.1"
|
||||
deno_lint = { version = "0.43.0", features = ["docs"] }
|
||||
deno_doc = "=0.63.1"
|
||||
deno_emit = "=0.24.0"
|
||||
deno_graph = "=0.49.0"
|
||||
deno_lint = { version = "=0.47.0", features = ["docs"] }
|
||||
deno_lockfile.workspace = true
|
||||
deno_npm = "0.3.0"
|
||||
deno_npm.workspace = true
|
||||
deno_runtime = { workspace = true, features = ["dont_create_runtime_snapshot", "include_js_files_for_snapshotting"] }
|
||||
deno_semver = "0.2.1"
|
||||
deno_task_shell = "0.11.0"
|
||||
deno_semver.workspace = true
|
||||
deno_task_shell = "=0.12.0"
|
||||
eszip = "=0.43.0"
|
||||
napi_sym.workspace = true
|
||||
|
||||
async-trait.workspace = true
|
||||
|
@ -65,24 +66,26 @@ clap_complete_fig = "=4.1.2"
|
|||
console_static_text.workspace = true
|
||||
data-url.workspace = true
|
||||
dissimilar = "=1.0.4"
|
||||
dprint-plugin-json = "=0.17.0"
|
||||
dprint-plugin-markdown = "=0.15.2"
|
||||
dprint-plugin-typescript = "=0.84.0"
|
||||
dprint-plugin-json = "=0.17.3"
|
||||
dprint-plugin-markdown = "=0.15.3"
|
||||
dprint-plugin-typescript = "=0.85.0"
|
||||
encoding_rs.workspace = true
|
||||
env_logger = "=0.9.0"
|
||||
eszip = "=0.40.0"
|
||||
fancy-regex = "=0.10.0"
|
||||
fastwebsockets.workspace = true
|
||||
flate2.workspace = true
|
||||
fs3.workspace = true
|
||||
glob = "0.3.1"
|
||||
http.workspace = true
|
||||
hyper.workspace = true
|
||||
import_map = "=0.15.0"
|
||||
indexmap.workspace = true
|
||||
jsonc-parser = { version = "=0.21.0", features = ["serde"] }
|
||||
jsonc-parser = { version = "=0.21.1", features = ["serde"] }
|
||||
lazy-regex.workspace = true
|
||||
libc.workspace = true
|
||||
log = { workspace = true, features = ["serde"] }
|
||||
lsp-types.workspace = true
|
||||
monch = "=0.4.1"
|
||||
monch = "=0.4.2"
|
||||
notify.workspace = true
|
||||
once_cell.workspace = true
|
||||
os_pipe.workspace = true
|
||||
|
|
|
@ -279,7 +279,7 @@ impl Serialize for TsConfig {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
#[derive(Clone, Debug, Default, Deserialize, PartialEq)]
|
||||
#[serde(default, deny_unknown_fields)]
|
||||
pub struct LintRulesConfig {
|
||||
pub tags: Option<Vec<String>>,
|
||||
|
@ -287,7 +287,7 @@ pub struct LintRulesConfig {
|
|||
pub exclude: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
#[derive(Clone, Debug, Default, Deserialize, PartialEq)]
|
||||
#[serde(default, deny_unknown_fields)]
|
||||
struct SerializedFilesConfig {
|
||||
pub include: Vec<String>,
|
||||
|
@ -299,26 +299,25 @@ impl SerializedFilesConfig {
|
|||
self,
|
||||
config_file_specifier: &ModuleSpecifier,
|
||||
) -> Result<FilesConfig, AnyError> {
|
||||
let config_dir = specifier_parent(config_file_specifier);
|
||||
let config_dir =
|
||||
specifier_to_file_path(&specifier_parent(config_file_specifier))?;
|
||||
Ok(FilesConfig {
|
||||
include: self
|
||||
.include
|
||||
.into_iter()
|
||||
.map(|p| {
|
||||
let url = config_dir.join(&p)?;
|
||||
specifier_to_file_path(&url)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?,
|
||||
.map(|p| config_dir.join(p))
|
||||
.collect::<Vec<_>>(),
|
||||
exclude: self
|
||||
.exclude
|
||||
.into_iter()
|
||||
.map(|p| {
|
||||
let url = config_dir.join(&p)?;
|
||||
specifier_to_file_path(&url)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?,
|
||||
.map(|p| config_dir.join(p))
|
||||
.collect::<Vec<_>>(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.include.is_empty() && self.exclude.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
|
@ -344,13 +343,66 @@ impl FilesConfig {
|
|||
self.include.is_empty()
|
||||
|| self.include.iter().any(|i| file_path.starts_with(i))
|
||||
}
|
||||
|
||||
fn extend(self, rhs: Self) -> Self {
|
||||
Self {
|
||||
include: [self.include, rhs.include].concat(),
|
||||
exclude: [self.exclude, rhs.exclude].concat(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
/// Choose between flat and nested files configuration.
|
||||
///
|
||||
/// `files` has precedence over `deprecated_files`.
|
||||
/// when `deprecated_files` is present, a warning is logged.
|
||||
///
|
||||
/// caveat: due to default values, it's not possible to distinguish between
|
||||
/// an empty configuration and a configuration with default values.
|
||||
/// `{ "files": {} }` is equivalent to `{ "files": { "include": [], "exclude": [] } }`
|
||||
/// and it wouldn't be able to emit warning for `{ "files": {}, "exclude": [] }`.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `files` - Flat configuration.
|
||||
/// * `deprecated_files` - Nested configuration. ("Files")
|
||||
fn choose_files(
|
||||
files: SerializedFilesConfig,
|
||||
deprecated_files: SerializedFilesConfig,
|
||||
) -> SerializedFilesConfig {
|
||||
const DEPRECATED_FILES: &str =
|
||||
"Warning: \"files\" configuration is deprecated";
|
||||
const FLAT_CONFIG: &str = "\"include\" and \"exclude\"";
|
||||
|
||||
let (files_nonempty, deprecated_files_nonempty) =
|
||||
(!files.is_empty(), !deprecated_files.is_empty());
|
||||
|
||||
match (files_nonempty, deprecated_files_nonempty) {
|
||||
(true, true) => {
|
||||
log::warn!("{DEPRECATED_FILES} and ignored by {FLAT_CONFIG}.");
|
||||
files
|
||||
}
|
||||
(true, false) => files,
|
||||
(false, true) => {
|
||||
log::warn!("{DEPRECATED_FILES}. Please use {FLAT_CONFIG} instead.");
|
||||
deprecated_files
|
||||
}
|
||||
(false, false) => SerializedFilesConfig::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// `lint` config representation for serde
|
||||
///
|
||||
/// fields `include` and `exclude` are expanded from [SerializedFilesConfig].
|
||||
#[derive(Clone, Debug, Default, Deserialize, PartialEq)]
|
||||
#[serde(default, deny_unknown_fields)]
|
||||
struct SerializedLintConfig {
|
||||
pub rules: LintRulesConfig,
|
||||
pub files: SerializedFilesConfig,
|
||||
pub include: Vec<String>,
|
||||
pub exclude: Vec<String>,
|
||||
|
||||
#[serde(rename = "files")]
|
||||
pub deprecated_files: SerializedFilesConfig,
|
||||
pub report: Option<String>,
|
||||
}
|
||||
|
||||
|
@ -359,22 +411,33 @@ impl SerializedLintConfig {
|
|||
self,
|
||||
config_file_specifier: &ModuleSpecifier,
|
||||
) -> Result<LintConfig, AnyError> {
|
||||
let (include, exclude) = (self.include, self.exclude);
|
||||
let files = SerializedFilesConfig { include, exclude };
|
||||
|
||||
Ok(LintConfig {
|
||||
rules: self.rules,
|
||||
files: self.files.into_resolved(config_file_specifier)?,
|
||||
files: choose_files(files, self.deprecated_files)
|
||||
.into_resolved(config_file_specifier)?,
|
||||
report: self.report,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
#[derive(Clone, Debug, Default, PartialEq)]
|
||||
pub struct LintConfig {
|
||||
pub rules: LintRulesConfig,
|
||||
pub files: FilesConfig,
|
||||
pub report: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
|
||||
impl LintConfig {
|
||||
pub fn with_files(self, files: FilesConfig) -> Self {
|
||||
let files = self.files.extend(files);
|
||||
Self { files, ..self }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
pub enum ProseWrap {
|
||||
Always,
|
||||
|
@ -382,7 +445,7 @@ pub enum ProseWrap {
|
|||
Preserve,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
|
||||
#[serde(default, deny_unknown_fields, rename_all = "camelCase")]
|
||||
pub struct FmtOptionsConfig {
|
||||
pub use_tabs: Option<bool>,
|
||||
|
@ -393,11 +456,75 @@ pub struct FmtOptionsConfig {
|
|||
pub semi_colons: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
#[serde(default, deny_unknown_fields)]
|
||||
impl FmtOptionsConfig {
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.use_tabs.is_none()
|
||||
&& self.line_width.is_none()
|
||||
&& self.indent_width.is_none()
|
||||
&& self.single_quote.is_none()
|
||||
&& self.prose_wrap.is_none()
|
||||
&& self.semi_colons.is_none()
|
||||
}
|
||||
}
|
||||
|
||||
/// Choose between flat and nested fmt options.
|
||||
///
|
||||
/// `options` has precedence over `deprecated_options`.
|
||||
/// when `deprecated_options` is present, a warning is logged.
|
||||
///
|
||||
/// caveat: due to default values, it's not possible to distinguish between
|
||||
/// an empty configuration and a configuration with default values.
|
||||
/// `{ "fmt": {} } is equivalent to `{ "fmt": { "options": {} } }`
|
||||
/// and it wouldn't be able to emit warning for `{ "fmt": { "options": {}, "semiColons": "false" } }`.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `options` - Flat options.
|
||||
/// * `deprecated_options` - Nested files configuration ("option").
|
||||
fn choose_fmt_options(
|
||||
options: FmtOptionsConfig,
|
||||
deprecated_options: FmtOptionsConfig,
|
||||
) -> FmtOptionsConfig {
|
||||
const DEPRECATED_OPTIONS: &str =
|
||||
"Warning: \"options\" configuration is deprecated";
|
||||
const FLAT_OPTION: &str = "\"flat\" options";
|
||||
|
||||
let (options_nonempty, deprecated_options_nonempty) =
|
||||
(!options.is_empty(), !deprecated_options.is_empty());
|
||||
|
||||
match (options_nonempty, deprecated_options_nonempty) {
|
||||
(true, true) => {
|
||||
log::warn!("{DEPRECATED_OPTIONS} and ignored by {FLAT_OPTION}.");
|
||||
options
|
||||
}
|
||||
(true, false) => options,
|
||||
(false, true) => {
|
||||
log::warn!("{DEPRECATED_OPTIONS}. Please use {FLAT_OPTION} instead.");
|
||||
deprecated_options
|
||||
}
|
||||
(false, false) => FmtOptionsConfig::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// `fmt` config representation for serde
|
||||
///
|
||||
/// fields from `use_tabs`..`semi_colons` are expanded from [FmtOptionsConfig].
|
||||
/// fields `include` and `exclude` are expanded from [SerializedFilesConfig].
|
||||
#[derive(Clone, Debug, Default, Deserialize, PartialEq)]
|
||||
#[serde(default, deny_unknown_fields, rename_all = "camelCase")]
|
||||
struct SerializedFmtConfig {
|
||||
pub options: FmtOptionsConfig,
|
||||
pub files: SerializedFilesConfig,
|
||||
pub use_tabs: Option<bool>,
|
||||
pub line_width: Option<u32>,
|
||||
pub indent_width: Option<u8>,
|
||||
pub single_quote: Option<bool>,
|
||||
pub prose_wrap: Option<ProseWrap>,
|
||||
pub semi_colons: Option<bool>,
|
||||
#[serde(rename = "options")]
|
||||
pub deprecated_options: FmtOptionsConfig,
|
||||
pub include: Vec<String>,
|
||||
pub exclude: Vec<String>,
|
||||
#[serde(rename = "files")]
|
||||
pub deprecated_files: SerializedFilesConfig,
|
||||
}
|
||||
|
||||
impl SerializedFmtConfig {
|
||||
|
@ -405,23 +532,48 @@ impl SerializedFmtConfig {
|
|||
self,
|
||||
config_file_specifier: &ModuleSpecifier,
|
||||
) -> Result<FmtConfig, AnyError> {
|
||||
let (include, exclude) = (self.include, self.exclude);
|
||||
let files = SerializedFilesConfig { include, exclude };
|
||||
let options = FmtOptionsConfig {
|
||||
use_tabs: self.use_tabs,
|
||||
line_width: self.line_width,
|
||||
indent_width: self.indent_width,
|
||||
single_quote: self.single_quote,
|
||||
prose_wrap: self.prose_wrap,
|
||||
semi_colons: self.semi_colons,
|
||||
};
|
||||
|
||||
Ok(FmtConfig {
|
||||
options: self.options,
|
||||
files: self.files.into_resolved(config_file_specifier)?,
|
||||
options: choose_fmt_options(options, self.deprecated_options),
|
||||
files: choose_files(files, self.deprecated_files)
|
||||
.into_resolved(config_file_specifier)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
#[derive(Clone, Debug, Default, PartialEq)]
|
||||
pub struct FmtConfig {
|
||||
pub options: FmtOptionsConfig,
|
||||
pub files: FilesConfig,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
impl FmtConfig {
|
||||
pub fn with_files(self, files: FilesConfig) -> Self {
|
||||
let files = self.files.extend(files);
|
||||
Self { files, ..self }
|
||||
}
|
||||
}
|
||||
|
||||
/// `test` config representation for serde
|
||||
///
|
||||
/// fields `include` and `exclude` are expanded from [SerializedFilesConfig].
|
||||
#[derive(Clone, Debug, Default, Deserialize, PartialEq)]
|
||||
#[serde(default, deny_unknown_fields)]
|
||||
struct SerializedTestConfig {
|
||||
pub files: SerializedFilesConfig,
|
||||
pub include: Vec<String>,
|
||||
pub exclude: Vec<String>,
|
||||
#[serde(rename = "files")]
|
||||
pub deprecated_files: SerializedFilesConfig,
|
||||
}
|
||||
|
||||
impl SerializedTestConfig {
|
||||
|
@ -429,21 +581,38 @@ impl SerializedTestConfig {
|
|||
self,
|
||||
config_file_specifier: &ModuleSpecifier,
|
||||
) -> Result<TestConfig, AnyError> {
|
||||
let (include, exclude) = (self.include, self.exclude);
|
||||
let files = SerializedFilesConfig { include, exclude };
|
||||
|
||||
Ok(TestConfig {
|
||||
files: self.files.into_resolved(config_file_specifier)?,
|
||||
files: choose_files(files, self.deprecated_files)
|
||||
.into_resolved(config_file_specifier)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
#[derive(Clone, Debug, Default, PartialEq)]
|
||||
pub struct TestConfig {
|
||||
pub files: FilesConfig,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
impl TestConfig {
|
||||
pub fn with_files(self, files: FilesConfig) -> Self {
|
||||
let files = self.files.extend(files);
|
||||
Self { files }
|
||||
}
|
||||
}
|
||||
|
||||
/// `bench` config representation for serde
|
||||
///
|
||||
/// fields `include` and `exclude` are expanded from [SerializedFilesConfig].
|
||||
#[derive(Clone, Debug, Default, Deserialize, PartialEq)]
|
||||
#[serde(default, deny_unknown_fields)]
|
||||
struct SerializedBenchConfig {
|
||||
pub files: SerializedFilesConfig,
|
||||
pub include: Vec<String>,
|
||||
pub exclude: Vec<String>,
|
||||
#[serde(rename = "files")]
|
||||
pub deprecated_files: SerializedFilesConfig,
|
||||
}
|
||||
|
||||
impl SerializedBenchConfig {
|
||||
|
@ -451,18 +620,29 @@ impl SerializedBenchConfig {
|
|||
self,
|
||||
config_file_specifier: &ModuleSpecifier,
|
||||
) -> Result<BenchConfig, AnyError> {
|
||||
let (include, exclude) = (self.include, self.exclude);
|
||||
let files = SerializedFilesConfig { include, exclude };
|
||||
|
||||
Ok(BenchConfig {
|
||||
files: self.files.into_resolved(config_file_specifier)?,
|
||||
files: choose_files(files, self.deprecated_files)
|
||||
.into_resolved(config_file_specifier)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
#[derive(Clone, Debug, Default, PartialEq)]
|
||||
pub struct BenchConfig {
|
||||
pub files: FilesConfig,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
impl BenchConfig {
|
||||
pub fn with_files(self, files: FilesConfig) -> Self {
|
||||
let files = self.files.extend(files);
|
||||
Self { files }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, PartialEq)]
|
||||
#[serde(untagged)]
|
||||
pub enum LockConfig {
|
||||
Bool(bool),
|
||||
|
@ -482,12 +662,14 @@ pub struct ConfigFileJson {
|
|||
pub test: Option<Value>,
|
||||
pub bench: Option<Value>,
|
||||
pub lock: Option<Value>,
|
||||
pub exclude: Option<Value>,
|
||||
pub node_modules_dir: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ConfigFile {
|
||||
pub specifier: ModuleSpecifier,
|
||||
pub json: ConfigFileJson,
|
||||
json: ConfigFileJson,
|
||||
}
|
||||
|
||||
impl ConfigFile {
|
||||
|
@ -603,11 +785,11 @@ impl ConfigFile {
|
|||
config_path.display()
|
||||
)
|
||||
})?;
|
||||
Self::from_specifier(&config_specifier)
|
||||
Self::from_specifier(config_specifier)
|
||||
}
|
||||
|
||||
pub fn from_specifier(specifier: &ModuleSpecifier) -> Result<Self, AnyError> {
|
||||
let config_path = specifier_to_file_path(specifier)?;
|
||||
pub fn from_specifier(specifier: ModuleSpecifier) -> Result<Self, AnyError> {
|
||||
let config_path = specifier_to_file_path(&specifier)?;
|
||||
let config_text = match std::fs::read_to_string(config_path) {
|
||||
Ok(text) => text,
|
||||
Err(err) => bail!(
|
||||
|
@ -619,10 +801,7 @@ impl ConfigFile {
|
|||
Self::new(&config_text, specifier)
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
text: &str,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Result<Self, AnyError> {
|
||||
pub fn new(text: &str, specifier: ModuleSpecifier) -> Result<Self, AnyError> {
|
||||
let jsonc =
|
||||
match jsonc_parser::parse_to_serde_value(text, &Default::default()) {
|
||||
Ok(None) => json!({}),
|
||||
|
@ -643,10 +822,7 @@ impl ConfigFile {
|
|||
};
|
||||
let json: ConfigFileJson = serde_json::from_value(jsonc)?;
|
||||
|
||||
Ok(Self {
|
||||
specifier: specifier.to_owned(),
|
||||
json,
|
||||
})
|
||||
Ok(Self { specifier, json })
|
||||
}
|
||||
|
||||
/// Returns true if the configuration indicates that JavaScript should be
|
||||
|
@ -679,6 +855,10 @@ impl ConfigFile {
|
|||
self.json.import_map.clone()
|
||||
}
|
||||
|
||||
pub fn node_modules_dir(&self) -> Option<bool> {
|
||||
self.json.node_modules_dir
|
||||
}
|
||||
|
||||
pub fn to_import_map_value(&self) -> Value {
|
||||
let mut value = serde_json::Map::with_capacity(2);
|
||||
if let Some(imports) = &self.json.imports {
|
||||
|
@ -694,44 +874,105 @@ impl ConfigFile {
|
|||
self.json.imports.is_some() || self.json.scopes.is_some()
|
||||
}
|
||||
|
||||
pub fn to_fmt_config(&self) -> Result<Option<FmtConfig>, AnyError> {
|
||||
if let Some(config) = self.json.fmt.clone() {
|
||||
let fmt_config: SerializedFmtConfig = serde_json::from_value(config)
|
||||
.context("Failed to parse \"fmt\" configuration")?;
|
||||
Ok(Some(fmt_config.into_resolved(&self.specifier)?))
|
||||
pub fn to_files_config(&self) -> Result<Option<FilesConfig>, AnyError> {
|
||||
let exclude: Vec<String> = if let Some(exclude) = self.json.exclude.clone()
|
||||
{
|
||||
serde_json::from_value(exclude)
|
||||
.context("Failed to parse \"exclude\" configuration")?
|
||||
} else {
|
||||
Ok(None)
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let raw_files_config = SerializedFilesConfig {
|
||||
exclude,
|
||||
..Default::default()
|
||||
};
|
||||
Ok(Some(raw_files_config.into_resolved(&self.specifier)?))
|
||||
}
|
||||
|
||||
pub fn to_fmt_config(&self) -> Result<Option<FmtConfig>, AnyError> {
|
||||
let files_config = self.to_files_config()?;
|
||||
let fmt_config = match self.json.fmt.clone() {
|
||||
Some(config) => {
|
||||
let fmt_config: SerializedFmtConfig = serde_json::from_value(config)
|
||||
.context("Failed to parse \"fmt\" configuration")?;
|
||||
Some(fmt_config.into_resolved(&self.specifier)?)
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
if files_config.is_none() && fmt_config.is_none() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let fmt_config = fmt_config.unwrap_or_default();
|
||||
let files_config = files_config.unwrap_or_default();
|
||||
|
||||
Ok(Some(fmt_config.with_files(files_config)))
|
||||
}
|
||||
|
||||
pub fn to_lint_config(&self) -> Result<Option<LintConfig>, AnyError> {
|
||||
if let Some(config) = self.json.lint.clone() {
|
||||
let lint_config: SerializedLintConfig = serde_json::from_value(config)
|
||||
.context("Failed to parse \"lint\" configuration")?;
|
||||
Ok(Some(lint_config.into_resolved(&self.specifier)?))
|
||||
} else {
|
||||
Ok(None)
|
||||
let files_config = self.to_files_config()?;
|
||||
let lint_config = match self.json.lint.clone() {
|
||||
Some(config) => {
|
||||
let lint_config: SerializedLintConfig = serde_json::from_value(config)
|
||||
.context("Failed to parse \"lint\" configuration")?;
|
||||
Some(lint_config.into_resolved(&self.specifier)?)
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
if files_config.is_none() && lint_config.is_none() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let lint_config = lint_config.unwrap_or_default();
|
||||
let files_config = files_config.unwrap_or_default();
|
||||
|
||||
Ok(Some(lint_config.with_files(files_config)))
|
||||
}
|
||||
|
||||
pub fn to_test_config(&self) -> Result<Option<TestConfig>, AnyError> {
|
||||
if let Some(config) = self.json.test.clone() {
|
||||
let test_config: SerializedTestConfig = serde_json::from_value(config)
|
||||
.context("Failed to parse \"test\" configuration")?;
|
||||
Ok(Some(test_config.into_resolved(&self.specifier)?))
|
||||
} else {
|
||||
Ok(None)
|
||||
let files_config = self.to_files_config()?;
|
||||
let test_config = match self.json.test.clone() {
|
||||
Some(config) => {
|
||||
let test_config: SerializedTestConfig = serde_json::from_value(config)
|
||||
.context("Failed to parse \"test\" configuration")?;
|
||||
Some(test_config.into_resolved(&self.specifier)?)
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
if files_config.is_none() && test_config.is_none() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let test_config = test_config.unwrap_or_default();
|
||||
let files_config = files_config.unwrap_or_default();
|
||||
|
||||
Ok(Some(test_config.with_files(files_config)))
|
||||
}
|
||||
|
||||
pub fn to_bench_config(&self) -> Result<Option<BenchConfig>, AnyError> {
|
||||
if let Some(config) = self.json.bench.clone() {
|
||||
let bench_config: SerializedBenchConfig = serde_json::from_value(config)
|
||||
.context("Failed to parse \"bench\" configuration")?;
|
||||
Ok(Some(bench_config.into_resolved(&self.specifier)?))
|
||||
} else {
|
||||
Ok(None)
|
||||
let files_config = self.to_files_config()?;
|
||||
let bench_config = match self.json.bench.clone() {
|
||||
Some(config) => {
|
||||
let bench_config: SerializedBenchConfig =
|
||||
serde_json::from_value(config)
|
||||
.context("Failed to parse \"bench\" configuration")?;
|
||||
Some(bench_config.into_resolved(&self.specifier)?)
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
if files_config.is_none() && bench_config.is_none() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let bench_config = bench_config.unwrap_or_default();
|
||||
let files_config = files_config.unwrap_or_default();
|
||||
|
||||
Ok(Some(bench_config.with_files(files_config)))
|
||||
}
|
||||
|
||||
/// Return any tasks that are defined in the configuration file as a sequence
|
||||
|
@ -837,6 +1078,26 @@ impl ConfigFile {
|
|||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_lockfile_path(&self) -> Result<Option<PathBuf>, AnyError> {
|
||||
match self.to_lock_config()? {
|
||||
Some(LockConfig::Bool(lock)) if !lock => Ok(None),
|
||||
Some(LockConfig::PathBuf(lock)) => Ok(Some(
|
||||
self
|
||||
.specifier
|
||||
.to_file_path()
|
||||
.unwrap()
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join(lock),
|
||||
)),
|
||||
_ => {
|
||||
let mut path = self.specifier.to_file_path().unwrap();
|
||||
path.set_file_name("deno.lock");
|
||||
Ok(Some(path))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents the "default" type library that should be used when type
|
||||
|
@ -999,6 +1260,12 @@ mod tests {
|
|||
use deno_core::serde_json::json;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn unpack_object<T>(result: Result<Option<T>, AnyError>, name: &str) -> T {
|
||||
result
|
||||
.unwrap_or_else(|err| panic!("error parsing {name} object but got {err}"))
|
||||
.unwrap_or_else(|| panic!("{name} object should be defined"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_config_file_absolute() {
|
||||
let path = test_util::testdata_path().join("module_graph/tsconfig.json");
|
||||
|
@ -1043,27 +1310,21 @@ mod tests {
|
|||
"strict": true
|
||||
},
|
||||
"lint": {
|
||||
"files": {
|
||||
"include": ["src/"],
|
||||
"exclude": ["src/testdata/"]
|
||||
},
|
||||
"include": ["src/"],
|
||||
"exclude": ["src/testdata/"],
|
||||
"rules": {
|
||||
"tags": ["recommended"],
|
||||
"include": ["ban-untagged-todo"]
|
||||
}
|
||||
},
|
||||
"fmt": {
|
||||
"files": {
|
||||
"include": ["src/"],
|
||||
"exclude": ["src/testdata/"]
|
||||
},
|
||||
"options": {
|
||||
"useTabs": true,
|
||||
"lineWidth": 80,
|
||||
"indentWidth": 4,
|
||||
"singleQuote": true,
|
||||
"proseWrap": "preserve"
|
||||
}
|
||||
"include": ["src/"],
|
||||
"exclude": ["src/testdata/"],
|
||||
"useTabs": true,
|
||||
"lineWidth": 80,
|
||||
"indentWidth": 4,
|
||||
"singleQuote": true,
|
||||
"proseWrap": "preserve"
|
||||
},
|
||||
"tasks": {
|
||||
"build": "deno run --allow-read --allow-write build.ts",
|
||||
|
@ -1072,9 +1333,9 @@ mod tests {
|
|||
}"#;
|
||||
let config_dir = ModuleSpecifier::parse("file:///deno/").unwrap();
|
||||
let config_specifier = config_dir.join("tsconfig.json").unwrap();
|
||||
let config_file = ConfigFile::new(config_text, &config_specifier).unwrap();
|
||||
let (options_value, ignored) =
|
||||
config_file.to_compiler_options().expect("error parsing");
|
||||
let config_file =
|
||||
ConfigFile::new(config_text, config_specifier.clone()).unwrap();
|
||||
let (options_value, ignored) = config_file.to_compiler_options().unwrap();
|
||||
assert!(options_value.is_object());
|
||||
let options = options_value.as_object().unwrap();
|
||||
assert!(options.contains_key("strict"));
|
||||
|
@ -1087,38 +1348,38 @@ mod tests {
|
|||
}),
|
||||
);
|
||||
|
||||
let lint_config = config_file
|
||||
.to_lint_config()
|
||||
.expect("error parsing lint object")
|
||||
.expect("lint object should be defined");
|
||||
assert_eq!(lint_config.files.include, vec![PathBuf::from("/deno/src/")]);
|
||||
assert_eq!(
|
||||
lint_config.files.exclude,
|
||||
vec![PathBuf::from("/deno/src/testdata/")]
|
||||
unpack_object(config_file.to_lint_config(), "lint"),
|
||||
LintConfig {
|
||||
files: FilesConfig {
|
||||
include: vec![PathBuf::from("/deno/src/")],
|
||||
exclude: vec![PathBuf::from("/deno/src/testdata/")],
|
||||
},
|
||||
rules: LintRulesConfig {
|
||||
include: Some(vec!["ban-untagged-todo".to_string()]),
|
||||
exclude: None,
|
||||
tags: Some(vec!["recommended".to_string()]),
|
||||
},
|
||||
..Default::default()
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
lint_config.rules.include,
|
||||
Some(vec!["ban-untagged-todo".to_string()])
|
||||
unpack_object(config_file.to_fmt_config(), "fmt"),
|
||||
FmtConfig {
|
||||
files: FilesConfig {
|
||||
include: vec![PathBuf::from("/deno/src/")],
|
||||
exclude: vec![PathBuf::from("/deno/src/testdata/")],
|
||||
},
|
||||
options: FmtOptionsConfig {
|
||||
use_tabs: Some(true),
|
||||
line_width: Some(80),
|
||||
indent_width: Some(4),
|
||||
single_quote: Some(true),
|
||||
prose_wrap: Some(ProseWrap::Preserve),
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
lint_config.rules.tags,
|
||||
Some(vec!["recommended".to_string()])
|
||||
);
|
||||
assert!(lint_config.rules.exclude.is_none());
|
||||
|
||||
let fmt_config = config_file
|
||||
.to_fmt_config()
|
||||
.expect("error parsing fmt object")
|
||||
.expect("fmt object should be defined");
|
||||
assert_eq!(fmt_config.files.include, vec![PathBuf::from("/deno/src/")]);
|
||||
assert_eq!(
|
||||
fmt_config.files.exclude,
|
||||
vec![PathBuf::from("/deno/src/testdata/")],
|
||||
);
|
||||
assert_eq!(fmt_config.options.use_tabs, Some(true));
|
||||
assert_eq!(fmt_config.options.line_width, Some(80));
|
||||
assert_eq!(fmt_config.options.indent_width, Some(4));
|
||||
assert_eq!(fmt_config.options.single_quote, Some(true));
|
||||
|
||||
let tasks_config = config_file.to_tasks_config().unwrap().unwrap();
|
||||
assert_eq!(
|
||||
|
@ -1131,14 +1392,135 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
/// if either "include" or "exclude" is specified, "files" is ignored
|
||||
#[test]
|
||||
fn test_parse_config_with_deprecated_files_field() {
|
||||
let config_text = r#"{
|
||||
"lint": {
|
||||
"files": { "include": ["foo/"], "exclude": ["bar/"] },
|
||||
"include": ["src/"]
|
||||
},
|
||||
"fmt": {
|
||||
"files": { "include": ["foo/"], "exclude": ["bar/"] },
|
||||
"exclude": ["dist/"]
|
||||
},
|
||||
"bench": {
|
||||
"files": { "include": ["foo/"] },
|
||||
"include": ["src/"]
|
||||
},
|
||||
"test": {
|
||||
"files": { "include": ["foo/"] },
|
||||
"include": ["src/"]
|
||||
}
|
||||
}"#;
|
||||
let config_dir = ModuleSpecifier::parse("file:///deno/").unwrap();
|
||||
let config_specifier = config_dir.join("tsconfig.json").unwrap();
|
||||
let config_file = ConfigFile::new(config_text, config_specifier).unwrap();
|
||||
|
||||
let lint_files = unpack_object(config_file.to_lint_config(), "lint").files;
|
||||
assert_eq!(
|
||||
lint_files,
|
||||
FilesConfig {
|
||||
include: vec![PathBuf::from("/deno/src/")],
|
||||
exclude: vec![],
|
||||
}
|
||||
);
|
||||
|
||||
let fmt_files = unpack_object(config_file.to_fmt_config(), "fmt").files;
|
||||
assert_eq!(
|
||||
fmt_files,
|
||||
FilesConfig {
|
||||
exclude: vec![PathBuf::from("/deno/dist/")],
|
||||
include: vec![],
|
||||
}
|
||||
);
|
||||
|
||||
let test_include = unpack_object(config_file.to_test_config(), "test")
|
||||
.files
|
||||
.include;
|
||||
assert_eq!(test_include, vec![PathBuf::from("/deno/src/")]);
|
||||
|
||||
let bench_include = unpack_object(config_file.to_bench_config(), "bench")
|
||||
.files
|
||||
.include;
|
||||
assert_eq!(bench_include, vec![PathBuf::from("/deno/src/")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_config_with_deprecated_files_field_only() {
|
||||
let config_text = r#"{
|
||||
"lint": { "files": { "include": ["src/"] } },
|
||||
"fmt": { "files": { "include": ["src/"] } },
|
||||
"test": { "files": { "exclude": ["dist/"] } },
|
||||
"bench": { "files": { "exclude": ["dist/"] } }
|
||||
}"#;
|
||||
let config_dir = ModuleSpecifier::parse("file:///deno/").unwrap();
|
||||
let config_specifier = config_dir.join("tsconfig.json").unwrap();
|
||||
let config_file = ConfigFile::new(config_text, config_specifier).unwrap();
|
||||
|
||||
let lint_include = unpack_object(config_file.to_lint_config(), "lint")
|
||||
.files
|
||||
.include;
|
||||
assert_eq!(lint_include, vec![PathBuf::from("/deno/src/")]);
|
||||
|
||||
let fmt_include = unpack_object(config_file.to_fmt_config(), "fmt")
|
||||
.files
|
||||
.include;
|
||||
assert_eq!(fmt_include, vec![PathBuf::from("/deno/src/")]);
|
||||
|
||||
let test_exclude = unpack_object(config_file.to_test_config(), "test")
|
||||
.files
|
||||
.exclude;
|
||||
assert_eq!(test_exclude, vec![PathBuf::from("/deno/dist/")]);
|
||||
|
||||
let bench_exclude = unpack_object(config_file.to_bench_config(), "bench")
|
||||
.files
|
||||
.exclude;
|
||||
assert_eq!(bench_exclude, vec![PathBuf::from("/deno/dist/")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_config_with_deprecated_fmt_options() {
|
||||
let config_text_both = r#"{
|
||||
"fmt": {
|
||||
"options": {
|
||||
"semiColons": true
|
||||
},
|
||||
"semiColons": false
|
||||
}
|
||||
}"#;
|
||||
let config_text_deprecated = r#"{
|
||||
"fmt": {
|
||||
"options": {
|
||||
"semiColons": true
|
||||
}
|
||||
}
|
||||
}"#;
|
||||
let config_specifier =
|
||||
ModuleSpecifier::parse("file:///deno/tsconfig.json").unwrap();
|
||||
let config_file_both =
|
||||
ConfigFile::new(config_text_both, config_specifier.clone()).unwrap();
|
||||
let config_file_deprecated =
|
||||
ConfigFile::new(config_text_deprecated, config_specifier).unwrap();
|
||||
|
||||
fn unpack_options(config_file: ConfigFile) -> FmtOptionsConfig {
|
||||
unpack_object(config_file.to_fmt_config(), "fmt").options
|
||||
}
|
||||
|
||||
let fmt_options_both = unpack_options(config_file_both);
|
||||
assert_eq!(fmt_options_both.semi_colons, Some(false));
|
||||
|
||||
let fmt_options_deprecated = unpack_options(config_file_deprecated);
|
||||
assert_eq!(fmt_options_deprecated.semi_colons, Some(true));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_config_with_empty_file() {
|
||||
let config_text = "";
|
||||
let config_specifier =
|
||||
ModuleSpecifier::parse("file:///deno/tsconfig.json").unwrap();
|
||||
let config_file = ConfigFile::new(config_text, &config_specifier).unwrap();
|
||||
let (options_value, _) =
|
||||
config_file.to_compiler_options().expect("error parsing");
|
||||
let config_file = ConfigFile::new(config_text, config_specifier).unwrap();
|
||||
let (options_value, _) = config_file.to_compiler_options().unwrap();
|
||||
assert!(options_value.is_object());
|
||||
}
|
||||
|
||||
|
@ -1147,19 +1529,75 @@ mod tests {
|
|||
let config_text = r#"//{"foo":"bar"}"#;
|
||||
let config_specifier =
|
||||
ModuleSpecifier::parse("file:///deno/tsconfig.json").unwrap();
|
||||
let config_file = ConfigFile::new(config_text, &config_specifier).unwrap();
|
||||
let (options_value, _) =
|
||||
config_file.to_compiler_options().expect("error parsing");
|
||||
let config_file = ConfigFile::new(config_text, config_specifier).unwrap();
|
||||
let (options_value, _) = config_file.to_compiler_options().unwrap();
|
||||
assert!(options_value.is_object());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_config_with_global_files() {
|
||||
let config_text = r#"{
|
||||
"exclude": ["foo/"],
|
||||
"test": {
|
||||
"exclude": ["npm/"],
|
||||
},
|
||||
"bench": {}
|
||||
}"#;
|
||||
let config_specifier =
|
||||
ModuleSpecifier::parse("file:///deno/tsconfig.json").unwrap();
|
||||
let config_file = ConfigFile::new(config_text, config_specifier).unwrap();
|
||||
|
||||
let (options_value, _) = config_file.to_compiler_options().unwrap();
|
||||
assert!(options_value.is_object());
|
||||
|
||||
let test_config = config_file.to_test_config().unwrap().unwrap();
|
||||
assert_eq!(test_config.files.include, Vec::<PathBuf>::new());
|
||||
assert_eq!(
|
||||
test_config.files.exclude,
|
||||
vec![PathBuf::from("/deno/npm/"), PathBuf::from("/deno/foo/")]
|
||||
);
|
||||
|
||||
let bench_config = config_file.to_bench_config().unwrap().unwrap();
|
||||
assert_eq!(
|
||||
bench_config.files.exclude,
|
||||
vec![PathBuf::from("/deno/foo/")]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_config_with_global_files_only() {
|
||||
let config_text = r#"{
|
||||
"exclude": ["npm/"]
|
||||
}"#;
|
||||
let config_specifier =
|
||||
ModuleSpecifier::parse("file:///deno/tsconfig.json").unwrap();
|
||||
let config_file = ConfigFile::new(config_text, config_specifier).unwrap();
|
||||
|
||||
let (options_value, _) = config_file.to_compiler_options().unwrap();
|
||||
assert!(options_value.is_object());
|
||||
|
||||
let empty_include = Vec::<PathBuf>::new();
|
||||
|
||||
let files_config = config_file.to_files_config().unwrap().unwrap();
|
||||
assert_eq!(files_config.include, empty_include);
|
||||
assert_eq!(files_config.exclude, vec![PathBuf::from("/deno/npm/")]);
|
||||
|
||||
let lint_config = config_file.to_lint_config().unwrap().unwrap();
|
||||
assert_eq!(lint_config.files.include, empty_include);
|
||||
assert_eq!(lint_config.files.exclude, vec![PathBuf::from("/deno/npm/")]);
|
||||
|
||||
let fmt_config = config_file.to_fmt_config().unwrap().unwrap();
|
||||
assert_eq!(fmt_config.files.include, empty_include);
|
||||
assert_eq!(fmt_config.files.exclude, vec![PathBuf::from("/deno/npm/")],);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_config_with_invalid_file() {
|
||||
let config_text = "{foo:bar}";
|
||||
let config_specifier =
|
||||
ModuleSpecifier::parse("file:///deno/tsconfig.json").unwrap();
|
||||
// Emit error: Unable to parse config file JSON "<config_path>" because of Unexpected token on line 1 column 6.
|
||||
assert!(ConfigFile::new(config_text, &config_specifier).is_err());
|
||||
assert!(ConfigFile::new(config_text, config_specifier).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1168,7 +1606,7 @@ mod tests {
|
|||
let config_specifier =
|
||||
ModuleSpecifier::parse("file:///deno/tsconfig.json").unwrap();
|
||||
// Emit error: config file JSON "<config_path>" should be an object
|
||||
assert!(ConfigFile::new(config_text, &config_specifier).is_err());
|
||||
assert!(ConfigFile::new(config_text, config_specifier).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1280,7 +1718,7 @@ mod tests {
|
|||
fn run_task_error_test(config_text: &str, expected_error: &str) {
|
||||
let config_dir = ModuleSpecifier::parse("file:///deno/").unwrap();
|
||||
let config_specifier = config_dir.join("tsconfig.json").unwrap();
|
||||
let config_file = ConfigFile::new(config_text, &config_specifier).unwrap();
|
||||
let config_file = ConfigFile::new(config_text, config_specifier).unwrap();
|
||||
assert_eq!(
|
||||
config_file
|
||||
.resolve_tasks_config()
|
||||
|
|
|
@ -9,6 +9,7 @@ use clap::Command;
|
|||
use clap::ValueHint;
|
||||
use deno_core::resolve_url_or_path;
|
||||
use deno_core::url::Url;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_runtime::permissions::parse_sys_kind;
|
||||
use log::debug;
|
||||
use log::Level;
|
||||
|
@ -255,6 +256,25 @@ pub enum TypeCheckMode {
|
|||
Local,
|
||||
}
|
||||
|
||||
impl TypeCheckMode {
|
||||
/// Gets if type checking will occur under this mode.
|
||||
pub fn is_true(&self) -> bool {
|
||||
match self {
|
||||
Self::None => false,
|
||||
Self::Local | Self::All => true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets the corresponding module `GraphKind` that should be created
|
||||
/// for the current `TypeCheckMode`.
|
||||
pub fn as_graph_kind(&self) -> GraphKind {
|
||||
match self.is_true() {
|
||||
true => GraphKind::All,
|
||||
false => GraphKind::CodeOnly,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for TypeCheckMode {
|
||||
fn default() -> Self {
|
||||
Self::None
|
||||
|
@ -527,8 +547,11 @@ impl Flags {
|
|||
.ok()
|
||||
}
|
||||
Task(_) | Check(_) | Coverage(_) | Cache(_) | Info(_) | Eval(_)
|
||||
| Test(_) | Bench(_) | Repl(_) => std::env::current_dir().ok(),
|
||||
_ => None,
|
||||
| Test(_) | Bench(_) | Repl(_) | Compile(_) => {
|
||||
std::env::current_dir().ok()
|
||||
}
|
||||
Bundle(_) | Completions(_) | Doc(_) | Fmt(_) | Init(_) | Install(_)
|
||||
| Uninstall(_) | Lsp | Lint(_) | Types | Upgrade(_) | Vendor(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1340,7 +1363,7 @@ TypeScript compiler cache: Subdirectory containing TS compiler output.",
|
|||
.arg(lock_arg())
|
||||
.arg(config_arg())
|
||||
.arg(import_map_arg())
|
||||
.arg(local_npm_arg())
|
||||
.arg(node_modules_dir_arg())
|
||||
.arg(
|
||||
Arg::new("json")
|
||||
.long("json")
|
||||
|
@ -1859,6 +1882,7 @@ Remote modules and multiple modules may also be specified:
|
|||
.arg(config_arg())
|
||||
.arg(import_map_arg())
|
||||
.arg(lock_arg())
|
||||
.arg(node_modules_dir_arg())
|
||||
.arg(reload_arg())
|
||||
.arg(ca_file_arg())
|
||||
}
|
||||
|
@ -1872,7 +1896,7 @@ fn compile_args_without_check_args(app: Command) -> Command {
|
|||
.arg(import_map_arg())
|
||||
.arg(no_remote_arg())
|
||||
.arg(no_npm_arg())
|
||||
.arg(local_npm_arg())
|
||||
.arg(node_modules_dir_arg())
|
||||
.arg(config_arg())
|
||||
.arg(no_config_arg())
|
||||
.arg(reload_arg())
|
||||
|
@ -1882,6 +1906,90 @@ fn compile_args_without_check_args(app: Command) -> Command {
|
|||
.arg(ca_file_arg())
|
||||
}
|
||||
|
||||
static ALLOW_READ_HELP: &str = concat!(
|
||||
"Allow file system read access. Optionally specify allowed paths.\n",
|
||||
"Docs: https://deno.land/manual@v",
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
"/basics/permissions\n",
|
||||
"Examples:\n",
|
||||
" --allow-read\n",
|
||||
" --allow-read=\"/etc,/var/log.txt\""
|
||||
);
|
||||
|
||||
static ALLOW_WRITE_HELP: &str = concat!(
|
||||
"Allow file system write access. Optionally specify allowed paths.\n",
|
||||
"Docs: https://deno.land/manual@v",
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
"/basics/permissions\n",
|
||||
"Examples:\n",
|
||||
" --allow-write\n",
|
||||
" --allow-write=\"/etc,/var/log.txt\""
|
||||
);
|
||||
|
||||
static ALLOW_NET_HELP: &str = concat!(
|
||||
"Allow network access. Optionally specify allowed IP addresses and host names, with ports as necessary.\n",
|
||||
"Docs: https://deno.land/manual@v",
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
"/basics/permissions\n",
|
||||
"Examples:\n",
|
||||
" --allow-net\n",
|
||||
" --allow-net=\"localhost:8080,deno.land\""
|
||||
);
|
||||
|
||||
static ALLOW_ENV_HELP: &str = concat!(
|
||||
"Allow access to system environment information. Optionally specify accessible environment variables.\n",
|
||||
"Docs: https://deno.land/manual@v",
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
"/basics/permissions\n",
|
||||
"Examples:\n",
|
||||
" --allow-env\n",
|
||||
" --allow-env=\"PORT,HOME,PATH\""
|
||||
);
|
||||
|
||||
static ALLOW_SYS_HELP: &str = concat!(
|
||||
"Allow access to OS information. Optionally allow specific APIs by function name.\n",
|
||||
"Docs: https://deno.land/manual@v",
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
"/basics/permissions\n",
|
||||
"Examples:\n",
|
||||
" --allow-sys\n",
|
||||
" --allow-sys=\"systemMemoryInfo,osRelease\""
|
||||
);
|
||||
|
||||
static ALLOW_RUN_HELP: &str = concat!(
|
||||
"Allow running subprocesses. Optionally specify allowed runnable program names.\n",
|
||||
"Docs: https://deno.land/manual@v",
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
"/basics/permissions\n",
|
||||
"Examples:\n",
|
||||
" --allow-run\n",
|
||||
" --allow-run=\"whoami,ps\""
|
||||
);
|
||||
|
||||
static ALLOW_FFI_HELP: &str = concat!(
|
||||
"(Unstable) Allow loading dynamic libraries. Optionally specify allowed directories or files.\n",
|
||||
"Docs: https://deno.land/manual@v",
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
"/basics/permissions\n",
|
||||
"Examples:\n",
|
||||
" --allow-ffi\n",
|
||||
" --allow-ffi=\"./libfoo.so\""
|
||||
);
|
||||
|
||||
static ALLOW_HRTIME_HELP: &str = concat!(
|
||||
"Allow high-resolution time measurement. Note: this can enable timing attacks and fingerprinting.\n",
|
||||
"Docs: https://deno.land/manual@v",
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
"/basics/permissions\n"
|
||||
);
|
||||
|
||||
static ALLOW_ALL_HELP: &str = concat!(
|
||||
"Allow all permissions. Learn more about permissions in Deno:\n",
|
||||
"https://deno.land/manual@v",
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
"/basics/permissions\n"
|
||||
);
|
||||
|
||||
fn permission_args(app: Command) -> Command {
|
||||
app
|
||||
.arg(
|
||||
|
@ -1890,7 +1998,8 @@ fn permission_args(app: Command) -> Command {
|
|||
.num_args(0..)
|
||||
.use_value_delimiter(true)
|
||||
.require_equals(true)
|
||||
.help("Allow file system read access")
|
||||
.value_name("PATH")
|
||||
.help(ALLOW_READ_HELP)
|
||||
.value_parser(value_parser!(PathBuf))
|
||||
.value_hint(ValueHint::AnyPath),
|
||||
)
|
||||
|
@ -1900,7 +2009,8 @@ fn permission_args(app: Command) -> Command {
|
|||
.num_args(0..)
|
||||
.use_value_delimiter(true)
|
||||
.require_equals(true)
|
||||
.help("Allow file system write access")
|
||||
.value_name("PATH")
|
||||
.help(ALLOW_WRITE_HELP)
|
||||
.value_parser(value_parser!(PathBuf))
|
||||
.value_hint(ValueHint::AnyPath),
|
||||
)
|
||||
|
@ -1910,7 +2020,8 @@ fn permission_args(app: Command) -> Command {
|
|||
.num_args(0..)
|
||||
.use_value_delimiter(true)
|
||||
.require_equals(true)
|
||||
.help("Allow network access")
|
||||
.value_name("IP_OR_HOSTNAME")
|
||||
.help(ALLOW_NET_HELP)
|
||||
.value_parser(flags_allow_net::validator),
|
||||
)
|
||||
.arg(unsafely_ignore_certificate_errors_arg())
|
||||
|
@ -1920,7 +2031,8 @@ fn permission_args(app: Command) -> Command {
|
|||
.num_args(0..)
|
||||
.use_value_delimiter(true)
|
||||
.require_equals(true)
|
||||
.help("Allow environment access")
|
||||
.value_name("VARIABLE_NAME")
|
||||
.help(ALLOW_ENV_HELP)
|
||||
.value_parser(|key: &str| {
|
||||
if key.is_empty() || key.contains(&['=', '\0'] as &[char]) {
|
||||
return Err(format!("invalid key \"{key}\""));
|
||||
|
@ -1939,7 +2051,8 @@ fn permission_args(app: Command) -> Command {
|
|||
.num_args(0..)
|
||||
.use_value_delimiter(true)
|
||||
.require_equals(true)
|
||||
.help("Allow access to system info")
|
||||
.value_name("API_NAME")
|
||||
.help(ALLOW_SYS_HELP)
|
||||
.value_parser(|key: &str| parse_sys_kind(key).map(ToString::to_string)),
|
||||
)
|
||||
.arg(
|
||||
|
@ -1948,7 +2061,8 @@ fn permission_args(app: Command) -> Command {
|
|||
.num_args(0..)
|
||||
.use_value_delimiter(true)
|
||||
.require_equals(true)
|
||||
.help("Allow running subprocesses"),
|
||||
.value_name("PROGRAM_NAME")
|
||||
.help(ALLOW_RUN_HELP),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("allow-ffi")
|
||||
|
@ -1956,7 +2070,8 @@ fn permission_args(app: Command) -> Command {
|
|||
.num_args(0..)
|
||||
.use_value_delimiter(true)
|
||||
.require_equals(true)
|
||||
.help("Allow loading dynamic libraries")
|
||||
.value_name("PATH")
|
||||
.help(ALLOW_FFI_HELP)
|
||||
.value_parser(value_parser!(PathBuf))
|
||||
.value_hint(ValueHint::AnyPath),
|
||||
)
|
||||
|
@ -1964,14 +2079,14 @@ fn permission_args(app: Command) -> Command {
|
|||
Arg::new("allow-hrtime")
|
||||
.long("allow-hrtime")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("Allow high resolution time measurement"),
|
||||
.help(ALLOW_HRTIME_HELP),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("allow-all")
|
||||
.short('A')
|
||||
.long("allow-all")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("Allow all permissions"),
|
||||
.help(ALLOW_ALL_HELP),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("prompt")
|
||||
|
@ -2230,7 +2345,7 @@ fn check_arg(checks_local_by_default: bool) -> Arg {
|
|||
default, so adding --check is redundant.
|
||||
If the value of '--check=all' is supplied, diagnostic errors from remote modules
|
||||
will be included.
|
||||
|
||||
|
||||
Alternatively, the 'deno check' subcommand can be used.",
|
||||
)
|
||||
} else {
|
||||
|
@ -2330,14 +2445,14 @@ fn no_npm_arg() -> Arg {
|
|||
.help("Do not resolve npm modules")
|
||||
}
|
||||
|
||||
fn local_npm_arg() -> Arg {
|
||||
fn node_modules_dir_arg() -> Arg {
|
||||
Arg::new("node-modules-dir")
|
||||
.long("node-modules-dir")
|
||||
.num_args(0..=1)
|
||||
.value_parser(value_parser!(bool))
|
||||
.default_missing_value("true")
|
||||
.require_equals(true)
|
||||
.help("Creates a local node_modules folder")
|
||||
.help("Enables or disables the use of a local node_modules folder for npm packages")
|
||||
}
|
||||
|
||||
fn unsafely_ignore_certificate_errors_arg() -> Arg {
|
||||
|
@ -2625,7 +2740,7 @@ fn info_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
|||
import_map_arg_parse(flags, matches);
|
||||
location_arg_parse(flags, matches);
|
||||
ca_file_arg_parse(flags, matches);
|
||||
local_npm_args_parse(flags, matches);
|
||||
node_modules_dir_arg_parse(flags, matches);
|
||||
lock_arg_parse(flags, matches);
|
||||
no_lock_arg_parse(flags, matches);
|
||||
no_remote_arg_parse(flags, matches);
|
||||
|
@ -2745,7 +2860,8 @@ fn run_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
|||
fn task_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
||||
flags.config_flag = matches
|
||||
.remove_one::<String>("config")
|
||||
.map_or(ConfigFlag::Discover, ConfigFlag::Path);
|
||||
.map(ConfigFlag::Path)
|
||||
.unwrap_or(ConfigFlag::Discover);
|
||||
|
||||
let mut task_flags = TaskFlags {
|
||||
cwd: matches.remove_one::<String>("cwd"),
|
||||
|
@ -2880,6 +2996,7 @@ fn vendor_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
|||
config_args_parse(flags, matches);
|
||||
import_map_arg_parse(flags, matches);
|
||||
lock_arg_parse(flags, matches);
|
||||
node_modules_dir_arg_parse(flags, matches);
|
||||
reload_arg_parse(flags, matches);
|
||||
|
||||
flags.subcommand = DenoSubcommand::Vendor(VendorFlags {
|
||||
|
@ -2905,7 +3022,7 @@ fn compile_args_without_check_parse(
|
|||
import_map_arg_parse(flags, matches);
|
||||
no_remote_arg_parse(flags, matches);
|
||||
no_npm_arg_parse(flags, matches);
|
||||
local_npm_args_parse(flags, matches);
|
||||
node_modules_dir_arg_parse(flags, matches);
|
||||
config_args_parse(flags, matches);
|
||||
reload_arg_parse(flags, matches);
|
||||
lock_args_parse(flags, matches);
|
||||
|
@ -3159,7 +3276,7 @@ fn no_npm_arg_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
|||
}
|
||||
}
|
||||
|
||||
fn local_npm_args_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
||||
fn node_modules_dir_arg_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
||||
flags.node_modules_dir = matches.remove_one::<bool>("node-modules-dir");
|
||||
}
|
||||
|
||||
|
|
|
@ -17,7 +17,6 @@ use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
|||
use deno_npm::NpmPackageId;
|
||||
use deno_semver::npm::NpmPackageReq;
|
||||
|
||||
use crate::args::config_file::LockConfig;
|
||||
use crate::args::ConfigFile;
|
||||
use crate::npm::CliNpmRegistryApi;
|
||||
use crate::Flags;
|
||||
|
@ -45,22 +44,9 @@ pub fn discover(
|
|||
None => match maybe_config_file {
|
||||
Some(config_file) => {
|
||||
if config_file.specifier.scheme() == "file" {
|
||||
match config_file.to_lock_config()? {
|
||||
Some(LockConfig::Bool(lock)) if !lock => {
|
||||
return Ok(None);
|
||||
}
|
||||
Some(LockConfig::PathBuf(lock)) => config_file
|
||||
.specifier
|
||||
.to_file_path()
|
||||
.unwrap()
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join(lock),
|
||||
_ => {
|
||||
let mut path = config_file.specifier.to_file_path().unwrap();
|
||||
path.set_file_name("deno.lock");
|
||||
path
|
||||
}
|
||||
match config_file.resolve_lockfile_path()? {
|
||||
Some(path) => path,
|
||||
None => return Ok(None),
|
||||
}
|
||||
} else {
|
||||
return Ok(None);
|
||||
|
@ -96,7 +82,7 @@ pub async fn snapshot_from_lockfile(
|
|||
// now fill the packages except for the dist information
|
||||
let mut packages = Vec::with_capacity(lockfile.content.npm.packages.len());
|
||||
for (key, package) in &lockfile.content.npm.packages {
|
||||
let pkg_id = NpmPackageId::from_serialized(key)?;
|
||||
let id = NpmPackageId::from_serialized(key)?;
|
||||
|
||||
// collect the dependencies
|
||||
let mut dependencies = HashMap::with_capacity(package.dependencies.len());
|
||||
|
@ -106,19 +92,20 @@ pub async fn snapshot_from_lockfile(
|
|||
}
|
||||
|
||||
packages.push(SerializedNpmResolutionSnapshotPackage {
|
||||
pkg_id,
|
||||
dist: Default::default(), // temporarily empty
|
||||
id,
|
||||
dependencies,
|
||||
// temporarily empty
|
||||
os: Default::default(),
|
||||
cpu: Default::default(),
|
||||
dist: Default::default(),
|
||||
optional_dependencies: Default::default(),
|
||||
});
|
||||
}
|
||||
(root_packages, packages)
|
||||
};
|
||||
|
||||
// now that the lockfile is dropped, fetch the package version information
|
||||
let pkg_nvs = packages
|
||||
.iter()
|
||||
.map(|p| p.pkg_id.nv.clone())
|
||||
.collect::<Vec<_>>();
|
||||
let pkg_nvs = packages.iter().map(|p| p.id.nv.clone()).collect::<Vec<_>>();
|
||||
let get_version_infos = || {
|
||||
FuturesOrdered::from_iter(pkg_nvs.iter().map(|nv| async move {
|
||||
let package_info = api.package_info(&nv.name).await?;
|
||||
|
@ -131,11 +118,17 @@ pub async fn snapshot_from_lockfile(
|
|||
}))
|
||||
};
|
||||
let mut version_infos = get_version_infos();
|
||||
|
||||
let mut i = 0;
|
||||
while let Some(result) = version_infos.next().await {
|
||||
packages[i].dist = match result {
|
||||
Ok(version_info) => version_info.dist,
|
||||
match result {
|
||||
Ok(version_info) => {
|
||||
let mut package = &mut packages[i];
|
||||
package.dist = version_info.dist;
|
||||
package.cpu = version_info.cpu;
|
||||
package.os = version_info.os;
|
||||
package.optional_dependencies =
|
||||
version_info.optional_dependencies.into_keys().collect();
|
||||
}
|
||||
Err(err) => {
|
||||
if api.mark_force_reload() {
|
||||
// reset and try again
|
||||
|
@ -146,7 +139,7 @@ pub async fn snapshot_from_lockfile(
|
|||
return Err(err);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
i += 1;
|
||||
}
|
||||
|
|
409
cli/args/mod.rs
409
cli/args/mod.rs
|
@ -8,11 +8,13 @@ mod lockfile;
|
|||
pub mod package_json;
|
||||
|
||||
pub use self::import_map::resolve_import_map_from_specifier;
|
||||
use self::lockfile::snapshot_from_lockfile;
|
||||
pub use self::lockfile::snapshot_from_lockfile;
|
||||
use self::package_json::PackageJsonDeps;
|
||||
use ::import_map::ImportMap;
|
||||
use deno_core::resolve_url_or_path;
|
||||
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_runtime::deno_tls::RootCertStoreProvider;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use indexmap::IndexMap;
|
||||
|
||||
|
@ -32,6 +34,7 @@ pub use config_file::TsTypeLib;
|
|||
pub use flags::*;
|
||||
pub use lockfile::Lockfile;
|
||||
pub use lockfile::LockfileError;
|
||||
pub use package_json::PackageJsonDepsProvider;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::anyhow;
|
||||
|
@ -52,6 +55,7 @@ use deno_runtime::deno_tls::webpki_roots;
|
|||
use deno_runtime::inspector_server::InspectorServer;
|
||||
use deno_runtime::permissions::PermissionsOptions;
|
||||
use once_cell::sync::Lazy;
|
||||
use once_cell::sync::OnceCell;
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::io::BufReader;
|
||||
|
@ -61,8 +65,8 @@ use std::num::NonZeroUsize;
|
|||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::cache::DenoDir;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::npm::CliNpmRegistryApi;
|
||||
use crate::npm::NpmProcessState;
|
||||
|
@ -133,7 +137,7 @@ impl BenchOptions {
|
|||
files: resolve_files(
|
||||
maybe_bench_config.map(|c| c.files),
|
||||
Some(bench_flags.files),
|
||||
),
|
||||
)?,
|
||||
filter: bench_flags.filter,
|
||||
json: bench_flags.json,
|
||||
no_run: bench_flags.no_run,
|
||||
|
@ -178,7 +182,7 @@ impl FmtOptions {
|
|||
files: resolve_files(
|
||||
maybe_config_files,
|
||||
maybe_fmt_flags.map(|f| f.files),
|
||||
),
|
||||
)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -248,7 +252,7 @@ impl TestOptions {
|
|||
files: resolve_files(
|
||||
maybe_test_config.map(|c| c.files),
|
||||
Some(test_flags.files),
|
||||
),
|
||||
)?,
|
||||
allow_none: test_flags.allow_none,
|
||||
concurrent_jobs: test_flags
|
||||
.concurrent_jobs
|
||||
|
@ -343,7 +347,7 @@ impl LintOptions {
|
|||
Ok(Self {
|
||||
reporter_kind: maybe_reporter_kind.unwrap_or_default(),
|
||||
is_stdin,
|
||||
files: resolve_files(maybe_config_files, Some(maybe_file_flags)),
|
||||
files: resolve_files(maybe_config_files, Some(maybe_file_flags))?,
|
||||
rules: resolve_lint_rules_options(
|
||||
maybe_config_rules,
|
||||
maybe_rules_tags,
|
||||
|
@ -401,13 +405,62 @@ fn discover_package_json(
|
|||
Ok(None)
|
||||
}
|
||||
|
||||
struct CliRootCertStoreProvider {
|
||||
cell: OnceCell<RootCertStore>,
|
||||
maybe_root_path: Option<PathBuf>,
|
||||
maybe_ca_stores: Option<Vec<String>>,
|
||||
maybe_ca_data: Option<CaData>,
|
||||
}
|
||||
|
||||
impl CliRootCertStoreProvider {
|
||||
pub fn new(
|
||||
maybe_root_path: Option<PathBuf>,
|
||||
maybe_ca_stores: Option<Vec<String>>,
|
||||
maybe_ca_data: Option<CaData>,
|
||||
) -> Self {
|
||||
Self {
|
||||
cell: Default::default(),
|
||||
maybe_root_path,
|
||||
maybe_ca_stores,
|
||||
maybe_ca_data,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RootCertStoreProvider for CliRootCertStoreProvider {
|
||||
fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError> {
|
||||
self
|
||||
.cell
|
||||
.get_or_try_init(|| {
|
||||
get_root_cert_store(
|
||||
self.maybe_root_path.clone(),
|
||||
self.maybe_ca_stores.clone(),
|
||||
self.maybe_ca_data.clone(),
|
||||
)
|
||||
})
|
||||
.map_err(|e| e.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug, Clone)]
|
||||
pub enum RootCertStoreLoadError {
|
||||
#[error(
|
||||
"Unknown certificate store \"{0}\" specified (allowed: \"system,mozilla\")"
|
||||
)]
|
||||
UnknownStore(String),
|
||||
#[error("Unable to add pem file to certificate store: {0}")]
|
||||
FailedAddPemFile(String),
|
||||
#[error("Failed opening CA file: {0}")]
|
||||
CaFileOpenError(String),
|
||||
}
|
||||
|
||||
/// Create and populate a root cert store based on the passed options and
|
||||
/// environment.
|
||||
pub fn get_root_cert_store(
|
||||
maybe_root_path: Option<PathBuf>,
|
||||
maybe_ca_stores: Option<Vec<String>>,
|
||||
maybe_ca_data: Option<CaData>,
|
||||
) -> Result<RootCertStore, AnyError> {
|
||||
) -> Result<RootCertStore, RootCertStoreLoadError> {
|
||||
let mut root_cert_store = RootCertStore::empty();
|
||||
let ca_stores: Vec<String> = maybe_ca_stores
|
||||
.or_else(|| {
|
||||
|
@ -444,7 +497,7 @@ pub fn get_root_cert_store(
|
|||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(anyhow!("Unknown certificate store \"{}\" specified (allowed: \"system,mozilla\")", store));
|
||||
return Err(RootCertStoreLoadError::UnknownStore(store.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -459,7 +512,9 @@ pub fn get_root_cert_store(
|
|||
} else {
|
||||
PathBuf::from(ca_file)
|
||||
};
|
||||
let certfile = std::fs::File::open(ca_file)?;
|
||||
let certfile = std::fs::File::open(ca_file).map_err(|err| {
|
||||
RootCertStoreLoadError::CaFileOpenError(err.to_string())
|
||||
})?;
|
||||
let mut reader = BufReader::new(certfile);
|
||||
rustls_pemfile::certs(&mut reader)
|
||||
}
|
||||
|
@ -474,10 +529,7 @@ pub fn get_root_cert_store(
|
|||
root_cert_store.add_parsable_certificates(&certs);
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(anyhow!(
|
||||
"Unable to add pem file to certificate store: {}",
|
||||
e
|
||||
));
|
||||
return Err(RootCertStoreLoadError::FailedAddPemFile(e.to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -505,7 +557,7 @@ struct CliOptionOverrides {
|
|||
import_map_specifier: Option<Option<ModuleSpecifier>>,
|
||||
}
|
||||
|
||||
/// Holds the resolved options of many sources used by sub commands
|
||||
/// Holds the resolved options of many sources used by subcommands
|
||||
/// and provides some helper function for creating common objects.
|
||||
pub struct CliOptions {
|
||||
// the source of the options is a detail the rest of the
|
||||
|
@ -524,7 +576,7 @@ impl CliOptions {
|
|||
flags: Flags,
|
||||
initial_cwd: PathBuf,
|
||||
maybe_config_file: Option<ConfigFile>,
|
||||
maybe_lockfile: Option<Lockfile>,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
maybe_package_json: Option<PackageJson>,
|
||||
) -> Result<Self, AnyError> {
|
||||
if let Some(insecure_allowlist) =
|
||||
|
@ -541,7 +593,6 @@ impl CliOptions {
|
|||
eprintln!("{}", colors::yellow(msg));
|
||||
}
|
||||
|
||||
let maybe_lockfile = maybe_lockfile.map(|l| Arc::new(Mutex::new(l)));
|
||||
let maybe_node_modules_folder = resolve_local_node_modules_folder(
|
||||
&initial_cwd,
|
||||
&flags,
|
||||
|
@ -594,7 +645,7 @@ impl CliOptions {
|
|||
flags,
|
||||
initial_cwd,
|
||||
maybe_config_file,
|
||||
maybe_lock_file,
|
||||
maybe_lock_file.map(|l| Arc::new(Mutex::new(l))),
|
||||
maybe_package_json,
|
||||
)
|
||||
}
|
||||
|
@ -636,8 +687,40 @@ impl CliOptions {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn resolve_deno_dir(&self) -> Result<DenoDir, AnyError> {
|
||||
Ok(DenoDir::new(self.maybe_custom_root())?)
|
||||
pub fn npm_system_info(&self) -> NpmSystemInfo {
|
||||
match self.sub_command() {
|
||||
DenoSubcommand::Compile(CompileFlags {
|
||||
target: Some(target),
|
||||
..
|
||||
}) => {
|
||||
// the values of NpmSystemInfo align with the possible values for the
|
||||
// `arch` and `platform` fields of Node.js' `process` global:
|
||||
// https://nodejs.org/api/process.html
|
||||
match target.as_str() {
|
||||
"aarch64-apple-darwin" => NpmSystemInfo {
|
||||
os: "darwin".to_string(),
|
||||
cpu: "arm64".to_string(),
|
||||
},
|
||||
"x86_64-apple-darwin" => NpmSystemInfo {
|
||||
os: "darwin".to_string(),
|
||||
cpu: "x64".to_string(),
|
||||
},
|
||||
"x86_64-unknown-linux-gnu" => NpmSystemInfo {
|
||||
os: "linux".to_string(),
|
||||
cpu: "x64".to_string(),
|
||||
},
|
||||
"x86_64-pc-windows-msvc" => NpmSystemInfo {
|
||||
os: "win32".to_string(),
|
||||
cpu: "x64".to_string(),
|
||||
},
|
||||
value => {
|
||||
log::warn!("Not implemented NPM system info for target '{value}'. Using current system default. This may impact NPM ");
|
||||
NpmSystemInfo::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => NpmSystemInfo::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Based on an optional command line import map path and an optional
|
||||
|
@ -753,7 +836,7 @@ impl CliOptions {
|
|||
return Ok(Some(state.snapshot.clone().into_valid()?));
|
||||
}
|
||||
|
||||
if let Some(lockfile) = self.maybe_lock_file() {
|
||||
if let Some(lockfile) = self.maybe_lockfile() {
|
||||
if !lockfile.lock().overwrite {
|
||||
return Ok(Some(
|
||||
snapshot_from_lockfile(lockfile.clone(), api)
|
||||
|
@ -792,6 +875,15 @@ impl CliOptions {
|
|||
self.maybe_node_modules_folder.clone()
|
||||
}
|
||||
|
||||
pub fn node_modules_dir_enablement(&self) -> Option<bool> {
|
||||
self.flags.node_modules_dir.or_else(|| {
|
||||
self
|
||||
.maybe_config_file
|
||||
.as_ref()
|
||||
.and_then(|c| c.node_modules_dir())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn node_modules_dir_specifier(&self) -> Option<ModuleSpecifier> {
|
||||
self
|
||||
.maybe_node_modules_folder
|
||||
|
@ -799,12 +891,14 @@ impl CliOptions {
|
|||
.map(|path| ModuleSpecifier::from_directory_path(path).unwrap())
|
||||
}
|
||||
|
||||
pub fn resolve_root_cert_store(&self) -> Result<RootCertStore, AnyError> {
|
||||
get_root_cert_store(
|
||||
pub fn resolve_root_cert_store_provider(
|
||||
&self,
|
||||
) -> Arc<dyn RootCertStoreProvider> {
|
||||
Arc::new(CliRootCertStoreProvider::new(
|
||||
None,
|
||||
self.flags.ca_stores.clone(),
|
||||
self.flags.ca_data.clone(),
|
||||
)
|
||||
))
|
||||
}
|
||||
|
||||
pub fn resolve_ts_config_for_emit(
|
||||
|
@ -817,30 +911,6 @@ impl CliOptions {
|
|||
)
|
||||
}
|
||||
|
||||
/// Resolves the storage key to use based on the current flags, config, or main module.
|
||||
pub fn resolve_storage_key(
|
||||
&self,
|
||||
main_module: &ModuleSpecifier,
|
||||
) -> Option<String> {
|
||||
if let Some(location) = &self.flags.location {
|
||||
// if a location is set, then the ascii serialization of the location is
|
||||
// used, unless the origin is opaque, and then no storage origin is set, as
|
||||
// we can't expect the origin to be reproducible
|
||||
let storage_origin = location.origin();
|
||||
if storage_origin.is_tuple() {
|
||||
Some(storage_origin.ascii_serialization())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else if let Some(config_file) = &self.maybe_config_file {
|
||||
// otherwise we will use the path to the config file
|
||||
Some(config_file.specifier.to_string())
|
||||
} else {
|
||||
// otherwise we will use the path to the main module
|
||||
Some(main_module.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_inspector_server(&self) -> Option<InspectorServer> {
|
||||
let maybe_inspect_host = self
|
||||
.flags
|
||||
|
@ -851,7 +921,7 @@ impl CliOptions {
|
|||
.map(|host| InspectorServer::new(host, version::get_user_agent()))
|
||||
}
|
||||
|
||||
pub fn maybe_lock_file(&self) -> Option<Arc<Mutex<Lockfile>>> {
|
||||
pub fn maybe_lockfile(&self) -> Option<Arc<Mutex<Lockfile>>> {
|
||||
self.maybe_lockfile.clone()
|
||||
}
|
||||
|
||||
|
@ -1039,12 +1109,8 @@ impl CliOptions {
|
|||
&self.flags.location
|
||||
}
|
||||
|
||||
pub fn maybe_custom_root(&self) -> Option<PathBuf> {
|
||||
self
|
||||
.flags
|
||||
.cache_path
|
||||
.clone()
|
||||
.or_else(|| env::var("DENO_DIR").map(String::into).ok())
|
||||
pub fn maybe_custom_root(&self) -> &Option<PathBuf> {
|
||||
&self.flags.cache_path
|
||||
}
|
||||
|
||||
pub fn no_clear_screen(&self) -> bool {
|
||||
|
@ -1089,20 +1155,6 @@ impl CliOptions {
|
|||
&self.flags.subcommand
|
||||
}
|
||||
|
||||
pub fn trace_ops(&self) -> bool {
|
||||
match self.sub_command() {
|
||||
DenoSubcommand::Test(flags) => flags.trace_ops,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn shuffle_tests(&self) -> Option<u64> {
|
||||
match self.sub_command() {
|
||||
DenoSubcommand::Test(flags) => flags.shuffle,
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn type_check_mode(&self) -> TypeCheckMode {
|
||||
self.flags.type_check_mode
|
||||
}
|
||||
|
@ -1131,14 +1183,17 @@ fn resolve_local_node_modules_folder(
|
|||
maybe_config_file: Option<&ConfigFile>,
|
||||
maybe_package_json: Option<&PackageJson>,
|
||||
) -> Result<Option<PathBuf>, AnyError> {
|
||||
let path = if flags.node_modules_dir == Some(false) {
|
||||
let use_node_modules_dir = flags
|
||||
.node_modules_dir
|
||||
.or_else(|| maybe_config_file.and_then(|c| c.node_modules_dir()));
|
||||
let path = if use_node_modules_dir == Some(false) {
|
||||
return Ok(None);
|
||||
} else if let Some(state) = &*NPM_PROCESS_STATE {
|
||||
return Ok(state.local_node_modules_path.as_ref().map(PathBuf::from));
|
||||
} else if let Some(package_json_path) = maybe_package_json.map(|c| &c.path) {
|
||||
// always auto-discover the local_node_modules_folder when a package.json exists
|
||||
package_json_path.parent().unwrap().join("node_modules")
|
||||
} else if flags.node_modules_dir.is_none() {
|
||||
} else if use_node_modules_dir.is_none() {
|
||||
return Ok(None);
|
||||
} else if let Some(config_path) = maybe_config_file
|
||||
.as_ref()
|
||||
|
@ -1216,13 +1271,90 @@ fn resolve_import_map_specifier(
|
|||
Ok(None)
|
||||
}
|
||||
|
||||
pub struct StorageKeyResolver(Option<Option<String>>);
|
||||
|
||||
impl StorageKeyResolver {
|
||||
pub fn from_options(options: &CliOptions) -> Self {
|
||||
Self(if let Some(location) = &options.flags.location {
|
||||
// if a location is set, then the ascii serialization of the location is
|
||||
// used, unless the origin is opaque, and then no storage origin is set, as
|
||||
// we can't expect the origin to be reproducible
|
||||
let storage_origin = location.origin();
|
||||
if storage_origin.is_tuple() {
|
||||
Some(Some(storage_origin.ascii_serialization()))
|
||||
} else {
|
||||
Some(None)
|
||||
}
|
||||
} else {
|
||||
// otherwise we will use the path to the config file or None to
|
||||
// fall back to using the main module's path
|
||||
options
|
||||
.maybe_config_file
|
||||
.as_ref()
|
||||
.map(|config_file| Some(config_file.specifier.to_string()))
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a storage key resolver that will always resolve to being empty.
|
||||
pub fn empty() -> Self {
|
||||
Self(Some(None))
|
||||
}
|
||||
|
||||
/// Resolves the storage key to use based on the current flags, config, or main module.
|
||||
pub fn resolve_storage_key(
|
||||
&self,
|
||||
main_module: &ModuleSpecifier,
|
||||
) -> Option<String> {
|
||||
// use the stored value or fall back to using the path of the main module.
|
||||
if let Some(maybe_value) = &self.0 {
|
||||
maybe_value.clone()
|
||||
} else {
|
||||
Some(main_module.to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn expand_globs(paths: &[PathBuf]) -> Result<Vec<PathBuf>, AnyError> {
|
||||
let mut new_paths = vec![];
|
||||
for path in paths {
|
||||
let path_str = path.to_string_lossy();
|
||||
if path_str.chars().any(|c| matches!(c, '*' | '?')) {
|
||||
// Escape brackets - we currently don't support them, because with introduction
|
||||
// of glob expansion paths like "pages/[id].ts" would suddenly start giving
|
||||
// wrong results. We might want to revisit that in the future.
|
||||
let escaped_path_str = path_str.replace('[', "[[]").replace(']', "[]]");
|
||||
let globbed_paths = glob::glob_with(
|
||||
&escaped_path_str,
|
||||
// Matches what `deno_task_shell` does
|
||||
glob::MatchOptions {
|
||||
// false because it should work the same way on case insensitive file systems
|
||||
case_sensitive: false,
|
||||
// true because it copies what sh does
|
||||
require_literal_separator: true,
|
||||
// true because it copies with sh does—these files are considered "hidden"
|
||||
require_literal_leading_dot: true,
|
||||
},
|
||||
)
|
||||
.with_context(|| format!("Failed to expand glob: \"{}\"", path_str))?;
|
||||
|
||||
for globbed_path_result in globbed_paths {
|
||||
new_paths.push(globbed_path_result?);
|
||||
}
|
||||
} else {
|
||||
new_paths.push(path.clone());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(new_paths)
|
||||
}
|
||||
|
||||
/// Collect included and ignored files. CLI flags take precedence
|
||||
/// over config file, i.e. if there's `files.ignore` in config file
|
||||
/// and `--ignore` CLI flag, only the flag value is taken into account.
|
||||
fn resolve_files(
|
||||
maybe_files_config: Option<FilesConfig>,
|
||||
maybe_file_flags: Option<FileFlags>,
|
||||
) -> FilesConfig {
|
||||
) -> Result<FilesConfig, AnyError> {
|
||||
let mut result = maybe_files_config.unwrap_or_default();
|
||||
if let Some(file_flags) = maybe_file_flags {
|
||||
if !file_flags.include.is_empty() {
|
||||
|
@ -1232,7 +1364,16 @@ fn resolve_files(
|
|||
result.exclude = file_flags.ignore;
|
||||
}
|
||||
}
|
||||
result
|
||||
// Now expand globs if there are any
|
||||
if !result.include.is_empty() {
|
||||
result.include = expand_globs(&result.include)?;
|
||||
}
|
||||
|
||||
if !result.exclude.is_empty() {
|
||||
result.exclude = expand_globs(&result.exclude)?;
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Resolves the no_prompt value based on the cli flags and environment.
|
||||
|
@ -1240,14 +1381,25 @@ pub fn resolve_no_prompt(flags: &Flags) -> bool {
|
|||
flags.no_prompt || has_flag_env_var("DENO_NO_PROMPT")
|
||||
}
|
||||
|
||||
fn has_flag_env_var(name: &str) -> bool {
|
||||
pub fn has_flag_env_var(name: &str) -> bool {
|
||||
let value = env::var(name);
|
||||
matches!(value.as_ref().map(|s| s.as_str()), Ok("1"))
|
||||
}
|
||||
|
||||
pub fn npm_pkg_req_ref_to_binary_command(
|
||||
req_ref: &NpmPackageReqReference,
|
||||
) -> String {
|
||||
let binary_name = req_ref
|
||||
.sub_path
|
||||
.as_deref()
|
||||
.unwrap_or(req_ref.req.name.as_str());
|
||||
binary_name.to_string()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[cfg(not(windows))]
|
||||
#[test]
|
||||
|
@ -1257,7 +1409,7 @@ mod test {
|
|||
}"#;
|
||||
let config_specifier =
|
||||
ModuleSpecifier::parse("file:///deno/deno.jsonc").unwrap();
|
||||
let config_file = ConfigFile::new(config_text, &config_specifier).unwrap();
|
||||
let config_file = ConfigFile::new(config_text, config_specifier).unwrap();
|
||||
let actual = resolve_import_map_specifier(
|
||||
None,
|
||||
Some(&config_file),
|
||||
|
@ -1278,7 +1430,7 @@ mod test {
|
|||
}"#;
|
||||
let config_specifier =
|
||||
ModuleSpecifier::parse("file:///deno/deno.jsonc").unwrap();
|
||||
let config_file = ConfigFile::new(config_text, &config_specifier).unwrap();
|
||||
let config_file = ConfigFile::new(config_text, config_specifier).unwrap();
|
||||
let actual = resolve_import_map_specifier(
|
||||
None,
|
||||
Some(&config_file),
|
||||
|
@ -1301,7 +1453,7 @@ mod test {
|
|||
}"#;
|
||||
let config_specifier =
|
||||
ModuleSpecifier::parse("https://example.com/deno.jsonc").unwrap();
|
||||
let config_file = ConfigFile::new(config_text, &config_specifier).unwrap();
|
||||
let config_file = ConfigFile::new(config_text, config_specifier).unwrap();
|
||||
let actual = resolve_import_map_specifier(
|
||||
None,
|
||||
Some(&config_file),
|
||||
|
@ -1325,7 +1477,7 @@ mod test {
|
|||
let cwd = &std::env::current_dir().unwrap();
|
||||
let config_specifier =
|
||||
ModuleSpecifier::parse("file:///deno/deno.jsonc").unwrap();
|
||||
let config_file = ConfigFile::new(config_text, &config_specifier).unwrap();
|
||||
let config_file = ConfigFile::new(config_text, config_specifier).unwrap();
|
||||
let actual = resolve_import_map_specifier(
|
||||
Some("import-map.json"),
|
||||
Some(&config_file),
|
||||
|
@ -1347,7 +1499,8 @@ mod test {
|
|||
}"#;
|
||||
let config_specifier =
|
||||
ModuleSpecifier::parse("file:///deno/deno.jsonc").unwrap();
|
||||
let config_file = ConfigFile::new(config_text, &config_specifier).unwrap();
|
||||
let config_file =
|
||||
ConfigFile::new(config_text, config_specifier.clone()).unwrap();
|
||||
let actual = resolve_import_map_specifier(
|
||||
None,
|
||||
Some(&config_file),
|
||||
|
@ -1363,7 +1516,7 @@ mod test {
|
|||
let config_text = r#"{}"#;
|
||||
let config_specifier =
|
||||
ModuleSpecifier::parse("file:///deno/deno.jsonc").unwrap();
|
||||
let config_file = ConfigFile::new(config_text, &config_specifier).unwrap();
|
||||
let config_file = ConfigFile::new(config_text, config_specifier).unwrap();
|
||||
let actual = resolve_import_map_specifier(
|
||||
None,
|
||||
Some(&config_file),
|
||||
|
@ -1381,4 +1534,102 @@ mod test {
|
|||
let actual = actual.unwrap();
|
||||
assert_eq!(actual, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn storage_key_resolver_test() {
|
||||
let resolver = StorageKeyResolver(None);
|
||||
let specifier = ModuleSpecifier::parse("file:///a.ts").unwrap();
|
||||
assert_eq!(
|
||||
resolver.resolve_storage_key(&specifier),
|
||||
Some(specifier.to_string())
|
||||
);
|
||||
let resolver = StorageKeyResolver(Some(None));
|
||||
assert_eq!(resolver.resolve_storage_key(&specifier), None);
|
||||
let resolver = StorageKeyResolver(Some(Some("value".to_string())));
|
||||
assert_eq!(
|
||||
resolver.resolve_storage_key(&specifier),
|
||||
Some("value".to_string())
|
||||
);
|
||||
|
||||
// test empty
|
||||
let resolver = StorageKeyResolver::empty();
|
||||
assert_eq!(resolver.resolve_storage_key(&specifier), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_files_test() {
|
||||
use test_util::TempDir;
|
||||
let temp_dir = TempDir::new();
|
||||
|
||||
temp_dir.create_dir_all("data");
|
||||
temp_dir.create_dir_all("nested");
|
||||
temp_dir.create_dir_all("nested/foo");
|
||||
temp_dir.create_dir_all("nested/fizz");
|
||||
temp_dir.create_dir_all("pages");
|
||||
|
||||
temp_dir.write("data/tes.ts", "");
|
||||
temp_dir.write("data/test1.js", "");
|
||||
temp_dir.write("data/test1.ts", "");
|
||||
temp_dir.write("data/test12.ts", "");
|
||||
|
||||
temp_dir.write("nested/foo/foo.ts", "");
|
||||
temp_dir.write("nested/foo/bar.ts", "");
|
||||
temp_dir.write("nested/foo/fizz.ts", "");
|
||||
temp_dir.write("nested/foo/bazz.ts", "");
|
||||
|
||||
temp_dir.write("nested/fizz/foo.ts", "");
|
||||
temp_dir.write("nested/fizz/bar.ts", "");
|
||||
temp_dir.write("nested/fizz/fizz.ts", "");
|
||||
temp_dir.write("nested/fizz/bazz.ts", "");
|
||||
|
||||
temp_dir.write("pages/[id].ts", "");
|
||||
|
||||
let error = resolve_files(
|
||||
Some(FilesConfig {
|
||||
include: vec![temp_dir.path().join("data/**********.ts")],
|
||||
exclude: vec![],
|
||||
}),
|
||||
None,
|
||||
)
|
||||
.unwrap_err();
|
||||
assert!(error.to_string().starts_with("Failed to expand glob"));
|
||||
|
||||
let resolved_files = resolve_files(
|
||||
Some(FilesConfig {
|
||||
include: vec![
|
||||
temp_dir.path().join("data/test1.?s"),
|
||||
temp_dir.path().join("nested/foo/*.ts"),
|
||||
temp_dir.path().join("nested/fizz/*.ts"),
|
||||
temp_dir.path().join("pages/[id].ts"),
|
||||
],
|
||||
exclude: vec![temp_dir.path().join("nested/**/*bazz.ts")],
|
||||
}),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
resolved_files.include,
|
||||
vec![
|
||||
temp_dir.path().join("data/test1.js"),
|
||||
temp_dir.path().join("data/test1.ts"),
|
||||
temp_dir.path().join("nested/foo/bar.ts"),
|
||||
temp_dir.path().join("nested/foo/bazz.ts"),
|
||||
temp_dir.path().join("nested/foo/fizz.ts"),
|
||||
temp_dir.path().join("nested/foo/foo.ts"),
|
||||
temp_dir.path().join("nested/fizz/bar.ts"),
|
||||
temp_dir.path().join("nested/fizz/bazz.ts"),
|
||||
temp_dir.path().join("nested/fizz/fizz.ts"),
|
||||
temp_dir.path().join("nested/fizz/foo.ts"),
|
||||
temp_dir.path().join("pages/[id].ts"),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
resolved_files.exclude,
|
||||
vec![
|
||||
temp_dir.path().join("nested/fizz/bazz.ts"),
|
||||
temp_dir.path().join("nested/foo/bazz.ts"),
|
||||
]
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,6 +28,33 @@ pub enum PackageJsonDepValueParseError {
|
|||
pub type PackageJsonDeps =
|
||||
BTreeMap<String, Result<NpmPackageReq, PackageJsonDepValueParseError>>;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct PackageJsonDepsProvider(Option<PackageJsonDeps>);
|
||||
|
||||
impl PackageJsonDepsProvider {
|
||||
pub fn new(deps: Option<PackageJsonDeps>) -> Self {
|
||||
Self(deps)
|
||||
}
|
||||
|
||||
pub fn deps(&self) -> Option<&PackageJsonDeps> {
|
||||
self.0.as_ref()
|
||||
}
|
||||
|
||||
pub fn reqs(&self) -> Vec<&NpmPackageReq> {
|
||||
match &self.0 {
|
||||
Some(deps) => {
|
||||
let mut package_reqs = deps
|
||||
.values()
|
||||
.filter_map(|r| r.as_ref().ok())
|
||||
.collect::<Vec<_>>();
|
||||
package_reqs.sort(); // deterministic resolution
|
||||
package_reqs
|
||||
}
|
||||
None => Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets an application level package.json's npm package requirements.
|
||||
///
|
||||
/// Note that this function is not general purpose. It is specifically for
|
||||
|
|
|
@ -17,4 +17,6 @@ async function bench(fun) {
|
|||
}
|
||||
|
||||
const core = Deno[Deno.internal].core;
|
||||
bench(() => core.opAsync("op_void_async"));
|
||||
const ops = core.ops;
|
||||
const opVoidAsync = ops.op_void_async;
|
||||
bench(() => opVoidAsync());
|
||||
|
|
|
@ -17,4 +17,6 @@ async function bench(fun) {
|
|||
}
|
||||
|
||||
const core = Deno[Deno.internal].core;
|
||||
bench(() => core.opAsync("op_void_async_deferred"));
|
||||
const ops = core.ops;
|
||||
const opVoidAsyncDeferred = ops.op_void_async_deferred;
|
||||
bench(() => opVoidAsyncDeferred());
|
||||
|
|
|
@ -99,6 +99,7 @@ pub fn benchmark(
|
|||
"run",
|
||||
"--allow-all",
|
||||
"--unstable",
|
||||
"--enable-testing-features-do-not-use",
|
||||
path,
|
||||
&server_addr(port),
|
||||
],
|
||||
|
|
|
@ -7,4 +7,4 @@ const [hostname, port] = addr.split(":");
|
|||
const app = new Hono();
|
||||
app.get("/", (c) => c.text("Hello, World!"));
|
||||
|
||||
Deno.serve(app.fetch, { port: Number(port), hostname });
|
||||
Deno.serve({ port: Number(port), hostname }, app.fetch);
|
||||
|
|
|
@ -11,4 +11,4 @@ function handler() {
|
|||
return new Response(file.readable);
|
||||
}
|
||||
|
||||
serve(handler, { hostname, port: Number(port) });
|
||||
serve({ hostname, port: Number(port) }, handler);
|
||||
|
|
|
@ -8,4 +8,4 @@ function handler() {
|
|||
return new Response("Hello World");
|
||||
}
|
||||
|
||||
serve(handler, { hostname, port, reusePort: true });
|
||||
serve({ hostname, port, reusePort: true }, handler);
|
||||
|
|
|
@ -40,7 +40,10 @@ struct FixtureMessage {
|
|||
/// the end of the document and does a level of hovering and gets quick fix
|
||||
/// code actions.
|
||||
fn bench_big_file_edits(deno_exe: &Path) -> Duration {
|
||||
let mut client = LspClientBuilder::new().deno_exe(deno_exe).build();
|
||||
let mut client = LspClientBuilder::new()
|
||||
.use_diagnostic_sync(false)
|
||||
.deno_exe(deno_exe)
|
||||
.build();
|
||||
client.initialize_default();
|
||||
|
||||
client.write_notification(
|
||||
|
@ -102,7 +105,10 @@ fn bench_big_file_edits(deno_exe: &Path) -> Duration {
|
|||
}
|
||||
|
||||
fn bench_code_lens(deno_exe: &Path) -> Duration {
|
||||
let mut client = LspClientBuilder::new().deno_exe(deno_exe).build();
|
||||
let mut client = LspClientBuilder::new()
|
||||
.use_diagnostic_sync(false)
|
||||
.deno_exe(deno_exe)
|
||||
.build();
|
||||
client.initialize_default();
|
||||
|
||||
client.write_notification(
|
||||
|
@ -152,7 +158,10 @@ fn bench_code_lens(deno_exe: &Path) -> Duration {
|
|||
}
|
||||
|
||||
fn bench_find_replace(deno_exe: &Path) -> Duration {
|
||||
let mut client = LspClientBuilder::new().deno_exe(deno_exe).build();
|
||||
let mut client = LspClientBuilder::new()
|
||||
.use_diagnostic_sync(false)
|
||||
.deno_exe(deno_exe)
|
||||
.build();
|
||||
client.initialize_default();
|
||||
|
||||
for i in 0..10 {
|
||||
|
@ -238,7 +247,10 @@ fn bench_find_replace(deno_exe: &Path) -> Duration {
|
|||
|
||||
/// A test that starts up the LSP, opens a single line document, and exits.
|
||||
fn bench_startup_shutdown(deno_exe: &Path) -> Duration {
|
||||
let mut client = LspClientBuilder::new().deno_exe(deno_exe).build();
|
||||
let mut client = LspClientBuilder::new()
|
||||
.use_diagnostic_sync(false)
|
||||
.deno_exe(deno_exe)
|
||||
.build();
|
||||
client.initialize_default();
|
||||
|
||||
client.write_notification(
|
||||
|
|
|
@ -11,7 +11,7 @@ use test_util::lsp::LspClientBuilder;
|
|||
// Intended to match the benchmark in quick-lint-js
|
||||
// https://github.com/quick-lint/quick-lint-js/blob/35207e6616267c6c81be63f47ce97ec2452d60df/benchmark/benchmark-lsp/lsp-benchmarks.cpp#L223-L268
|
||||
fn incremental_change_wait(bench: &mut Bencher) {
|
||||
let mut client = LspClientBuilder::new().build();
|
||||
let mut client = LspClientBuilder::new().use_diagnostic_sync(false).build();
|
||||
client.initialize_default();
|
||||
|
||||
client.write_notification(
|
||||
|
|
12
cli/bench/testdata/deno_upgrade_http.js
vendored
12
cli/bench/testdata/deno_upgrade_http.js
vendored
|
@ -1,12 +0,0 @@
|
|||
const { serve, upgradeHttpRaw } = Deno;
|
||||
const u8 = Deno[Deno.internal].core.encode(
|
||||
"HTTP/1.1 101 Switching Protocols\r\n\r\n",
|
||||
);
|
||||
|
||||
async function handler(req) {
|
||||
const [conn, _firstPacket] = upgradeHttpRaw(req);
|
||||
await conn.write(u8);
|
||||
await conn.close();
|
||||
}
|
||||
|
||||
serve(handler, { hostname: "127.0.0.1", port: 9000 });
|
|
@ -22,4 +22,4 @@ function handler(request) {
|
|||
return response;
|
||||
}
|
||||
|
||||
serve(handler, { port: parseInt(port), hostname: "0.0.0.0" });
|
||||
serve({ port: parseInt(port), hostname: "0.0.0.0" }, handler);
|
||||
|
|
31
cli/build.rs
31
cli/build.rs
|
@ -2,13 +2,14 @@
|
|||
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_core::snapshot_util::*;
|
||||
use deno_core::Extension;
|
||||
use deno_core::ExtensionFileSource;
|
||||
use deno_core::ExtensionFileSourceCode;
|
||||
use deno_runtime::deno_cache::SqliteBackedCache;
|
||||
use deno_runtime::deno_fs::StdFs;
|
||||
use deno_runtime::deno_http::DefaultHttpPropertyExtractor;
|
||||
use deno_runtime::deno_kv::sqlite::SqliteDbHandler;
|
||||
use deno_runtime::permissions::PermissionsContainer;
|
||||
use deno_runtime::*;
|
||||
|
@ -39,7 +40,7 @@ mod ts {
|
|||
|
||||
let node_built_in_module_names = SUPPORTED_BUILTIN_NODE_MODULES
|
||||
.iter()
|
||||
.map(|s| s.name)
|
||||
.map(|p| p.module_name())
|
||||
.collect::<Vec<&str>>();
|
||||
let build_libs = state.borrow::<Vec<&str>>();
|
||||
json!({
|
||||
|
@ -261,7 +262,7 @@ mod ts {
|
|||
)
|
||||
.unwrap();
|
||||
|
||||
create_snapshot(CreateSnapshotOptions {
|
||||
let output = create_snapshot(CreateSnapshotOptions {
|
||||
cargo_manifest_dir: env!("CARGO_MANIFEST_DIR"),
|
||||
snapshot_path,
|
||||
startup_snapshot: None,
|
||||
|
@ -288,6 +289,9 @@ mod ts {
|
|||
})),
|
||||
snapshot_module_load_cb: None,
|
||||
});
|
||||
for path in output.files_loaded_during_snapshot {
|
||||
println!("cargo:rerun-if-changed={}", path.display());
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn version() -> String {
|
||||
|
@ -309,9 +313,11 @@ mod ts {
|
|||
// deps = [runtime]
|
||||
deno_core::extension!(
|
||||
cli,
|
||||
esm_entry_point = "ext:cli/99_main.js",
|
||||
esm = [
|
||||
dir "js",
|
||||
"40_testing.js"
|
||||
"40_testing.js",
|
||||
"99_main.js"
|
||||
],
|
||||
customizer = |ext: &mut deno_core::ExtensionBuilder| {
|
||||
ext.esm(vec![ExtensionFileSource {
|
||||
|
@ -323,9 +329,11 @@ deno_core::extension!(
|
|||
}
|
||||
);
|
||||
|
||||
fn create_cli_snapshot(snapshot_path: PathBuf) {
|
||||
#[must_use = "The files listed by create_cli_snapshot should be printed as 'cargo:rerun-if-changed' lines"]
|
||||
fn create_cli_snapshot(snapshot_path: PathBuf) -> CreateSnapshotOutput {
|
||||
// NOTE(bartlomieju): ordering is important here, keep it in sync with
|
||||
// `runtime/worker.rs`, `runtime/web_worker.rs` and `runtime/build.rs`!
|
||||
let fs = Arc::new(deno_fs::RealFs);
|
||||
let extensions: Vec<Extension> = vec![
|
||||
deno_webidl::deno_webidl::init_ops(),
|
||||
deno_console::deno_console::init_ops(),
|
||||
|
@ -358,10 +366,10 @@ fn create_cli_snapshot(snapshot_path: PathBuf) {
|
|||
false, // No --unstable.
|
||||
),
|
||||
deno_napi::deno_napi::init_ops::<PermissionsContainer>(),
|
||||
deno_http::deno_http::init_ops(),
|
||||
deno_http::deno_http::init_ops::<DefaultHttpPropertyExtractor>(),
|
||||
deno_io::deno_io::init_ops(Default::default()),
|
||||
deno_fs::deno_fs::init_ops::<_, PermissionsContainer>(false, StdFs),
|
||||
deno_node::deno_node::init_ops::<deno_runtime::RuntimeNodeEnv>(None),
|
||||
deno_fs::deno_fs::init_ops::<PermissionsContainer>(false, fs.clone()),
|
||||
deno_node::deno_node::init_ops::<PermissionsContainer>(None, fs),
|
||||
cli::init_ops_and_esm(), // NOTE: This needs to be init_ops_and_esm!
|
||||
];
|
||||
|
||||
|
@ -463,7 +471,7 @@ fn main() {
|
|||
);
|
||||
|
||||
let ts_version = ts::version();
|
||||
debug_assert_eq!(ts_version, "5.0.3"); // bump this assertion when it changes
|
||||
debug_assert_eq!(ts_version, "5.0.4"); // bump this assertion when it changes
|
||||
println!("cargo:rustc-env=TS_VERSION={}", ts_version);
|
||||
println!("cargo:rerun-if-env-changed=TS_VERSION");
|
||||
|
||||
|
@ -477,7 +485,10 @@ fn main() {
|
|||
ts::create_compiler_snapshot(compiler_snapshot_path, &c);
|
||||
|
||||
let cli_snapshot_path = o.join("CLI_SNAPSHOT.bin");
|
||||
create_cli_snapshot(cli_snapshot_path);
|
||||
let output = create_cli_snapshot(cli_snapshot_path);
|
||||
for path in output.files_loaded_during_snapshot {
|
||||
println!("cargo:rerun-if-changed={}", path.display())
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
|
|
24
cli/cache/cache_db.rs
vendored
24
cli/cache/cache_db.rs
vendored
|
@ -3,6 +3,7 @@
|
|||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::parking_lot::MutexGuard;
|
||||
use deno_core::task::spawn_blocking;
|
||||
use deno_runtime::deno_webstorage::rusqlite;
|
||||
use deno_runtime::deno_webstorage::rusqlite::Connection;
|
||||
use deno_runtime::deno_webstorage::rusqlite::OptionalExtension;
|
||||
|
@ -95,7 +96,7 @@ impl Drop for CacheDB {
|
|||
// Hand off SQLite connection to another thread to do the surprisingly expensive cleanup
|
||||
let inner = inner.into_inner().into_inner();
|
||||
if let Some(conn) = inner {
|
||||
tokio::task::spawn_blocking(move || {
|
||||
spawn_blocking(move || {
|
||||
drop(conn);
|
||||
log::trace!(
|
||||
"Cleaned up SQLite connection at {}",
|
||||
|
@ -108,7 +109,6 @@ impl Drop for CacheDB {
|
|||
}
|
||||
|
||||
impl CacheDB {
|
||||
#[cfg(test)]
|
||||
pub fn in_memory(
|
||||
config: &'static CacheDBConfiguration,
|
||||
version: &'static str,
|
||||
|
@ -168,7 +168,7 @@ impl CacheDB {
|
|||
fn spawn_eager_init_thread(&self) {
|
||||
let clone = self.clone();
|
||||
debug_assert!(tokio::runtime::Handle::try_current().is_ok());
|
||||
tokio::task::spawn_blocking(move || {
|
||||
spawn_blocking(move || {
|
||||
let lock = clone.conn.lock();
|
||||
clone.initialize(&lock);
|
||||
});
|
||||
|
@ -261,7 +261,9 @@ impl CacheDB {
|
|||
};
|
||||
|
||||
// Failed, try deleting it
|
||||
log::warn!(
|
||||
let is_tty = atty::is(atty::Stream::Stderr);
|
||||
log::log!(
|
||||
if is_tty { log::Level::Warn } else { log::Level::Trace },
|
||||
"Could not initialize cache database '{}', deleting and retrying... ({err:?})",
|
||||
path.to_string_lossy()
|
||||
);
|
||||
|
@ -275,7 +277,12 @@ impl CacheDB {
|
|||
|
||||
match self.config.on_failure {
|
||||
CacheFailure::InMemory => {
|
||||
log::error!(
|
||||
log::log!(
|
||||
if is_tty {
|
||||
log::Level::Error
|
||||
} else {
|
||||
log::Level::Trace
|
||||
},
|
||||
"Failed to open cache file '{}', opening in-memory cache.",
|
||||
path.to_string_lossy()
|
||||
);
|
||||
|
@ -284,7 +291,12 @@ impl CacheDB {
|
|||
))
|
||||
}
|
||||
CacheFailure::Blackhole => {
|
||||
log::error!(
|
||||
log::log!(
|
||||
if is_tty {
|
||||
log::Level::Error
|
||||
} else {
|
||||
log::Level::Trace
|
||||
},
|
||||
"Failed to open cache file '{}', performance may be degraded.",
|
||||
path.to_string_lossy()
|
||||
);
|
||||
|
|
66
cli/cache/caches.rs
vendored
66
cli/cache/caches.rs
vendored
|
@ -1,19 +1,20 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use once_cell::sync::OnceCell;
|
||||
|
||||
use super::cache_db::CacheDB;
|
||||
use super::cache_db::CacheDBConfiguration;
|
||||
use super::check::TYPE_CHECK_CACHE_DB;
|
||||
use super::deno_dir::DenoDirProvider;
|
||||
use super::incremental::INCREMENTAL_CACHE_DB;
|
||||
use super::node::NODE_ANALYSIS_CACHE_DB;
|
||||
use super::parsed_source::PARSED_SOURCE_CACHE_DB;
|
||||
use super::DenoDir;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Caches {
|
||||
dir_provider: Arc<DenoDirProvider>,
|
||||
fmt_incremental_cache_db: OnceCell<CacheDB>,
|
||||
lint_incremental_cache_db: OnceCell<CacheDB>,
|
||||
dep_analysis_db: OnceCell<CacheDB>,
|
||||
|
@ -22,53 +23,90 @@ pub struct Caches {
|
|||
}
|
||||
|
||||
impl Caches {
|
||||
pub fn new(dir: Arc<DenoDirProvider>) -> Self {
|
||||
Self {
|
||||
dir_provider: dir,
|
||||
fmt_incremental_cache_db: Default::default(),
|
||||
lint_incremental_cache_db: Default::default(),
|
||||
dep_analysis_db: Default::default(),
|
||||
node_analysis_db: Default::default(),
|
||||
type_checking_cache_db: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn make_db(
|
||||
cell: &OnceCell<CacheDB>,
|
||||
config: &'static CacheDBConfiguration,
|
||||
path: PathBuf,
|
||||
path: Option<PathBuf>,
|
||||
) -> CacheDB {
|
||||
cell
|
||||
.get_or_init(|| CacheDB::from_path(config, path, crate::version::deno()))
|
||||
.get_or_init(|| {
|
||||
if let Some(path) = path {
|
||||
CacheDB::from_path(config, path, crate::version::deno())
|
||||
} else {
|
||||
CacheDB::in_memory(config, crate::version::deno())
|
||||
}
|
||||
})
|
||||
.clone()
|
||||
}
|
||||
|
||||
pub fn fmt_incremental_cache_db(&self, dir: &DenoDir) -> CacheDB {
|
||||
pub fn fmt_incremental_cache_db(&self) -> CacheDB {
|
||||
Self::make_db(
|
||||
&self.fmt_incremental_cache_db,
|
||||
&INCREMENTAL_CACHE_DB,
|
||||
dir.fmt_incremental_cache_db_file_path(),
|
||||
self
|
||||
.dir_provider
|
||||
.get_or_create()
|
||||
.ok()
|
||||
.map(|dir| dir.fmt_incremental_cache_db_file_path()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn lint_incremental_cache_db(&self, dir: &DenoDir) -> CacheDB {
|
||||
pub fn lint_incremental_cache_db(&self) -> CacheDB {
|
||||
Self::make_db(
|
||||
&self.lint_incremental_cache_db,
|
||||
&INCREMENTAL_CACHE_DB,
|
||||
dir.lint_incremental_cache_db_file_path(),
|
||||
self
|
||||
.dir_provider
|
||||
.get_or_create()
|
||||
.ok()
|
||||
.map(|dir| dir.lint_incremental_cache_db_file_path()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn dep_analysis_db(&self, dir: &DenoDir) -> CacheDB {
|
||||
pub fn dep_analysis_db(&self) -> CacheDB {
|
||||
Self::make_db(
|
||||
&self.dep_analysis_db,
|
||||
&PARSED_SOURCE_CACHE_DB,
|
||||
dir.dep_analysis_db_file_path(),
|
||||
self
|
||||
.dir_provider
|
||||
.get_or_create()
|
||||
.ok()
|
||||
.map(|dir| dir.dep_analysis_db_file_path()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn node_analysis_db(&self, dir: &DenoDir) -> CacheDB {
|
||||
pub fn node_analysis_db(&self) -> CacheDB {
|
||||
Self::make_db(
|
||||
&self.node_analysis_db,
|
||||
&NODE_ANALYSIS_CACHE_DB,
|
||||
dir.node_analysis_db_file_path(),
|
||||
self
|
||||
.dir_provider
|
||||
.get_or_create()
|
||||
.ok()
|
||||
.map(|dir| dir.node_analysis_db_file_path()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn type_checking_cache_db(&self, dir: &DenoDir) -> CacheDB {
|
||||
pub fn type_checking_cache_db(&self) -> CacheDB {
|
||||
Self::make_db(
|
||||
&self.type_checking_cache_db,
|
||||
&TYPE_CHECK_CACHE_DB,
|
||||
dir.type_checking_cache_db_file_path(),
|
||||
self
|
||||
.dir_provider
|
||||
.get_or_create()
|
||||
.ok()
|
||||
.map(|dir| dir.type_checking_cache_db_file_path()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
28
cli/cache/deno_dir.rs
vendored
28
cli/cache/deno_dir.rs
vendored
|
@ -1,10 +1,36 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use once_cell::sync::OnceCell;
|
||||
|
||||
use super::DiskCache;
|
||||
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Lazily creates the deno dir which might be useful in scenarios
|
||||
/// where functionality wants to continue if the DENO_DIR can't be created.
|
||||
pub struct DenoDirProvider {
|
||||
maybe_custom_root: Option<PathBuf>,
|
||||
deno_dir: OnceCell<std::io::Result<DenoDir>>,
|
||||
}
|
||||
|
||||
impl DenoDirProvider {
|
||||
pub fn new(maybe_custom_root: Option<PathBuf>) -> Self {
|
||||
Self {
|
||||
maybe_custom_root,
|
||||
deno_dir: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_or_create(&self) -> Result<&DenoDir, std::io::Error> {
|
||||
self
|
||||
.deno_dir
|
||||
.get_or_init(|| DenoDir::new(self.maybe_custom_root.clone()))
|
||||
.as_ref()
|
||||
.map_err(|err| std::io::Error::new(err.kind(), err.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
/// `DenoDir` serves as coordinator for multiple `DiskCache`s containing them
|
||||
/// in single directory that can be controlled with `$DENO_DIR` env variable.
|
||||
#[derive(Clone)]
|
||||
|
@ -18,6 +44,8 @@ pub struct DenoDir {
|
|||
|
||||
impl DenoDir {
|
||||
pub fn new(maybe_custom_root: Option<PathBuf>) -> std::io::Result<Self> {
|
||||
let maybe_custom_root =
|
||||
maybe_custom_root.or_else(|| env::var("DENO_DIR").map(String::into).ok());
|
||||
let root: PathBuf = if let Some(root) = maybe_custom_root {
|
||||
root
|
||||
} else if let Some(cache_dir) = dirs::cache_dir() {
|
||||
|
|
5
cli/cache/incremental.rs
vendored
5
cli/cache/incremental.rs
vendored
|
@ -7,9 +7,10 @@ use std::path::PathBuf;
|
|||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::task::spawn;
|
||||
use deno_core::task::JoinHandle;
|
||||
use deno_runtime::deno_webstorage::rusqlite::params;
|
||||
use serde::Serialize;
|
||||
use tokio::task::JoinHandle;
|
||||
|
||||
use super::cache_db::CacheDB;
|
||||
use super::cache_db::CacheDBConfiguration;
|
||||
|
@ -93,7 +94,7 @@ impl IncrementalCacheInner {
|
|||
tokio::sync::mpsc::unbounded_channel::<ReceiverMessage>();
|
||||
|
||||
// sqlite isn't `Sync`, so we do all the updating on a dedicated task
|
||||
let handle = tokio::task::spawn(async move {
|
||||
let handle = spawn(async move {
|
||||
while let Some(message) = receiver.recv().await {
|
||||
match message {
|
||||
ReceiverMessage::Update(path, hash) => {
|
||||
|
|
18
cli/cache/mod.rs
vendored
18
cli/cache/mod.rs
vendored
|
@ -30,6 +30,7 @@ pub use caches::Caches;
|
|||
pub use check::TypeCheckCache;
|
||||
pub use common::FastInsecureHasher;
|
||||
pub use deno_dir::DenoDir;
|
||||
pub use deno_dir::DenoDirProvider;
|
||||
pub use disk_cache::DiskCache;
|
||||
pub use emit::EmitCache;
|
||||
pub use http_cache::CachedUrlMetadata;
|
||||
|
@ -45,10 +46,9 @@ pub const CACHE_PERM: u32 = 0o644;
|
|||
/// a concise interface to the DENO_DIR when building module graphs.
|
||||
pub struct FetchCacher {
|
||||
emit_cache: EmitCache,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
|
||||
root_permissions: PermissionsContainer,
|
||||
permissions: PermissionsContainer,
|
||||
cache_info_enabled: bool,
|
||||
maybe_local_node_modules_url: Option<ModuleSpecifier>,
|
||||
}
|
||||
|
@ -58,16 +58,14 @@ impl FetchCacher {
|
|||
emit_cache: EmitCache,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
permissions: PermissionsContainer,
|
||||
maybe_local_node_modules_url: Option<ModuleSpecifier>,
|
||||
) -> Self {
|
||||
Self {
|
||||
emit_cache,
|
||||
dynamic_permissions,
|
||||
file_fetcher,
|
||||
file_header_overrides,
|
||||
root_permissions,
|
||||
permissions,
|
||||
cache_info_enabled: false,
|
||||
maybe_local_node_modules_url,
|
||||
}
|
||||
|
@ -105,7 +103,7 @@ impl Loader for FetchCacher {
|
|||
fn load(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
is_dynamic: bool,
|
||||
_is_dynamic: bool,
|
||||
) -> LoadFuture {
|
||||
if let Some(node_modules_url) = self.maybe_local_node_modules_url.as_ref() {
|
||||
// The specifier might be in a completely different symlinked tree than
|
||||
|
@ -124,11 +122,7 @@ impl Loader for FetchCacher {
|
|||
}
|
||||
}
|
||||
|
||||
let permissions = if is_dynamic {
|
||||
self.dynamic_permissions.clone()
|
||||
} else {
|
||||
self.root_permissions.clone()
|
||||
};
|
||||
let permissions = self.permissions.clone();
|
||||
let file_fetcher = self.file_fetcher.clone();
|
||||
let file_header_overrides = self.file_header_overrides.clone();
|
||||
let specifier = specifier.clone();
|
||||
|
|
8
cli/cache/node.rs
vendored
8
cli/cache/node.rs
vendored
|
@ -42,14 +42,6 @@ pub struct NodeAnalysisCache {
|
|||
}
|
||||
|
||||
impl NodeAnalysisCache {
|
||||
#[cfg(test)]
|
||||
pub fn new_in_memory() -> Self {
|
||||
Self::new(CacheDB::in_memory(
|
||||
&NODE_ANALYSIS_CACHE_DB,
|
||||
crate::version::deno(),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn new(db: CacheDB) -> Self {
|
||||
Self {
|
||||
inner: NodeAnalysisCacheInner::new(db),
|
||||
|
|
|
@ -2,4 +2,4 @@
|
|||
|
||||
// WARNING: Ensure this is the only deno_std version reference as this
|
||||
// is automatically updated by the version bump workflow.
|
||||
pub const CURRENT_STD_URL_STR: &str = "https://deno.land/std@0.184.0/";
|
||||
pub const CURRENT_STD_URL_STR: &str = "https://deno.land/std@0.190.0/";
|
||||
|
|
728
cli/factory.rs
Normal file
728
cli/factory.rs
Normal file
|
@ -0,0 +1,728 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::npm_pkg_req_ref_to_binary_command;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::DenoSubcommand;
|
||||
use crate::args::Flags;
|
||||
use crate::args::Lockfile;
|
||||
use crate::args::PackageJsonDepsProvider;
|
||||
use crate::args::StorageKeyResolver;
|
||||
use crate::args::TsConfigType;
|
||||
use crate::cache::Caches;
|
||||
use crate::cache::DenoDir;
|
||||
use crate::cache::DenoDirProvider;
|
||||
use crate::cache::EmitCache;
|
||||
use crate::cache::HttpCache;
|
||||
use crate::cache::NodeAnalysisCache;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
use crate::emit::Emitter;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::graph_util::ModuleGraphBuilder;
|
||||
use crate::graph_util::ModuleGraphContainer;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::module_loader::CjsResolutionStore;
|
||||
use crate::module_loader::CliModuleLoaderFactory;
|
||||
use crate::module_loader::ModuleLoadPreparer;
|
||||
use crate::module_loader::NpmModuleLoader;
|
||||
use crate::node::CliCjsEsmCodeAnalyzer;
|
||||
use crate::node::CliNodeCodeTranslator;
|
||||
use crate::npm::create_npm_fs_resolver;
|
||||
use crate::npm::CliNpmRegistryApi;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::npm::NpmCache;
|
||||
use crate::npm::NpmPackageFsResolver;
|
||||
use crate::npm::NpmResolution;
|
||||
use crate::npm::PackageJsonDepsInstaller;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::standalone::DenoCompileBinaryWriter;
|
||||
use crate::tools::check::TypeChecker;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
use crate::watcher::FileWatcher;
|
||||
use crate::watcher::FileWatcherReporter;
|
||||
use crate::worker::CliMainWorkerFactory;
|
||||
use crate::worker::CliMainWorkerOptions;
|
||||
use crate::worker::HasNodeSpecifierChecker;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
|
||||
use deno_graph::GraphKind;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::analyze::NodeCodeTranslator;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_runtime::deno_tls::RootCertStoreProvider;
|
||||
use deno_runtime::deno_web::BlobStore;
|
||||
use deno_runtime::inspector_server::InspectorServer;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use import_map::ImportMap;
|
||||
use log::warn;
|
||||
use std::cell::RefCell;
|
||||
use std::future::Future;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub struct CliFactoryBuilder {
|
||||
maybe_sender: Option<tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>>,
|
||||
}
|
||||
|
||||
impl CliFactoryBuilder {
|
||||
pub fn new() -> Self {
|
||||
Self { maybe_sender: None }
|
||||
}
|
||||
|
||||
pub fn with_watcher(
|
||||
mut self,
|
||||
sender: tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>,
|
||||
) -> Self {
|
||||
self.maybe_sender = Some(sender);
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn build_from_flags(
|
||||
self,
|
||||
flags: Flags,
|
||||
) -> Result<CliFactory, AnyError> {
|
||||
Ok(self.build_from_cli_options(Arc::new(CliOptions::from_flags(flags)?)))
|
||||
}
|
||||
|
||||
pub fn build_from_cli_options(self, options: Arc<CliOptions>) -> CliFactory {
|
||||
CliFactory {
|
||||
maybe_sender: RefCell::new(self.maybe_sender),
|
||||
options,
|
||||
services: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Deferred<T>(once_cell::unsync::OnceCell<T>);
|
||||
|
||||
impl<T> Default for Deferred<T> {
|
||||
fn default() -> Self {
|
||||
Self(once_cell::unsync::OnceCell::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deferred<T> {
|
||||
pub fn get_or_try_init(
|
||||
&self,
|
||||
create: impl FnOnce() -> Result<T, AnyError>,
|
||||
) -> Result<&T, AnyError> {
|
||||
self.0.get_or_try_init(create)
|
||||
}
|
||||
|
||||
pub fn get_or_init(&self, create: impl FnOnce() -> T) -> &T {
|
||||
self.0.get_or_init(create)
|
||||
}
|
||||
|
||||
pub async fn get_or_try_init_async(
|
||||
&self,
|
||||
create: impl Future<Output = Result<T, AnyError>>,
|
||||
) -> Result<&T, AnyError> {
|
||||
if self.0.get().is_none() {
|
||||
// todo(dsherret): it would be more ideal if this enforced a
|
||||
// single executor and then we could make some initialization
|
||||
// concurrent
|
||||
let val = create.await?;
|
||||
_ = self.0.set(val);
|
||||
}
|
||||
Ok(self.0.get().unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct CliFactoryServices {
|
||||
deno_dir_provider: Deferred<Arc<DenoDirProvider>>,
|
||||
caches: Deferred<Arc<Caches>>,
|
||||
file_fetcher: Deferred<Arc<FileFetcher>>,
|
||||
http_client: Deferred<Arc<HttpClient>>,
|
||||
emit_cache: Deferred<EmitCache>,
|
||||
emitter: Deferred<Arc<Emitter>>,
|
||||
fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
|
||||
graph_container: Deferred<Arc<ModuleGraphContainer>>,
|
||||
lockfile: Deferred<Option<Arc<Mutex<Lockfile>>>>,
|
||||
maybe_import_map: Deferred<Option<Arc<ImportMap>>>,
|
||||
maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>,
|
||||
root_cert_store_provider: Deferred<Arc<dyn RootCertStoreProvider>>,
|
||||
blob_store: Deferred<BlobStore>,
|
||||
parsed_source_cache: Deferred<Arc<ParsedSourceCache>>,
|
||||
resolver: Deferred<Arc<CliGraphResolver>>,
|
||||
file_watcher: Deferred<Arc<FileWatcher>>,
|
||||
maybe_file_watcher_reporter: Deferred<Option<FileWatcherReporter>>,
|
||||
module_graph_builder: Deferred<Arc<ModuleGraphBuilder>>,
|
||||
module_load_preparer: Deferred<Arc<ModuleLoadPreparer>>,
|
||||
node_code_translator: Deferred<Arc<CliNodeCodeTranslator>>,
|
||||
node_resolver: Deferred<Arc<NodeResolver>>,
|
||||
npm_api: Deferred<Arc<CliNpmRegistryApi>>,
|
||||
npm_cache: Deferred<Arc<NpmCache>>,
|
||||
npm_resolver: Deferred<Arc<CliNpmResolver>>,
|
||||
npm_resolution: Deferred<Arc<NpmResolution>>,
|
||||
package_json_deps_provider: Deferred<Arc<PackageJsonDepsProvider>>,
|
||||
package_json_deps_installer: Deferred<Arc<PackageJsonDepsInstaller>>,
|
||||
text_only_progress_bar: Deferred<ProgressBar>,
|
||||
type_checker: Deferred<Arc<TypeChecker>>,
|
||||
cjs_resolutions: Deferred<Arc<CjsResolutionStore>>,
|
||||
}
|
||||
|
||||
pub struct CliFactory {
|
||||
maybe_sender:
|
||||
RefCell<Option<tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>>>,
|
||||
options: Arc<CliOptions>,
|
||||
services: CliFactoryServices,
|
||||
}
|
||||
|
||||
impl CliFactory {
|
||||
pub async fn from_flags(flags: Flags) -> Result<Self, AnyError> {
|
||||
CliFactoryBuilder::new().build_from_flags(flags).await
|
||||
}
|
||||
|
||||
pub fn from_cli_options(options: Arc<CliOptions>) -> Self {
|
||||
CliFactoryBuilder::new().build_from_cli_options(options)
|
||||
}
|
||||
|
||||
pub fn cli_options(&self) -> &Arc<CliOptions> {
|
||||
&self.options
|
||||
}
|
||||
|
||||
pub fn deno_dir_provider(&self) -> &Arc<DenoDirProvider> {
|
||||
self.services.deno_dir_provider.get_or_init(|| {
|
||||
Arc::new(DenoDirProvider::new(
|
||||
self.options.maybe_custom_root().clone(),
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn deno_dir(&self) -> Result<&DenoDir, AnyError> {
|
||||
Ok(self.deno_dir_provider().get_or_create()?)
|
||||
}
|
||||
|
||||
pub fn caches(&self) -> Result<&Arc<Caches>, AnyError> {
|
||||
self.services.caches.get_or_try_init(|| {
|
||||
let caches = Arc::new(Caches::new(self.deno_dir_provider().clone()));
|
||||
// Warm up the caches we know we'll likely need based on the CLI mode
|
||||
match self.options.sub_command() {
|
||||
DenoSubcommand::Run(_) => {
|
||||
_ = caches.dep_analysis_db();
|
||||
_ = caches.node_analysis_db();
|
||||
}
|
||||
DenoSubcommand::Check(_) => {
|
||||
_ = caches.dep_analysis_db();
|
||||
_ = caches.node_analysis_db();
|
||||
_ = caches.type_checking_cache_db();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
Ok(caches)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn blob_store(&self) -> &BlobStore {
|
||||
self.services.blob_store.get_or_init(BlobStore::default)
|
||||
}
|
||||
|
||||
pub fn root_cert_store_provider(&self) -> &Arc<dyn RootCertStoreProvider> {
|
||||
self
|
||||
.services
|
||||
.root_cert_store_provider
|
||||
.get_or_init(|| self.options.resolve_root_cert_store_provider())
|
||||
}
|
||||
|
||||
pub fn text_only_progress_bar(&self) -> &ProgressBar {
|
||||
self
|
||||
.services
|
||||
.text_only_progress_bar
|
||||
.get_or_init(|| ProgressBar::new(ProgressBarStyle::TextOnly))
|
||||
}
|
||||
|
||||
pub fn http_client(&self) -> &Arc<HttpClient> {
|
||||
self.services.http_client.get_or_init(|| {
|
||||
Arc::new(HttpClient::new(
|
||||
Some(self.root_cert_store_provider().clone()),
|
||||
self.options.unsafely_ignore_certificate_errors().clone(),
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn file_fetcher(&self) -> Result<&Arc<FileFetcher>, AnyError> {
|
||||
self.services.file_fetcher.get_or_try_init(|| {
|
||||
Ok(Arc::new(FileFetcher::new(
|
||||
HttpCache::new(&self.deno_dir()?.deps_folder_path()),
|
||||
self.options.cache_setting(),
|
||||
!self.options.no_remote(),
|
||||
self.http_client().clone(),
|
||||
self.blob_store().clone(),
|
||||
Some(self.text_only_progress_bar().clone()),
|
||||
)))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn fs(&self) -> &Arc<dyn deno_fs::FileSystem> {
|
||||
self.services.fs.get_or_init(|| Arc::new(deno_fs::RealFs))
|
||||
}
|
||||
|
||||
pub fn maybe_lockfile(&self) -> &Option<Arc<Mutex<Lockfile>>> {
|
||||
self
|
||||
.services
|
||||
.lockfile
|
||||
.get_or_init(|| self.options.maybe_lockfile())
|
||||
}
|
||||
|
||||
pub fn npm_cache(&self) -> Result<&Arc<NpmCache>, AnyError> {
|
||||
self.services.npm_cache.get_or_try_init(|| {
|
||||
Ok(Arc::new(NpmCache::new(
|
||||
self.deno_dir()?.npm_folder_path(),
|
||||
self.options.cache_setting(),
|
||||
self.http_client().clone(),
|
||||
self.text_only_progress_bar().clone(),
|
||||
)))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn npm_api(&self) -> Result<&Arc<CliNpmRegistryApi>, AnyError> {
|
||||
self.services.npm_api.get_or_try_init(|| {
|
||||
Ok(Arc::new(CliNpmRegistryApi::new(
|
||||
CliNpmRegistryApi::default_url().to_owned(),
|
||||
self.npm_cache()?.clone(),
|
||||
self.http_client().clone(),
|
||||
self.text_only_progress_bar().clone(),
|
||||
)))
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn npm_resolution(&self) -> Result<&Arc<NpmResolution>, AnyError> {
|
||||
self
|
||||
.services
|
||||
.npm_resolution
|
||||
.get_or_try_init_async(async {
|
||||
let npm_api = self.npm_api()?;
|
||||
Ok(Arc::new(NpmResolution::from_serialized(
|
||||
npm_api.clone(),
|
||||
self
|
||||
.options
|
||||
.resolve_npm_resolution_snapshot(npm_api)
|
||||
.await?,
|
||||
self.maybe_lockfile().as_ref().cloned(),
|
||||
)))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn npm_resolver(&self) -> Result<&Arc<CliNpmResolver>, AnyError> {
|
||||
self
|
||||
.services
|
||||
.npm_resolver
|
||||
.get_or_try_init_async(async {
|
||||
let npm_resolution = self.npm_resolution().await?;
|
||||
let fs = self.fs().clone();
|
||||
let npm_fs_resolver = create_npm_fs_resolver(
|
||||
fs.clone(),
|
||||
self.npm_cache()?.clone(),
|
||||
self.text_only_progress_bar(),
|
||||
CliNpmRegistryApi::default_url().to_owned(),
|
||||
npm_resolution.clone(),
|
||||
self.options.node_modules_dir_path(),
|
||||
self.options.npm_system_info(),
|
||||
);
|
||||
Ok(Arc::new(CliNpmResolver::new(
|
||||
fs.clone(),
|
||||
npm_resolution.clone(),
|
||||
npm_fs_resolver,
|
||||
self.maybe_lockfile().as_ref().cloned(),
|
||||
)))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn create_node_modules_npm_fs_resolver(
|
||||
&self,
|
||||
node_modules_dir_path: PathBuf,
|
||||
) -> Result<Arc<dyn NpmPackageFsResolver>, AnyError> {
|
||||
Ok(create_npm_fs_resolver(
|
||||
self.fs().clone(),
|
||||
self.npm_cache()?.clone(),
|
||||
self.text_only_progress_bar(),
|
||||
CliNpmRegistryApi::default_url().to_owned(),
|
||||
self.npm_resolution().await?.clone(),
|
||||
// when an explicit path is provided here, it will create the
|
||||
// local node_modules variant of an npm fs resolver
|
||||
Some(node_modules_dir_path),
|
||||
self.options.npm_system_info(),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn package_json_deps_provider(&self) -> &Arc<PackageJsonDepsProvider> {
|
||||
self.services.package_json_deps_provider.get_or_init(|| {
|
||||
Arc::new(PackageJsonDepsProvider::new(
|
||||
self.options.maybe_package_json_deps(),
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn package_json_deps_installer(
|
||||
&self,
|
||||
) -> Result<&Arc<PackageJsonDepsInstaller>, AnyError> {
|
||||
self
|
||||
.services
|
||||
.package_json_deps_installer
|
||||
.get_or_try_init_async(async {
|
||||
Ok(Arc::new(PackageJsonDepsInstaller::new(
|
||||
self.package_json_deps_provider().clone(),
|
||||
self.npm_api()?.clone(),
|
||||
self.npm_resolution().await?.clone(),
|
||||
)))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn maybe_import_map(
|
||||
&self,
|
||||
) -> Result<&Option<Arc<ImportMap>>, AnyError> {
|
||||
self
|
||||
.services
|
||||
.maybe_import_map
|
||||
.get_or_try_init_async(async {
|
||||
Ok(
|
||||
self
|
||||
.options
|
||||
.resolve_import_map(self.file_fetcher()?)
|
||||
.await?
|
||||
.map(Arc::new),
|
||||
)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn resolver(&self) -> Result<&Arc<CliGraphResolver>, AnyError> {
|
||||
self
|
||||
.services
|
||||
.resolver
|
||||
.get_or_try_init_async(async {
|
||||
Ok(Arc::new(CliGraphResolver::new(
|
||||
self.options.to_maybe_jsx_import_source_config(),
|
||||
self.maybe_import_map().await?.clone(),
|
||||
self.options.no_npm(),
|
||||
self.npm_api()?.clone(),
|
||||
self.npm_resolution().await?.clone(),
|
||||
self.package_json_deps_provider().clone(),
|
||||
self.package_json_deps_installer().await?.clone(),
|
||||
)))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn file_watcher(&self) -> Result<&Arc<FileWatcher>, AnyError> {
|
||||
self.services.file_watcher.get_or_try_init(|| {
|
||||
let watcher = FileWatcher::new(
|
||||
self.options.clone(),
|
||||
self.cjs_resolutions().clone(),
|
||||
self.graph_container().clone(),
|
||||
self.maybe_file_watcher_reporter().clone(),
|
||||
self.parsed_source_cache()?.clone(),
|
||||
);
|
||||
watcher.init_watcher();
|
||||
Ok(Arc::new(watcher))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn maybe_file_watcher_reporter(&self) -> &Option<FileWatcherReporter> {
|
||||
let maybe_sender = self.maybe_sender.borrow_mut().take();
|
||||
self
|
||||
.services
|
||||
.maybe_file_watcher_reporter
|
||||
.get_or_init(|| maybe_sender.map(FileWatcherReporter::new))
|
||||
}
|
||||
|
||||
pub fn emit_cache(&self) -> Result<&EmitCache, AnyError> {
|
||||
self.services.emit_cache.get_or_try_init(|| {
|
||||
Ok(EmitCache::new(self.deno_dir()?.gen_cache.clone()))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn parsed_source_cache(
|
||||
&self,
|
||||
) -> Result<&Arc<ParsedSourceCache>, AnyError> {
|
||||
self.services.parsed_source_cache.get_or_try_init(|| {
|
||||
Ok(Arc::new(ParsedSourceCache::new(
|
||||
self.caches()?.dep_analysis_db(),
|
||||
)))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn emitter(&self) -> Result<&Arc<Emitter>, AnyError> {
|
||||
self.services.emitter.get_or_try_init(|| {
|
||||
let ts_config_result = self
|
||||
.options
|
||||
.resolve_ts_config_for_emit(TsConfigType::Emit)?;
|
||||
if let Some(ignored_options) = ts_config_result.maybe_ignored_options {
|
||||
warn!("{}", ignored_options);
|
||||
}
|
||||
let emit_options: deno_ast::EmitOptions =
|
||||
ts_config_result.ts_config.into();
|
||||
Ok(Arc::new(Emitter::new(
|
||||
self.emit_cache()?.clone(),
|
||||
self.parsed_source_cache()?.clone(),
|
||||
emit_options,
|
||||
)))
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn node_resolver(&self) -> Result<&Arc<NodeResolver>, AnyError> {
|
||||
self
|
||||
.services
|
||||
.node_resolver
|
||||
.get_or_try_init_async(async {
|
||||
Ok(Arc::new(NodeResolver::new(
|
||||
self.fs().clone(),
|
||||
self.npm_resolver().await?.clone(),
|
||||
)))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn node_code_translator(
|
||||
&self,
|
||||
) -> Result<&Arc<CliNodeCodeTranslator>, AnyError> {
|
||||
self
|
||||
.services
|
||||
.node_code_translator
|
||||
.get_or_try_init_async(async {
|
||||
let caches = self.caches()?;
|
||||
let node_analysis_cache =
|
||||
NodeAnalysisCache::new(caches.node_analysis_db());
|
||||
let cjs_esm_analyzer = CliCjsEsmCodeAnalyzer::new(node_analysis_cache);
|
||||
|
||||
Ok(Arc::new(NodeCodeTranslator::new(
|
||||
cjs_esm_analyzer,
|
||||
self.fs().clone(),
|
||||
self.node_resolver().await?.clone(),
|
||||
self.npm_resolver().await?.clone(),
|
||||
)))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn type_checker(&self) -> Result<&Arc<TypeChecker>, AnyError> {
|
||||
self
|
||||
.services
|
||||
.type_checker
|
||||
.get_or_try_init_async(async {
|
||||
Ok(Arc::new(TypeChecker::new(
|
||||
self.caches()?.clone(),
|
||||
self.options.clone(),
|
||||
self.node_resolver().await?.clone(),
|
||||
self.npm_resolver().await?.clone(),
|
||||
)))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn module_graph_builder(
|
||||
&self,
|
||||
) -> Result<&Arc<ModuleGraphBuilder>, AnyError> {
|
||||
self
|
||||
.services
|
||||
.module_graph_builder
|
||||
.get_or_try_init_async(async {
|
||||
Ok(Arc::new(ModuleGraphBuilder::new(
|
||||
self.options.clone(),
|
||||
self.resolver().await?.clone(),
|
||||
self.npm_resolver().await?.clone(),
|
||||
self.parsed_source_cache()?.clone(),
|
||||
self.maybe_lockfile().clone(),
|
||||
self.emit_cache()?.clone(),
|
||||
self.file_fetcher()?.clone(),
|
||||
self.type_checker().await?.clone(),
|
||||
)))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn graph_container(&self) -> &Arc<ModuleGraphContainer> {
|
||||
self.services.graph_container.get_or_init(|| {
|
||||
let graph_kind = match self.options.sub_command() {
|
||||
// todo(dsherret): ideally the graph container would not be used
|
||||
// for deno cache because it doesn't dynamically load modules
|
||||
DenoSubcommand::Cache(_) => GraphKind::All,
|
||||
_ => self.options.type_check_mode().as_graph_kind(),
|
||||
};
|
||||
Arc::new(ModuleGraphContainer::new(graph_kind))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn maybe_inspector_server(&self) -> &Option<Arc<InspectorServer>> {
|
||||
self
|
||||
.services
|
||||
.maybe_inspector_server
|
||||
.get_or_init(|| self.options.resolve_inspector_server().map(Arc::new))
|
||||
}
|
||||
|
||||
pub async fn module_load_preparer(
|
||||
&self,
|
||||
) -> Result<&Arc<ModuleLoadPreparer>, AnyError> {
|
||||
self
|
||||
.services
|
||||
.module_load_preparer
|
||||
.get_or_try_init_async(async {
|
||||
Ok(Arc::new(ModuleLoadPreparer::new(
|
||||
self.options.clone(),
|
||||
self.graph_container().clone(),
|
||||
self.maybe_lockfile().clone(),
|
||||
self.maybe_file_watcher_reporter().clone(),
|
||||
self.module_graph_builder().await?.clone(),
|
||||
self.parsed_source_cache()?.clone(),
|
||||
self.text_only_progress_bar().clone(),
|
||||
self.resolver().await?.clone(),
|
||||
self.type_checker().await?.clone(),
|
||||
)))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn cjs_resolutions(&self) -> &Arc<CjsResolutionStore> {
|
||||
self.services.cjs_resolutions.get_or_init(Default::default)
|
||||
}
|
||||
|
||||
pub async fn create_compile_binary_writer(
|
||||
&self,
|
||||
) -> Result<DenoCompileBinaryWriter, AnyError> {
|
||||
Ok(DenoCompileBinaryWriter::new(
|
||||
self.file_fetcher()?,
|
||||
self.http_client(),
|
||||
self.deno_dir()?,
|
||||
self.npm_api()?,
|
||||
self.npm_cache()?,
|
||||
self.npm_resolution().await?,
|
||||
self.npm_resolver().await?,
|
||||
self.options.npm_system_info(),
|
||||
self.package_json_deps_provider(),
|
||||
))
|
||||
}
|
||||
|
||||
/// Gets a function that can be used to create a CliMainWorkerFactory
|
||||
/// for a file watcher.
|
||||
pub async fn create_cli_main_worker_factory_func(
|
||||
&self,
|
||||
) -> Result<Arc<dyn Fn() -> CliMainWorkerFactory>, AnyError> {
|
||||
let emitter = self.emitter()?.clone();
|
||||
let graph_container = self.graph_container().clone();
|
||||
let module_load_preparer = self.module_load_preparer().await?.clone();
|
||||
let parsed_source_cache = self.parsed_source_cache()?.clone();
|
||||
let resolver = self.resolver().await?.clone();
|
||||
let blob_store = self.blob_store().clone();
|
||||
let cjs_resolutions = self.cjs_resolutions().clone();
|
||||
let node_code_translator = self.node_code_translator().await?.clone();
|
||||
let options = self.cli_options().clone();
|
||||
let main_worker_options = self.create_cli_main_worker_options()?;
|
||||
let fs = self.fs().clone();
|
||||
let root_cert_store_provider = self.root_cert_store_provider().clone();
|
||||
let node_resolver = self.node_resolver().await?.clone();
|
||||
let npm_resolver = self.npm_resolver().await?.clone();
|
||||
let maybe_inspector_server = self.maybe_inspector_server().clone();
|
||||
let maybe_lockfile = self.maybe_lockfile().clone();
|
||||
Ok(Arc::new(move || {
|
||||
CliMainWorkerFactory::new(
|
||||
StorageKeyResolver::from_options(&options),
|
||||
npm_resolver.clone(),
|
||||
node_resolver.clone(),
|
||||
Box::new(CliHasNodeSpecifierChecker(graph_container.clone())),
|
||||
blob_store.clone(),
|
||||
Box::new(CliModuleLoaderFactory::new(
|
||||
&options,
|
||||
emitter.clone(),
|
||||
graph_container.clone(),
|
||||
module_load_preparer.clone(),
|
||||
parsed_source_cache.clone(),
|
||||
resolver.clone(),
|
||||
NpmModuleLoader::new(
|
||||
cjs_resolutions.clone(),
|
||||
node_code_translator.clone(),
|
||||
fs.clone(),
|
||||
node_resolver.clone(),
|
||||
),
|
||||
)),
|
||||
root_cert_store_provider.clone(),
|
||||
fs.clone(),
|
||||
maybe_inspector_server.clone(),
|
||||
maybe_lockfile.clone(),
|
||||
main_worker_options.clone(),
|
||||
)
|
||||
}))
|
||||
}
|
||||
|
||||
pub async fn create_cli_main_worker_factory(
|
||||
&self,
|
||||
) -> Result<CliMainWorkerFactory, AnyError> {
|
||||
let node_resolver = self.node_resolver().await?;
|
||||
let fs = self.fs();
|
||||
Ok(CliMainWorkerFactory::new(
|
||||
StorageKeyResolver::from_options(&self.options),
|
||||
self.npm_resolver().await?.clone(),
|
||||
node_resolver.clone(),
|
||||
Box::new(CliHasNodeSpecifierChecker(self.graph_container().clone())),
|
||||
self.blob_store().clone(),
|
||||
Box::new(CliModuleLoaderFactory::new(
|
||||
&self.options,
|
||||
self.emitter()?.clone(),
|
||||
self.graph_container().clone(),
|
||||
self.module_load_preparer().await?.clone(),
|
||||
self.parsed_source_cache()?.clone(),
|
||||
self.resolver().await?.clone(),
|
||||
NpmModuleLoader::new(
|
||||
self.cjs_resolutions().clone(),
|
||||
self.node_code_translator().await?.clone(),
|
||||
fs.clone(),
|
||||
node_resolver.clone(),
|
||||
),
|
||||
)),
|
||||
self.root_cert_store_provider().clone(),
|
||||
self.fs().clone(),
|
||||
self.maybe_inspector_server().clone(),
|
||||
self.maybe_lockfile().clone(),
|
||||
self.create_cli_main_worker_options()?,
|
||||
))
|
||||
}
|
||||
|
||||
fn create_cli_main_worker_options(
|
||||
&self,
|
||||
) -> Result<CliMainWorkerOptions, AnyError> {
|
||||
Ok(CliMainWorkerOptions {
|
||||
argv: self.options.argv().clone(),
|
||||
log_level: self.options.log_level().unwrap_or(log::Level::Info).into(),
|
||||
coverage_dir: self.options.coverage_dir(),
|
||||
enable_testing_features: self.options.enable_testing_features(),
|
||||
has_node_modules_dir: self.options.has_node_modules_dir(),
|
||||
inspect_brk: self.options.inspect_brk().is_some(),
|
||||
inspect_wait: self.options.inspect_wait().is_some(),
|
||||
is_inspecting: self.options.is_inspecting(),
|
||||
is_npm_main: self.options.is_npm_main(),
|
||||
location: self.options.location_flag().clone(),
|
||||
maybe_binary_npm_command_name: {
|
||||
let mut maybe_binary_command_name = None;
|
||||
if let DenoSubcommand::Run(flags) = self.options.sub_command() {
|
||||
if let Ok(pkg_ref) = NpmPackageReqReference::from_str(&flags.script) {
|
||||
// if the user ran a binary command, we'll need to set process.argv[0]
|
||||
// to be the name of the binary command instead of deno
|
||||
maybe_binary_command_name =
|
||||
Some(npm_pkg_req_ref_to_binary_command(&pkg_ref));
|
||||
}
|
||||
}
|
||||
maybe_binary_command_name
|
||||
},
|
||||
origin_data_folder_path: Some(self.deno_dir()?.origin_data_folder_path()),
|
||||
seed: self.options.seed(),
|
||||
unsafely_ignore_certificate_errors: self
|
||||
.options
|
||||
.unsafely_ignore_certificate_errors()
|
||||
.clone(),
|
||||
unstable: self.options.unstable(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
struct CliHasNodeSpecifierChecker(Arc<ModuleGraphContainer>);
|
||||
|
||||
impl HasNodeSpecifierChecker for CliHasNodeSpecifierChecker {
|
||||
fn has_node_specifier(&self) -> bool {
|
||||
self.0.graph().has_node_specifier
|
||||
}
|
||||
}
|
|
@ -178,7 +178,7 @@ pub struct FileFetcher {
|
|||
cache: FileCache,
|
||||
cache_setting: CacheSetting,
|
||||
pub http_cache: HttpCache,
|
||||
http_client: HttpClient,
|
||||
http_client: Arc<HttpClient>,
|
||||
blob_store: BlobStore,
|
||||
download_log_level: log::Level,
|
||||
progress_bar: Option<ProgressBar>,
|
||||
|
@ -189,7 +189,7 @@ impl FileFetcher {
|
|||
http_cache: HttpCache,
|
||||
cache_setting: CacheSetting,
|
||||
allow_remote: bool,
|
||||
http_client: HttpClient,
|
||||
http_client: Arc<HttpClient>,
|
||||
blob_store: BlobStore,
|
||||
progress_bar: Option<ProgressBar>,
|
||||
) -> Self {
|
||||
|
@ -660,7 +660,7 @@ async fn fetch_once<'a>(
|
|||
http_client: &HttpClient,
|
||||
args: FetchOnceArgs<'a>,
|
||||
) -> Result<FetchOnceResult, AnyError> {
|
||||
let mut request = http_client.get_no_redirect(args.url.clone());
|
||||
let mut request = http_client.get_no_redirect(args.url.clone())?;
|
||||
|
||||
if let Some(etag) = args.maybe_etag {
|
||||
let if_none_match_val = HeaderValue::from_str(&etag)?;
|
||||
|
@ -744,6 +744,7 @@ mod tests {
|
|||
use deno_core::resolve_url;
|
||||
use deno_core::url::Url;
|
||||
use deno_runtime::deno_fetch::create_http_client;
|
||||
use deno_runtime::deno_fetch::CreateHttpClientOptions;
|
||||
use deno_runtime::deno_web::Blob;
|
||||
use deno_runtime::deno_web::InMemoryBlobPart;
|
||||
use std::fs::read;
|
||||
|
@ -769,7 +770,7 @@ mod tests {
|
|||
HttpCache::new(&location),
|
||||
cache_setting,
|
||||
true,
|
||||
HttpClient::new(None, None).unwrap(),
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
blob_store.clone(),
|
||||
None,
|
||||
);
|
||||
|
@ -1207,7 +1208,7 @@ mod tests {
|
|||
HttpCache::new(&location),
|
||||
CacheSetting::ReloadAll,
|
||||
true,
|
||||
HttpClient::new(None, None).unwrap(),
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
BlobStore::default(),
|
||||
None,
|
||||
);
|
||||
|
@ -1232,7 +1233,7 @@ mod tests {
|
|||
HttpCache::new(&location),
|
||||
CacheSetting::Use,
|
||||
true,
|
||||
HttpClient::new(None, None).unwrap(),
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
BlobStore::default(),
|
||||
None,
|
||||
);
|
||||
|
@ -1257,7 +1258,7 @@ mod tests {
|
|||
HttpCache::new(&location),
|
||||
CacheSetting::Use,
|
||||
true,
|
||||
HttpClient::new(None, None).unwrap(),
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
BlobStore::default(),
|
||||
None,
|
||||
);
|
||||
|
@ -1398,7 +1399,7 @@ mod tests {
|
|||
HttpCache::new(&location),
|
||||
CacheSetting::Use,
|
||||
true,
|
||||
HttpClient::new(None, None).unwrap(),
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
BlobStore::default(),
|
||||
None,
|
||||
);
|
||||
|
@ -1426,7 +1427,7 @@ mod tests {
|
|||
HttpCache::new(&location),
|
||||
CacheSetting::Use,
|
||||
true,
|
||||
HttpClient::new(None, None).unwrap(),
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
BlobStore::default(),
|
||||
None,
|
||||
);
|
||||
|
@ -1525,7 +1526,7 @@ mod tests {
|
|||
HttpCache::new(&location),
|
||||
CacheSetting::Use,
|
||||
false,
|
||||
HttpClient::new(None, None).unwrap(),
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
BlobStore::default(),
|
||||
None,
|
||||
);
|
||||
|
@ -1550,7 +1551,7 @@ mod tests {
|
|||
HttpCache::new(&location),
|
||||
CacheSetting::Only,
|
||||
true,
|
||||
HttpClient::new(None, None).unwrap(),
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
BlobStore::default(),
|
||||
None,
|
||||
);
|
||||
|
@ -1558,7 +1559,7 @@ mod tests {
|
|||
HttpCache::new(&location),
|
||||
CacheSetting::Use,
|
||||
true,
|
||||
HttpClient::new(None, None).unwrap(),
|
||||
Arc::new(HttpClient::new(None, None)),
|
||||
BlobStore::default(),
|
||||
None,
|
||||
);
|
||||
|
@ -1746,7 +1747,7 @@ mod tests {
|
|||
|
||||
fn create_test_client() -> HttpClient {
|
||||
HttpClient::from_client(
|
||||
create_http_client("test_client", None, vec![], None, None, None)
|
||||
create_http_client("test_client", CreateHttpClientOptions::default())
|
||||
.unwrap(),
|
||||
)
|
||||
}
|
||||
|
@ -1943,17 +1944,16 @@ mod tests {
|
|||
let client = HttpClient::from_client(
|
||||
create_http_client(
|
||||
version::get_user_agent(),
|
||||
None,
|
||||
vec![read(
|
||||
test_util::testdata_path()
|
||||
.join("tls/RootCA.pem")
|
||||
.to_str()
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap()],
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
CreateHttpClientOptions {
|
||||
ca_certs: vec![read(
|
||||
test_util::testdata_path()
|
||||
.join("tls/RootCA.pem")
|
||||
.to_str()
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap()],
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
|
@ -1982,15 +1982,11 @@ mod tests {
|
|||
async fn test_fetch_with_default_certificate_store() {
|
||||
let _http_server_guard = test_util::http_server();
|
||||
// Relies on external http server with a valid mozilla root CA cert.
|
||||
let url = Url::parse("https://deno.land").unwrap();
|
||||
let url = Url::parse("https://deno.land/x").unwrap();
|
||||
let client = HttpClient::from_client(
|
||||
create_http_client(
|
||||
version::get_user_agent(),
|
||||
None, // This will load mozilla certs by default
|
||||
vec![],
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
CreateHttpClientOptions::default(),
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
|
@ -2021,15 +2017,24 @@ mod tests {
|
|||
#[ignore] // https://github.com/denoland/deno/issues/12561
|
||||
async fn test_fetch_with_empty_certificate_store() {
|
||||
use deno_runtime::deno_tls::rustls::RootCertStore;
|
||||
use deno_runtime::deno_tls::RootCertStoreProvider;
|
||||
|
||||
struct ValueRootCertStoreProvider(RootCertStore);
|
||||
|
||||
impl RootCertStoreProvider for ValueRootCertStoreProvider {
|
||||
fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError> {
|
||||
Ok(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
let _http_server_guard = test_util::http_server();
|
||||
// Relies on external http server with a valid mozilla root CA cert.
|
||||
let url = Url::parse("https://deno.land").unwrap();
|
||||
let client = HttpClient::new(
|
||||
Some(RootCertStore::empty()), // no certs loaded at all
|
||||
// no certs loaded at all
|
||||
Some(Arc::new(ValueRootCertStoreProvider(RootCertStore::empty()))),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
);
|
||||
|
||||
let result = fetch_once(
|
||||
&client,
|
||||
|
@ -2059,17 +2064,16 @@ mod tests {
|
|||
let client = HttpClient::from_client(
|
||||
create_http_client(
|
||||
version::get_user_agent(),
|
||||
None,
|
||||
vec![read(
|
||||
test_util::testdata_path()
|
||||
.join("tls/RootCA.pem")
|
||||
.to_str()
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap()],
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
CreateHttpClientOptions {
|
||||
ca_certs: vec![read(
|
||||
test_util::testdata_path()
|
||||
.join("tls/RootCA.pem")
|
||||
.to_str()
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap()],
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
|
@ -2104,17 +2108,16 @@ mod tests {
|
|||
let client = HttpClient::from_client(
|
||||
create_http_client(
|
||||
version::get_user_agent(),
|
||||
None,
|
||||
vec![read(
|
||||
test_util::testdata_path()
|
||||
.join("tls/RootCA.pem")
|
||||
.to_str()
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap()],
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
CreateHttpClientOptions {
|
||||
ca_certs: vec![read(
|
||||
test_util::testdata_path()
|
||||
.join("tls/RootCA.pem")
|
||||
.to_str()
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap()],
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
|
@ -2166,17 +2169,16 @@ mod tests {
|
|||
let client = HttpClient::from_client(
|
||||
create_http_client(
|
||||
version::get_user_agent(),
|
||||
None,
|
||||
vec![read(
|
||||
test_util::testdata_path()
|
||||
.join("tls/RootCA.pem")
|
||||
.to_str()
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap()],
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
CreateHttpClientOptions {
|
||||
ca_certs: vec![read(
|
||||
test_util::testdata_path()
|
||||
.join("tls/RootCA.pem")
|
||||
.to_str()
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap()],
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
|
|
|
@ -3,13 +3,12 @@
|
|||
use crate::args::CliOptions;
|
||||
use crate::args::Lockfile;
|
||||
use crate::args::TsTypeLib;
|
||||
use crate::args::TypeCheckMode;
|
||||
use crate::cache;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
use crate::colors;
|
||||
use crate::errors::get_error_class_name;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::npm::NpmPackageResolver;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::tools::check;
|
||||
use crate::tools::check::TypeChecker;
|
||||
|
@ -23,12 +22,14 @@ use deno_core::ModuleSpecifier;
|
|||
use deno_core::TaskQueue;
|
||||
use deno_core::TaskQueuePermit;
|
||||
use deno_graph::source::Loader;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_graph::Module;
|
||||
use deno_graph::ModuleError;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_graph::ModuleGraphError;
|
||||
use deno_graph::ResolutionError;
|
||||
use deno_graph::SpecifierError;
|
||||
use deno_runtime::deno_node;
|
||||
use deno_runtime::permissions::PermissionsContainer;
|
||||
use import_map::ImportMapError;
|
||||
use std::collections::HashMap;
|
||||
|
@ -55,7 +56,7 @@ pub fn graph_valid_with_cli_options(
|
|||
roots,
|
||||
GraphValidOptions {
|
||||
is_vendoring: false,
|
||||
follow_type_only: options.type_check_mode() != TypeCheckMode::None,
|
||||
follow_type_only: options.type_check_mode().is_true(),
|
||||
check_js: options.check_js(),
|
||||
},
|
||||
)
|
||||
|
@ -165,7 +166,7 @@ pub fn graph_lock_or_exit(graph: &ModuleGraph, lockfile: &mut Lockfile) {
|
|||
pub struct ModuleGraphBuilder {
|
||||
options: Arc<CliOptions>,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
npm_resolver: Arc<NpmPackageResolver>,
|
||||
npm_resolver: Arc<CliNpmResolver>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
emit_cache: cache::EmitCache,
|
||||
|
@ -178,7 +179,7 @@ impl ModuleGraphBuilder {
|
|||
pub fn new(
|
||||
options: Arc<CliOptions>,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
npm_resolver: Arc<NpmPackageResolver>,
|
||||
npm_resolver: Arc<CliNpmResolver>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
emit_cache: cache::EmitCache,
|
||||
|
@ -199,6 +200,7 @@ impl ModuleGraphBuilder {
|
|||
|
||||
pub async fn create_graph_with_loader(
|
||||
&self,
|
||||
graph_kind: GraphKind,
|
||||
roots: Vec<ModuleSpecifier>,
|
||||
loader: &mut dyn Loader,
|
||||
) -> Result<deno_graph::ModuleGraph, AnyError> {
|
||||
|
@ -209,7 +211,7 @@ impl ModuleGraphBuilder {
|
|||
let graph_npm_resolver = cli_resolver.as_graph_npm_resolver();
|
||||
let analyzer = self.parsed_source_cache.as_analyzer();
|
||||
|
||||
let mut graph = ModuleGraph::default();
|
||||
let mut graph = ModuleGraph::new(graph_kind);
|
||||
self
|
||||
.build_graph_with_npm_resolution(
|
||||
&mut graph,
|
||||
|
@ -226,9 +228,7 @@ impl ModuleGraphBuilder {
|
|||
)
|
||||
.await?;
|
||||
|
||||
if graph.has_node_specifier
|
||||
&& self.options.type_check_mode() != TypeCheckMode::None
|
||||
{
|
||||
if graph.has_node_specifier && self.options.type_check_mode().is_true() {
|
||||
self
|
||||
.npm_resolver
|
||||
.inject_synthetic_types_node_package()
|
||||
|
@ -248,7 +248,8 @@ impl ModuleGraphBuilder {
|
|||
let graph_resolver = cli_resolver.as_graph_resolver();
|
||||
let graph_npm_resolver = cli_resolver.as_graph_npm_resolver();
|
||||
let analyzer = self.parsed_source_cache.as_analyzer();
|
||||
let mut graph = ModuleGraph::default();
|
||||
let graph_kind = self.options.type_check_mode().as_graph_kind();
|
||||
let mut graph = ModuleGraph::new(graph_kind);
|
||||
self
|
||||
.build_graph_with_npm_resolution(
|
||||
&mut graph,
|
||||
|
@ -271,7 +272,7 @@ impl ModuleGraphBuilder {
|
|||
graph_lock_or_exit(&graph, &mut lockfile.lock());
|
||||
}
|
||||
|
||||
if self.options.type_check_mode() != TypeCheckMode::None {
|
||||
if self.options.type_check_mode().is_true() {
|
||||
self
|
||||
.type_checker
|
||||
.check(
|
||||
|
@ -295,6 +296,12 @@ impl ModuleGraphBuilder {
|
|||
loader: &mut dyn deno_graph::source::Loader,
|
||||
options: deno_graph::BuildOptions<'a>,
|
||||
) -> Result<(), AnyError> {
|
||||
// ensure an "npm install" is done if the user has explicitly
|
||||
// opted into using a node_modules directory
|
||||
if self.options.node_modules_dir_enablement() == Some(true) {
|
||||
self.resolver.force_top_level_package_json_install().await?;
|
||||
}
|
||||
|
||||
graph.build(roots, loader, options).await;
|
||||
|
||||
// ensure that the top level package.json is installed if a
|
||||
|
@ -313,33 +320,31 @@ impl ModuleGraphBuilder {
|
|||
|
||||
/// Creates the default loader used for creating a graph.
|
||||
pub fn create_graph_loader(&self) -> cache::FetchCacher {
|
||||
self.create_fetch_cacher(
|
||||
PermissionsContainer::allow_all(),
|
||||
PermissionsContainer::allow_all(),
|
||||
)
|
||||
self.create_fetch_cacher(PermissionsContainer::allow_all())
|
||||
}
|
||||
|
||||
pub fn create_fetch_cacher(
|
||||
&self,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
permissions: PermissionsContainer,
|
||||
) -> cache::FetchCacher {
|
||||
cache::FetchCacher::new(
|
||||
self.emit_cache.clone(),
|
||||
self.file_fetcher.clone(),
|
||||
self.options.resolve_file_header_overrides(),
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
permissions,
|
||||
self.options.node_modules_dir_specifier(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn create_graph(
|
||||
&self,
|
||||
graph_kind: GraphKind,
|
||||
roots: Vec<ModuleSpecifier>,
|
||||
) -> Result<deno_graph::ModuleGraph, AnyError> {
|
||||
let mut cache = self.create_graph_loader();
|
||||
self.create_graph_with_loader(roots, &mut cache).await
|
||||
self
|
||||
.create_graph_with_loader(graph_kind, roots, &mut cache)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -349,10 +354,10 @@ pub fn error_for_any_npm_specifier(
|
|||
for module in graph.modules() {
|
||||
match module {
|
||||
Module::Npm(module) => {
|
||||
bail!("npm specifiers have not yet been implemented for this sub command (https://github.com/denoland/deno/issues/15960). Found: {}", module.specifier)
|
||||
bail!("npm specifiers have not yet been implemented for this subcommand (https://github.com/denoland/deno/issues/15960). Found: {}", module.specifier)
|
||||
}
|
||||
Module::Node(module) => {
|
||||
bail!("Node specifiers have not yet been implemented for this sub command (https://github.com/denoland/deno/issues/15960). Found: node:{}", module.module_name)
|
||||
bail!("Node specifiers have not yet been implemented for this subcommand (https://github.com/denoland/deno/issues/15960). Found: node:{}", module.module_name)
|
||||
}
|
||||
Module::Esm(_) | Module::Json(_) | Module::External(_) => {}
|
||||
}
|
||||
|
@ -376,9 +381,8 @@ pub fn enhanced_resolution_error_message(error: &ResolutionError) -> String {
|
|||
pub fn get_resolution_error_bare_node_specifier(
|
||||
error: &ResolutionError,
|
||||
) -> Option<&str> {
|
||||
get_resolution_error_bare_specifier(error).filter(|specifier| {
|
||||
crate::node::resolve_builtin_node_module(specifier).is_ok()
|
||||
})
|
||||
get_resolution_error_bare_specifier(error)
|
||||
.filter(|specifier| deno_node::is_builtin_node_module(specifier))
|
||||
}
|
||||
|
||||
fn get_resolution_error_bare_specifier(
|
||||
|
@ -403,15 +407,15 @@ fn get_resolution_error_bare_specifier(
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
#[derive(Debug)]
|
||||
struct GraphData {
|
||||
graph: Arc<ModuleGraph>,
|
||||
checked_libs: HashMap<TsTypeLib, HashSet<ModuleSpecifier>>,
|
||||
}
|
||||
|
||||
/// Holds the `ModuleGraph` and what parts of it are type checked.
|
||||
#[derive(Default)]
|
||||
pub struct ModuleGraphContainer {
|
||||
graph_kind: GraphKind,
|
||||
// Allow only one request to update the graph data at a time,
|
||||
// but allow other requests to read from it at any time even
|
||||
// while another request is updating the data.
|
||||
|
@ -420,8 +424,19 @@ pub struct ModuleGraphContainer {
|
|||
}
|
||||
|
||||
impl ModuleGraphContainer {
|
||||
pub fn new(graph_kind: GraphKind) -> Self {
|
||||
Self {
|
||||
graph_kind,
|
||||
update_queue: Default::default(),
|
||||
graph_data: Arc::new(RwLock::new(GraphData {
|
||||
graph: Arc::new(ModuleGraph::new(graph_kind)),
|
||||
checked_libs: Default::default(),
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clear(&self) {
|
||||
self.graph_data.write().graph = Default::default();
|
||||
self.graph_data.write().graph = Arc::new(ModuleGraph::new(self.graph_kind));
|
||||
}
|
||||
|
||||
/// Acquires a permit to modify the module graph without other code
|
||||
|
|
|
@ -15,8 +15,10 @@ use deno_runtime::deno_fetch::create_http_client;
|
|||
use deno_runtime::deno_fetch::reqwest;
|
||||
use deno_runtime::deno_fetch::reqwest::header::LOCATION;
|
||||
use deno_runtime::deno_fetch::reqwest::Response;
|
||||
use deno_runtime::deno_tls::rustls::RootCertStore;
|
||||
use deno_runtime::deno_fetch::CreateHttpClientOptions;
|
||||
use deno_runtime::deno_tls::RootCertStoreProvider;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use std::time::SystemTime;
|
||||
|
||||
|
@ -217,34 +219,67 @@ impl CacheSemantics {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct HttpClient(reqwest::Client);
|
||||
pub struct HttpClient {
|
||||
options: CreateHttpClientOptions,
|
||||
root_cert_store_provider: Option<Arc<dyn RootCertStoreProvider>>,
|
||||
cell: once_cell::sync::OnceCell<reqwest::Client>,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for HttpClient {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("HttpClient")
|
||||
.field("options", &self.options)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl HttpClient {
|
||||
pub fn new(
|
||||
root_cert_store: Option<RootCertStore>,
|
||||
root_cert_store_provider: Option<Arc<dyn RootCertStoreProvider>>,
|
||||
unsafely_ignore_certificate_errors: Option<Vec<String>>,
|
||||
) -> Result<Self, AnyError> {
|
||||
Ok(HttpClient::from_client(create_http_client(
|
||||
get_user_agent(),
|
||||
root_cert_store,
|
||||
vec![],
|
||||
None,
|
||||
unsafely_ignore_certificate_errors,
|
||||
None,
|
||||
)?))
|
||||
) -> Self {
|
||||
Self {
|
||||
options: CreateHttpClientOptions {
|
||||
unsafely_ignore_certificate_errors,
|
||||
..Default::default()
|
||||
},
|
||||
root_cert_store_provider,
|
||||
cell: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn from_client(client: reqwest::Client) -> Self {
|
||||
Self(client)
|
||||
let result = Self {
|
||||
options: Default::default(),
|
||||
root_cert_store_provider: Default::default(),
|
||||
cell: Default::default(),
|
||||
};
|
||||
result.cell.set(client).unwrap();
|
||||
result
|
||||
}
|
||||
|
||||
fn client(&self) -> Result<&reqwest::Client, AnyError> {
|
||||
self.cell.get_or_try_init(|| {
|
||||
create_http_client(
|
||||
get_user_agent(),
|
||||
CreateHttpClientOptions {
|
||||
root_cert_store: match &self.root_cert_store_provider {
|
||||
Some(provider) => Some(provider.get_or_try_init()?.clone()),
|
||||
None => None,
|
||||
},
|
||||
..self.options.clone()
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/// Do a GET request without following redirects.
|
||||
pub fn get_no_redirect<U: reqwest::IntoUrl>(
|
||||
&self,
|
||||
url: U,
|
||||
) -> reqwest::RequestBuilder {
|
||||
self.0.get(url)
|
||||
) -> Result<reqwest::RequestBuilder, AnyError> {
|
||||
Ok(self.client()?.get(url))
|
||||
}
|
||||
|
||||
pub async fn download_text<U: reqwest::IntoUrl>(
|
||||
|
@ -306,12 +341,13 @@ impl HttpClient {
|
|||
url: U,
|
||||
) -> Result<Response, AnyError> {
|
||||
let mut url = url.into_url()?;
|
||||
let mut response = self.get_no_redirect(url.clone()).send().await?;
|
||||
let mut response = self.get_no_redirect(url.clone())?.send().await?;
|
||||
let status = response.status();
|
||||
if status.is_redirection() {
|
||||
for _ in 0..5 {
|
||||
let new_url = resolve_redirect_from_response(&url, &response)?;
|
||||
let new_response = self.get_no_redirect(new_url.clone()).send().await?;
|
||||
let new_response =
|
||||
self.get_no_redirect(new_url.clone())?.send().await?;
|
||||
let status = new_response.status();
|
||||
if status.is_redirection() {
|
||||
response = new_response;
|
||||
|
@ -357,7 +393,7 @@ mod test {
|
|||
#[tokio::test]
|
||||
async fn test_http_client_download_redirect() {
|
||||
let _http_server_guard = test_util::http_server();
|
||||
let client = HttpClient::new(None, None).unwrap();
|
||||
let client = HttpClient::new(None, None);
|
||||
|
||||
// make a request to the redirect server
|
||||
let text = client
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
const core = globalThis.Deno.core;
|
||||
const ops = core.ops;
|
||||
import { setExitHandler } from "ext:runtime/30_os.js";
|
||||
import { Console } from "ext:deno_console/02_console.js";
|
||||
import { Console } from "ext:deno_console/01_console.js";
|
||||
import { serializePermissions } from "ext:runtime/10_permissions.js";
|
||||
import { assert } from "ext:deno_web/00_infra.js";
|
||||
const primordials = globalThis.__bootstrap.primordials;
|
||||
|
@ -21,7 +21,7 @@ const {
|
|||
MapPrototypeSet,
|
||||
MathCeil,
|
||||
ObjectKeys,
|
||||
ObjectPrototypeHasOwnProperty,
|
||||
ObjectHasOwn,
|
||||
ObjectPrototypeIsPrototypeOf,
|
||||
Promise,
|
||||
SafeArrayIterator,
|
||||
|
@ -50,7 +50,7 @@ function opSanitizerDelay() {
|
|||
return new Promise((resolve) => {
|
||||
setTimeout(() => {
|
||||
ArrayPrototypePush(opSanitizerDelayResolveQueue, resolve);
|
||||
}, 0);
|
||||
}, 1);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -83,8 +83,8 @@ const OP_DETAILS = {
|
|||
"op_dns_resolve": ["resolve a DNS name", "awaiting the result of a `Deno.resolveDns` call"],
|
||||
"op_fdatasync_async": ["flush pending data operations for a file to disk", "awaiting the result of a `Deno.fdatasync` call"],
|
||||
"op_fetch_send": ["send a HTTP request", "awaiting the result of a `fetch` call"],
|
||||
"op_ffi_call_nonblocking": ["do a non blocking ffi call", "awaiting the returned promise"] ,
|
||||
"op_ffi_call_ptr_nonblocking": ["do a non blocking ffi call", "awaiting the returned promise"],
|
||||
"op_ffi_call_nonblocking": ["do a non blocking ffi call", "awaiting the returned promise"],
|
||||
"op_ffi_call_ptr_nonblocking": ["do a non blocking ffi call", "awaiting the returned promise"],
|
||||
"op_flock_async": ["lock a file", "awaiting the result of a `Deno.flock` call"],
|
||||
"op_fs_events_poll": ["get the next file system event", "breaking out of a for await loop looping over `Deno.FsEvents`"],
|
||||
"op_fstat_async": ["get file metadata", "awaiting the result of a `Deno.File#fstat` call"],
|
||||
|
@ -124,11 +124,14 @@ const OP_DETAILS = {
|
|||
"op_tls_start": ["start a TLS connection", "awaiting a `Deno.startTls` call"],
|
||||
"op_truncate_async": ["truncate a file", "awaiting the result of a `Deno.truncate` call"],
|
||||
"op_utime_async": ["change file timestamps", "awaiting the result of a `Deno.utime` call"],
|
||||
"op_worker_recv_message": ["receive a message from a web worker", "terminating a `Worker`"],
|
||||
"op_worker_recv_message": ["receive a message from a web worker", "terminating a `Worker`"],
|
||||
"op_ws_close": ["close a WebSocket", "awaiting until the `close` event is emitted on a `WebSocket`, or the `WebSocketStream#closed` promise resolves"],
|
||||
"op_ws_create": ["create a WebSocket", "awaiting until the `open` event is emitted on a `WebSocket`, or the result of a `WebSocketStream#connection` promise"],
|
||||
"op_ws_next_event": ["receive the next message on a WebSocket", "closing a `WebSocket` or `WebSocketStream`"],
|
||||
"op_ws_send": ["send a message on a WebSocket", "closing a `WebSocket` or `WebSocketStream`"],
|
||||
"op_ws_send_text": ["send a message on a WebSocket", "closing a `WebSocket` or `WebSocketStream`"],
|
||||
"op_ws_send_binary": ["send a message on a WebSocket", "closing a `WebSocket` or `WebSocketStream`"],
|
||||
"op_ws_send_ping": ["send a message on a WebSocket", "closing a `WebSocket` or `WebSocketStream`"],
|
||||
"op_ws_send_pong": ["send a message on a WebSocket", "closing a `WebSocket` or `WebSocketStream`"],
|
||||
};
|
||||
|
||||
// Wrap test function in additional assertion that makes sure
|
||||
|
@ -163,7 +166,7 @@ function assertOps(fn) {
|
|||
|
||||
const details = [];
|
||||
for (const key in post.ops) {
|
||||
if (!ObjectPrototypeHasOwnProperty(post.ops, key)) {
|
||||
if (!ObjectHasOwn(post.ops, key)) {
|
||||
continue;
|
||||
}
|
||||
const preOp = pre.ops[key] ??
|
||||
|
|
3
cli/js/99_main.js
Normal file
3
cli/js/99_main.js
Normal file
|
@ -0,0 +1,3 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
import "ext:cli/40_testing.js";
|
||||
import "ext:cli/runtime/js/99_main.js";
|
|
@ -1,10 +1,10 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
///!
|
||||
///! Provides information about what capabilities that are supported by the
|
||||
///! language server, which helps determine what messages are sent from the
|
||||
///! client.
|
||||
///!
|
||||
//!
|
||||
//! Provides information about what capabilities that are supported by the
|
||||
//! language server, which helps determine what messages are sent from the
|
||||
//! client.
|
||||
//!
|
||||
use deno_core::serde_json::json;
|
||||
use tower_lsp::lsp_types::*;
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ use deno_core::anyhow::bail;
|
|||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::Value;
|
||||
use deno_core::task::spawn;
|
||||
use tower_lsp::lsp_types as lsp;
|
||||
use tower_lsp::lsp_types::ConfigurationItem;
|
||||
|
||||
|
@ -26,13 +27,6 @@ pub enum TestingNotification {
|
|||
Progress(testing_lsp_custom::TestRunProgressParams),
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq)]
|
||||
pub enum LspClientKind {
|
||||
#[default]
|
||||
CodeEditor,
|
||||
Repl,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Client(Arc<dyn ClientTrait>);
|
||||
|
||||
|
@ -51,10 +45,6 @@ impl Client {
|
|||
Self(Arc::new(ReplClient))
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> LspClientKind {
|
||||
self.0.kind()
|
||||
}
|
||||
|
||||
/// Gets additional methods that should only be called outside
|
||||
/// the LSP's lock to prevent deadlocking scenarios.
|
||||
pub fn when_outside_lsp_lock(&self) -> OutsideLockClient {
|
||||
|
@ -67,15 +57,29 @@ impl Client {
|
|||
) {
|
||||
// do on a task in case the caller currently is in the lsp lock
|
||||
let client = self.0.clone();
|
||||
tokio::task::spawn(async move {
|
||||
spawn(async move {
|
||||
client.send_registry_state_notification(params).await;
|
||||
});
|
||||
}
|
||||
|
||||
/// This notification is sent to the client during internal testing
|
||||
/// purposes only in order to let the test client know when the latest
|
||||
/// diagnostics have been published.
|
||||
pub fn send_diagnostic_batch_notification(
|
||||
&self,
|
||||
params: lsp_custom::DiagnosticBatchNotificationParams,
|
||||
) {
|
||||
// do on a task in case the caller currently is in the lsp lock
|
||||
let client = self.0.clone();
|
||||
spawn(async move {
|
||||
client.send_diagnostic_batch_notification(params).await;
|
||||
});
|
||||
}
|
||||
|
||||
pub fn send_test_notification(&self, params: TestingNotification) {
|
||||
// do on a task in case the caller currently is in the lsp lock
|
||||
let client = self.0.clone();
|
||||
tokio::task::spawn(async move {
|
||||
spawn(async move {
|
||||
client.send_test_notification(params).await;
|
||||
});
|
||||
}
|
||||
|
@ -88,7 +92,7 @@ impl Client {
|
|||
// do on a task in case the caller currently is in the lsp lock
|
||||
let client = self.0.clone();
|
||||
let message = message.to_string();
|
||||
tokio::task::spawn(async move {
|
||||
spawn(async move {
|
||||
client.show_message(message_type, message).await;
|
||||
});
|
||||
}
|
||||
|
@ -160,7 +164,6 @@ impl OutsideLockClient {
|
|||
|
||||
#[async_trait]
|
||||
trait ClientTrait: Send + Sync {
|
||||
fn kind(&self) -> LspClientKind;
|
||||
async fn publish_diagnostics(
|
||||
&self,
|
||||
uri: lsp::Url,
|
||||
|
@ -171,6 +174,10 @@ trait ClientTrait: Send + Sync {
|
|||
&self,
|
||||
params: lsp_custom::RegistryStateNotificationParams,
|
||||
);
|
||||
async fn send_diagnostic_batch_notification(
|
||||
&self,
|
||||
params: lsp_custom::DiagnosticBatchNotificationParams,
|
||||
);
|
||||
async fn send_test_notification(&self, params: TestingNotification);
|
||||
async fn specifier_configurations(
|
||||
&self,
|
||||
|
@ -189,10 +196,6 @@ struct TowerClient(tower_lsp::Client);
|
|||
|
||||
#[async_trait]
|
||||
impl ClientTrait for TowerClient {
|
||||
fn kind(&self) -> LspClientKind {
|
||||
LspClientKind::CodeEditor
|
||||
}
|
||||
|
||||
async fn publish_diagnostics(
|
||||
&self,
|
||||
uri: lsp::Url,
|
||||
|
@ -212,6 +215,16 @@ impl ClientTrait for TowerClient {
|
|||
.await
|
||||
}
|
||||
|
||||
async fn send_diagnostic_batch_notification(
|
||||
&self,
|
||||
params: lsp_custom::DiagnosticBatchNotificationParams,
|
||||
) {
|
||||
self
|
||||
.0
|
||||
.send_notification::<lsp_custom::DiagnosticBatchNotification>(params)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn send_test_notification(&self, notification: TestingNotification) {
|
||||
match notification {
|
||||
TestingNotification::Module(params) => {
|
||||
|
@ -312,10 +325,6 @@ struct ReplClient;
|
|||
|
||||
#[async_trait]
|
||||
impl ClientTrait for ReplClient {
|
||||
fn kind(&self) -> LspClientKind {
|
||||
LspClientKind::Repl
|
||||
}
|
||||
|
||||
async fn publish_diagnostics(
|
||||
&self,
|
||||
_uri: lsp::Url,
|
||||
|
@ -330,6 +339,12 @@ impl ClientTrait for ReplClient {
|
|||
) {
|
||||
}
|
||||
|
||||
async fn send_diagnostic_batch_notification(
|
||||
&self,
|
||||
_params: lsp_custom::DiagnosticBatchNotificationParams,
|
||||
) {
|
||||
}
|
||||
|
||||
async fn send_test_notification(&self, _params: TestingNotification) {}
|
||||
|
||||
async fn specifier_configurations(
|
||||
|
|
|
@ -230,13 +230,14 @@ async fn resolve_implementation_code_lens(
|
|||
) -> Result<lsp::CodeLens, AnyError> {
|
||||
let asset_or_doc = language_server.get_asset_or_document(&data.specifier)?;
|
||||
let line_index = asset_or_doc.line_index();
|
||||
let req = tsc::RequestMethod::GetImplementation((
|
||||
data.specifier.clone(),
|
||||
line_index.offset_tsc(code_lens.range.start)?,
|
||||
));
|
||||
let snapshot = language_server.snapshot();
|
||||
let maybe_implementations: Option<Vec<tsc::ImplementationLocation>> =
|
||||
language_server.ts_server.request(snapshot, req).await?;
|
||||
let maybe_implementations = language_server
|
||||
.ts_server
|
||||
.get_implementations(
|
||||
language_server.snapshot(),
|
||||
data.specifier.clone(),
|
||||
line_index.offset_tsc(code_lens.range.start)?,
|
||||
)
|
||||
.await?;
|
||||
if let Some(implementations) = maybe_implementations {
|
||||
let mut locations = Vec::new();
|
||||
for implementation in implementations {
|
||||
|
@ -325,12 +326,12 @@ async fn resolve_references_code_lens(
|
|||
let asset_or_document =
|
||||
language_server.get_asset_or_document(&data.specifier)?;
|
||||
let line_index = asset_or_document.line_index();
|
||||
let snapshot = language_server.snapshot();
|
||||
|
||||
let maybe_referenced_symbols = language_server
|
||||
.ts_server
|
||||
.find_references(
|
||||
snapshot,
|
||||
&data.specifier,
|
||||
language_server.snapshot(),
|
||||
data.specifier.clone(),
|
||||
line_index.offset_tsc(code_lens.range.start)?,
|
||||
)
|
||||
.await?;
|
||||
|
@ -391,7 +392,7 @@ pub async fn collect(
|
|||
code_lenses.extend(
|
||||
collect_tsc(
|
||||
specifier,
|
||||
&config.get_workspace_settings(),
|
||||
config.workspace_settings(),
|
||||
line_index,
|
||||
navigation_tree,
|
||||
)
|
||||
|
|
|
@ -519,7 +519,7 @@ mod tests {
|
|||
source_fixtures: &[(&str, &str)],
|
||||
location: &Path,
|
||||
) -> Documents {
|
||||
let mut documents = Documents::new(location, Default::default());
|
||||
let mut documents = Documents::new(location);
|
||||
for (specifier, source, version, language_id) in fixtures {
|
||||
let specifier =
|
||||
resolve_url(specifier).expect("failed to create specifier");
|
||||
|
|
|
@ -265,6 +265,10 @@ fn default_to_true() -> bool {
|
|||
true
|
||||
}
|
||||
|
||||
fn default_document_preload_limit() -> usize {
|
||||
1000
|
||||
}
|
||||
|
||||
fn empty_string_none<'de, D: serde::Deserializer<'de>>(
|
||||
d: D,
|
||||
) -> Result<Option<String>, D::Error> {
|
||||
|
@ -318,6 +322,10 @@ pub struct WorkspaceSettings {
|
|||
#[serde(default = "default_to_true")]
|
||||
pub lint: bool,
|
||||
|
||||
/// Limits the number of files that can be preloaded by the language server.
|
||||
#[serde(default = "default_document_preload_limit")]
|
||||
pub document_preload_limit: usize,
|
||||
|
||||
/// A flag that indicates if Dene should validate code against the unstable
|
||||
/// APIs for the workspace.
|
||||
#[serde(default)]
|
||||
|
@ -354,6 +362,7 @@ impl Default for WorkspaceSettings {
|
|||
inlay_hints: Default::default(),
|
||||
internal_debug: false,
|
||||
lint: true,
|
||||
document_preload_limit: default_document_preload_limit(),
|
||||
suggest: Default::default(),
|
||||
testing: Default::default(),
|
||||
tls_certificate: None,
|
||||
|
@ -439,8 +448,8 @@ impl Config {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn get_workspace_settings(&self) -> WorkspaceSettings {
|
||||
self.settings.workspace.clone()
|
||||
pub fn workspace_settings(&self) -> &WorkspaceSettings {
|
||||
&self.settings.workspace
|
||||
}
|
||||
|
||||
/// Set the workspace settings directly, which occurs during initialization
|
||||
|
@ -714,7 +723,7 @@ mod tests {
|
|||
.set_workspace_settings(json!({}))
|
||||
.expect("could not update");
|
||||
assert_eq!(
|
||||
config.get_workspace_settings(),
|
||||
config.workspace_settings().clone(),
|
||||
WorkspaceSettings {
|
||||
enable: false,
|
||||
enable_paths: Vec::new(),
|
||||
|
@ -750,6 +759,7 @@ mod tests {
|
|||
},
|
||||
internal_debug: false,
|
||||
lint: true,
|
||||
document_preload_limit: 1_000,
|
||||
suggest: CompletionSettings {
|
||||
complete_function_calls: false,
|
||||
names: true,
|
||||
|
@ -778,7 +788,7 @@ mod tests {
|
|||
.set_workspace_settings(json!({ "cache": "" }))
|
||||
.expect("could not update");
|
||||
assert_eq!(
|
||||
config.get_workspace_settings(),
|
||||
config.workspace_settings().clone(),
|
||||
WorkspaceSettings::default()
|
||||
);
|
||||
}
|
||||
|
@ -790,7 +800,7 @@ mod tests {
|
|||
.set_workspace_settings(json!({ "import_map": "" }))
|
||||
.expect("could not update");
|
||||
assert_eq!(
|
||||
config.get_workspace_settings(),
|
||||
config.workspace_settings().clone(),
|
||||
WorkspaceSettings::default()
|
||||
);
|
||||
}
|
||||
|
@ -802,7 +812,7 @@ mod tests {
|
|||
.set_workspace_settings(json!({ "tls_certificate": "" }))
|
||||
.expect("could not update");
|
||||
assert_eq!(
|
||||
config.get_workspace_settings(),
|
||||
config.workspace_settings().clone(),
|
||||
WorkspaceSettings::default()
|
||||
);
|
||||
}
|
||||
|
@ -814,7 +824,7 @@ mod tests {
|
|||
.set_workspace_settings(json!({ "config": "" }))
|
||||
.expect("could not update");
|
||||
assert_eq!(
|
||||
config.get_workspace_settings(),
|
||||
config.workspace_settings().clone(),
|
||||
WorkspaceSettings::default()
|
||||
);
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ use super::tsc::TsServer;
|
|||
use crate::args::LintOptions;
|
||||
use crate::graph_util;
|
||||
use crate::graph_util::enhanced_resolution_error_message;
|
||||
use crate::node;
|
||||
use crate::lsp::lsp_custom::DiagnosticBatchNotificationParams;
|
||||
use crate::tools::lint::get_configured_rules;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
|
@ -26,15 +26,19 @@ use deno_core::resolve_url;
|
|||
use deno_core::serde::Deserialize;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::json;
|
||||
use deno_core::task::spawn;
|
||||
use deno_core::task::JoinHandle;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::Resolution;
|
||||
use deno_graph::ResolutionError;
|
||||
use deno_graph::SpecifierError;
|
||||
use deno_lint::rules::LintRule;
|
||||
use deno_runtime::deno_node;
|
||||
use deno_runtime::tokio_util::create_basic_runtime;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use log::error;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::Arc;
|
||||
use std::thread;
|
||||
use tokio::sync::mpsc;
|
||||
|
@ -43,14 +47,18 @@ use tokio::time::Duration;
|
|||
use tokio_util::sync::CancellationToken;
|
||||
use tower_lsp::lsp_types as lsp;
|
||||
|
||||
pub type SnapshotForDiagnostics =
|
||||
(Arc<StateSnapshot>, Arc<ConfigSnapshot>, LintOptions);
|
||||
#[derive(Debug)]
|
||||
pub struct DiagnosticServerUpdateMessage {
|
||||
pub snapshot: Arc<StateSnapshot>,
|
||||
pub config: Arc<ConfigSnapshot>,
|
||||
pub lint_options: LintOptions,
|
||||
}
|
||||
|
||||
pub type DiagnosticRecord =
|
||||
(ModuleSpecifier, Option<i32>, Vec<lsp::Diagnostic>);
|
||||
pub type DiagnosticVec = Vec<DiagnosticRecord>;
|
||||
type DiagnosticMap =
|
||||
HashMap<ModuleSpecifier, (Option<i32>, Vec<lsp::Diagnostic>)>;
|
||||
type TsDiagnosticsMap = HashMap<String, Vec<crate::tsc::Diagnostic>>;
|
||||
type DiagnosticsByVersionMap = HashMap<Option<i32>, Vec<lsp::Diagnostic>>;
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -144,13 +152,55 @@ impl TsDiagnosticsStore {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn should_send_diagnostic_batch_index_notifications() -> bool {
|
||||
crate::args::has_flag_env_var(
|
||||
"DENO_DONT_USE_INTERNAL_LSP_DIAGNOSTIC_SYNC_FLAG",
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct DiagnosticBatchCounter(Option<Arc<AtomicUsize>>);
|
||||
|
||||
impl Default for DiagnosticBatchCounter {
|
||||
fn default() -> Self {
|
||||
if should_send_diagnostic_batch_index_notifications() {
|
||||
Self(Some(Default::default()))
|
||||
} else {
|
||||
Self(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DiagnosticBatchCounter {
|
||||
pub fn inc(&self) -> Option<usize> {
|
||||
self
|
||||
.0
|
||||
.as_ref()
|
||||
.map(|value| value.fetch_add(1, std::sync::atomic::Ordering::SeqCst) + 1)
|
||||
}
|
||||
|
||||
pub fn get(&self) -> Option<usize> {
|
||||
self
|
||||
.0
|
||||
.as_ref()
|
||||
.map(|value| value.load(std::sync::atomic::Ordering::SeqCst))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ChannelMessage {
|
||||
message: DiagnosticServerUpdateMessage,
|
||||
batch_index: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DiagnosticsServer {
|
||||
channel: Option<mpsc::UnboundedSender<SnapshotForDiagnostics>>,
|
||||
channel: Option<mpsc::UnboundedSender<ChannelMessage>>,
|
||||
ts_diagnostics: TsDiagnosticsStore,
|
||||
client: Client,
|
||||
performance: Arc<Performance>,
|
||||
ts_server: Arc<TsServer>,
|
||||
batch_counter: DiagnosticBatchCounter,
|
||||
}
|
||||
|
||||
impl DiagnosticsServer {
|
||||
|
@ -165,6 +215,7 @@ impl DiagnosticsServer {
|
|||
client,
|
||||
performance,
|
||||
ts_server,
|
||||
batch_counter: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -186,7 +237,7 @@ impl DiagnosticsServer {
|
|||
|
||||
#[allow(unused_must_use)]
|
||||
pub fn start(&mut self) {
|
||||
let (tx, mut rx) = mpsc::unbounded_channel::<SnapshotForDiagnostics>();
|
||||
let (tx, mut rx) = mpsc::unbounded_channel::<ChannelMessage>();
|
||||
self.channel = Some(tx);
|
||||
let client = self.client.clone();
|
||||
let performance = self.performance.clone();
|
||||
|
@ -198,23 +249,33 @@ impl DiagnosticsServer {
|
|||
|
||||
runtime.block_on(async {
|
||||
let mut token = CancellationToken::new();
|
||||
let mut ts_handle: Option<tokio::task::JoinHandle<()>> = None;
|
||||
let mut lint_handle: Option<tokio::task::JoinHandle<()>> = None;
|
||||
let mut deps_handle: Option<tokio::task::JoinHandle<()>> = None;
|
||||
let mut ts_handle: Option<JoinHandle<()>> = None;
|
||||
let mut lint_handle: Option<JoinHandle<()>> = None;
|
||||
let mut deps_handle: Option<JoinHandle<()>> = None;
|
||||
let diagnostics_publisher = DiagnosticsPublisher::new(client.clone());
|
||||
|
||||
loop {
|
||||
match rx.recv().await {
|
||||
// channel has closed
|
||||
None => break,
|
||||
Some((snapshot, config, lint_options)) => {
|
||||
Some(message) => {
|
||||
let ChannelMessage {
|
||||
message:
|
||||
DiagnosticServerUpdateMessage {
|
||||
snapshot,
|
||||
config,
|
||||
lint_options,
|
||||
},
|
||||
batch_index,
|
||||
} = message;
|
||||
|
||||
// cancel the previous run
|
||||
token.cancel();
|
||||
token = CancellationToken::new();
|
||||
diagnostics_publisher.clear().await;
|
||||
|
||||
let previous_ts_handle = ts_handle.take();
|
||||
ts_handle = Some(tokio::spawn({
|
||||
ts_handle = Some(spawn({
|
||||
let performance = performance.clone();
|
||||
let diagnostics_publisher = diagnostics_publisher.clone();
|
||||
let ts_server = ts_server.clone();
|
||||
|
@ -254,6 +315,7 @@ impl DiagnosticsServer {
|
|||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let messages_len = diagnostics.len();
|
||||
if !token.is_cancelled() {
|
||||
ts_diagnostics_store.update(&diagnostics);
|
||||
diagnostics_publisher.publish(diagnostics, &token).await;
|
||||
|
@ -262,11 +324,22 @@ impl DiagnosticsServer {
|
|||
performance.measure(mark);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(batch_index) = batch_index {
|
||||
diagnostics_publisher
|
||||
.client
|
||||
.send_diagnostic_batch_notification(
|
||||
DiagnosticBatchNotificationParams {
|
||||
batch_index,
|
||||
messages_len,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
let previous_deps_handle = deps_handle.take();
|
||||
deps_handle = Some(tokio::spawn({
|
||||
deps_handle = Some(spawn({
|
||||
let performance = performance.clone();
|
||||
let diagnostics_publisher = diagnostics_publisher.clone();
|
||||
let token = token.clone();
|
||||
|
@ -285,16 +358,30 @@ impl DiagnosticsServer {
|
|||
)
|
||||
.await;
|
||||
|
||||
diagnostics_publisher.publish(diagnostics, &token).await;
|
||||
|
||||
let messages_len = diagnostics.len();
|
||||
if !token.is_cancelled() {
|
||||
performance.measure(mark);
|
||||
diagnostics_publisher.publish(diagnostics, &token).await;
|
||||
|
||||
if !token.is_cancelled() {
|
||||
performance.measure(mark);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(batch_index) = batch_index {
|
||||
diagnostics_publisher
|
||||
.client
|
||||
.send_diagnostic_batch_notification(
|
||||
DiagnosticBatchNotificationParams {
|
||||
batch_index,
|
||||
messages_len,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
let previous_lint_handle = lint_handle.take();
|
||||
lint_handle = Some(tokio::spawn({
|
||||
lint_handle = Some(spawn({
|
||||
let performance = performance.clone();
|
||||
let diagnostics_publisher = diagnostics_publisher.clone();
|
||||
let token = token.clone();
|
||||
|
@ -314,10 +401,24 @@ impl DiagnosticsServer {
|
|||
)
|
||||
.await;
|
||||
|
||||
diagnostics_publisher.publish(diagnostics, &token).await;
|
||||
|
||||
let messages_len = diagnostics.len();
|
||||
if !token.is_cancelled() {
|
||||
performance.measure(mark);
|
||||
diagnostics_publisher.publish(diagnostics, &token).await;
|
||||
|
||||
if !token.is_cancelled() {
|
||||
performance.measure(mark);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(batch_index) = batch_index {
|
||||
diagnostics_publisher
|
||||
.client
|
||||
.send_diagnostic_batch_notification(
|
||||
DiagnosticBatchNotificationParams {
|
||||
batch_index,
|
||||
messages_len,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
@ -328,15 +429,23 @@ impl DiagnosticsServer {
|
|||
});
|
||||
}
|
||||
|
||||
pub fn latest_batch_index(&self) -> Option<usize> {
|
||||
self.batch_counter.get()
|
||||
}
|
||||
|
||||
pub fn update(
|
||||
&self,
|
||||
message: SnapshotForDiagnostics,
|
||||
message: DiagnosticServerUpdateMessage,
|
||||
) -> Result<(), AnyError> {
|
||||
// todo(dsherret): instead of queuing up messages, it would be better to
|
||||
// instead only store the latest message (ex. maybe using a
|
||||
// tokio::sync::watch::channel)
|
||||
if let Some(tx) = &self.channel {
|
||||
tx.send(message).map_err(|err| err.into())
|
||||
tx.send(ChannelMessage {
|
||||
message,
|
||||
batch_index: self.batch_counter.inc(),
|
||||
})
|
||||
.map_err(|err| err.into())
|
||||
} else {
|
||||
Err(anyhow!("diagnostics server not started"))
|
||||
}
|
||||
|
@ -469,8 +578,8 @@ async fn generate_lint_diagnostics(
|
|||
}
|
||||
|
||||
// ignore any npm package files
|
||||
if let Some(npm_resolver) = &snapshot.maybe_npm_resolver {
|
||||
if npm_resolver.in_npm_package(document.specifier()) {
|
||||
if let Some(node_resolver) = &snapshot.maybe_node_resolver {
|
||||
if node_resolver.in_npm_package(document.specifier()) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -539,10 +648,9 @@ async fn generate_ts_diagnostics(
|
|||
let (enabled_specifiers, disabled_specifiers) = specifiers
|
||||
.into_iter()
|
||||
.partition::<Vec<_>, _>(|s| config.specifier_enabled(s));
|
||||
let ts_diagnostics_map: TsDiagnosticsMap = if !enabled_specifiers.is_empty() {
|
||||
let req = tsc::RequestMethod::GetDiagnostics(enabled_specifiers);
|
||||
let ts_diagnostics_map = if !enabled_specifiers.is_empty() {
|
||||
ts_server
|
||||
.request_with_cancellation(snapshot.clone(), req, token)
|
||||
.get_diagnostics(snapshot.clone(), enabled_specifiers, token)
|
||||
.await?
|
||||
} else {
|
||||
Default::default()
|
||||
|
@ -857,24 +965,24 @@ impl DenoDiagnostic {
|
|||
}
|
||||
|
||||
fn diagnose_resolution(
|
||||
diagnostics: &mut Vec<lsp::Diagnostic>,
|
||||
lsp_diagnostics: &mut Vec<lsp::Diagnostic>,
|
||||
snapshot: &language_server::StateSnapshot,
|
||||
resolution: &Resolution,
|
||||
is_dynamic: bool,
|
||||
maybe_assert_type: Option<&str>,
|
||||
ranges: Vec<lsp::Range>,
|
||||
) {
|
||||
let mut diagnostics = vec![];
|
||||
match resolution {
|
||||
Resolution::Ok(resolved) => {
|
||||
let specifier = &resolved.specifier;
|
||||
let range = documents::to_lsp_range(&resolved.range);
|
||||
// If the module is a remote module and has a `X-Deno-Warning` header, we
|
||||
// want a warning diagnostic with that message.
|
||||
if let Some(metadata) = snapshot.cache_metadata.get(specifier) {
|
||||
if let Some(message) =
|
||||
metadata.get(&cache::MetadataKey::Warning).cloned()
|
||||
{
|
||||
diagnostics
|
||||
.push(DenoDiagnostic::DenoWarn(message).to_lsp_diagnostic(&range));
|
||||
diagnostics.push(DenoDiagnostic::DenoWarn(message));
|
||||
}
|
||||
}
|
||||
if let Some(doc) = snapshot.documents.get(specifier) {
|
||||
|
@ -883,13 +991,10 @@ fn diagnose_resolution(
|
|||
// diagnostic that indicates this. This then allows us to issue a code
|
||||
// action to replace the specifier with the final redirected one.
|
||||
if doc_specifier != specifier {
|
||||
diagnostics.push(
|
||||
DenoDiagnostic::Redirect {
|
||||
from: specifier.clone(),
|
||||
to: doc_specifier.clone(),
|
||||
}
|
||||
.to_lsp_diagnostic(&range),
|
||||
);
|
||||
diagnostics.push(DenoDiagnostic::Redirect {
|
||||
from: specifier.clone(),
|
||||
to: doc_specifier.clone(),
|
||||
});
|
||||
}
|
||||
if doc.media_type() == MediaType::Json {
|
||||
match maybe_assert_type {
|
||||
|
@ -900,13 +1005,10 @@ fn diagnose_resolution(
|
|||
// not provide a potentially incorrect diagnostic.
|
||||
None if is_dynamic => (),
|
||||
// The module has an incorrect assertion type, diagnostic
|
||||
Some(assert_type) => diagnostics.push(
|
||||
DenoDiagnostic::InvalidAssertType(assert_type.to_string())
|
||||
.to_lsp_diagnostic(&range),
|
||||
),
|
||||
Some(assert_type) => diagnostics
|
||||
.push(DenoDiagnostic::InvalidAssertType(assert_type.to_string())),
|
||||
// The module is missing an assertion type, diagnostic
|
||||
None => diagnostics
|
||||
.push(DenoDiagnostic::NoAssertType.to_lsp_diagnostic(&range)),
|
||||
None => diagnostics.push(DenoDiagnostic::NoAssertType),
|
||||
}
|
||||
}
|
||||
} else if let Ok(pkg_ref) =
|
||||
|
@ -914,38 +1016,25 @@ fn diagnose_resolution(
|
|||
{
|
||||
if let Some(npm_resolver) = &snapshot.maybe_npm_resolver {
|
||||
// show diagnostics for npm package references that aren't cached
|
||||
if npm_resolver
|
||||
.resolve_pkg_id_from_pkg_req(&pkg_ref.req)
|
||||
.is_err()
|
||||
{
|
||||
diagnostics.push(
|
||||
DenoDiagnostic::NoCacheNpm(pkg_ref, specifier.clone())
|
||||
.to_lsp_diagnostic(&range),
|
||||
);
|
||||
if !npm_resolver.is_pkg_req_folder_cached(&pkg_ref.req) {
|
||||
diagnostics
|
||||
.push(DenoDiagnostic::NoCacheNpm(pkg_ref, specifier.clone()));
|
||||
}
|
||||
}
|
||||
} else if let Some(module_name) = specifier.as_str().strip_prefix("node:")
|
||||
{
|
||||
if node::resolve_builtin_node_module(module_name).is_err() {
|
||||
diagnostics.push(
|
||||
DenoDiagnostic::InvalidNodeSpecifier(specifier.clone())
|
||||
.to_lsp_diagnostic(&range),
|
||||
);
|
||||
if !deno_node::is_builtin_node_module(module_name) {
|
||||
diagnostics
|
||||
.push(DenoDiagnostic::InvalidNodeSpecifier(specifier.clone()));
|
||||
} else if let Some(npm_resolver) = &snapshot.maybe_npm_resolver {
|
||||
// check that a @types/node package exists in the resolver
|
||||
let types_node_ref =
|
||||
NpmPackageReqReference::from_str("npm:@types/node").unwrap();
|
||||
if npm_resolver
|
||||
.resolve_pkg_id_from_pkg_req(&types_node_ref.req)
|
||||
.is_err()
|
||||
{
|
||||
diagnostics.push(
|
||||
DenoDiagnostic::NoCacheNpm(
|
||||
types_node_ref,
|
||||
ModuleSpecifier::parse("npm:@types/node").unwrap(),
|
||||
)
|
||||
.to_lsp_diagnostic(&range),
|
||||
);
|
||||
if !npm_resolver.is_pkg_req_folder_cached(&types_node_ref.req) {
|
||||
diagnostics.push(DenoDiagnostic::NoCacheNpm(
|
||||
types_node_ref,
|
||||
ModuleSpecifier::parse("npm:@types/node").unwrap(),
|
||||
));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -958,17 +1047,21 @@ fn diagnose_resolution(
|
|||
"blob" => DenoDiagnostic::NoCacheBlob,
|
||||
_ => DenoDiagnostic::NoCache(specifier.clone()),
|
||||
};
|
||||
diagnostics.push(deno_diagnostic.to_lsp_diagnostic(&range));
|
||||
diagnostics.push(deno_diagnostic);
|
||||
}
|
||||
}
|
||||
// The specifier resolution resulted in an error, so we want to issue a
|
||||
// diagnostic for that.
|
||||
Resolution::Err(err) => diagnostics.push(
|
||||
DenoDiagnostic::ResolutionError(*err.clone())
|
||||
.to_lsp_diagnostic(&documents::to_lsp_range(err.range())),
|
||||
),
|
||||
Resolution::Err(err) => {
|
||||
diagnostics.push(DenoDiagnostic::ResolutionError(*err.clone()))
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
for range in ranges {
|
||||
for diagnostic in &diagnostics {
|
||||
lsp_diagnostics.push(diagnostic.to_lsp_diagnostic(&range));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate diagnostics related to a dependency. The dependency is analyzed to
|
||||
|
@ -1005,17 +1098,43 @@ fn diagnose_dependency(
|
|||
diagnose_resolution(
|
||||
diagnostics,
|
||||
snapshot,
|
||||
&dependency.maybe_code,
|
||||
dependency.is_dynamic,
|
||||
dependency.maybe_assert_type.as_deref(),
|
||||
);
|
||||
diagnose_resolution(
|
||||
diagnostics,
|
||||
snapshot,
|
||||
&dependency.maybe_type,
|
||||
if dependency.maybe_code.is_none() {
|
||||
&dependency.maybe_type
|
||||
} else {
|
||||
&dependency.maybe_code
|
||||
},
|
||||
dependency.is_dynamic,
|
||||
dependency.maybe_assert_type.as_deref(),
|
||||
dependency
|
||||
.imports
|
||||
.iter()
|
||||
.map(|i| documents::to_lsp_range(&i.range))
|
||||
.collect(),
|
||||
);
|
||||
// TODO(nayeemrmn): This is a crude way of detecting `@deno-types` which has
|
||||
// a different specifier and therefore needs a separate call to
|
||||
// `diagnose_resolution()`. It would be much cleaner if that were modelled as
|
||||
// a separate dependency: https://github.com/denoland/deno_graph/issues/247.
|
||||
if !dependency.maybe_type.is_none()
|
||||
&& !dependency
|
||||
.imports
|
||||
.iter()
|
||||
.any(|i| dependency.maybe_type.includes(&i.range.start).is_some())
|
||||
{
|
||||
let range = match &dependency.maybe_type {
|
||||
Resolution::Ok(resolved) => documents::to_lsp_range(&resolved.range),
|
||||
Resolution::Err(error) => documents::to_lsp_range(error.range()),
|
||||
Resolution::None => unreachable!(),
|
||||
};
|
||||
diagnose_resolution(
|
||||
diagnostics,
|
||||
snapshot,
|
||||
&dependency.maybe_type,
|
||||
dependency.is_dynamic,
|
||||
dependency.maybe_assert_type.as_deref(),
|
||||
vec![range],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate diagnostics that come from Deno module resolution logic (like
|
||||
|
@ -1079,7 +1198,7 @@ mod tests {
|
|||
location: &Path,
|
||||
maybe_import_map: Option<(&str, &str)>,
|
||||
) -> StateSnapshot {
|
||||
let mut documents = Documents::new(location, Default::default());
|
||||
let mut documents = Documents::new(location);
|
||||
for (specifier, source, version, language_id) in fixtures {
|
||||
let specifier =
|
||||
resolve_url(specifier).expect("failed to create specifier");
|
||||
|
@ -1376,4 +1495,81 @@ let c: number = "a";
|
|||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn duplicate_diagnostics_for_duplicate_imports() {
|
||||
let temp_dir = TempDir::new();
|
||||
let (snapshot, _) = setup(
|
||||
&temp_dir,
|
||||
&[(
|
||||
"file:///a.ts",
|
||||
r#"
|
||||
// @deno-types="bad.d.ts"
|
||||
import "bad.js";
|
||||
import "bad.js";
|
||||
"#,
|
||||
1,
|
||||
LanguageId::TypeScript,
|
||||
)],
|
||||
None,
|
||||
);
|
||||
let config = mock_config();
|
||||
let token = CancellationToken::new();
|
||||
let actual = generate_deno_diagnostics(&snapshot, &config, token).await;
|
||||
assert_eq!(actual.len(), 1);
|
||||
let (_, _, diagnostics) = actual.first().unwrap();
|
||||
assert_eq!(
|
||||
json!(diagnostics),
|
||||
json!([
|
||||
{
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 2,
|
||||
"character": 15
|
||||
},
|
||||
"end": {
|
||||
"line": 2,
|
||||
"character": 23
|
||||
}
|
||||
},
|
||||
"severity": 1,
|
||||
"code": "import-prefix-missing",
|
||||
"source": "deno",
|
||||
"message": "Relative import path \"bad.js\" not prefixed with / or ./ or ../",
|
||||
},
|
||||
{
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 3,
|
||||
"character": 15
|
||||
},
|
||||
"end": {
|
||||
"line": 3,
|
||||
"character": 23
|
||||
}
|
||||
},
|
||||
"severity": 1,
|
||||
"code": "import-prefix-missing",
|
||||
"source": "deno",
|
||||
"message": "Relative import path \"bad.js\" not prefixed with / or ./ or ../",
|
||||
},
|
||||
{
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 1,
|
||||
"character": 23
|
||||
},
|
||||
"end": {
|
||||
"line": 1,
|
||||
"character": 33
|
||||
}
|
||||
},
|
||||
"severity": 1,
|
||||
"code": "import-prefix-missing",
|
||||
"source": "deno",
|
||||
"message": "Relative import path \"bad.d.ts\" not prefixed with / or ./ or ../",
|
||||
},
|
||||
])
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use super::cache::calculate_fs_version;
|
||||
use super::client::LspClientKind;
|
||||
use super::text::LineIndex;
|
||||
use super::tsc;
|
||||
use super::tsc::AssetDocument;
|
||||
|
@ -17,8 +16,6 @@ use crate::file_fetcher::get_source_from_bytes;
|
|||
use crate::file_fetcher::map_content_type;
|
||||
use crate::file_fetcher::SUPPORTED_SCHEMES;
|
||||
use crate::lsp::logging::lsp_warn;
|
||||
use crate::node::CliNodeResolver;
|
||||
use crate::node::NodeResolution;
|
||||
use crate::npm::CliNpmRegistryApi;
|
||||
use crate::npm::NpmResolution;
|
||||
use crate::npm::PackageJsonDepsInstaller;
|
||||
|
@ -37,7 +34,10 @@ use deno_core::url;
|
|||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::GraphImport;
|
||||
use deno_graph::Resolution;
|
||||
use deno_runtime::deno_node;
|
||||
use deno_runtime::deno_node::NodeResolution;
|
||||
use deno_runtime::deno_node::NodeResolutionMode;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_runtime::permissions::PermissionsContainer;
|
||||
use deno_semver::npm::NpmPackageReq;
|
||||
|
@ -45,6 +45,7 @@ use deno_semver::npm::NpmPackageReqReference;
|
|||
use indexmap::IndexMap;
|
||||
use lsp::Url;
|
||||
use once_cell::sync::Lazy;
|
||||
use package_json::PackageJsonDepsProvider;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
|
@ -791,6 +792,16 @@ fn get_document_path(
|
|||
}
|
||||
}
|
||||
|
||||
pub struct UpdateDocumentConfigOptions<'a> {
|
||||
pub enabled_urls: Vec<Url>,
|
||||
pub document_preload_limit: usize,
|
||||
pub maybe_import_map: Option<Arc<import_map::ImportMap>>,
|
||||
pub maybe_config_file: Option<&'a ConfigFile>,
|
||||
pub maybe_package_json: Option<&'a PackageJson>,
|
||||
pub npm_registry_api: Arc<CliNpmRegistryApi>,
|
||||
pub npm_resolution: Arc<NpmResolution>,
|
||||
}
|
||||
|
||||
/// Specify the documents to include on a `documents.documents(...)` call.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum DocumentsFilter {
|
||||
|
@ -816,8 +827,6 @@ pub struct Documents {
|
|||
open_docs: HashMap<ModuleSpecifier, Document>,
|
||||
/// Documents stored on the file system.
|
||||
file_system_docs: Arc<Mutex<FileSystemDocuments>>,
|
||||
/// Kind of the client that is using the documents.
|
||||
lsp_client_kind: LspClientKind,
|
||||
/// Hash of the config used for resolution. When the hash changes we update
|
||||
/// dependencies.
|
||||
resolver_config_hash: u64,
|
||||
|
@ -837,14 +846,13 @@ pub struct Documents {
|
|||
}
|
||||
|
||||
impl Documents {
|
||||
pub fn new(location: &Path, lsp_client_kind: LspClientKind) -> Self {
|
||||
pub fn new(location: &Path) -> Self {
|
||||
Self {
|
||||
cache: HttpCache::new(location),
|
||||
dirty: true,
|
||||
dependents_map: Default::default(),
|
||||
open_docs: HashMap::default(),
|
||||
file_system_docs: Default::default(),
|
||||
lsp_client_kind,
|
||||
resolver_config_hash: 0,
|
||||
imports: Default::default(),
|
||||
resolver: Default::default(),
|
||||
|
@ -1055,7 +1063,7 @@ impl Documents {
|
|||
&self,
|
||||
specifiers: Vec<String>,
|
||||
referrer_doc: &AssetOrDocument,
|
||||
maybe_node_resolver: Option<&Arc<CliNodeResolver>>,
|
||||
maybe_node_resolver: Option<&Arc<NodeResolver>>,
|
||||
) -> Vec<Option<(ModuleSpecifier, MediaType)>> {
|
||||
let referrer = referrer_doc.specifier();
|
||||
let dependencies = match referrer_doc {
|
||||
|
@ -1073,7 +1081,7 @@ impl Documents {
|
|||
&specifier,
|
||||
referrer,
|
||||
NodeResolutionMode::Types,
|
||||
&mut PermissionsContainer::allow_all(),
|
||||
&PermissionsContainer::allow_all(),
|
||||
)
|
||||
.ok()
|
||||
.flatten(),
|
||||
|
@ -1082,7 +1090,7 @@ impl Documents {
|
|||
}
|
||||
}
|
||||
if let Some(module_name) = specifier.strip_prefix("node:") {
|
||||
if crate::node::resolve_builtin_node_module(module_name).is_ok() {
|
||||
if deno_node::is_builtin_node_module(module_name) {
|
||||
// return itself for node: specifiers because during type checking
|
||||
// we resolve to the ambient modules in the @types/node package
|
||||
// rather than deno_std/node
|
||||
|
@ -1159,22 +1167,16 @@ impl Documents {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_config(
|
||||
&mut self,
|
||||
enabled_urls: Vec<Url>,
|
||||
maybe_import_map: Option<Arc<import_map::ImportMap>>,
|
||||
maybe_config_file: Option<&ConfigFile>,
|
||||
maybe_package_json: Option<&PackageJson>,
|
||||
npm_registry_api: Arc<CliNpmRegistryApi>,
|
||||
npm_resolution: Arc<NpmResolution>,
|
||||
) {
|
||||
pub fn update_config(&mut self, options: UpdateDocumentConfigOptions) {
|
||||
fn calculate_resolver_config_hash(
|
||||
enabled_urls: &[Url],
|
||||
document_preload_limit: usize,
|
||||
maybe_import_map: Option<&import_map::ImportMap>,
|
||||
maybe_jsx_config: Option<&JsxImportSourceConfig>,
|
||||
maybe_package_json_deps: Option<&PackageJsonDeps>,
|
||||
) -> u64 {
|
||||
let mut hasher = FastInsecureHasher::default();
|
||||
hasher.write_hashable(&document_preload_limit);
|
||||
hasher.write_hashable(&{
|
||||
// ensure these are sorted so the hashing is deterministic
|
||||
let mut enabled_urls = enabled_urls.to_vec();
|
||||
|
@ -1206,33 +1208,35 @@ impl Documents {
|
|||
hasher.finish()
|
||||
}
|
||||
|
||||
let maybe_package_json_deps = maybe_package_json.map(|package_json| {
|
||||
package_json::get_local_package_json_version_reqs(package_json)
|
||||
});
|
||||
let maybe_jsx_config =
|
||||
maybe_config_file.and_then(|cf| cf.to_maybe_jsx_import_source_config());
|
||||
let maybe_package_json_deps =
|
||||
options.maybe_package_json.map(|package_json| {
|
||||
package_json::get_local_package_json_version_reqs(package_json)
|
||||
});
|
||||
let maybe_jsx_config = options
|
||||
.maybe_config_file
|
||||
.and_then(|cf| cf.to_maybe_jsx_import_source_config());
|
||||
let new_resolver_config_hash = calculate_resolver_config_hash(
|
||||
&enabled_urls,
|
||||
maybe_import_map.as_deref(),
|
||||
&options.enabled_urls,
|
||||
options.document_preload_limit,
|
||||
options.maybe_import_map.as_deref(),
|
||||
maybe_jsx_config.as_ref(),
|
||||
maybe_package_json_deps.as_ref(),
|
||||
);
|
||||
let deps_installer = Arc::new(PackageJsonDepsInstaller::new(
|
||||
npm_registry_api.clone(),
|
||||
npm_resolution.clone(),
|
||||
maybe_package_json_deps,
|
||||
));
|
||||
let deps_provider =
|
||||
Arc::new(PackageJsonDepsProvider::new(maybe_package_json_deps));
|
||||
let deps_installer = Arc::new(PackageJsonDepsInstaller::no_op());
|
||||
self.resolver = Arc::new(CliGraphResolver::new(
|
||||
maybe_jsx_config,
|
||||
maybe_import_map,
|
||||
options.maybe_import_map,
|
||||
false,
|
||||
npm_registry_api,
|
||||
npm_resolution,
|
||||
options.npm_registry_api,
|
||||
options.npm_resolution,
|
||||
deps_provider,
|
||||
deps_installer,
|
||||
));
|
||||
self.imports = Arc::new(
|
||||
if let Some(Ok(imports)) =
|
||||
maybe_config_file.map(|cf| cf.to_maybe_imports())
|
||||
options.maybe_config_file.map(|cf| cf.to_maybe_imports())
|
||||
{
|
||||
imports
|
||||
.into_iter()
|
||||
|
@ -1252,14 +1256,21 @@ impl Documents {
|
|||
|
||||
// only refresh the dependencies if the underlying configuration has changed
|
||||
if self.resolver_config_hash != new_resolver_config_hash {
|
||||
self.refresh_dependencies(enabled_urls);
|
||||
self.refresh_dependencies(
|
||||
options.enabled_urls,
|
||||
options.document_preload_limit,
|
||||
);
|
||||
self.resolver_config_hash = new_resolver_config_hash;
|
||||
}
|
||||
|
||||
self.dirty = true;
|
||||
}
|
||||
|
||||
fn refresh_dependencies(&mut self, enabled_urls: Vec<Url>) {
|
||||
fn refresh_dependencies(
|
||||
&mut self,
|
||||
enabled_urls: Vec<Url>,
|
||||
document_preload_limit: usize,
|
||||
) {
|
||||
let resolver = self.resolver.as_graph_resolver();
|
||||
for doc in self.open_docs.values_mut() {
|
||||
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
|
||||
|
@ -1269,51 +1280,73 @@ impl Documents {
|
|||
|
||||
// update the file system documents
|
||||
let mut fs_docs = self.file_system_docs.lock();
|
||||
match self.lsp_client_kind {
|
||||
LspClientKind::CodeEditor => {
|
||||
let mut not_found_docs =
|
||||
fs_docs.docs.keys().cloned().collect::<HashSet<_>>();
|
||||
let open_docs = &mut self.open_docs;
|
||||
if document_preload_limit > 0 {
|
||||
let mut not_found_docs =
|
||||
fs_docs.docs.keys().cloned().collect::<HashSet<_>>();
|
||||
let open_docs = &mut self.open_docs;
|
||||
|
||||
log::debug!("Preloading documents from enabled urls...");
|
||||
for specifier in PreloadDocumentFinder::from_enabled_urls(&enabled_urls)
|
||||
log::debug!("Preloading documents from enabled urls...");
|
||||
let mut finder = PreloadDocumentFinder::from_enabled_urls_with_limit(
|
||||
&enabled_urls,
|
||||
document_preload_limit,
|
||||
);
|
||||
for specifier in finder.by_ref() {
|
||||
// mark this document as having been found
|
||||
not_found_docs.remove(&specifier);
|
||||
|
||||
if !open_docs.contains_key(&specifier)
|
||||
&& !fs_docs.docs.contains_key(&specifier)
|
||||
{
|
||||
// mark this document as having been found
|
||||
not_found_docs.remove(&specifier);
|
||||
|
||||
if !open_docs.contains_key(&specifier)
|
||||
&& !fs_docs.docs.contains_key(&specifier)
|
||||
{
|
||||
fs_docs.refresh_document(&self.cache, resolver, &specifier);
|
||||
} else {
|
||||
// update the existing entry to have the new resolver
|
||||
if let Some(doc) = fs_docs.docs.get_mut(&specifier) {
|
||||
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
|
||||
*doc = new_doc;
|
||||
}
|
||||
fs_docs.refresh_document(&self.cache, resolver, &specifier);
|
||||
} else {
|
||||
// update the existing entry to have the new resolver
|
||||
if let Some(doc) = fs_docs.docs.get_mut(&specifier) {
|
||||
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
|
||||
*doc = new_doc;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if finder.hit_limit() {
|
||||
lsp_warn!(
|
||||
concat!(
|
||||
"Hit the language server document preload limit of {} file system entries. ",
|
||||
"You may want to use the \"deno.enablePaths\" configuration setting to only have Deno ",
|
||||
"partially enable a workspace or increase the limit via \"deno.documentPreloadLimit\". ",
|
||||
"In cases where Deno ends up using too much memory, you may want to lower the limit."
|
||||
),
|
||||
document_preload_limit,
|
||||
);
|
||||
|
||||
// since we hit the limit, just update everything to use the new resolver
|
||||
for uri in not_found_docs {
|
||||
if let Some(doc) = fs_docs.docs.get_mut(&uri) {
|
||||
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
|
||||
*doc = new_doc;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// clean up and remove any documents that weren't found
|
||||
for uri in not_found_docs {
|
||||
fs_docs.docs.remove(&uri);
|
||||
}
|
||||
}
|
||||
LspClientKind::Repl => {
|
||||
// This log statement is used in the tests to ensure preloading doesn't
|
||||
// happen, which is not useful in the repl and could be very expensive
|
||||
// if the repl is launched from a directory with a lot of descendants.
|
||||
log::debug!("Skipping document preload for repl.");
|
||||
} else {
|
||||
// This log statement is used in the tests to ensure preloading doesn't
|
||||
// happen, which is not useful in the repl and could be very expensive
|
||||
// if the repl is launched from a directory with a lot of descendants.
|
||||
log::debug!("Skipping document preload.");
|
||||
|
||||
// for the repl, just update to use the new resolver
|
||||
for doc in fs_docs.docs.values_mut() {
|
||||
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
|
||||
*doc = new_doc;
|
||||
}
|
||||
// just update to use the new resolver
|
||||
for doc in fs_docs.docs.values_mut() {
|
||||
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
|
||||
*doc = new_doc;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fs_docs.dirty = true;
|
||||
}
|
||||
|
||||
|
@ -1417,7 +1450,7 @@ impl Documents {
|
|||
fn resolve_dependency(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_node_resolver: Option<&Arc<CliNodeResolver>>,
|
||||
maybe_node_resolver: Option<&Arc<NodeResolver>>,
|
||||
) -> Option<(ModuleSpecifier, MediaType)> {
|
||||
if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(specifier) {
|
||||
return node_resolve_npm_req_ref(npm_ref, maybe_node_resolver);
|
||||
|
@ -1452,7 +1485,7 @@ impl Documents {
|
|||
|
||||
fn node_resolve_npm_req_ref(
|
||||
npm_req_ref: NpmPackageReqReference,
|
||||
maybe_node_resolver: Option<&Arc<CliNodeResolver>>,
|
||||
maybe_node_resolver: Option<&Arc<NodeResolver>>,
|
||||
) -> Option<(ModuleSpecifier, MediaType)> {
|
||||
maybe_node_resolver.map(|node_resolver| {
|
||||
NodeResolution::into_specifier_and_media_type(
|
||||
|
@ -1460,7 +1493,7 @@ fn node_resolve_npm_req_ref(
|
|||
.resolve_npm_req_reference(
|
||||
&npm_req_ref,
|
||||
NodeResolutionMode::Types,
|
||||
&mut PermissionsContainer::allow_all(),
|
||||
&PermissionsContainer::allow_all(),
|
||||
)
|
||||
.ok()
|
||||
.flatten(),
|
||||
|
@ -1553,19 +1586,15 @@ enum PendingEntry {
|
|||
/// Iterator that finds documents that can be preloaded into
|
||||
/// the LSP on startup.
|
||||
struct PreloadDocumentFinder {
|
||||
limit: u16,
|
||||
entry_count: u16,
|
||||
limit: usize,
|
||||
entry_count: usize,
|
||||
pending_entries: VecDeque<PendingEntry>,
|
||||
}
|
||||
|
||||
impl PreloadDocumentFinder {
|
||||
pub fn from_enabled_urls(enabled_urls: &Vec<Url>) -> Self {
|
||||
Self::from_enabled_urls_with_limit(enabled_urls, 1_000)
|
||||
}
|
||||
|
||||
pub fn from_enabled_urls_with_limit(
|
||||
enabled_urls: &Vec<Url>,
|
||||
limit: u16,
|
||||
limit: usize,
|
||||
) -> Self {
|
||||
fn is_allowed_root_dir(dir_path: &Path) -> bool {
|
||||
if dir_path.parent().is_none() {
|
||||
|
@ -1600,6 +1629,10 @@ impl PreloadDocumentFinder {
|
|||
finder
|
||||
}
|
||||
|
||||
pub fn hit_limit(&self) -> bool {
|
||||
self.entry_count >= self.limit
|
||||
}
|
||||
|
||||
fn get_valid_specifier(path: &Path) -> Option<ModuleSpecifier> {
|
||||
fn is_allowed_media_type(media_type: MediaType) -> bool {
|
||||
match media_type {
|
||||
|
@ -1694,15 +1727,7 @@ impl Iterator for PreloadDocumentFinder {
|
|||
while let Some(entry) = entries.next() {
|
||||
self.entry_count += 1;
|
||||
|
||||
if self.entry_count >= self.limit {
|
||||
lsp_warn!(
|
||||
concat!(
|
||||
"Hit the language server document preload limit of {} file system entries. ",
|
||||
"You may want to use the \"deno.enablePaths\" configuration setting to only have Deno ",
|
||||
"partially enable a workspace."
|
||||
),
|
||||
self.limit,
|
||||
);
|
||||
if self.hit_limit() {
|
||||
self.pending_entries.clear(); // stop searching
|
||||
return None;
|
||||
}
|
||||
|
@ -1764,7 +1789,7 @@ mod tests {
|
|||
|
||||
fn setup(temp_dir: &TempDir) -> (Documents, PathBuf) {
|
||||
let location = temp_dir.path().join("deps");
|
||||
let documents = Documents::new(&location, Default::default());
|
||||
let documents = Documents::new(&location);
|
||||
(documents, location)
|
||||
}
|
||||
|
||||
|
@ -1894,14 +1919,15 @@ console.log(b, "hello deno");
|
|||
.append("test".to_string(), "./file2.ts".to_string())
|
||||
.unwrap();
|
||||
|
||||
documents.update_config(
|
||||
vec![],
|
||||
Some(Arc::new(import_map)),
|
||||
None,
|
||||
None,
|
||||
npm_registry_api.clone(),
|
||||
npm_resolution.clone(),
|
||||
);
|
||||
documents.update_config(UpdateDocumentConfigOptions {
|
||||
enabled_urls: vec![],
|
||||
document_preload_limit: 1_000,
|
||||
maybe_import_map: Some(Arc::new(import_map)),
|
||||
maybe_config_file: None,
|
||||
maybe_package_json: None,
|
||||
npm_registry_api: npm_registry_api.clone(),
|
||||
npm_resolution: npm_resolution.clone(),
|
||||
});
|
||||
|
||||
// open the document
|
||||
let document = documents.open(
|
||||
|
@ -1934,14 +1960,15 @@ console.log(b, "hello deno");
|
|||
.append("test".to_string(), "./file3.ts".to_string())
|
||||
.unwrap();
|
||||
|
||||
documents.update_config(
|
||||
vec![],
|
||||
Some(Arc::new(import_map)),
|
||||
None,
|
||||
None,
|
||||
documents.update_config(UpdateDocumentConfigOptions {
|
||||
enabled_urls: vec![],
|
||||
document_preload_limit: 1_000,
|
||||
maybe_import_map: Some(Arc::new(import_map)),
|
||||
maybe_config_file: None,
|
||||
maybe_package_json: None,
|
||||
npm_registry_api,
|
||||
npm_resolution,
|
||||
);
|
||||
});
|
||||
|
||||
// check the document's dependencies
|
||||
let document = documents.get(&file1_specifier).unwrap();
|
||||
|
@ -1996,12 +2023,15 @@ console.log(b, "hello deno");
|
|||
temp_dir.create_dir_all("root3/");
|
||||
temp_dir.write("root3/mod.ts", ""); // no, not provided
|
||||
|
||||
let mut urls = PreloadDocumentFinder::from_enabled_urls(&vec![
|
||||
temp_dir.uri().join("root1/").unwrap(),
|
||||
temp_dir.uri().join("root2/file1.ts").unwrap(),
|
||||
temp_dir.uri().join("root2/main.min.ts").unwrap(),
|
||||
temp_dir.uri().join("root2/folder/").unwrap(),
|
||||
])
|
||||
let mut urls = PreloadDocumentFinder::from_enabled_urls_with_limit(
|
||||
&vec![
|
||||
temp_dir.uri().join("root1/").unwrap(),
|
||||
temp_dir.uri().join("root2/file1.ts").unwrap(),
|
||||
temp_dir.uri().join("root2/main.min.ts").unwrap(),
|
||||
temp_dir.uri().join("root2/folder/").unwrap(),
|
||||
],
|
||||
1_000,
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Ideally we would test for order here, which should be BFS, but
|
||||
|
@ -2043,18 +2073,18 @@ console.log(b, "hello deno");
|
|||
#[test]
|
||||
pub fn test_pre_load_document_finder_disallowed_dirs() {
|
||||
if cfg!(windows) {
|
||||
let paths = PreloadDocumentFinder::from_enabled_urls(&vec![Url::parse(
|
||||
"file:///c:/",
|
||||
let paths = PreloadDocumentFinder::from_enabled_urls_with_limit(
|
||||
&vec![Url::parse("file:///c:/").unwrap()],
|
||||
1_000,
|
||||
)
|
||||
.unwrap()])
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(paths, vec![]);
|
||||
} else {
|
||||
let paths =
|
||||
PreloadDocumentFinder::from_enabled_urls(&vec![
|
||||
Url::parse("file:///").unwrap()
|
||||
])
|
||||
.collect::<Vec<_>>();
|
||||
let paths = PreloadDocumentFinder::from_enabled_urls_with_limit(
|
||||
&vec![Url::parse("file:///").unwrap()],
|
||||
1_000,
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(paths, vec![]);
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -10,6 +10,8 @@ pub const TASK_REQUEST: &str = "deno/task";
|
|||
pub const RELOAD_IMPORT_REGISTRIES_REQUEST: &str =
|
||||
"deno/reloadImportRegistries";
|
||||
pub const VIRTUAL_TEXT_DOCUMENT: &str = "deno/virtualTextDocument";
|
||||
pub const LATEST_DIAGNOSTIC_BATCH_INDEX: &str =
|
||||
"deno/internalLatestDiagnosticBatchIndex";
|
||||
|
||||
// While lsp_types supports inlay hints currently, tower_lsp does not.
|
||||
pub const INLAY_HINT: &str = "textDocument/inlayHint";
|
||||
|
@ -44,3 +46,19 @@ impl lsp::notification::Notification for RegistryStateNotification {
|
|||
pub struct VirtualTextDocumentParams {
|
||||
pub text_document: lsp::TextDocumentIdentifier,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct DiagnosticBatchNotificationParams {
|
||||
pub batch_index: usize,
|
||||
pub messages_len: usize,
|
||||
}
|
||||
|
||||
/// This notification is only sent for testing purposes
|
||||
/// in order to know what the latest diagnostics are.
|
||||
pub enum DiagnosticBatchNotification {}
|
||||
|
||||
impl lsp::notification::Notification for DiagnosticBatchNotification {
|
||||
type Params = DiagnosticBatchNotificationParams;
|
||||
|
||||
const METHOD: &'static str = "deno/internalTestDiagnosticBatch";
|
||||
}
|
||||
|
|
|
@ -8,6 +8,8 @@ use crate::lsp::language_server::LanguageServer;
|
|||
pub use repl::ReplCompletionItem;
|
||||
pub use repl::ReplLanguageServer;
|
||||
|
||||
use self::diagnostics::should_send_diagnostic_batch_index_notifications;
|
||||
|
||||
mod analysis;
|
||||
mod cache;
|
||||
mod capabilities;
|
||||
|
@ -36,7 +38,7 @@ pub async fn start() -> Result<(), AnyError> {
|
|||
let stdin = tokio::io::stdin();
|
||||
let stdout = tokio::io::stdout();
|
||||
|
||||
let (service, socket) = LspService::build(|client| {
|
||||
let builder = LspService::build(|client| {
|
||||
language_server::LanguageServer::new(client::Client::from_tower(client))
|
||||
})
|
||||
.custom_method(lsp_custom::CACHE_REQUEST, LanguageServer::cache_request)
|
||||
|
@ -58,8 +60,18 @@ pub async fn start() -> Result<(), AnyError> {
|
|||
lsp_custom::VIRTUAL_TEXT_DOCUMENT,
|
||||
LanguageServer::virtual_text_document,
|
||||
)
|
||||
.custom_method(lsp_custom::INLAY_HINT, LanguageServer::inlay_hint)
|
||||
.finish();
|
||||
.custom_method(lsp_custom::INLAY_HINT, LanguageServer::inlay_hint);
|
||||
|
||||
let builder = if should_send_diagnostic_batch_index_notifications() {
|
||||
builder.custom_method(
|
||||
lsp_custom::LATEST_DIAGNOSTIC_BATCH_INDEX,
|
||||
LanguageServer::latest_diagnostic_batch_index_request,
|
||||
)
|
||||
} else {
|
||||
builder
|
||||
};
|
||||
|
||||
let (service, socket) = builder.finish();
|
||||
|
||||
Server::new(stdin, stdout, socket).serve(service).await;
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::task::spawn;
|
||||
use tokio::time::sleep;
|
||||
use tokio::time::Duration;
|
||||
|
||||
|
@ -7,7 +8,7 @@ use tokio::time::Duration;
|
|||
/// provided process id. Once that process no longer exists
|
||||
/// it will terminate the current process.
|
||||
pub fn start(parent_process_id: u32) {
|
||||
tokio::task::spawn(async move {
|
||||
spawn(async move {
|
||||
loop {
|
||||
sleep(Duration::from_secs(30)).await;
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@ use log::error;
|
|||
use once_cell::sync::Lazy;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use tower_lsp::lsp_types as lsp;
|
||||
|
||||
const CONFIG_PATH: &str = "/.well-known/deno-import-intellisense.json";
|
||||
|
@ -425,16 +426,13 @@ impl Default for ModuleRegistry {
|
|||
// custom root.
|
||||
let dir = DenoDir::new(None).unwrap();
|
||||
let location = dir.registries_folder_path();
|
||||
let http_client = HttpClient::new(None, None).unwrap();
|
||||
Self::new(&location, http_client).unwrap()
|
||||
let http_client = Arc::new(HttpClient::new(None, None));
|
||||
Self::new(&location, http_client)
|
||||
}
|
||||
}
|
||||
|
||||
impl ModuleRegistry {
|
||||
pub fn new(
|
||||
location: &Path,
|
||||
http_client: HttpClient,
|
||||
) -> Result<Self, AnyError> {
|
||||
pub fn new(location: &Path, http_client: Arc<HttpClient>) -> Self {
|
||||
let http_cache = HttpCache::new(location);
|
||||
let mut file_fetcher = FileFetcher::new(
|
||||
http_cache,
|
||||
|
@ -446,10 +444,10 @@ impl ModuleRegistry {
|
|||
);
|
||||
file_fetcher.set_download_log_level(super::logging::lsp_log_level());
|
||||
|
||||
Ok(Self {
|
||||
Self {
|
||||
origins: HashMap::new(),
|
||||
file_fetcher,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn complete_literal(
|
||||
|
@ -1251,8 +1249,7 @@ mod tests {
|
|||
let temp_dir = TempDir::new();
|
||||
let location = temp_dir.path().join("registries");
|
||||
let mut module_registry =
|
||||
ModuleRegistry::new(&location, HttpClient::new(None, None).unwrap())
|
||||
.unwrap();
|
||||
ModuleRegistry::new(&location, Arc::new(HttpClient::new(None, None)));
|
||||
module_registry
|
||||
.enable("http://localhost:4545/")
|
||||
.await
|
||||
|
@ -1313,8 +1310,7 @@ mod tests {
|
|||
let temp_dir = TempDir::new();
|
||||
let location = temp_dir.path().join("registries");
|
||||
let mut module_registry =
|
||||
ModuleRegistry::new(&location, HttpClient::new(None, None).unwrap())
|
||||
.unwrap();
|
||||
ModuleRegistry::new(&location, Arc::new(HttpClient::new(None, None)));
|
||||
module_registry
|
||||
.enable("http://localhost:4545/")
|
||||
.await
|
||||
|
@ -1537,8 +1533,7 @@ mod tests {
|
|||
let temp_dir = TempDir::new();
|
||||
let location = temp_dir.path().join("registries");
|
||||
let mut module_registry =
|
||||
ModuleRegistry::new(&location, HttpClient::new(None, None).unwrap())
|
||||
.unwrap();
|
||||
ModuleRegistry::new(&location, Arc::new(HttpClient::new(None, None)));
|
||||
module_registry
|
||||
.enable_custom("http://localhost:4545/lsp/registries/deno-import-intellisense-key-first.json")
|
||||
.await
|
||||
|
@ -1608,8 +1603,7 @@ mod tests {
|
|||
let temp_dir = TempDir::new();
|
||||
let location = temp_dir.path().join("registries");
|
||||
let mut module_registry =
|
||||
ModuleRegistry::new(&location, HttpClient::new(None, None).unwrap())
|
||||
.unwrap();
|
||||
ModuleRegistry::new(&location, Arc::new(HttpClient::new(None, None)));
|
||||
module_registry
|
||||
.enable_custom("http://localhost:4545/lsp/registries/deno-import-intellisense-complex.json")
|
||||
.await
|
||||
|
@ -1660,8 +1654,7 @@ mod tests {
|
|||
let temp_dir = TempDir::new();
|
||||
let location = temp_dir.path().join("registries");
|
||||
let module_registry =
|
||||
ModuleRegistry::new(&location, HttpClient::new(None, None).unwrap())
|
||||
.unwrap();
|
||||
ModuleRegistry::new(&location, Arc::new(HttpClient::new(None, None)));
|
||||
let result = module_registry.check_origin("http://localhost:4545").await;
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
@ -1672,8 +1665,7 @@ mod tests {
|
|||
let temp_dir = TempDir::new();
|
||||
let location = temp_dir.path().join("registries");
|
||||
let module_registry =
|
||||
ModuleRegistry::new(&location, HttpClient::new(None, None).unwrap())
|
||||
.unwrap();
|
||||
ModuleRegistry::new(&location, Arc::new(HttpClient::new(None, None)));
|
||||
let result = module_registry.check_origin("https://example.com").await;
|
||||
assert!(result.is_err());
|
||||
let err = result.unwrap_err().to_string();
|
||||
|
|
|
@ -294,6 +294,7 @@ pub fn get_repl_workspace_settings() -> WorkspaceSettings {
|
|||
inlay_hints: Default::default(),
|
||||
internal_debug: false,
|
||||
lint: false,
|
||||
document_preload_limit: 0, // don't pre-load any modules as it's expensive and not useful for the repl
|
||||
tls_certificate: None,
|
||||
unsafely_ignore_certificate_errors: None,
|
||||
unstable: false,
|
||||
|
|
|
@ -6,11 +6,11 @@ use super::lsp_custom;
|
|||
|
||||
use crate::args::flags_from_vec;
|
||||
use crate::args::DenoSubcommand;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::lsp::client::Client;
|
||||
use crate::lsp::client::TestingNotification;
|
||||
use crate::lsp::config;
|
||||
use crate::lsp::logging::lsp_log;
|
||||
use crate::proc_state;
|
||||
use crate::tools::test;
|
||||
use crate::tools::test::FailFastTracker;
|
||||
use crate::tools::test::TestEventSender;
|
||||
|
@ -24,9 +24,11 @@ use deno_core::futures::stream;
|
|||
use deno_core::futures::StreamExt;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::parking_lot::RwLock;
|
||||
use deno_core::task::spawn;
|
||||
use deno_core::task::spawn_blocking;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_runtime::permissions::Permissions;
|
||||
use deno_runtime::tokio_util::run_local;
|
||||
use deno_runtime::tokio_util::create_and_run_current_thread;
|
||||
use indexmap::IndexMap;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
|
@ -218,15 +220,16 @@ impl TestRun {
|
|||
let args = self.get_args();
|
||||
lsp_log!("Executing test run with arguments: {}", args.join(" "));
|
||||
let flags = flags_from_vec(args.into_iter().map(String::from).collect())?;
|
||||
let ps = proc_state::ProcState::from_flags(flags).await?;
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
// Various test files should not share the same permissions in terms of
|
||||
// `PermissionsContainer` - otherwise granting/revoking permissions in one
|
||||
// file would have impact on other files, which is undesirable.
|
||||
let permissions =
|
||||
Permissions::from_options(&ps.options.permissions_options())?;
|
||||
Permissions::from_options(&factory.cli_options().permissions_options())?;
|
||||
test::check_specifiers(
|
||||
&ps,
|
||||
permissions.clone(),
|
||||
factory.cli_options(),
|
||||
factory.file_fetcher()?,
|
||||
factory.module_load_preparer().await?,
|
||||
self
|
||||
.queue
|
||||
.iter()
|
||||
|
@ -235,18 +238,19 @@ impl TestRun {
|
|||
)
|
||||
.await?;
|
||||
|
||||
let (concurrent_jobs, fail_fast) =
|
||||
if let DenoSubcommand::Test(test_flags) = ps.options.sub_command() {
|
||||
(
|
||||
test_flags
|
||||
.concurrent_jobs
|
||||
.unwrap_or_else(|| NonZeroUsize::new(1).unwrap())
|
||||
.into(),
|
||||
test_flags.fail_fast,
|
||||
)
|
||||
} else {
|
||||
unreachable!("Should always be Test subcommand.");
|
||||
};
|
||||
let (concurrent_jobs, fail_fast) = if let DenoSubcommand::Test(test_flags) =
|
||||
factory.cli_options().sub_command()
|
||||
{
|
||||
(
|
||||
test_flags
|
||||
.concurrent_jobs
|
||||
.unwrap_or_else(|| NonZeroUsize::new(1).unwrap())
|
||||
.into(),
|
||||
test_flags.fail_fast,
|
||||
)
|
||||
} else {
|
||||
unreachable!("Should always be Test subcommand.");
|
||||
};
|
||||
|
||||
let (sender, mut receiver) = mpsc::unbounded_channel::<test::TestEvent>();
|
||||
let sender = TestEventSender::new(sender);
|
||||
|
@ -258,10 +262,12 @@ impl TestRun {
|
|||
let tests: Arc<RwLock<IndexMap<usize, test::TestDescription>>> =
|
||||
Arc::new(RwLock::new(IndexMap::new()));
|
||||
let mut test_steps = IndexMap::new();
|
||||
let worker_factory =
|
||||
Arc::new(factory.create_cli_main_worker_factory().await?);
|
||||
|
||||
let join_handles = queue.into_iter().map(move |specifier| {
|
||||
let specifier = specifier.clone();
|
||||
let ps = ps.clone();
|
||||
let worker_factory = worker_factory.clone();
|
||||
let permissions = permissions.clone();
|
||||
let mut sender = sender.clone();
|
||||
let fail_fast_tracker = fail_fast_tracker.clone();
|
||||
|
@ -280,7 +286,7 @@ impl TestRun {
|
|||
};
|
||||
let token = self.token.clone();
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
spawn_blocking(move || {
|
||||
if fail_fast_tracker.should_stop() {
|
||||
return Ok(());
|
||||
}
|
||||
|
@ -288,13 +294,17 @@ impl TestRun {
|
|||
let file_result = if token.is_cancelled() {
|
||||
Ok(())
|
||||
} else {
|
||||
run_local(test::test_specifier(
|
||||
&ps,
|
||||
create_and_run_current_thread(test::test_specifier(
|
||||
worker_factory,
|
||||
permissions,
|
||||
specifier,
|
||||
sender.clone(),
|
||||
fail_fast_tracker,
|
||||
filter,
|
||||
test::TestSpecifierOptions {
|
||||
filter,
|
||||
shuffle: None,
|
||||
trace_ops: false,
|
||||
},
|
||||
))
|
||||
};
|
||||
if let Err(error) = file_result {
|
||||
|
@ -323,7 +333,7 @@ impl TestRun {
|
|||
));
|
||||
|
||||
let handler = {
|
||||
tokio::task::spawn(async move {
|
||||
spawn(async move {
|
||||
let earlier = Instant::now();
|
||||
let mut summary = test::TestSummary::new();
|
||||
let mut used_only = false;
|
||||
|
|
448
cli/lsp/tsc.rs
448
cli/lsp/tsc.rs
|
@ -1,5 +1,6 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use super::analysis::CodeActionData;
|
||||
use super::code_lens;
|
||||
use super::config;
|
||||
use super::documents::AssetOrDocument;
|
||||
|
@ -53,6 +54,7 @@ use serde_repr::Serialize_repr;
|
|||
use std::cmp;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::ops::Range;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::thread;
|
||||
|
@ -118,7 +120,403 @@ impl TsServer {
|
|||
Self(tx)
|
||||
}
|
||||
|
||||
pub async fn request<R>(
|
||||
pub async fn get_diagnostics(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifiers: Vec<ModuleSpecifier>,
|
||||
token: CancellationToken,
|
||||
) -> Result<HashMap<String, Vec<crate::tsc::Diagnostic>>, AnyError> {
|
||||
let req = RequestMethod::GetDiagnostics(specifiers);
|
||||
self.request_with_cancellation(snapshot, req, token).await
|
||||
}
|
||||
|
||||
pub async fn find_references(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
position: u32,
|
||||
) -> Result<Option<Vec<ReferencedSymbol>>, LspError> {
|
||||
let req = RequestMethod::FindReferences {
|
||||
specifier,
|
||||
position,
|
||||
};
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Unable to get references from TypeScript: {}", err);
|
||||
LspError::internal_error()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_navigation_tree(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
) -> Result<NavigationTree, AnyError> {
|
||||
self
|
||||
.request(snapshot, RequestMethod::GetNavigationTree(specifier))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn configure(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
tsconfig: TsConfig,
|
||||
) -> Result<bool, AnyError> {
|
||||
self
|
||||
.request(snapshot, RequestMethod::Configure(tsconfig))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_supported_code_fixes(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
) -> Result<Vec<String>, LspError> {
|
||||
self
|
||||
.request(snapshot, RequestMethod::GetSupportedCodeFixes)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
log::error!("Unable to get fixable diagnostics: {}", err);
|
||||
LspError::internal_error()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_quick_info(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
position: u32,
|
||||
) -> Result<Option<QuickInfo>, LspError> {
|
||||
let req = RequestMethod::GetQuickInfo((specifier, position));
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Unable to get quick info: {}", err);
|
||||
LspError::internal_error()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_code_fixes(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
range: Range<u32>,
|
||||
codes: Vec<String>,
|
||||
) -> Vec<CodeFixAction> {
|
||||
let req =
|
||||
RequestMethod::GetCodeFixes((specifier, range.start, range.end, codes));
|
||||
match self.request(snapshot, req).await {
|
||||
Ok(items) => items,
|
||||
Err(err) => {
|
||||
// sometimes tsc reports errors when retrieving code actions
|
||||
// because they don't reflect the current state of the document
|
||||
// so we will log them to the output, but we won't send an error
|
||||
// message back to the client.
|
||||
log::error!("Error getting actions from TypeScript: {}", err);
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_applicable_refactors(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
range: Range<u32>,
|
||||
only: String,
|
||||
) -> Result<Vec<ApplicableRefactorInfo>, LspError> {
|
||||
let req = RequestMethod::GetApplicableRefactors((
|
||||
specifier.clone(),
|
||||
TextSpan {
|
||||
start: range.start,
|
||||
length: range.end - range.start,
|
||||
},
|
||||
only,
|
||||
));
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Failed to request to tsserver {}", err);
|
||||
LspError::invalid_request()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_combined_code_fix(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
code_action_data: &CodeActionData,
|
||||
) -> Result<CombinedCodeActions, LspError> {
|
||||
let req = RequestMethod::GetCombinedCodeFix((
|
||||
code_action_data.specifier.clone(),
|
||||
json!(code_action_data.fix_id.clone()),
|
||||
));
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Unable to get combined fix from TypeScript: {}", err);
|
||||
LspError::internal_error()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_edits_for_refactor(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
range: Range<u32>,
|
||||
refactor_name: String,
|
||||
action_name: String,
|
||||
) -> Result<RefactorEditInfo, LspError> {
|
||||
let req = RequestMethod::GetEditsForRefactor((
|
||||
specifier,
|
||||
TextSpan {
|
||||
start: range.start,
|
||||
length: range.end - range.start,
|
||||
},
|
||||
refactor_name,
|
||||
action_name,
|
||||
));
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Failed to request to tsserver {}", err);
|
||||
LspError::invalid_request()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_document_highlights(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
position: u32,
|
||||
files_to_search: Vec<ModuleSpecifier>,
|
||||
) -> Result<Option<Vec<DocumentHighlights>>, LspError> {
|
||||
let req = RequestMethod::GetDocumentHighlights((
|
||||
specifier,
|
||||
position,
|
||||
files_to_search,
|
||||
));
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Unable to get document highlights from TypeScript: {}", err);
|
||||
LspError::internal_error()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_definition(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
position: u32,
|
||||
) -> Result<Option<DefinitionInfoAndBoundSpan>, LspError> {
|
||||
let req = RequestMethod::GetDefinition((specifier, position));
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Unable to get definition from TypeScript: {}", err);
|
||||
LspError::internal_error()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_type_definition(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
position: u32,
|
||||
) -> Result<Option<Vec<DefinitionInfo>>, LspError> {
|
||||
let req = RequestMethod::GetTypeDefinition {
|
||||
specifier,
|
||||
position,
|
||||
};
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Unable to get type definition from TypeScript: {}", err);
|
||||
LspError::internal_error()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_completions(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
position: u32,
|
||||
options: GetCompletionsAtPositionOptions,
|
||||
) -> Option<CompletionInfo> {
|
||||
let req = RequestMethod::GetCompletions((specifier, position, options));
|
||||
match self.request(snapshot, req).await {
|
||||
Ok(maybe_info) => maybe_info,
|
||||
Err(err) => {
|
||||
log::error!("Unable to get completion info from TypeScript: {:#}", err);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_completion_details(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
args: GetCompletionDetailsArgs,
|
||||
) -> Result<Option<CompletionEntryDetails>, AnyError> {
|
||||
let req = RequestMethod::GetCompletionDetails(args);
|
||||
self.request(snapshot, req).await
|
||||
}
|
||||
|
||||
pub async fn get_implementations(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
position: u32,
|
||||
) -> Result<Option<Vec<ImplementationLocation>>, LspError> {
|
||||
let req = RequestMethod::GetImplementation((specifier, position));
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Failed to request to tsserver {}", err);
|
||||
LspError::invalid_request()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_outlining_spans(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
) -> Result<Vec<OutliningSpan>, LspError> {
|
||||
let req = RequestMethod::GetOutliningSpans(specifier);
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Failed to request to tsserver {}", err);
|
||||
LspError::invalid_request()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn provide_call_hierarchy_incoming_calls(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
position: u32,
|
||||
) -> Result<Vec<CallHierarchyIncomingCall>, LspError> {
|
||||
let req =
|
||||
RequestMethod::ProvideCallHierarchyIncomingCalls((specifier, position));
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Failed to request to tsserver {}", err);
|
||||
LspError::invalid_request()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn provide_call_hierarchy_outgoing_calls(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
position: u32,
|
||||
) -> Result<Vec<CallHierarchyOutgoingCall>, LspError> {
|
||||
let req =
|
||||
RequestMethod::ProvideCallHierarchyOutgoingCalls((specifier, position));
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Failed to request to tsserver {}", err);
|
||||
LspError::invalid_request()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn prepare_call_hierarchy(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
position: u32,
|
||||
) -> Result<Option<OneOrMany<CallHierarchyItem>>, LspError> {
|
||||
let req = RequestMethod::PrepareCallHierarchy((specifier, position));
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Failed to request to tsserver {}", err);
|
||||
LspError::invalid_request()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn find_rename_locations(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
position: u32,
|
||||
) -> Result<Option<Vec<RenameLocation>>, LspError> {
|
||||
let req = RequestMethod::FindRenameLocations {
|
||||
specifier,
|
||||
position,
|
||||
find_in_strings: false,
|
||||
find_in_comments: false,
|
||||
provide_prefix_and_suffix_text_for_rename: false,
|
||||
};
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Failed to request to tsserver {}", err);
|
||||
LspError::invalid_request()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_smart_selection_range(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
position: u32,
|
||||
) -> Result<SelectionRange, LspError> {
|
||||
let req = RequestMethod::GetSmartSelectionRange((specifier, position));
|
||||
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Failed to request to tsserver {}", err);
|
||||
LspError::invalid_request()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_encoded_semantic_classifications(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
range: Range<u32>,
|
||||
) -> Result<Classifications, LspError> {
|
||||
let req = RequestMethod::GetEncodedSemanticClassifications((
|
||||
specifier,
|
||||
TextSpan {
|
||||
start: range.start,
|
||||
length: range.end - range.start,
|
||||
},
|
||||
));
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Failed to request to tsserver {}", err);
|
||||
LspError::invalid_request()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_signature_help_items(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
position: u32,
|
||||
options: SignatureHelpItemsOptions,
|
||||
) -> Result<Option<SignatureHelpItems>, LspError> {
|
||||
let req =
|
||||
RequestMethod::GetSignatureHelpItems((specifier, position, options));
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Failed to request to tsserver: {}", err);
|
||||
LspError::invalid_request()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_navigate_to_items(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
args: GetNavigateToItemsArgs,
|
||||
) -> Result<Vec<NavigateToItem>, LspError> {
|
||||
let req = RequestMethod::GetNavigateToItems(args);
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Failed request to tsserver: {}", err);
|
||||
LspError::invalid_request()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn provide_inlay_hints(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: ModuleSpecifier,
|
||||
text_span: TextSpan,
|
||||
user_preferences: UserPreferences,
|
||||
) -> Result<Option<Vec<InlayHint>>, LspError> {
|
||||
let req = RequestMethod::ProvideInlayHints((
|
||||
specifier,
|
||||
text_span,
|
||||
user_preferences,
|
||||
));
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Unable to get inlay hints: {}", err);
|
||||
LspError::internal_error()
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn restart(&self, snapshot: Arc<StateSnapshot>) {
|
||||
let _: bool = self
|
||||
.request(snapshot, RequestMethod::Restart)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
async fn request<R>(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
req: RequestMethod,
|
||||
|
@ -131,7 +529,7 @@ impl TsServer {
|
|||
.await
|
||||
}
|
||||
|
||||
pub async fn request_with_cancellation<R>(
|
||||
async fn request_with_cancellation<R>(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
req: RequestMethod,
|
||||
|
@ -147,26 +545,6 @@ impl TsServer {
|
|||
let value = rx.await??;
|
||||
Ok(serde_json::from_value::<R>(value)?)
|
||||
}
|
||||
|
||||
// todo(dsherret): refactor the rest of the request methods to have
|
||||
// methods to call on this struct, then make `RequestMethod` and
|
||||
// friends internal
|
||||
|
||||
pub async fn find_references(
|
||||
&self,
|
||||
snapshot: Arc<StateSnapshot>,
|
||||
specifier: &ModuleSpecifier,
|
||||
position: u32,
|
||||
) -> Result<Option<Vec<ReferencedSymbol>>, LspError> {
|
||||
let req = RequestMethod::FindReferences {
|
||||
specifier: specifier.clone(),
|
||||
position,
|
||||
};
|
||||
self.request(snapshot, req).await.map_err(|err| {
|
||||
log::error!("Unable to get references from TypeScript: {}", err);
|
||||
LspError::internal_error()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -2883,9 +3261,6 @@ deno_core::extension!(deno_tsc,
|
|||
options.performance,
|
||||
));
|
||||
},
|
||||
customizer = |ext: &mut deno_core::ExtensionBuilder| {
|
||||
ext.force_op_registration();
|
||||
},
|
||||
);
|
||||
|
||||
/// Instruct a language server runtime to start the language server and provide
|
||||
|
@ -3161,9 +3536,16 @@ impl From<&CompletionItemData> for GetCompletionDetailsArgs {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GetNavigateToItemsArgs {
|
||||
pub search: String,
|
||||
pub max_result_count: Option<u32>,
|
||||
pub file: Option<String>,
|
||||
}
|
||||
|
||||
/// Methods that are supported by the Language Service in the compiler isolate.
|
||||
#[derive(Debug)]
|
||||
pub enum RequestMethod {
|
||||
enum RequestMethod {
|
||||
/// Configure the compilation settings for the server.
|
||||
Configure(TsConfig),
|
||||
/// Get rename locations at a given position.
|
||||
|
@ -3198,11 +3580,7 @@ pub enum RequestMethod {
|
|||
/// Get implementation information for a specific position.
|
||||
GetImplementation((ModuleSpecifier, u32)),
|
||||
/// Get "navigate to" items, which are converted to workspace symbols
|
||||
GetNavigateToItems {
|
||||
search: String,
|
||||
max_result_count: Option<u32>,
|
||||
file: Option<String>,
|
||||
},
|
||||
GetNavigateToItems(GetNavigateToItemsArgs),
|
||||
/// Get a "navigation tree" for a specifier.
|
||||
GetNavigationTree(ModuleSpecifier),
|
||||
/// Get outlining spans for a specifier.
|
||||
|
@ -3356,11 +3734,11 @@ impl RequestMethod {
|
|||
"specifier": state.denormalize_specifier(specifier),
|
||||
"position": position,
|
||||
}),
|
||||
RequestMethod::GetNavigateToItems {
|
||||
RequestMethod::GetNavigateToItems(GetNavigateToItemsArgs {
|
||||
search,
|
||||
max_result_count,
|
||||
file,
|
||||
} => json!({
|
||||
}) => json!({
|
||||
"id": id,
|
||||
"method": "getNavigateToItems",
|
||||
"search": search,
|
||||
|
@ -3470,7 +3848,7 @@ impl RequestMethod {
|
|||
}
|
||||
|
||||
/// Send a request into a runtime and return the JSON value of the response.
|
||||
pub fn request(
|
||||
fn request(
|
||||
runtime: &mut JsRuntime,
|
||||
state_snapshot: Arc<StateSnapshot>,
|
||||
method: RequestMethod,
|
||||
|
@ -3525,7 +3903,7 @@ mod tests {
|
|||
fixtures: &[(&str, &str, i32, LanguageId)],
|
||||
location: &Path,
|
||||
) -> StateSnapshot {
|
||||
let mut documents = Documents::new(location, Default::default());
|
||||
let mut documents = Documents::new(location);
|
||||
for (specifier, source, version, language_id) in fixtures {
|
||||
let specifier =
|
||||
resolve_url(specifier).expect("failed to create specifier");
|
||||
|
|
243
cli/main.rs
243
cli/main.rs
|
@ -6,6 +6,7 @@ mod cache;
|
|||
mod deno_std;
|
||||
mod emit;
|
||||
mod errors;
|
||||
mod factory;
|
||||
mod file_fetcher;
|
||||
mod graph_util;
|
||||
mod http_util;
|
||||
|
@ -16,20 +17,18 @@ mod napi;
|
|||
mod node;
|
||||
mod npm;
|
||||
mod ops;
|
||||
mod proc_state;
|
||||
mod resolver;
|
||||
mod standalone;
|
||||
mod tools;
|
||||
mod tsc;
|
||||
mod util;
|
||||
mod version;
|
||||
mod watcher;
|
||||
mod worker;
|
||||
|
||||
use crate::args::flags_from_vec;
|
||||
use crate::args::DenoSubcommand;
|
||||
use crate::args::Flags;
|
||||
use crate::proc_state::ProcState;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::util::display;
|
||||
use crate::util::v8::get_v8_flags_from_env;
|
||||
use crate::util::v8::init_v8_flags;
|
||||
|
@ -38,148 +37,175 @@ use args::CliOptions;
|
|||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::error::JsError;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::task::JoinHandle;
|
||||
use deno_runtime::colors;
|
||||
use deno_runtime::fmt_errors::format_js_error;
|
||||
use deno_runtime::tokio_util::run_local;
|
||||
use deno_runtime::tokio_util::create_and_run_current_thread;
|
||||
use factory::CliFactory;
|
||||
use std::env;
|
||||
use std::env::current_exe;
|
||||
use std::future::Future;
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Ensures that all subcommands return an i32 exit code and an [`AnyError`] error type.
|
||||
trait SubcommandOutput {
|
||||
fn output(self) -> Result<i32, AnyError>;
|
||||
}
|
||||
|
||||
impl SubcommandOutput for Result<i32, AnyError> {
|
||||
fn output(self) -> Result<i32, AnyError> {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl SubcommandOutput for Result<(), AnyError> {
|
||||
fn output(self) -> Result<i32, AnyError> {
|
||||
self.map(|_| 0)
|
||||
}
|
||||
}
|
||||
|
||||
impl SubcommandOutput for Result<(), std::io::Error> {
|
||||
fn output(self) -> Result<i32, AnyError> {
|
||||
self.map(|_| 0).map_err(|e| e.into())
|
||||
}
|
||||
}
|
||||
|
||||
/// Ensure that the subcommand runs in a task, rather than being directly executed. Since some of these
|
||||
/// futures are very large, this prevents the stack from getting blown out from passing them by value up
|
||||
/// the callchain (especially in debug mode when Rust doesn't have a chance to elide copies!).
|
||||
#[inline(always)]
|
||||
fn spawn_subcommand<F: Future<Output = T> + 'static, T: SubcommandOutput>(
|
||||
f: F,
|
||||
) -> JoinHandle<Result<i32, AnyError>> {
|
||||
deno_core::task::spawn(f.map(|r| r.output()))
|
||||
}
|
||||
|
||||
async fn run_subcommand(flags: Flags) -> Result<i32, AnyError> {
|
||||
match flags.subcommand.clone() {
|
||||
DenoSubcommand::Bench(bench_flags) => {
|
||||
let handle = match flags.subcommand.clone() {
|
||||
DenoSubcommand::Bench(bench_flags) => spawn_subcommand(async {
|
||||
let cli_options = CliOptions::from_flags(flags)?;
|
||||
let bench_options = cli_options.resolve_bench_options(bench_flags)?;
|
||||
if cli_options.watch_paths().is_some() {
|
||||
tools::bench::run_benchmarks_with_watch(cli_options, bench_options)
|
||||
.await?;
|
||||
.await
|
||||
} else {
|
||||
tools::bench::run_benchmarks(cli_options, bench_options).await?;
|
||||
tools::bench::run_benchmarks(cli_options, bench_options).await
|
||||
}
|
||||
Ok(0)
|
||||
}
|
||||
DenoSubcommand::Bundle(bundle_flags) => {
|
||||
tools::bundle::bundle(flags, bundle_flags).await?;
|
||||
Ok(0)
|
||||
}
|
||||
}),
|
||||
DenoSubcommand::Bundle(bundle_flags) => spawn_subcommand(async {
|
||||
tools::bundle::bundle(flags, bundle_flags).await
|
||||
}),
|
||||
DenoSubcommand::Doc(doc_flags) => {
|
||||
tools::doc::print_docs(flags, doc_flags).await?;
|
||||
Ok(0)
|
||||
spawn_subcommand(async { tools::doc::print_docs(flags, doc_flags).await })
|
||||
}
|
||||
DenoSubcommand::Eval(eval_flags) => {
|
||||
DenoSubcommand::Eval(eval_flags) => spawn_subcommand(async {
|
||||
tools::run::eval_command(flags, eval_flags).await
|
||||
}
|
||||
DenoSubcommand::Cache(cache_flags) => {
|
||||
let ps = ProcState::from_flags(flags).await?;
|
||||
ps.module_load_preparer
|
||||
}),
|
||||
DenoSubcommand::Cache(cache_flags) => spawn_subcommand(async move {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let module_load_preparer = factory.module_load_preparer().await?;
|
||||
let emitter = factory.emitter()?;
|
||||
let graph_container = factory.graph_container();
|
||||
module_load_preparer
|
||||
.load_and_type_check_files(&cache_flags.files)
|
||||
.await?;
|
||||
ps.emitter.cache_module_emits(&ps.graph_container.graph())?;
|
||||
Ok(0)
|
||||
}
|
||||
DenoSubcommand::Check(check_flags) => {
|
||||
let ps = ProcState::from_flags(flags).await?;
|
||||
ps.module_load_preparer
|
||||
emitter.cache_module_emits(&graph_container.graph())
|
||||
}),
|
||||
DenoSubcommand::Check(check_flags) => spawn_subcommand(async move {
|
||||
let factory = CliFactory::from_flags(flags).await?;
|
||||
let module_load_preparer = factory.module_load_preparer().await?;
|
||||
module_load_preparer
|
||||
.load_and_type_check_files(&check_flags.files)
|
||||
.await?;
|
||||
Ok(0)
|
||||
}
|
||||
DenoSubcommand::Compile(compile_flags) => {
|
||||
tools::standalone::compile(flags, compile_flags).await?;
|
||||
Ok(0)
|
||||
}
|
||||
DenoSubcommand::Coverage(coverage_flags) => {
|
||||
tools::coverage::cover_files(flags, coverage_flags).await?;
|
||||
Ok(0)
|
||||
}
|
||||
DenoSubcommand::Fmt(fmt_flags) => {
|
||||
.await
|
||||
}),
|
||||
DenoSubcommand::Compile(compile_flags) => spawn_subcommand(async {
|
||||
tools::compile::compile(flags, compile_flags).await
|
||||
}),
|
||||
DenoSubcommand::Coverage(coverage_flags) => spawn_subcommand(async {
|
||||
tools::coverage::cover_files(flags, coverage_flags).await
|
||||
}),
|
||||
DenoSubcommand::Fmt(fmt_flags) => spawn_subcommand(async move {
|
||||
let cli_options = CliOptions::from_flags(flags.clone())?;
|
||||
let fmt_options = cli_options.resolve_fmt_options(fmt_flags)?;
|
||||
tools::fmt::format(cli_options, fmt_options).await?;
|
||||
Ok(0)
|
||||
}
|
||||
tools::fmt::format(cli_options, fmt_options).await
|
||||
}),
|
||||
DenoSubcommand::Init(init_flags) => {
|
||||
tools::init::init_project(init_flags).await?;
|
||||
Ok(0)
|
||||
spawn_subcommand(async { tools::init::init_project(init_flags).await })
|
||||
}
|
||||
DenoSubcommand::Info(info_flags) => {
|
||||
tools::info::info(flags, info_flags).await?;
|
||||
Ok(0)
|
||||
spawn_subcommand(async { tools::info::info(flags, info_flags).await })
|
||||
}
|
||||
DenoSubcommand::Install(install_flags) => {
|
||||
tools::installer::install_command(flags, install_flags).await?;
|
||||
Ok(0)
|
||||
}
|
||||
DenoSubcommand::Uninstall(uninstall_flags) => {
|
||||
tools::installer::uninstall(uninstall_flags.name, uninstall_flags.root)?;
|
||||
Ok(0)
|
||||
}
|
||||
DenoSubcommand::Lsp => {
|
||||
lsp::start().await?;
|
||||
Ok(0)
|
||||
}
|
||||
DenoSubcommand::Lint(lint_flags) => {
|
||||
DenoSubcommand::Install(install_flags) => spawn_subcommand(async {
|
||||
tools::installer::install_command(flags, install_flags).await
|
||||
}),
|
||||
DenoSubcommand::Uninstall(uninstall_flags) => spawn_subcommand(async {
|
||||
tools::installer::uninstall(uninstall_flags.name, uninstall_flags.root)
|
||||
}),
|
||||
DenoSubcommand::Lsp => spawn_subcommand(async { lsp::start().await }),
|
||||
DenoSubcommand::Lint(lint_flags) => spawn_subcommand(async {
|
||||
if lint_flags.rules {
|
||||
tools::lint::print_rules_list(lint_flags.json);
|
||||
Ok(())
|
||||
} else {
|
||||
let cli_options = CliOptions::from_flags(flags)?;
|
||||
let lint_options = cli_options.resolve_lint_options(lint_flags)?;
|
||||
tools::lint::lint(cli_options, lint_options).await?;
|
||||
tools::lint::lint(cli_options, lint_options).await
|
||||
}
|
||||
Ok(0)
|
||||
}
|
||||
}),
|
||||
DenoSubcommand::Repl(repl_flags) => {
|
||||
tools::repl::run(flags, repl_flags).await
|
||||
spawn_subcommand(async move { tools::repl::run(flags, repl_flags).await })
|
||||
}
|
||||
DenoSubcommand::Run(run_flags) => {
|
||||
DenoSubcommand::Run(run_flags) => spawn_subcommand(async move {
|
||||
if run_flags.is_stdin() {
|
||||
tools::run::run_from_stdin(flags).await
|
||||
} else {
|
||||
tools::run::run_script(flags).await
|
||||
}
|
||||
}
|
||||
DenoSubcommand::Task(task_flags) => {
|
||||
}),
|
||||
DenoSubcommand::Task(task_flags) => spawn_subcommand(async {
|
||||
tools::task::execute_script(flags, task_flags).await
|
||||
}
|
||||
}),
|
||||
DenoSubcommand::Test(test_flags) => {
|
||||
if let Some(ref coverage_dir) = flags.coverage_dir {
|
||||
std::fs::create_dir_all(coverage_dir)
|
||||
.with_context(|| format!("Failed creating: {coverage_dir}"))?;
|
||||
// this is set in order to ensure spawned processes use the same
|
||||
// coverage directory
|
||||
env::set_var(
|
||||
"DENO_UNSTABLE_COVERAGE_DIR",
|
||||
PathBuf::from(coverage_dir).canonicalize()?,
|
||||
);
|
||||
}
|
||||
let cli_options = CliOptions::from_flags(flags)?;
|
||||
let test_options = cli_options.resolve_test_options(test_flags)?;
|
||||
spawn_subcommand(async {
|
||||
if let Some(ref coverage_dir) = flags.coverage_dir {
|
||||
std::fs::create_dir_all(coverage_dir)
|
||||
.with_context(|| format!("Failed creating: {coverage_dir}"))?;
|
||||
// this is set in order to ensure spawned processes use the same
|
||||
// coverage directory
|
||||
env::set_var(
|
||||
"DENO_UNSTABLE_COVERAGE_DIR",
|
||||
PathBuf::from(coverage_dir).canonicalize()?,
|
||||
);
|
||||
}
|
||||
let cli_options = CliOptions::from_flags(flags)?;
|
||||
let test_options = cli_options.resolve_test_options(test_flags)?;
|
||||
|
||||
if cli_options.watch_paths().is_some() {
|
||||
tools::test::run_tests_with_watch(cli_options, test_options).await?;
|
||||
} else {
|
||||
tools::test::run_tests(cli_options, test_options).await?;
|
||||
}
|
||||
|
||||
Ok(0)
|
||||
if cli_options.watch_paths().is_some() {
|
||||
tools::test::run_tests_with_watch(cli_options, test_options).await
|
||||
} else {
|
||||
tools::test::run_tests(cli_options, test_options).await
|
||||
}
|
||||
})
|
||||
}
|
||||
DenoSubcommand::Completions(completions_flags) => {
|
||||
display::write_to_stdout_ignore_sigpipe(&completions_flags.buf)?;
|
||||
Ok(0)
|
||||
spawn_subcommand(async move {
|
||||
display::write_to_stdout_ignore_sigpipe(&completions_flags.buf)
|
||||
})
|
||||
}
|
||||
DenoSubcommand::Types => {
|
||||
DenoSubcommand::Types => spawn_subcommand(async move {
|
||||
let types = tsc::get_types_declaration_file_text(flags.unstable);
|
||||
display::write_to_stdout_ignore_sigpipe(types.as_bytes())?;
|
||||
Ok(0)
|
||||
}
|
||||
DenoSubcommand::Upgrade(upgrade_flags) => {
|
||||
tools::upgrade::upgrade(flags, upgrade_flags).await?;
|
||||
Ok(0)
|
||||
}
|
||||
DenoSubcommand::Vendor(vendor_flags) => {
|
||||
tools::vendor::vendor(flags, vendor_flags).await?;
|
||||
Ok(0)
|
||||
}
|
||||
}
|
||||
display::write_to_stdout_ignore_sigpipe(types.as_bytes())
|
||||
}),
|
||||
DenoSubcommand::Upgrade(upgrade_flags) => spawn_subcommand(async {
|
||||
tools::upgrade::upgrade(flags, upgrade_flags).await
|
||||
}),
|
||||
DenoSubcommand::Vendor(vendor_flags) => spawn_subcommand(async {
|
||||
tools::vendor::vendor(flags, vendor_flags).await
|
||||
}),
|
||||
};
|
||||
|
||||
handle.await?
|
||||
}
|
||||
|
||||
fn setup_panic_hook() {
|
||||
|
@ -245,8 +271,11 @@ pub fn main() {
|
|||
let args: Vec<String> = env::args().collect();
|
||||
|
||||
let future = async move {
|
||||
let current_exe_path = current_exe()?;
|
||||
let standalone_res =
|
||||
match standalone::extract_standalone(args.clone()).await {
|
||||
match standalone::extract_standalone(¤t_exe_path, args.clone())
|
||||
.await
|
||||
{
|
||||
Ok(Some((metadata, eszip))) => standalone::run(eszip, metadata).await,
|
||||
Ok(None) => Ok(()),
|
||||
Err(err) => Err(err),
|
||||
|
@ -266,14 +295,20 @@ pub fn main() {
|
|||
Err(err) => unwrap_or_exit(Err(AnyError::from(err))),
|
||||
};
|
||||
|
||||
init_v8_flags(&flags.v8_flags, get_v8_flags_from_env());
|
||||
let default_v8_flags = match flags.subcommand {
|
||||
// Using same default as VSCode:
|
||||
// https://github.com/microsoft/vscode/blob/48d4ba271686e8072fc6674137415bc80d936bc7/extensions/typescript-language-features/src/configuration/configuration.ts#L213-L214
|
||||
DenoSubcommand::Lsp => vec!["--max-old-space-size=3072".to_string()],
|
||||
_ => vec![],
|
||||
};
|
||||
init_v8_flags(&default_v8_flags, &flags.v8_flags, get_v8_flags_from_env());
|
||||
|
||||
util::logger::init(flags.log_level);
|
||||
|
||||
run_subcommand(flags).await
|
||||
};
|
||||
|
||||
let exit_code = unwrap_or_exit(run_local(future));
|
||||
let exit_code = unwrap_or_exit(create_and_run_current_thread(future));
|
||||
|
||||
std::process::exit(exit_code);
|
||||
}
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
use crate::args::CliOptions;
|
||||
use crate::args::DenoSubcommand;
|
||||
use crate::args::TsTypeLib;
|
||||
use crate::args::TypeCheckMode;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
use crate::emit::Emitter;
|
||||
use crate::graph_util::graph_lock_or_exit;
|
||||
|
@ -11,18 +10,15 @@ use crate::graph_util::graph_valid_with_cli_options;
|
|||
use crate::graph_util::ModuleGraphBuilder;
|
||||
use crate::graph_util::ModuleGraphContainer;
|
||||
use crate::node;
|
||||
use crate::node::CliNodeResolver;
|
||||
use crate::node::NodeCodeTranslator;
|
||||
use crate::node::NodeResolution;
|
||||
use crate::proc_state::CjsResolutionStore;
|
||||
use crate::proc_state::FileWatcherReporter;
|
||||
use crate::proc_state::ProcState;
|
||||
use crate::node::CliNodeCodeTranslator;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::tools::check;
|
||||
use crate::tools::check::TypeChecker;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::text_encoding::code_without_source_map;
|
||||
use crate::util::text_encoding::source_map_from_code;
|
||||
use crate::watcher::FileWatcherReporter;
|
||||
use crate::worker::ModuleLoaderFactory;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::anyhow::anyhow;
|
||||
|
@ -40,7 +36,6 @@ use deno_core::ModuleLoader;
|
|||
use deno_core::ModuleSource;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_core::ModuleType;
|
||||
use deno_core::OpState;
|
||||
use deno_core::ResolutionKind;
|
||||
use deno_core::SourceMapGetter;
|
||||
use deno_graph::source::Resolver;
|
||||
|
@ -49,11 +44,14 @@ use deno_graph::JsonModule;
|
|||
use deno_graph::Module;
|
||||
use deno_graph::Resolution;
|
||||
use deno_lockfile::Lockfile;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::NodeResolution;
|
||||
use deno_runtime::deno_node::NodeResolutionMode;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_runtime::permissions::PermissionsContainer;
|
||||
use deno_semver::npm::NpmPackageNvReference;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use std::borrow::Cow;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashSet;
|
||||
use std::pin::Pin;
|
||||
use std::rc::Rc;
|
||||
|
@ -108,15 +106,12 @@ impl ModuleLoadPreparer {
|
|||
roots: Vec<ModuleSpecifier>,
|
||||
is_dynamic: bool,
|
||||
lib: TsTypeLib,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
permissions: PermissionsContainer,
|
||||
) -> Result<(), AnyError> {
|
||||
log::debug!("Preparing module load.");
|
||||
let _pb_clear_guard = self.progress_bar.clear_guard();
|
||||
|
||||
let mut cache = self
|
||||
.module_graph_builder
|
||||
.create_fetch_cacher(root_permissions, dynamic_permissions);
|
||||
let mut cache = self.module_graph_builder.create_fetch_cacher(permissions);
|
||||
let maybe_imports = self.options.to_maybe_imports()?;
|
||||
let graph_resolver = self.resolver.as_graph_resolver();
|
||||
let graph_npm_resolver = self.resolver.as_graph_npm_resolver();
|
||||
|
@ -164,7 +159,7 @@ impl ModuleLoadPreparer {
|
|||
// validate the integrity of all the modules
|
||||
graph_lock_or_exit(graph, &mut lockfile);
|
||||
// update it with anything new
|
||||
lockfile.write()?;
|
||||
lockfile.write().context("Failed writing lockfile.")?;
|
||||
}
|
||||
|
||||
// save the graph and get a reference to the new graph
|
||||
|
@ -173,7 +168,7 @@ impl ModuleLoadPreparer {
|
|||
drop(_pb_clear_guard);
|
||||
|
||||
// type check if necessary
|
||||
if self.options.type_check_mode() != TypeCheckMode::None
|
||||
if self.options.type_check_mode().is_true()
|
||||
&& !self.graph_container.is_type_checked(&roots, lib)
|
||||
{
|
||||
let graph = Arc::new(graph.segment(&roots));
|
||||
|
@ -215,82 +210,25 @@ impl ModuleLoadPreparer {
|
|||
false,
|
||||
lib,
|
||||
PermissionsContainer::allow_all(),
|
||||
PermissionsContainer::allow_all(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
struct ModuleCodeSource {
|
||||
pub struct ModuleCodeSource {
|
||||
pub code: ModuleCode,
|
||||
pub found_url: ModuleSpecifier,
|
||||
pub media_type: MediaType,
|
||||
}
|
||||
|
||||
pub struct CliModuleLoader {
|
||||
lib: TsTypeLib,
|
||||
/// The initial set of permissions used to resolve the static imports in the
|
||||
/// worker. These are "allow all" for main worker, and parent thread
|
||||
/// permissions for Web Worker.
|
||||
root_permissions: PermissionsContainer,
|
||||
/// Permissions used to resolve dynamic imports, these get passed as
|
||||
/// "root permissions" for Web Worker.
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
cli_options: Arc<CliOptions>,
|
||||
cjs_resolutions: Arc<CjsResolutionStore>,
|
||||
struct PreparedModuleLoader {
|
||||
emitter: Arc<Emitter>,
|
||||
graph_container: Arc<ModuleGraphContainer>,
|
||||
module_load_preparer: Arc<ModuleLoadPreparer>,
|
||||
node_code_translator: Arc<NodeCodeTranslator>,
|
||||
node_resolver: Arc<CliNodeResolver>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
}
|
||||
|
||||
impl CliModuleLoader {
|
||||
pub fn new(
|
||||
ps: ProcState,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
) -> Rc<Self> {
|
||||
Rc::new(CliModuleLoader {
|
||||
lib: ps.options.ts_type_lib_window(),
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
cli_options: ps.options.clone(),
|
||||
cjs_resolutions: ps.cjs_resolutions.clone(),
|
||||
emitter: ps.emitter.clone(),
|
||||
graph_container: ps.graph_container.clone(),
|
||||
module_load_preparer: ps.module_load_preparer.clone(),
|
||||
node_code_translator: ps.node_code_translator.clone(),
|
||||
node_resolver: ps.node_resolver.clone(),
|
||||
parsed_source_cache: ps.parsed_source_cache.clone(),
|
||||
resolver: ps.resolver.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn new_for_worker(
|
||||
ps: ProcState,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
) -> Rc<Self> {
|
||||
Rc::new(CliModuleLoader {
|
||||
lib: ps.options.ts_type_lib_worker(),
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
cli_options: ps.options.clone(),
|
||||
cjs_resolutions: ps.cjs_resolutions.clone(),
|
||||
emitter: ps.emitter.clone(),
|
||||
graph_container: ps.graph_container.clone(),
|
||||
module_load_preparer: ps.module_load_preparer.clone(),
|
||||
node_code_translator: ps.node_code_translator.clone(),
|
||||
node_resolver: ps.node_resolver.clone(),
|
||||
parsed_source_cache: ps.parsed_source_cache.clone(),
|
||||
resolver: ps.resolver.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
fn load_prepared_module(
|
||||
impl PreparedModuleLoader {
|
||||
pub fn load_prepared_module(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_referrer: Option<&ModuleSpecifier>,
|
||||
|
@ -359,53 +297,137 @@ impl CliModuleLoader {
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct SharedCliModuleLoaderState {
|
||||
lib_window: TsTypeLib,
|
||||
lib_worker: TsTypeLib,
|
||||
is_inspecting: bool,
|
||||
is_repl: bool,
|
||||
graph_container: Arc<ModuleGraphContainer>,
|
||||
module_load_preparer: Arc<ModuleLoadPreparer>,
|
||||
prepared_module_loader: PreparedModuleLoader,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
npm_module_loader: NpmModuleLoader,
|
||||
}
|
||||
|
||||
pub struct CliModuleLoaderFactory {
|
||||
shared: Arc<SharedCliModuleLoaderState>,
|
||||
}
|
||||
|
||||
impl CliModuleLoaderFactory {
|
||||
pub fn new(
|
||||
options: &CliOptions,
|
||||
emitter: Arc<Emitter>,
|
||||
graph_container: Arc<ModuleGraphContainer>,
|
||||
module_load_preparer: Arc<ModuleLoadPreparer>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
npm_module_loader: NpmModuleLoader,
|
||||
) -> Self {
|
||||
Self {
|
||||
shared: Arc::new(SharedCliModuleLoaderState {
|
||||
lib_window: options.ts_type_lib_window(),
|
||||
lib_worker: options.ts_type_lib_worker(),
|
||||
is_inspecting: options.is_inspecting(),
|
||||
is_repl: matches!(options.sub_command(), DenoSubcommand::Repl(_)),
|
||||
prepared_module_loader: PreparedModuleLoader {
|
||||
emitter,
|
||||
graph_container: graph_container.clone(),
|
||||
parsed_source_cache,
|
||||
},
|
||||
graph_container,
|
||||
module_load_preparer,
|
||||
resolver,
|
||||
npm_module_loader,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn create_with_lib(
|
||||
&self,
|
||||
lib: TsTypeLib,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
) -> Rc<dyn ModuleLoader> {
|
||||
Rc::new(CliModuleLoader {
|
||||
lib,
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
shared: self.shared.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ModuleLoaderFactory for CliModuleLoaderFactory {
|
||||
fn create_for_main(
|
||||
&self,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
) -> Rc<dyn ModuleLoader> {
|
||||
self.create_with_lib(
|
||||
self.shared.lib_window,
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
)
|
||||
}
|
||||
|
||||
fn create_for_worker(
|
||||
&self,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
) -> Rc<dyn ModuleLoader> {
|
||||
self.create_with_lib(
|
||||
self.shared.lib_worker,
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
)
|
||||
}
|
||||
|
||||
fn create_source_map_getter(&self) -> Option<Box<dyn SourceMapGetter>> {
|
||||
Some(Box::new(CliSourceMapGetter {
|
||||
shared: self.shared.clone(),
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
struct CliModuleLoader {
|
||||
lib: TsTypeLib,
|
||||
/// The initial set of permissions used to resolve the static imports in the
|
||||
/// worker. These are "allow all" for main worker, and parent thread
|
||||
/// permissions for Web Worker.
|
||||
root_permissions: PermissionsContainer,
|
||||
/// Permissions used to resolve dynamic imports, these get passed as
|
||||
/// "root permissions" for Web Worker.
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
shared: Arc<SharedCliModuleLoaderState>,
|
||||
}
|
||||
|
||||
impl CliModuleLoader {
|
||||
fn load_sync(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_referrer: Option<&ModuleSpecifier>,
|
||||
is_dynamic: bool,
|
||||
) -> Result<ModuleSource, AnyError> {
|
||||
let code_source = if self.node_resolver.in_npm_package(specifier) {
|
||||
let file_path = specifier.to_file_path().unwrap();
|
||||
let code = std::fs::read_to_string(&file_path).with_context(|| {
|
||||
let mut msg = "Unable to load ".to_string();
|
||||
msg.push_str(&file_path.to_string_lossy());
|
||||
if let Some(referrer) = &maybe_referrer {
|
||||
msg.push_str(" imported from ");
|
||||
msg.push_str(referrer.as_str());
|
||||
}
|
||||
msg
|
||||
})?;
|
||||
|
||||
let code = if self.cjs_resolutions.contains(specifier) {
|
||||
let mut permissions = if is_dynamic {
|
||||
self.dynamic_permissions.clone()
|
||||
} else {
|
||||
self.root_permissions.clone()
|
||||
};
|
||||
// translate cjs to esm if it's cjs and inject node globals
|
||||
self.node_code_translator.translate_cjs_to_esm(
|
||||
specifier,
|
||||
code,
|
||||
MediaType::Cjs,
|
||||
&mut permissions,
|
||||
)?
|
||||
} else {
|
||||
// only inject node globals for esm
|
||||
self
|
||||
.node_code_translator
|
||||
.esm_code_with_node_globals(specifier, code)?
|
||||
};
|
||||
ModuleCodeSource {
|
||||
code: code.into(),
|
||||
found_url: specifier.clone(),
|
||||
media_type: MediaType::from_specifier(specifier),
|
||||
}
|
||||
let permissions = if is_dynamic {
|
||||
&self.dynamic_permissions
|
||||
} else {
|
||||
self.load_prepared_module(specifier, maybe_referrer)?
|
||||
&self.root_permissions
|
||||
};
|
||||
let code = if self.cli_options.is_inspecting() {
|
||||
let code_source = if let Some(result) = self
|
||||
.shared
|
||||
.npm_module_loader
|
||||
.load_sync_if_in_npm_package(specifier, maybe_referrer, permissions)
|
||||
{
|
||||
result?
|
||||
} else {
|
||||
self
|
||||
.shared
|
||||
.prepared_module_loader
|
||||
.load_prepared_module(specifier, maybe_referrer)?
|
||||
};
|
||||
let code = if self.shared.is_inspecting {
|
||||
// we need the code with the source map in order for
|
||||
// it to work with --inspect or --inspect-brk
|
||||
code_source.code
|
||||
|
@ -424,23 +446,6 @@ impl CliModuleLoader {
|
|||
&code_source.found_url,
|
||||
))
|
||||
}
|
||||
|
||||
fn handle_node_resolve_result(
|
||||
&self,
|
||||
result: Result<Option<node::NodeResolution>, AnyError>,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
let response = match result? {
|
||||
Some(response) => response,
|
||||
None => return Err(generic_error("not found")),
|
||||
};
|
||||
if let NodeResolution::CommonJs(specifier) = &response {
|
||||
// remember that this was a common js resolution
|
||||
self.cjs_resolutions.insert(specifier.clone());
|
||||
} else if let NodeResolution::BuiltIn(specifier) = &response {
|
||||
return node::resolve_builtin_node_module(specifier);
|
||||
}
|
||||
Ok(response.into_url())
|
||||
}
|
||||
}
|
||||
|
||||
impl ModuleLoader for CliModuleLoader {
|
||||
|
@ -450,10 +455,10 @@ impl ModuleLoader for CliModuleLoader {
|
|||
referrer: &str,
|
||||
kind: ResolutionKind,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
let mut permissions = if matches!(kind, ResolutionKind::DynamicImport) {
|
||||
self.dynamic_permissions.clone()
|
||||
let permissions = if matches!(kind, ResolutionKind::DynamicImport) {
|
||||
&self.dynamic_permissions
|
||||
} else {
|
||||
self.root_permissions.clone()
|
||||
&self.root_permissions
|
||||
};
|
||||
|
||||
// TODO(bartlomieju): ideally we shouldn't need to call `current_dir()` on each
|
||||
|
@ -462,21 +467,15 @@ impl ModuleLoader for CliModuleLoader {
|
|||
let referrer_result = deno_core::resolve_url_or_path(referrer, &cwd);
|
||||
|
||||
if let Ok(referrer) = referrer_result.as_ref() {
|
||||
if self.node_resolver.in_npm_package(referrer) {
|
||||
// we're in an npm package, so use node resolution
|
||||
return self
|
||||
.handle_node_resolve_result(self.node_resolver.resolve(
|
||||
specifier,
|
||||
referrer,
|
||||
NodeResolutionMode::Execution,
|
||||
&mut permissions,
|
||||
))
|
||||
.with_context(|| {
|
||||
format!("Could not resolve '{specifier}' from '{referrer}'.")
|
||||
});
|
||||
if let Some(result) = self
|
||||
.shared
|
||||
.npm_module_loader
|
||||
.resolve_if_in_npm_package(specifier, referrer, permissions)
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
let graph = self.graph_container.graph();
|
||||
let graph = self.shared.graph_container.graph();
|
||||
let maybe_resolved = match graph.get(referrer) {
|
||||
Some(Module::Esm(module)) => {
|
||||
module.dependencies.get(specifier).map(|d| &d.maybe_code)
|
||||
|
@ -490,19 +489,10 @@ impl ModuleLoader for CliModuleLoader {
|
|||
|
||||
return match graph.get(specifier) {
|
||||
Some(Module::Npm(module)) => self
|
||||
.handle_node_resolve_result(
|
||||
self.node_resolver.resolve_npm_reference(
|
||||
&module.nv_reference,
|
||||
NodeResolutionMode::Execution,
|
||||
&mut permissions,
|
||||
),
|
||||
)
|
||||
.with_context(|| {
|
||||
format!("Could not resolve '{}'.", module.nv_reference)
|
||||
}),
|
||||
Some(Module::Node(module)) => {
|
||||
node::resolve_builtin_node_module(&module.module_name)
|
||||
}
|
||||
.shared
|
||||
.npm_module_loader
|
||||
.resolve_nv_ref(&module.nv_reference, permissions),
|
||||
Some(Module::Node(module)) => Ok(module.specifier.clone()),
|
||||
Some(Module::Esm(module)) => Ok(module.specifier.clone()),
|
||||
Some(Module::Json(module)) => Ok(module.specifier.clone()),
|
||||
Some(Module::External(module)) => {
|
||||
|
@ -521,17 +511,10 @@ impl ModuleLoader for CliModuleLoader {
|
|||
}
|
||||
}
|
||||
|
||||
// Built-in Node modules
|
||||
if let Some(module_name) = specifier.strip_prefix("node:") {
|
||||
return node::resolve_builtin_node_module(module_name);
|
||||
}
|
||||
|
||||
// FIXME(bartlomieju): this is a hacky way to provide compatibility with REPL
|
||||
// and `Deno.core.evalContext` API. Ideally we should always have a referrer filled
|
||||
// but sadly that's not the case due to missing APIs in V8.
|
||||
let is_repl =
|
||||
matches!(self.cli_options.sub_command(), DenoSubcommand::Repl(_));
|
||||
let referrer = if referrer.is_empty() && is_repl {
|
||||
let referrer = if referrer.is_empty() && self.shared.is_repl {
|
||||
deno_core::resolve_path("./$deno$repl.ts", &cwd)?
|
||||
} else {
|
||||
referrer_result?
|
||||
|
@ -539,9 +522,9 @@ impl ModuleLoader for CliModuleLoader {
|
|||
|
||||
// FIXME(bartlomieju): this is another hack way to provide NPM specifier
|
||||
// support in REPL. This should be fixed.
|
||||
let resolution = self.resolver.resolve(specifier, &referrer);
|
||||
let resolution = self.shared.resolver.resolve(specifier, &referrer);
|
||||
|
||||
if is_repl {
|
||||
if self.shared.is_repl {
|
||||
let specifier = resolution
|
||||
.as_ref()
|
||||
.ok()
|
||||
|
@ -552,14 +535,9 @@ impl ModuleLoader for CliModuleLoader {
|
|||
NpmPackageReqReference::from_specifier(&specifier)
|
||||
{
|
||||
return self
|
||||
.handle_node_resolve_result(
|
||||
self.node_resolver.resolve_npm_req_reference(
|
||||
&reference,
|
||||
deno_runtime::deno_node::NodeResolutionMode::Execution,
|
||||
&mut permissions,
|
||||
),
|
||||
)
|
||||
.with_context(|| format!("Could not resolve '{reference}'."));
|
||||
.shared
|
||||
.npm_module_loader
|
||||
.resolve_req_reference(&reference, permissions);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -585,20 +563,19 @@ impl ModuleLoader for CliModuleLoader {
|
|||
|
||||
fn prepare_load(
|
||||
&self,
|
||||
_op_state: Rc<RefCell<OpState>>,
|
||||
specifier: &ModuleSpecifier,
|
||||
_maybe_referrer: Option<String>,
|
||||
is_dynamic: bool,
|
||||
) -> Pin<Box<dyn Future<Output = Result<(), AnyError>>>> {
|
||||
if self.node_resolver.in_npm_package(specifier) {
|
||||
// nothing to prepare
|
||||
return Box::pin(deno_core::futures::future::ready(Ok(())));
|
||||
if let Some(result) =
|
||||
self.shared.npm_module_loader.maybe_prepare_load(specifier)
|
||||
{
|
||||
return Box::pin(deno_core::futures::future::ready(result));
|
||||
}
|
||||
|
||||
let specifier = specifier.clone();
|
||||
let module_load_preparer = self.module_load_preparer.clone();
|
||||
let module_load_preparer = self.shared.module_load_preparer.clone();
|
||||
|
||||
let dynamic_permissions = self.dynamic_permissions.clone();
|
||||
let root_permissions = if is_dynamic {
|
||||
self.dynamic_permissions.clone()
|
||||
} else {
|
||||
|
@ -608,20 +585,18 @@ impl ModuleLoader for CliModuleLoader {
|
|||
|
||||
async move {
|
||||
module_load_preparer
|
||||
.prepare_module_load(
|
||||
vec![specifier],
|
||||
is_dynamic,
|
||||
lib,
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
)
|
||||
.prepare_module_load(vec![specifier], is_dynamic, lib, root_permissions)
|
||||
.await
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceMapGetter for CliModuleLoader {
|
||||
struct CliSourceMapGetter {
|
||||
shared: Arc<SharedCliModuleLoaderState>,
|
||||
}
|
||||
|
||||
impl SourceMapGetter for CliSourceMapGetter {
|
||||
fn get_source_map(&self, file_name: &str) -> Option<Vec<u8>> {
|
||||
let specifier = resolve_url(file_name).ok()?;
|
||||
match specifier.scheme() {
|
||||
|
@ -630,7 +605,11 @@ impl SourceMapGetter for CliModuleLoader {
|
|||
"wasm" | "file" | "http" | "https" | "data" | "blob" => (),
|
||||
_ => return None,
|
||||
}
|
||||
let source = self.load_prepared_module(&specifier, None).ok()?;
|
||||
let source = self
|
||||
.shared
|
||||
.prepared_module_loader
|
||||
.load_prepared_module(&specifier, None)
|
||||
.ok()?;
|
||||
source_map_from_code(&source.code)
|
||||
}
|
||||
|
||||
|
@ -639,7 +618,7 @@ impl SourceMapGetter for CliModuleLoader {
|
|||
file_name: &str,
|
||||
line_number: usize,
|
||||
) -> Option<String> {
|
||||
let graph = self.graph_container.graph();
|
||||
let graph = self.shared.graph_container.graph();
|
||||
let code = match graph.get(&resolve_url(file_name).ok()?) {
|
||||
Some(deno_graph::Module::Esm(module)) => &module.source,
|
||||
Some(deno_graph::Module::Json(module)) => &module.source,
|
||||
|
@ -658,3 +637,178 @@ impl SourceMapGetter for CliModuleLoader {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct NpmModuleLoader {
|
||||
cjs_resolutions: Arc<CjsResolutionStore>,
|
||||
node_code_translator: Arc<CliNodeCodeTranslator>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
node_resolver: Arc<NodeResolver>,
|
||||
}
|
||||
|
||||
impl NpmModuleLoader {
|
||||
pub fn new(
|
||||
cjs_resolutions: Arc<CjsResolutionStore>,
|
||||
node_code_translator: Arc<CliNodeCodeTranslator>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
node_resolver: Arc<NodeResolver>,
|
||||
) -> Self {
|
||||
Self {
|
||||
cjs_resolutions,
|
||||
node_code_translator,
|
||||
fs,
|
||||
node_resolver,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_if_in_npm_package(
|
||||
&self,
|
||||
specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
permissions: &PermissionsContainer,
|
||||
) -> Option<Result<ModuleSpecifier, AnyError>> {
|
||||
if self.node_resolver.in_npm_package(referrer) {
|
||||
// we're in an npm package, so use node resolution
|
||||
Some(
|
||||
self
|
||||
.handle_node_resolve_result(self.node_resolver.resolve(
|
||||
specifier,
|
||||
referrer,
|
||||
NodeResolutionMode::Execution,
|
||||
permissions,
|
||||
))
|
||||
.with_context(|| {
|
||||
format!("Could not resolve '{specifier}' from '{referrer}'.")
|
||||
}),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_nv_ref(
|
||||
&self,
|
||||
nv_ref: &NpmPackageNvReference,
|
||||
permissions: &PermissionsContainer,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
self
|
||||
.handle_node_resolve_result(self.node_resolver.resolve_npm_reference(
|
||||
nv_ref,
|
||||
NodeResolutionMode::Execution,
|
||||
permissions,
|
||||
))
|
||||
.with_context(|| format!("Could not resolve '{}'.", nv_ref))
|
||||
}
|
||||
|
||||
pub fn resolve_req_reference(
|
||||
&self,
|
||||
reference: &NpmPackageReqReference,
|
||||
permissions: &PermissionsContainer,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
self
|
||||
.handle_node_resolve_result(self.node_resolver.resolve_npm_req_reference(
|
||||
reference,
|
||||
NodeResolutionMode::Execution,
|
||||
permissions,
|
||||
))
|
||||
.with_context(|| format!("Could not resolve '{reference}'."))
|
||||
}
|
||||
|
||||
pub fn maybe_prepare_load(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<Result<(), AnyError>> {
|
||||
if self.node_resolver.in_npm_package(specifier) {
|
||||
// nothing to prepare
|
||||
Some(Ok(()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_sync_if_in_npm_package(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_referrer: Option<&ModuleSpecifier>,
|
||||
permissions: &PermissionsContainer,
|
||||
) -> Option<Result<ModuleCodeSource, AnyError>> {
|
||||
if self.node_resolver.in_npm_package(specifier) {
|
||||
Some(self.load_sync(specifier, maybe_referrer, permissions))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn load_sync(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_referrer: Option<&ModuleSpecifier>,
|
||||
permissions: &PermissionsContainer,
|
||||
) -> Result<ModuleCodeSource, AnyError> {
|
||||
let file_path = specifier.to_file_path().unwrap();
|
||||
let code = self
|
||||
.fs
|
||||
.read_to_string(&file_path)
|
||||
.map_err(AnyError::from)
|
||||
.with_context(|| {
|
||||
let mut msg = "Unable to load ".to_string();
|
||||
msg.push_str(&file_path.to_string_lossy());
|
||||
if let Some(referrer) = &maybe_referrer {
|
||||
msg.push_str(" imported from ");
|
||||
msg.push_str(referrer.as_str());
|
||||
}
|
||||
msg
|
||||
})?;
|
||||
|
||||
let code = if self.cjs_resolutions.contains(specifier) {
|
||||
// translate cjs to esm if it's cjs and inject node globals
|
||||
self.node_code_translator.translate_cjs_to_esm(
|
||||
specifier,
|
||||
&code,
|
||||
permissions,
|
||||
)?
|
||||
} else {
|
||||
// only inject node globals for esm
|
||||
self
|
||||
.node_code_translator
|
||||
.esm_code_with_node_globals(specifier, &code)?
|
||||
};
|
||||
Ok(ModuleCodeSource {
|
||||
code: code.into(),
|
||||
found_url: specifier.clone(),
|
||||
media_type: MediaType::from_specifier(specifier),
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_node_resolve_result(
|
||||
&self,
|
||||
result: Result<Option<NodeResolution>, AnyError>,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
let response = match result? {
|
||||
Some(response) => response,
|
||||
None => return Err(generic_error("not found")),
|
||||
};
|
||||
if let NodeResolution::CommonJs(specifier) = &response {
|
||||
// remember that this was a common js resolution
|
||||
self.cjs_resolutions.insert(specifier.clone());
|
||||
}
|
||||
Ok(response.into_url())
|
||||
}
|
||||
}
|
||||
|
||||
/// Keeps track of what module specifiers were resolved as CJS.
|
||||
#[derive(Default)]
|
||||
pub struct CjsResolutionStore(Mutex<HashSet<ModuleSpecifier>>);
|
||||
|
||||
impl CjsResolutionStore {
|
||||
pub fn clear(&self) {
|
||||
self.0.lock().clear();
|
||||
}
|
||||
|
||||
pub fn contains(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
self.0.lock().contains(specifier)
|
||||
}
|
||||
|
||||
pub fn insert(&self, specifier: ModuleSpecifier) {
|
||||
self.0.lock().insert(specifier);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
use deno_runtime::deno_napi::*;
|
||||
|
||||
use crate::check_env;
|
||||
|
||||
#[repr(C)]
|
||||
pub struct AsyncWork {
|
||||
pub data: *mut c_void,
|
||||
|
@ -18,37 +20,46 @@ fn napi_create_async_work(
|
|||
complete: napi_async_complete_callback,
|
||||
data: *mut c_void,
|
||||
result: *mut napi_async_work,
|
||||
) -> Result {
|
||||
) -> napi_status {
|
||||
let mut work = AsyncWork {
|
||||
data,
|
||||
execute,
|
||||
complete,
|
||||
};
|
||||
*result = transmute::<Box<AsyncWork>, _>(Box::new(work));
|
||||
Ok(())
|
||||
let work_box = Box::new(work);
|
||||
*result = transmute::<*mut AsyncWork, _>(Box::into_raw(work_box));
|
||||
napi_ok
|
||||
}
|
||||
|
||||
#[napi_sym::napi_sym]
|
||||
fn napi_cancel_async_work(
|
||||
_env: &mut Env,
|
||||
_async_work: napi_async_work,
|
||||
) -> Result {
|
||||
Ok(())
|
||||
) -> napi_status {
|
||||
napi_ok
|
||||
}
|
||||
|
||||
/// Frees a previously allocated work object.
|
||||
#[napi_sym::napi_sym]
|
||||
fn napi_delete_async_work(_env: &mut Env, work: napi_async_work) -> Result {
|
||||
fn napi_delete_async_work(
|
||||
_env: &mut Env,
|
||||
work: napi_async_work,
|
||||
) -> napi_status {
|
||||
let work = Box::from_raw(work as *mut AsyncWork);
|
||||
drop(work);
|
||||
|
||||
Ok(())
|
||||
napi_ok
|
||||
}
|
||||
|
||||
#[napi_sym::napi_sym]
|
||||
fn napi_queue_async_work(env_ptr: *mut Env, work: napi_async_work) -> Result {
|
||||
fn napi_queue_async_work(
|
||||
env_ptr: *mut Env,
|
||||
work: napi_async_work,
|
||||
) -> napi_status {
|
||||
let work: &AsyncWork = &*(work as *const AsyncWork);
|
||||
let env: &mut Env = env_ptr.as_mut().ok_or(Error::InvalidArg)?;
|
||||
let Some(env) = env_ptr.as_mut() else {
|
||||
return napi_invalid_arg;
|
||||
};
|
||||
|
||||
let fut = Box::new(move || {
|
||||
(work.execute)(env_ptr as napi_env, work.data);
|
||||
|
@ -57,22 +68,25 @@ fn napi_queue_async_work(env_ptr: *mut Env, work: napi_async_work) -> Result {
|
|||
});
|
||||
env.add_async_work(fut);
|
||||
|
||||
Ok(())
|
||||
napi_ok
|
||||
}
|
||||
|
||||
// TODO: Custom async operations.
|
||||
|
||||
// NOTE: we don't support "async_hooks::AsyncContext" so these APIs are noops.
|
||||
#[napi_sym::napi_sym]
|
||||
fn napi_async_init(
|
||||
_env: *mut Env,
|
||||
env: *mut Env,
|
||||
_async_resource: napi_value,
|
||||
_async_resource_name: napi_value,
|
||||
_result: *mut *mut (),
|
||||
) -> Result {
|
||||
todo!()
|
||||
result: *mut *mut (),
|
||||
) -> napi_status {
|
||||
check_env!(env);
|
||||
*result = ptr::null_mut();
|
||||
napi_ok
|
||||
}
|
||||
|
||||
#[napi_sym::napi_sym]
|
||||
fn napi_async_destroy(_env: *mut Env, _async_context: *mut ()) -> Result {
|
||||
todo!()
|
||||
fn napi_async_destroy(env: *mut Env, async_context: *mut ()) -> napi_status {
|
||||
check_env!(env);
|
||||
assert!(async_context.is_null());
|
||||
napi_ok
|
||||
}
|
||||
|
|
|
@ -41,8 +41,10 @@ pub unsafe extern "C" fn napi_fatal_error(
|
|||
// napi-3
|
||||
|
||||
#[napi_sym::napi_sym]
|
||||
fn napi_fatal_exception(env: *mut Env, value: napi_value) -> Result {
|
||||
let env: &mut Env = env.as_mut().ok_or(Error::InvalidArg)?;
|
||||
fn napi_fatal_exception(env: *mut Env, value: napi_value) -> napi_status {
|
||||
let Some(env) = env.as_mut() else {
|
||||
return napi_invalid_arg;
|
||||
};
|
||||
let value = transmute::<napi_value, v8::Local<v8::Value>>(value);
|
||||
let error = value.to_rust_string_lossy(&mut env.scope());
|
||||
panic!("Fatal exception triggered by napi_fatal_exception!\n{error}");
|
||||
|
@ -53,8 +55,10 @@ fn napi_add_env_cleanup_hook(
|
|||
env: *mut Env,
|
||||
hook: extern "C" fn(*const c_void),
|
||||
data: *const c_void,
|
||||
) -> Result {
|
||||
let env: &mut Env = env.as_mut().ok_or(Error::InvalidArg)?;
|
||||
) -> napi_status {
|
||||
let Some(env) = env.as_mut() else {
|
||||
return napi_invalid_arg;
|
||||
};
|
||||
|
||||
{
|
||||
let mut env_cleanup_hooks = env.cleanup_hooks.borrow_mut();
|
||||
|
@ -66,7 +70,7 @@ fn napi_add_env_cleanup_hook(
|
|||
}
|
||||
env_cleanup_hooks.push((hook, data));
|
||||
}
|
||||
Ok(())
|
||||
napi_ok
|
||||
}
|
||||
|
||||
#[napi_sym::napi_sym]
|
||||
|
@ -74,8 +78,10 @@ fn napi_remove_env_cleanup_hook(
|
|||
env: *mut Env,
|
||||
hook: extern "C" fn(*const c_void),
|
||||
data: *const c_void,
|
||||
) -> Result {
|
||||
let env: &mut Env = env.as_mut().ok_or(Error::InvalidArg)?;
|
||||
) -> napi_status {
|
||||
let Some(env) = env.as_mut() else {
|
||||
return napi_invalid_arg;
|
||||
};
|
||||
|
||||
{
|
||||
let mut env_cleanup_hooks = env.cleanup_hooks.borrow_mut();
|
||||
|
@ -91,7 +97,7 @@ fn napi_remove_env_cleanup_hook(
|
|||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
napi_ok
|
||||
}
|
||||
|
||||
#[napi_sym::napi_sym]
|
||||
|
@ -100,46 +106,52 @@ fn napi_open_callback_scope(
|
|||
_resource_object: napi_value,
|
||||
_context: napi_value,
|
||||
_result: *mut napi_callback_scope,
|
||||
) -> Result {
|
||||
) -> napi_status {
|
||||
// we open scope automatically when it's needed
|
||||
Ok(())
|
||||
napi_ok
|
||||
}
|
||||
|
||||
#[napi_sym::napi_sym]
|
||||
fn napi_close_callback_scope(
|
||||
_env: *mut Env,
|
||||
_scope: napi_callback_scope,
|
||||
) -> Result {
|
||||
) -> napi_status {
|
||||
// we close scope automatically when it's needed
|
||||
Ok(())
|
||||
napi_ok
|
||||
}
|
||||
|
||||
#[napi_sym::napi_sym]
|
||||
fn node_api_get_module_file_name(
|
||||
env: *mut Env,
|
||||
result: *mut *const c_char,
|
||||
) -> Result {
|
||||
let env: &mut Env = env.as_mut().ok_or(Error::InvalidArg)?;
|
||||
) -> napi_status {
|
||||
let Some(env) = env.as_mut() else {
|
||||
return napi_invalid_arg;
|
||||
};
|
||||
|
||||
let shared = env.shared();
|
||||
*result = shared.filename;
|
||||
Ok(())
|
||||
napi_ok
|
||||
}
|
||||
|
||||
#[napi_sym::napi_sym]
|
||||
fn napi_module_register(module: *const NapiModule) -> Result {
|
||||
MODULE.with(|cell| {
|
||||
fn napi_module_register(module: *const NapiModule) -> napi_status {
|
||||
MODULE_TO_REGISTER.with(|cell| {
|
||||
let mut slot = cell.borrow_mut();
|
||||
slot.replace(module);
|
||||
let prev = slot.replace(module);
|
||||
assert!(prev.is_none());
|
||||
});
|
||||
Ok(())
|
||||
napi_ok
|
||||
}
|
||||
|
||||
#[napi_sym::napi_sym]
|
||||
fn napi_get_uv_event_loop(_env: *mut Env, uv_loop: *mut *mut ()) -> Result {
|
||||
fn napi_get_uv_event_loop(
|
||||
_env: *mut Env,
|
||||
uv_loop: *mut *mut (),
|
||||
) -> napi_status {
|
||||
// There is no uv_loop in Deno
|
||||
*uv_loop = std::ptr::null_mut();
|
||||
Ok(())
|
||||
napi_ok
|
||||
}
|
||||
|
||||
const NODE_VERSION: napi_node_version = napi_node_version {
|
||||
|
@ -153,10 +165,10 @@ const NODE_VERSION: napi_node_version = napi_node_version {
|
|||
fn napi_get_node_version(
|
||||
env: *mut Env,
|
||||
result: *mut *const napi_node_version,
|
||||
) -> Result {
|
||||
) -> napi_status {
|
||||
crate::check_env!(env);
|
||||
crate::check_arg!(env, result);
|
||||
|
||||
*result = &NODE_VERSION as *const napi_node_version;
|
||||
Ok(())
|
||||
napi_ok
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "napi_sym"
|
||||
version = "0.29.0"
|
||||
version = "0.37.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -25,22 +25,12 @@ pub fn napi_sym(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
|||
|
||||
let block = &func.block;
|
||||
let inputs = &func.sig.inputs;
|
||||
let output = &func.sig.output;
|
||||
let generics = &func.sig.generics;
|
||||
let ret_ty = match output {
|
||||
syn::ReturnType::Default => panic!("expected a return type"),
|
||||
syn::ReturnType::Type(_, ty) => quote! { #ty },
|
||||
};
|
||||
TokenStream::from(quote! {
|
||||
// SAFETY: it's an NAPI function.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn #name #generics (#inputs) -> napi_status {
|
||||
let mut inner = || -> #ret_ty {
|
||||
#block
|
||||
};
|
||||
inner()
|
||||
.map(|_| napi_ok)
|
||||
.unwrap_or_else(|e| e.into())
|
||||
#block
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -18,6 +18,8 @@ pub struct TsFn {
|
|||
pub context: *mut c_void,
|
||||
pub thread_counter: usize,
|
||||
pub ref_counter: Arc<AtomicUsize>,
|
||||
finalizer: Option<napi_finalize>,
|
||||
finalizer_data: *mut c_void,
|
||||
sender: mpsc::UnboundedSender<PendingNapiAsyncWork>,
|
||||
tsfn_sender: mpsc::UnboundedSender<ThreadSafeFunctionStatus>,
|
||||
}
|
||||
|
@ -25,38 +27,46 @@ pub struct TsFn {
|
|||
impl Drop for TsFn {
|
||||
fn drop(&mut self) {
|
||||
let env = unsafe { self.env.as_mut().unwrap() };
|
||||
env.remove_threadsafe_function_ref_counter(self.id)
|
||||
env.remove_threadsafe_function_ref_counter(self.id);
|
||||
if let Some(finalizer) = self.finalizer {
|
||||
unsafe {
|
||||
(finalizer)(self.env as _, self.finalizer_data, ptr::null_mut());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TsFn {
|
||||
pub fn acquire(&mut self) -> Result {
|
||||
pub fn acquire(&mut self) -> napi_status {
|
||||
self.thread_counter += 1;
|
||||
Ok(())
|
||||
napi_ok
|
||||
}
|
||||
|
||||
pub fn release(mut self) -> Result {
|
||||
pub fn release(mut self) -> napi_status {
|
||||
self.thread_counter -= 1;
|
||||
if self.thread_counter == 0 {
|
||||
self
|
||||
if self
|
||||
.tsfn_sender
|
||||
.unbounded_send(ThreadSafeFunctionStatus::Dead)
|
||||
.map_err(|_| Error::GenericFailure)?;
|
||||
.is_err()
|
||||
{
|
||||
return napi_generic_failure;
|
||||
}
|
||||
drop(self);
|
||||
} else {
|
||||
forget(self);
|
||||
}
|
||||
Ok(())
|
||||
napi_ok
|
||||
}
|
||||
|
||||
pub fn ref_(&mut self) -> Result {
|
||||
pub fn ref_(&mut self) -> napi_status {
|
||||
self
|
||||
.ref_counter
|
||||
.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
|
||||
Ok(())
|
||||
napi_ok
|
||||
}
|
||||
|
||||
pub fn unref(&mut self) -> Result {
|
||||
pub fn unref(&mut self) -> napi_status {
|
||||
let _ = self.ref_counter.fetch_update(
|
||||
std::sync::atomic::Ordering::SeqCst,
|
||||
std::sync::atomic::Ordering::SeqCst,
|
||||
|
@ -69,7 +79,7 @@ impl TsFn {
|
|||
},
|
||||
);
|
||||
|
||||
Ok(())
|
||||
napi_ok
|
||||
}
|
||||
|
||||
pub fn call(&self, data: *mut c_void, is_blocking: bool) {
|
||||
|
@ -126,23 +136,27 @@ fn napi_create_threadsafe_function(
|
|||
_async_resource_name: napi_value,
|
||||
_max_queue_size: usize,
|
||||
initial_thread_count: usize,
|
||||
_thread_finialize_data: *mut c_void,
|
||||
_thread_finalize_cb: napi_finalize,
|
||||
thread_finialize_data: *mut c_void,
|
||||
thread_finalize_cb: Option<napi_finalize>,
|
||||
context: *mut c_void,
|
||||
maybe_call_js_cb: Option<napi_threadsafe_function_call_js>,
|
||||
result: *mut napi_threadsafe_function,
|
||||
) -> Result {
|
||||
let env_ref = env.as_mut().ok_or(Error::GenericFailure)?;
|
||||
) -> napi_status {
|
||||
let Some(env_ref) = env.as_mut() else {
|
||||
return napi_generic_failure;
|
||||
};
|
||||
if initial_thread_count == 0 {
|
||||
return Err(Error::InvalidArg);
|
||||
return napi_invalid_arg;
|
||||
}
|
||||
|
||||
let mut maybe_func = None;
|
||||
|
||||
if let Some(value) = *func {
|
||||
let Ok(func) = v8::Local::<v8::Function>::try_from(value) else {
|
||||
return napi_function_expected;
|
||||
};
|
||||
maybe_func = Some(v8::Global::new(&mut env_ref.scope(), func));
|
||||
}
|
||||
let maybe_func = func
|
||||
.map(|value| {
|
||||
let func = v8::Local::<v8::Function>::try_from(value)
|
||||
.map_err(|_| Error::FunctionExpected)?;
|
||||
Ok(v8::Global::new(&mut env_ref.scope(), func))
|
||||
})
|
||||
.transpose()?;
|
||||
|
||||
let id = TS_FN_ID_COUNTER.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
|
||||
|
||||
|
@ -153,42 +167,44 @@ fn napi_create_threadsafe_function(
|
|||
context,
|
||||
thread_counter: initial_thread_count,
|
||||
sender: env_ref.async_work_sender.clone(),
|
||||
finalizer: thread_finalize_cb,
|
||||
finalizer_data: thread_finialize_data,
|
||||
tsfn_sender: env_ref.threadsafe_function_sender.clone(),
|
||||
ref_counter: Arc::new(AtomicUsize::new(1)),
|
||||
env,
|
||||
};
|
||||
|
||||
env_ref
|
||||
.add_threadsafe_function_ref_counter(tsfn.id, tsfn.ref_counter.clone());
|
||||
|
||||
env_ref
|
||||
if env_ref
|
||||
.threadsafe_function_sender
|
||||
.unbounded_send(ThreadSafeFunctionStatus::Alive)
|
||||
.map_err(|_| Error::GenericFailure)?;
|
||||
.is_err()
|
||||
{
|
||||
return napi_generic_failure;
|
||||
}
|
||||
*result = transmute::<Box<TsFn>, _>(Box::new(tsfn));
|
||||
|
||||
Ok(())
|
||||
napi_ok
|
||||
}
|
||||
|
||||
#[napi_sym::napi_sym]
|
||||
fn napi_acquire_threadsafe_function(
|
||||
tsfn: napi_threadsafe_function,
|
||||
_mode: napi_threadsafe_function_release_mode,
|
||||
) -> Result {
|
||||
) -> napi_status {
|
||||
let tsfn: &mut TsFn = &mut *(tsfn as *mut TsFn);
|
||||
tsfn.acquire()?;
|
||||
|
||||
Ok(())
|
||||
tsfn.acquire()
|
||||
}
|
||||
|
||||
#[napi_sym::napi_sym]
|
||||
fn napi_unref_threadsafe_function(
|
||||
_env: &mut Env,
|
||||
tsfn: napi_threadsafe_function,
|
||||
) -> Result {
|
||||
) -> napi_status {
|
||||
let tsfn: &mut TsFn = &mut *(tsfn as *mut TsFn);
|
||||
tsfn.unref()?;
|
||||
|
||||
Ok(())
|
||||
tsfn.unref()
|
||||
}
|
||||
|
||||
/// Maybe called from any thread.
|
||||
|
@ -196,10 +212,10 @@ fn napi_unref_threadsafe_function(
|
|||
pub fn napi_get_threadsafe_function_context(
|
||||
func: napi_threadsafe_function,
|
||||
result: *mut *const c_void,
|
||||
) -> Result {
|
||||
) -> napi_status {
|
||||
let tsfn: &TsFn = &*(func as *const TsFn);
|
||||
*result = tsfn.context;
|
||||
Ok(())
|
||||
napi_ok
|
||||
}
|
||||
|
||||
#[napi_sym::napi_sym]
|
||||
|
@ -207,29 +223,26 @@ fn napi_call_threadsafe_function(
|
|||
func: napi_threadsafe_function,
|
||||
data: *mut c_void,
|
||||
is_blocking: napi_threadsafe_function_call_mode,
|
||||
) -> Result {
|
||||
) -> napi_status {
|
||||
let tsfn: &TsFn = &*(func as *const TsFn);
|
||||
tsfn.call(data, is_blocking != 0);
|
||||
Ok(())
|
||||
napi_ok
|
||||
}
|
||||
|
||||
#[napi_sym::napi_sym]
|
||||
fn napi_ref_threadsafe_function(
|
||||
_env: &mut Env,
|
||||
func: napi_threadsafe_function,
|
||||
) -> Result {
|
||||
) -> napi_status {
|
||||
let tsfn: &mut TsFn = &mut *(func as *mut TsFn);
|
||||
tsfn.ref_()?;
|
||||
Ok(())
|
||||
tsfn.ref_()
|
||||
}
|
||||
|
||||
#[napi_sym::napi_sym]
|
||||
fn napi_release_threadsafe_function(
|
||||
tsfn: napi_threadsafe_function,
|
||||
_mode: napi_threadsafe_function_release_mode,
|
||||
) -> Result {
|
||||
) -> napi_status {
|
||||
let tsfn: Box<TsFn> = Box::from_raw(tsfn as *mut TsFn);
|
||||
tsfn.release()?;
|
||||
|
||||
Ok(())
|
||||
tsfn.release()
|
||||
}
|
||||
|
|
201
cli/node.rs
Normal file
201
cli/node.rs
Normal file
|
@ -0,0 +1,201 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::HashSet;
|
||||
|
||||
use deno_ast::swc::common::SyntaxContext;
|
||||
use deno_ast::view::Node;
|
||||
use deno_ast::view::NodeTrait;
|
||||
use deno_ast::CjsAnalysis;
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_ast::ParsedSource;
|
||||
use deno_ast::SourceRanged;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_runtime::deno_node::analyze::CjsAnalysis as ExtNodeCjsAnalysis;
|
||||
use deno_runtime::deno_node::analyze::CjsEsmCodeAnalyzer;
|
||||
use deno_runtime::deno_node::analyze::NodeCodeTranslator;
|
||||
|
||||
use crate::cache::NodeAnalysisCache;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
||||
|
||||
pub type CliNodeCodeTranslator = NodeCodeTranslator<CliCjsEsmCodeAnalyzer>;
|
||||
|
||||
/// Resolves a specifier that is pointing into a node_modules folder.
|
||||
///
|
||||
/// Note: This should be called whenever getting the specifier from
|
||||
/// a Module::External(module) reference because that module might
|
||||
/// not be fully resolved at the time deno_graph is analyzing it
|
||||
/// because the node_modules folder might not exist at that time.
|
||||
pub fn resolve_specifier_into_node_modules(
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> ModuleSpecifier {
|
||||
specifier
|
||||
.to_file_path()
|
||||
.ok()
|
||||
// this path might not exist at the time the graph is being created
|
||||
// because the node_modules folder might not yet exist
|
||||
.and_then(|path| canonicalize_path_maybe_not_exists(&path).ok())
|
||||
.and_then(|path| ModuleSpecifier::from_file_path(path).ok())
|
||||
.unwrap_or_else(|| specifier.clone())
|
||||
}
|
||||
|
||||
pub struct CliCjsEsmCodeAnalyzer {
|
||||
cache: NodeAnalysisCache,
|
||||
}
|
||||
|
||||
impl CliCjsEsmCodeAnalyzer {
|
||||
pub fn new(cache: NodeAnalysisCache) -> Self {
|
||||
Self { cache }
|
||||
}
|
||||
|
||||
fn inner_cjs_analysis(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
source: &str,
|
||||
) -> Result<CjsAnalysis, AnyError> {
|
||||
let source_hash = NodeAnalysisCache::compute_source_hash(source);
|
||||
if let Some(analysis) = self
|
||||
.cache
|
||||
.get_cjs_analysis(specifier.as_str(), &source_hash)
|
||||
{
|
||||
return Ok(analysis);
|
||||
}
|
||||
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
if media_type == MediaType::Json {
|
||||
return Ok(CjsAnalysis {
|
||||
exports: vec![],
|
||||
reexports: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
let parsed_source = deno_ast::parse_script(deno_ast::ParseParams {
|
||||
specifier: specifier.to_string(),
|
||||
text_info: deno_ast::SourceTextInfo::new(source.into()),
|
||||
media_type,
|
||||
capture_tokens: true,
|
||||
scope_analysis: false,
|
||||
maybe_syntax: None,
|
||||
})?;
|
||||
let analysis = parsed_source.analyze_cjs();
|
||||
self
|
||||
.cache
|
||||
.set_cjs_analysis(specifier.as_str(), &source_hash, &analysis);
|
||||
|
||||
Ok(analysis)
|
||||
}
|
||||
}
|
||||
|
||||
impl CjsEsmCodeAnalyzer for CliCjsEsmCodeAnalyzer {
|
||||
fn analyze_cjs(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
source: &str,
|
||||
) -> Result<ExtNodeCjsAnalysis, AnyError> {
|
||||
let analysis = self.inner_cjs_analysis(specifier, source)?;
|
||||
Ok(ExtNodeCjsAnalysis {
|
||||
exports: analysis.exports,
|
||||
reexports: analysis.reexports,
|
||||
})
|
||||
}
|
||||
|
||||
fn analyze_esm_top_level_decls(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
source: &str,
|
||||
) -> Result<HashSet<String>, AnyError> {
|
||||
// TODO(dsherret): this code is way more inefficient than it needs to be.
|
||||
//
|
||||
// In the future, we should disable capturing tokens & scope analysis
|
||||
// and instead only use swc's APIs to go through the portions of the tree
|
||||
// that we know will affect the global scope while still ensuring that
|
||||
// `var` decls are taken into consideration.
|
||||
let source_hash = NodeAnalysisCache::compute_source_hash(source);
|
||||
if let Some(decls) = self
|
||||
.cache
|
||||
.get_esm_analysis(specifier.as_str(), &source_hash)
|
||||
{
|
||||
Ok(HashSet::from_iter(decls))
|
||||
} else {
|
||||
let parsed_source = deno_ast::parse_program(deno_ast::ParseParams {
|
||||
specifier: specifier.to_string(),
|
||||
text_info: deno_ast::SourceTextInfo::from_string(source.to_string()),
|
||||
media_type: deno_ast::MediaType::from_specifier(specifier),
|
||||
capture_tokens: true,
|
||||
scope_analysis: true,
|
||||
maybe_syntax: None,
|
||||
})?;
|
||||
let top_level_decls = analyze_top_level_decls(&parsed_source)?;
|
||||
self.cache.set_esm_analysis(
|
||||
specifier.as_str(),
|
||||
&source_hash,
|
||||
&top_level_decls.clone().into_iter().collect::<Vec<_>>(),
|
||||
);
|
||||
Ok(top_level_decls)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn analyze_top_level_decls(
|
||||
parsed_source: &ParsedSource,
|
||||
) -> Result<HashSet<String>, AnyError> {
|
||||
fn visit_children(
|
||||
node: Node,
|
||||
top_level_context: SyntaxContext,
|
||||
results: &mut HashSet<String>,
|
||||
) {
|
||||
if let Node::Ident(ident) = node {
|
||||
if ident.ctxt() == top_level_context && is_local_declaration_ident(node) {
|
||||
results.insert(ident.sym().to_string());
|
||||
}
|
||||
}
|
||||
|
||||
for child in node.children() {
|
||||
visit_children(child, top_level_context, results);
|
||||
}
|
||||
}
|
||||
|
||||
let top_level_context = parsed_source.top_level_context();
|
||||
|
||||
parsed_source.with_view(|program| {
|
||||
let mut results = HashSet::new();
|
||||
visit_children(program.into(), top_level_context, &mut results);
|
||||
Ok(results)
|
||||
})
|
||||
}
|
||||
|
||||
fn is_local_declaration_ident(node: Node) -> bool {
|
||||
if let Some(parent) = node.parent() {
|
||||
match parent {
|
||||
Node::BindingIdent(decl) => decl.id.range().contains(&node.range()),
|
||||
Node::ClassDecl(decl) => decl.ident.range().contains(&node.range()),
|
||||
Node::ClassExpr(decl) => decl
|
||||
.ident
|
||||
.as_ref()
|
||||
.map(|i| i.range().contains(&node.range()))
|
||||
.unwrap_or(false),
|
||||
Node::TsInterfaceDecl(decl) => decl.id.range().contains(&node.range()),
|
||||
Node::FnDecl(decl) => decl.ident.range().contains(&node.range()),
|
||||
Node::FnExpr(decl) => decl
|
||||
.ident
|
||||
.as_ref()
|
||||
.map(|i| i.range().contains(&node.range()))
|
||||
.unwrap_or(false),
|
||||
Node::TsModuleDecl(decl) => decl.id.range().contains(&node.range()),
|
||||
Node::TsNamespaceDecl(decl) => decl.id.range().contains(&node.range()),
|
||||
Node::VarDeclarator(decl) => decl.name.range().contains(&node.range()),
|
||||
Node::ImportNamedSpecifier(decl) => {
|
||||
decl.local.range().contains(&node.range())
|
||||
}
|
||||
Node::ImportDefaultSpecifier(decl) => {
|
||||
decl.local.range().contains(&node.range())
|
||||
}
|
||||
Node::ImportStarAsSpecifier(decl) => decl.range().contains(&node.range()),
|
||||
Node::KeyValuePatProp(decl) => decl.key.range().contains(&node.range()),
|
||||
Node::AssignPatProp(decl) => decl.key.range().contains(&node.range()),
|
||||
_ => false,
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
735
cli/node/mod.rs
735
cli/node/mod.rs
|
@ -1,735 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json::Value;
|
||||
use deno_core::url::Url;
|
||||
use deno_runtime::deno_node;
|
||||
use deno_runtime::deno_node::errors;
|
||||
use deno_runtime::deno_node::find_builtin_node_module;
|
||||
use deno_runtime::deno_node::get_closest_package_json;
|
||||
use deno_runtime::deno_node::legacy_main_resolve;
|
||||
use deno_runtime::deno_node::package_exports_resolve;
|
||||
use deno_runtime::deno_node::package_imports_resolve;
|
||||
use deno_runtime::deno_node::package_resolve;
|
||||
use deno_runtime::deno_node::path_to_declaration_path;
|
||||
use deno_runtime::deno_node::NodeModuleKind;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_runtime::deno_node::NodeResolutionMode;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_runtime::deno_node::RealFs;
|
||||
use deno_runtime::deno_node::RequireNpmResolver;
|
||||
use deno_runtime::deno_node::DEFAULT_CONDITIONS;
|
||||
use deno_runtime::permissions::PermissionsContainer;
|
||||
use deno_semver::npm::NpmPackageNv;
|
||||
use deno_semver::npm::NpmPackageNvReference;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
|
||||
use crate::npm::NpmPackageResolver;
|
||||
use crate::npm::NpmResolution;
|
||||
use crate::npm::RequireNpmPackageResolver;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
||||
|
||||
mod analyze;
|
||||
|
||||
pub use analyze::NodeCodeTranslator;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum NodeResolution {
|
||||
Esm(ModuleSpecifier),
|
||||
CommonJs(ModuleSpecifier),
|
||||
BuiltIn(String),
|
||||
}
|
||||
|
||||
impl NodeResolution {
|
||||
pub fn into_url(self) -> ModuleSpecifier {
|
||||
match self {
|
||||
Self::Esm(u) => u,
|
||||
Self::CommonJs(u) => u,
|
||||
Self::BuiltIn(specifier) => {
|
||||
if specifier.starts_with("node:") {
|
||||
ModuleSpecifier::parse(&specifier).unwrap()
|
||||
} else {
|
||||
ModuleSpecifier::parse(&format!("node:{specifier}")).unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_specifier_and_media_type(
|
||||
resolution: Option<Self>,
|
||||
) -> (ModuleSpecifier, MediaType) {
|
||||
match resolution {
|
||||
Some(NodeResolution::CommonJs(specifier)) => {
|
||||
let media_type = MediaType::from_specifier(&specifier);
|
||||
(
|
||||
specifier,
|
||||
match media_type {
|
||||
MediaType::JavaScript | MediaType::Jsx => MediaType::Cjs,
|
||||
MediaType::TypeScript | MediaType::Tsx => MediaType::Cts,
|
||||
MediaType::Dts => MediaType::Dcts,
|
||||
_ => media_type,
|
||||
},
|
||||
)
|
||||
}
|
||||
Some(NodeResolution::Esm(specifier)) => {
|
||||
let media_type = MediaType::from_specifier(&specifier);
|
||||
(
|
||||
specifier,
|
||||
match media_type {
|
||||
MediaType::JavaScript | MediaType::Jsx => MediaType::Mjs,
|
||||
MediaType::TypeScript | MediaType::Tsx => MediaType::Mts,
|
||||
MediaType::Dts => MediaType::Dmts,
|
||||
_ => media_type,
|
||||
},
|
||||
)
|
||||
}
|
||||
Some(resolution) => (resolution.into_url(), MediaType::Dts),
|
||||
None => (
|
||||
ModuleSpecifier::parse("internal:///missing_dependency.d.ts").unwrap(),
|
||||
MediaType::Dts,
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(bartlomieju): seems super wasteful to parse specified each time
|
||||
pub fn resolve_builtin_node_module(module_name: &str) -> Result<Url, AnyError> {
|
||||
if let Some(module) = find_builtin_node_module(module_name) {
|
||||
return Ok(ModuleSpecifier::parse(module.specifier).unwrap());
|
||||
}
|
||||
|
||||
Err(generic_error(format!(
|
||||
"Unknown built-in \"node:\" module: {module_name}"
|
||||
)))
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CliNodeResolver {
|
||||
npm_resolution: Arc<NpmResolution>,
|
||||
npm_resolver: Arc<NpmPackageResolver>,
|
||||
require_npm_resolver: RequireNpmPackageResolver,
|
||||
}
|
||||
|
||||
impl CliNodeResolver {
|
||||
pub fn new(
|
||||
npm_resolution: Arc<NpmResolution>,
|
||||
npm_package_resolver: Arc<NpmPackageResolver>,
|
||||
) -> Self {
|
||||
Self {
|
||||
npm_resolution,
|
||||
require_npm_resolver: npm_package_resolver.as_require_npm_resolver(),
|
||||
npm_resolver: npm_package_resolver,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
self.npm_resolver.in_npm_package(specifier)
|
||||
}
|
||||
|
||||
/// This function is an implementation of `defaultResolve` in
|
||||
/// `lib/internal/modules/esm/resolve.js` from Node.
|
||||
pub fn resolve(
|
||||
&self,
|
||||
specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
mode: NodeResolutionMode,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
) -> Result<Option<NodeResolution>, AnyError> {
|
||||
// Note: if we are here, then the referrer is an esm module
|
||||
// TODO(bartlomieju): skipped "policy" part as we don't plan to support it
|
||||
|
||||
if deno_node::is_builtin_node_module(specifier) {
|
||||
return Ok(Some(NodeResolution::BuiltIn(specifier.to_string())));
|
||||
}
|
||||
|
||||
if let Ok(url) = Url::parse(specifier) {
|
||||
if url.scheme() == "data" {
|
||||
return Ok(Some(NodeResolution::Esm(url)));
|
||||
}
|
||||
|
||||
let protocol = url.scheme();
|
||||
|
||||
if protocol == "node" {
|
||||
let split_specifier = url.as_str().split(':');
|
||||
let specifier = split_specifier.skip(1).collect::<String>();
|
||||
|
||||
if deno_node::is_builtin_node_module(&specifier) {
|
||||
return Ok(Some(NodeResolution::BuiltIn(specifier)));
|
||||
}
|
||||
}
|
||||
|
||||
if protocol != "file" && protocol != "data" {
|
||||
return Err(errors::err_unsupported_esm_url_scheme(&url));
|
||||
}
|
||||
|
||||
// todo(dsherret): this seems wrong
|
||||
if referrer.scheme() == "data" {
|
||||
let url = referrer.join(specifier).map_err(AnyError::from)?;
|
||||
return Ok(Some(NodeResolution::Esm(url)));
|
||||
}
|
||||
}
|
||||
|
||||
let url = self.module_resolve(
|
||||
specifier,
|
||||
referrer,
|
||||
DEFAULT_CONDITIONS,
|
||||
mode,
|
||||
permissions,
|
||||
)?;
|
||||
let url = match url {
|
||||
Some(url) => url,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let url = match mode {
|
||||
NodeResolutionMode::Execution => url,
|
||||
NodeResolutionMode::Types => {
|
||||
let path = url.to_file_path().unwrap();
|
||||
// todo(16370): the module kind is not correct here. I think we need
|
||||
// typescript to tell us if the referrer is esm or cjs
|
||||
let path =
|
||||
match path_to_declaration_path::<RealFs>(path, NodeModuleKind::Esm) {
|
||||
Some(path) => path,
|
||||
None => return Ok(None),
|
||||
};
|
||||
ModuleSpecifier::from_file_path(path).unwrap()
|
||||
}
|
||||
};
|
||||
|
||||
let resolve_response = self.url_to_node_resolution(url)?;
|
||||
// TODO(bartlomieju): skipped checking errors for commonJS resolution and
|
||||
// "preserveSymlinksMain"/"preserveSymlinks" options.
|
||||
Ok(Some(resolve_response))
|
||||
}
|
||||
|
||||
fn module_resolve(
|
||||
&self,
|
||||
specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
conditions: &[&str],
|
||||
mode: NodeResolutionMode,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
) -> Result<Option<ModuleSpecifier>, AnyError> {
|
||||
// note: if we're here, the referrer is an esm module
|
||||
let url = if should_be_treated_as_relative_or_absolute_path(specifier) {
|
||||
let resolved_specifier = referrer.join(specifier)?;
|
||||
if mode.is_types() {
|
||||
let file_path = to_file_path(&resolved_specifier);
|
||||
// todo(dsherret): the node module kind is not correct and we
|
||||
// should use the value provided by typescript instead
|
||||
let declaration_path =
|
||||
path_to_declaration_path::<RealFs>(file_path, NodeModuleKind::Esm);
|
||||
declaration_path.map(|declaration_path| {
|
||||
ModuleSpecifier::from_file_path(declaration_path).unwrap()
|
||||
})
|
||||
} else {
|
||||
Some(resolved_specifier)
|
||||
}
|
||||
} else if specifier.starts_with('#') {
|
||||
Some(
|
||||
package_imports_resolve::<RealFs>(
|
||||
specifier,
|
||||
referrer,
|
||||
NodeModuleKind::Esm,
|
||||
conditions,
|
||||
mode,
|
||||
&self.require_npm_resolver,
|
||||
permissions,
|
||||
)
|
||||
.map(|p| ModuleSpecifier::from_file_path(p).unwrap())?,
|
||||
)
|
||||
} else if let Ok(resolved) = Url::parse(specifier) {
|
||||
Some(resolved)
|
||||
} else {
|
||||
package_resolve::<RealFs>(
|
||||
specifier,
|
||||
referrer,
|
||||
NodeModuleKind::Esm,
|
||||
conditions,
|
||||
mode,
|
||||
&self.require_npm_resolver,
|
||||
permissions,
|
||||
)?
|
||||
.map(|p| ModuleSpecifier::from_file_path(p).unwrap())
|
||||
};
|
||||
Ok(match url {
|
||||
Some(url) => Some(finalize_resolution(url, referrer)?),
|
||||
None => None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn resolve_npm_req_reference(
|
||||
&self,
|
||||
reference: &NpmPackageReqReference,
|
||||
mode: NodeResolutionMode,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
) -> Result<Option<NodeResolution>, AnyError> {
|
||||
let reference = self.npm_resolution.pkg_req_ref_to_nv_ref(reference)?;
|
||||
self.resolve_npm_reference(&reference, mode, permissions)
|
||||
}
|
||||
|
||||
pub fn resolve_npm_reference(
|
||||
&self,
|
||||
reference: &NpmPackageNvReference,
|
||||
mode: NodeResolutionMode,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
) -> Result<Option<NodeResolution>, AnyError> {
|
||||
let package_folder = self
|
||||
.npm_resolver
|
||||
.resolve_package_folder_from_deno_module(&reference.nv)?;
|
||||
let node_module_kind = NodeModuleKind::Esm;
|
||||
let maybe_resolved_path = package_config_resolve(
|
||||
&reference
|
||||
.sub_path
|
||||
.as_ref()
|
||||
.map(|s| format!("./{s}"))
|
||||
.unwrap_or_else(|| ".".to_string()),
|
||||
&package_folder,
|
||||
node_module_kind,
|
||||
DEFAULT_CONDITIONS,
|
||||
mode,
|
||||
&self.require_npm_resolver,
|
||||
permissions,
|
||||
)
|
||||
.with_context(|| {
|
||||
format!("Error resolving package config for '{reference}'")
|
||||
})?;
|
||||
let resolved_path = match maybe_resolved_path {
|
||||
Some(resolved_path) => resolved_path,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let resolved_path = match mode {
|
||||
NodeResolutionMode::Execution => resolved_path,
|
||||
NodeResolutionMode::Types => {
|
||||
match path_to_declaration_path::<RealFs>(
|
||||
resolved_path,
|
||||
node_module_kind,
|
||||
) {
|
||||
Some(path) => path,
|
||||
None => return Ok(None),
|
||||
}
|
||||
}
|
||||
};
|
||||
let url = ModuleSpecifier::from_file_path(resolved_path).unwrap();
|
||||
let resolve_response = self.url_to_node_resolution(url)?;
|
||||
// TODO(bartlomieju): skipped checking errors for commonJS resolution and
|
||||
// "preserveSymlinksMain"/"preserveSymlinks" options.
|
||||
Ok(Some(resolve_response))
|
||||
}
|
||||
|
||||
pub fn resolve_binary_commands(
|
||||
&self,
|
||||
pkg_nv: &NpmPackageNv,
|
||||
) -> Result<Vec<String>, AnyError> {
|
||||
let package_folder = self
|
||||
.npm_resolver
|
||||
.resolve_package_folder_from_deno_module(pkg_nv)?;
|
||||
let package_json_path = package_folder.join("package.json");
|
||||
let package_json = PackageJson::load::<RealFs>(
|
||||
&self.require_npm_resolver,
|
||||
&mut PermissionsContainer::allow_all(),
|
||||
package_json_path,
|
||||
)?;
|
||||
|
||||
Ok(match package_json.bin {
|
||||
Some(Value::String(_)) => vec![pkg_nv.name.to_string()],
|
||||
Some(Value::Object(o)) => {
|
||||
o.into_iter().map(|(key, _)| key).collect::<Vec<_>>()
|
||||
}
|
||||
_ => Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn resolve_binary_export(
|
||||
&self,
|
||||
pkg_ref: &NpmPackageReqReference,
|
||||
) -> Result<NodeResolution, AnyError> {
|
||||
let pkg_nv = self
|
||||
.npm_resolution
|
||||
.resolve_pkg_id_from_pkg_req(&pkg_ref.req)?
|
||||
.nv;
|
||||
let bin_name = pkg_ref.sub_path.as_deref();
|
||||
let package_folder = self
|
||||
.npm_resolver
|
||||
.resolve_package_folder_from_deno_module(&pkg_nv)?;
|
||||
let package_json_path = package_folder.join("package.json");
|
||||
let package_json = PackageJson::load::<RealFs>(
|
||||
&self.require_npm_resolver,
|
||||
&mut PermissionsContainer::allow_all(),
|
||||
package_json_path,
|
||||
)?;
|
||||
let bin = match &package_json.bin {
|
||||
Some(bin) => bin,
|
||||
None => bail!(
|
||||
"package '{}' did not have a bin property in its package.json",
|
||||
&pkg_nv.name,
|
||||
),
|
||||
};
|
||||
let bin_entry = resolve_bin_entry_value(&pkg_nv, bin_name, bin)?;
|
||||
let url =
|
||||
ModuleSpecifier::from_file_path(package_folder.join(bin_entry)).unwrap();
|
||||
|
||||
let resolve_response = self.url_to_node_resolution(url)?;
|
||||
// TODO(bartlomieju): skipped checking errors for commonJS resolution and
|
||||
// "preserveSymlinksMain"/"preserveSymlinks" options.
|
||||
Ok(resolve_response)
|
||||
}
|
||||
|
||||
pub fn url_to_node_resolution(
|
||||
&self,
|
||||
url: ModuleSpecifier,
|
||||
) -> Result<NodeResolution, AnyError> {
|
||||
let url_str = url.as_str().to_lowercase();
|
||||
if url_str.starts_with("http") {
|
||||
Ok(NodeResolution::Esm(url))
|
||||
} else if url_str.ends_with(".js") || url_str.ends_with(".d.ts") {
|
||||
let package_config = get_closest_package_json::<RealFs>(
|
||||
&url,
|
||||
&self.require_npm_resolver,
|
||||
&mut PermissionsContainer::allow_all(),
|
||||
)?;
|
||||
if package_config.typ == "module" {
|
||||
Ok(NodeResolution::Esm(url))
|
||||
} else {
|
||||
Ok(NodeResolution::CommonJs(url))
|
||||
}
|
||||
} else if url_str.ends_with(".mjs") || url_str.ends_with(".d.mts") {
|
||||
Ok(NodeResolution::Esm(url))
|
||||
} else if url_str.ends_with(".ts") {
|
||||
Err(generic_error(format!(
|
||||
"TypeScript files are not supported in npm packages: {url}"
|
||||
)))
|
||||
} else {
|
||||
Ok(NodeResolution::CommonJs(url))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolves a specifier that is pointing into a node_modules folder.
|
||||
///
|
||||
/// Note: This should be called whenever getting the specifier from
|
||||
/// a Module::External(module) reference because that module might
|
||||
/// not be fully resolved at the time deno_graph is analyzing it
|
||||
/// because the node_modules folder might not exist at that time.
|
||||
pub fn resolve_specifier_into_node_modules(
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> ModuleSpecifier {
|
||||
specifier
|
||||
.to_file_path()
|
||||
.ok()
|
||||
// this path might not exist at the time the graph is being created
|
||||
// because the node_modules folder might not yet exist
|
||||
.and_then(|path| canonicalize_path_maybe_not_exists(&path).ok())
|
||||
.and_then(|path| ModuleSpecifier::from_file_path(path).ok())
|
||||
.unwrap_or_else(|| specifier.clone())
|
||||
}
|
||||
|
||||
fn resolve_bin_entry_value<'a>(
|
||||
pkg_nv: &NpmPackageNv,
|
||||
bin_name: Option<&str>,
|
||||
bin: &'a Value,
|
||||
) -> Result<&'a str, AnyError> {
|
||||
let bin_entry = match bin {
|
||||
Value::String(_) => {
|
||||
if bin_name.is_some() && bin_name.unwrap() != pkg_nv.name {
|
||||
None
|
||||
} else {
|
||||
Some(bin)
|
||||
}
|
||||
}
|
||||
Value::Object(o) => {
|
||||
if let Some(bin_name) = bin_name {
|
||||
o.get(bin_name)
|
||||
} else if o.len() == 1 || o.len() > 1 && o.values().all(|v| v == o.values().next().unwrap()) {
|
||||
o.values().next()
|
||||
} else {
|
||||
o.get(&pkg_nv.name)
|
||||
}
|
||||
},
|
||||
_ => bail!("package '{}' did not have a bin property with a string or object value in its package.json", pkg_nv),
|
||||
};
|
||||
let bin_entry = match bin_entry {
|
||||
Some(e) => e,
|
||||
None => {
|
||||
let keys = bin
|
||||
.as_object()
|
||||
.map(|o| {
|
||||
o.keys()
|
||||
.map(|k| format!(" * npm:{pkg_nv}/{k}"))
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
bail!(
|
||||
"package '{}' did not have a bin entry for '{}' in its package.json{}",
|
||||
pkg_nv,
|
||||
bin_name.unwrap_or(&pkg_nv.name),
|
||||
if keys.is_empty() {
|
||||
"".to_string()
|
||||
} else {
|
||||
format!("\n\nPossibilities:\n{}", keys.join("\n"))
|
||||
}
|
||||
)
|
||||
}
|
||||
};
|
||||
match bin_entry {
|
||||
Value::String(s) => Ok(s),
|
||||
_ => bail!(
|
||||
"package '{}' had a non-string sub property of bin in its package.json",
|
||||
pkg_nv,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn package_config_resolve(
|
||||
package_subpath: &str,
|
||||
package_dir: &Path,
|
||||
referrer_kind: NodeModuleKind,
|
||||
conditions: &[&str],
|
||||
mode: NodeResolutionMode,
|
||||
npm_resolver: &dyn RequireNpmResolver,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
) -> Result<Option<PathBuf>, AnyError> {
|
||||
let package_json_path = package_dir.join("package.json");
|
||||
let referrer = ModuleSpecifier::from_directory_path(package_dir).unwrap();
|
||||
let package_config = PackageJson::load::<RealFs>(
|
||||
npm_resolver,
|
||||
permissions,
|
||||
package_json_path.clone(),
|
||||
)?;
|
||||
if let Some(exports) = &package_config.exports {
|
||||
let result = package_exports_resolve::<RealFs>(
|
||||
&package_json_path,
|
||||
package_subpath.to_string(),
|
||||
exports,
|
||||
&referrer,
|
||||
referrer_kind,
|
||||
conditions,
|
||||
mode,
|
||||
npm_resolver,
|
||||
permissions,
|
||||
);
|
||||
match result {
|
||||
Ok(found) => return Ok(Some(found)),
|
||||
Err(exports_err) => {
|
||||
if mode.is_types() && package_subpath == "." {
|
||||
if let Ok(Some(path)) =
|
||||
legacy_main_resolve::<RealFs>(&package_config, referrer_kind, mode)
|
||||
{
|
||||
return Ok(Some(path));
|
||||
} else {
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
return Err(exports_err);
|
||||
}
|
||||
}
|
||||
}
|
||||
if package_subpath == "." {
|
||||
return legacy_main_resolve::<RealFs>(&package_config, referrer_kind, mode);
|
||||
}
|
||||
|
||||
Ok(Some(package_dir.join(package_subpath)))
|
||||
}
|
||||
|
||||
fn finalize_resolution(
|
||||
resolved: ModuleSpecifier,
|
||||
base: &ModuleSpecifier,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
let encoded_sep_re = lazy_regex::regex!(r"%2F|%2C");
|
||||
|
||||
if encoded_sep_re.is_match(resolved.path()) {
|
||||
return Err(errors::err_invalid_module_specifier(
|
||||
resolved.path(),
|
||||
"must not include encoded \"/\" or \"\\\\\" characters",
|
||||
Some(to_file_path_string(base)),
|
||||
));
|
||||
}
|
||||
|
||||
let path = to_file_path(&resolved);
|
||||
|
||||
// TODO(bartlomieju): currently not supported
|
||||
// if (getOptionValue('--experimental-specifier-resolution') === 'node') {
|
||||
// ...
|
||||
// }
|
||||
|
||||
let p_str = path.to_str().unwrap();
|
||||
let p = if p_str.ends_with('/') {
|
||||
p_str[p_str.len() - 1..].to_string()
|
||||
} else {
|
||||
p_str.to_string()
|
||||
};
|
||||
|
||||
let (is_dir, is_file) = if let Ok(stats) = std::fs::metadata(p) {
|
||||
(stats.is_dir(), stats.is_file())
|
||||
} else {
|
||||
(false, false)
|
||||
};
|
||||
if is_dir {
|
||||
return Err(errors::err_unsupported_dir_import(
|
||||
resolved.as_str(),
|
||||
base.as_str(),
|
||||
));
|
||||
} else if !is_file {
|
||||
return Err(errors::err_module_not_found(
|
||||
resolved.as_str(),
|
||||
base.as_str(),
|
||||
"module",
|
||||
));
|
||||
}
|
||||
|
||||
Ok(resolved)
|
||||
}
|
||||
|
||||
fn to_file_path(url: &ModuleSpecifier) -> PathBuf {
|
||||
url
|
||||
.to_file_path()
|
||||
.unwrap_or_else(|_| panic!("Provided URL was not file:// URL: {url}"))
|
||||
}
|
||||
|
||||
fn to_file_path_string(url: &ModuleSpecifier) -> String {
|
||||
to_file_path(url).display().to_string()
|
||||
}
|
||||
|
||||
fn should_be_treated_as_relative_or_absolute_path(specifier: &str) -> bool {
|
||||
if specifier.is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
if specifier.starts_with('/') {
|
||||
return true;
|
||||
}
|
||||
|
||||
is_relative_specifier(specifier)
|
||||
}
|
||||
|
||||
// TODO(ry) We very likely have this utility function elsewhere in Deno.
|
||||
fn is_relative_specifier(specifier: &str) -> bool {
|
||||
let specifier_len = specifier.len();
|
||||
let specifier_chars: Vec<_> = specifier.chars().collect();
|
||||
|
||||
if !specifier_chars.is_empty() && specifier_chars[0] == '.' {
|
||||
if specifier_len == 1 || specifier_chars[1] == '/' {
|
||||
return true;
|
||||
}
|
||||
if specifier_chars[1] == '.'
|
||||
&& (specifier_len == 2 || specifier_chars[2] == '/')
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use deno_core::serde_json::json;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_resolve_bin_entry_value() {
|
||||
// should resolve the specified value
|
||||
let value = json!({
|
||||
"bin1": "./value1",
|
||||
"bin2": "./value2",
|
||||
"test": "./value3",
|
||||
});
|
||||
assert_eq!(
|
||||
resolve_bin_entry_value(
|
||||
&NpmPackageNv::from_str("test@1.1.1").unwrap(),
|
||||
Some("bin1"),
|
||||
&value
|
||||
)
|
||||
.unwrap(),
|
||||
"./value1"
|
||||
);
|
||||
|
||||
// should resolve the value with the same name when not specified
|
||||
assert_eq!(
|
||||
resolve_bin_entry_value(
|
||||
&NpmPackageNv::from_str("test@1.1.1").unwrap(),
|
||||
None,
|
||||
&value
|
||||
)
|
||||
.unwrap(),
|
||||
"./value3"
|
||||
);
|
||||
|
||||
// should not resolve when specified value does not exist
|
||||
assert_eq!(
|
||||
resolve_bin_entry_value(
|
||||
&NpmPackageNv::from_str("test@1.1.1").unwrap(),
|
||||
Some("other"),
|
||||
&value
|
||||
)
|
||||
.err()
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
concat!(
|
||||
"package 'test@1.1.1' did not have a bin entry for 'other' in its package.json\n",
|
||||
"\n",
|
||||
"Possibilities:\n",
|
||||
" * npm:test@1.1.1/bin1\n",
|
||||
" * npm:test@1.1.1/bin2\n",
|
||||
" * npm:test@1.1.1/test"
|
||||
)
|
||||
);
|
||||
|
||||
// should not resolve when default value can't be determined
|
||||
assert_eq!(
|
||||
resolve_bin_entry_value(
|
||||
&NpmPackageNv::from_str("asdf@1.2.3").unwrap(),
|
||||
None,
|
||||
&value
|
||||
)
|
||||
.err()
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
concat!(
|
||||
"package 'asdf@1.2.3' did not have a bin entry for 'asdf' in its package.json\n",
|
||||
"\n",
|
||||
"Possibilities:\n",
|
||||
" * npm:asdf@1.2.3/bin1\n",
|
||||
" * npm:asdf@1.2.3/bin2\n",
|
||||
" * npm:asdf@1.2.3/test"
|
||||
)
|
||||
);
|
||||
|
||||
// should resolve since all the values are the same
|
||||
let value = json!({
|
||||
"bin1": "./value",
|
||||
"bin2": "./value",
|
||||
});
|
||||
assert_eq!(
|
||||
resolve_bin_entry_value(
|
||||
&NpmPackageNv::from_str("test@1.2.3").unwrap(),
|
||||
None,
|
||||
&value
|
||||
)
|
||||
.unwrap(),
|
||||
"./value"
|
||||
);
|
||||
|
||||
// should not resolve when specified and is a string
|
||||
let value = json!("./value");
|
||||
assert_eq!(
|
||||
resolve_bin_entry_value(
|
||||
&NpmPackageNv::from_str("test@1.2.3").unwrap(),
|
||||
Some("path"),
|
||||
&value
|
||||
)
|
||||
.err()
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
"package 'test@1.2.3' did not have a bin entry for 'path' in its package.json"
|
||||
);
|
||||
}
|
||||
}
|
|
@ -4,6 +4,7 @@ use std::collections::HashSet;
|
|||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::bail;
|
||||
|
@ -19,7 +20,6 @@ use deno_semver::Version;
|
|||
use once_cell::sync::Lazy;
|
||||
|
||||
use crate::args::CacheSetting;
|
||||
use crate::cache::DenoDir;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
use crate::util::fs::hard_link_dir_recursive;
|
||||
|
@ -119,20 +119,6 @@ pub struct ReadonlyNpmCache {
|
|||
root_dir_url: Url,
|
||||
}
|
||||
|
||||
// todo(dsherret): implementing Default for this is error prone because someone
|
||||
// might accidentally use the default implementation instead of getting the
|
||||
// correct location of the deno dir, which might be provided via a CLI argument.
|
||||
// That said, the rest of the LSP code does this at the moment and so this code
|
||||
// copies that.
|
||||
impl Default for ReadonlyNpmCache {
|
||||
fn default() -> Self {
|
||||
// This only gets used when creating the tsc runtime and for testing, and so
|
||||
// it shouldn't ever actually access the DenoDir, so it doesn't support a
|
||||
// custom root.
|
||||
Self::from_deno_dir(&DenoDir::new(None).unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
impl ReadonlyNpmCache {
|
||||
pub fn new(root_dir: PathBuf) -> Self {
|
||||
fn try_get_canonicalized_root_dir(
|
||||
|
@ -155,10 +141,6 @@ impl ReadonlyNpmCache {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn from_deno_dir(dir: &DenoDir) -> Self {
|
||||
Self::new(dir.npm_folder_path())
|
||||
}
|
||||
|
||||
pub fn root_dir_url(&self) -> &Url {
|
||||
&self.root_dir_url
|
||||
}
|
||||
|
@ -299,21 +281,21 @@ impl ReadonlyNpmCache {
|
|||
pub struct NpmCache {
|
||||
readonly: ReadonlyNpmCache,
|
||||
cache_setting: CacheSetting,
|
||||
http_client: HttpClient,
|
||||
http_client: Arc<HttpClient>,
|
||||
progress_bar: ProgressBar,
|
||||
/// ensures a package is only downloaded once per run
|
||||
previously_reloaded_packages: Mutex<HashSet<NpmPackageNv>>,
|
||||
}
|
||||
|
||||
impl NpmCache {
|
||||
pub fn from_deno_dir(
|
||||
dir: &DenoDir,
|
||||
pub fn new(
|
||||
cache_dir_path: PathBuf,
|
||||
cache_setting: CacheSetting,
|
||||
http_client: HttpClient,
|
||||
http_client: Arc<HttpClient>,
|
||||
progress_bar: ProgressBar,
|
||||
) -> Self {
|
||||
Self {
|
||||
readonly: ReadonlyNpmCache::from_deno_dir(dir),
|
||||
readonly: ReadonlyNpmCache::new(cache_dir_path),
|
||||
cache_setting,
|
||||
http_client,
|
||||
progress_bar,
|
||||
|
|
|
@ -10,7 +10,7 @@ use deno_npm::registry::NpmRegistryApi;
|
|||
use deno_npm::registry::NpmRegistryPackageInfoLoadError;
|
||||
use deno_semver::npm::NpmPackageReq;
|
||||
|
||||
use crate::args::package_json::PackageJsonDeps;
|
||||
use crate::args::PackageJsonDepsProvider;
|
||||
use crate::util::sync::AtomicFlag;
|
||||
|
||||
use super::CliNpmRegistryApi;
|
||||
|
@ -18,40 +18,29 @@ use super::NpmResolution;
|
|||
|
||||
#[derive(Debug)]
|
||||
struct PackageJsonDepsInstallerInner {
|
||||
deps_provider: Arc<PackageJsonDepsProvider>,
|
||||
has_installed_flag: AtomicFlag,
|
||||
npm_registry_api: Arc<CliNpmRegistryApi>,
|
||||
npm_resolution: Arc<NpmResolution>,
|
||||
package_deps: PackageJsonDeps,
|
||||
}
|
||||
|
||||
impl PackageJsonDepsInstallerInner {
|
||||
pub fn reqs(&self) -> Vec<&NpmPackageReq> {
|
||||
let mut package_reqs = self
|
||||
.package_deps
|
||||
.values()
|
||||
.filter_map(|r| r.as_ref().ok())
|
||||
.collect::<Vec<_>>();
|
||||
package_reqs.sort(); // deterministic resolution
|
||||
package_reqs
|
||||
}
|
||||
|
||||
pub fn reqs_with_info_futures(
|
||||
pub fn reqs_with_info_futures<'a>(
|
||||
&self,
|
||||
reqs: &'a [&'a NpmPackageReq],
|
||||
) -> FuturesOrdered<
|
||||
impl Future<
|
||||
Output = Result<
|
||||
(&NpmPackageReq, Arc<deno_npm::registry::NpmPackageInfo>),
|
||||
(&'a NpmPackageReq, Arc<deno_npm::registry::NpmPackageInfo>),
|
||||
NpmRegistryPackageInfoLoadError,
|
||||
>,
|
||||
>,
|
||||
> {
|
||||
let package_reqs = self.reqs();
|
||||
|
||||
FuturesOrdered::from_iter(package_reqs.into_iter().map(|req| {
|
||||
FuturesOrdered::from_iter(reqs.iter().map(|req| {
|
||||
let api = self.npm_registry_api.clone();
|
||||
async move {
|
||||
let info = api.package_info(&req.name).await?;
|
||||
Ok::<_, NpmRegistryPackageInfoLoadError>((req, info))
|
||||
Ok::<_, NpmRegistryPackageInfoLoadError>((*req, info))
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
@ -63,20 +52,22 @@ pub struct PackageJsonDepsInstaller(Option<PackageJsonDepsInstallerInner>);
|
|||
|
||||
impl PackageJsonDepsInstaller {
|
||||
pub fn new(
|
||||
deps_provider: Arc<PackageJsonDepsProvider>,
|
||||
npm_registry_api: Arc<CliNpmRegistryApi>,
|
||||
npm_resolution: Arc<NpmResolution>,
|
||||
deps: Option<PackageJsonDeps>,
|
||||
) -> Self {
|
||||
Self(deps.map(|package_deps| PackageJsonDepsInstallerInner {
|
||||
Self(Some(PackageJsonDepsInstallerInner {
|
||||
deps_provider,
|
||||
has_installed_flag: Default::default(),
|
||||
npm_registry_api,
|
||||
npm_resolution,
|
||||
package_deps,
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn package_deps(&self) -> Option<&PackageJsonDeps> {
|
||||
self.0.as_ref().map(|inner| &inner.package_deps)
|
||||
/// Creates an installer that never installs local packages during
|
||||
/// resolution. A top level install will be a no-op.
|
||||
pub fn no_op() -> Self {
|
||||
Self(None)
|
||||
}
|
||||
|
||||
/// Installs the top level dependencies in the package.json file
|
||||
|
@ -91,7 +82,24 @@ impl PackageJsonDepsInstaller {
|
|||
return Ok(()); // already installed by something else
|
||||
}
|
||||
|
||||
let mut reqs_with_info_futures = inner.reqs_with_info_futures();
|
||||
let package_reqs = inner.deps_provider.reqs();
|
||||
|
||||
// check if something needs resolving before bothering to load all
|
||||
// the package information (which is slow)
|
||||
if package_reqs.iter().all(|req| {
|
||||
inner
|
||||
.npm_resolution
|
||||
.resolve_pkg_id_from_pkg_req(req)
|
||||
.is_ok()
|
||||
}) {
|
||||
log::debug!(
|
||||
"All package.json deps resolvable. Skipping top level install."
|
||||
);
|
||||
return Ok(()); // everything is already resolvable
|
||||
}
|
||||
|
||||
let mut reqs_with_info_futures =
|
||||
inner.reqs_with_info_futures(&package_reqs);
|
||||
|
||||
while let Some(result) = reqs_with_info_futures.next().await {
|
||||
let (req, info) = result?;
|
||||
|
@ -102,7 +110,7 @@ impl PackageJsonDepsInstaller {
|
|||
if inner.npm_registry_api.mark_force_reload() {
|
||||
log::debug!("Failed to resolve package. Retrying. Error: {err:#}");
|
||||
// re-initialize
|
||||
reqs_with_info_futures = inner.reqs_with_info_futures();
|
||||
reqs_with_info_futures = inner.reqs_with_info_futures(&package_reqs);
|
||||
} else {
|
||||
return Err(err.into());
|
||||
}
|
||||
|
|
|
@ -13,6 +13,6 @@ pub use installer::PackageJsonDepsInstaller;
|
|||
pub use registry::CliNpmRegistryApi;
|
||||
pub use resolution::NpmResolution;
|
||||
pub use resolvers::create_npm_fs_resolver;
|
||||
pub use resolvers::NpmPackageResolver;
|
||||
pub use resolvers::CliNpmResolver;
|
||||
pub use resolvers::NpmPackageFsResolver;
|
||||
pub use resolvers::NpmProcessState;
|
||||
pub use resolvers::RequireNpmPackageResolver;
|
||||
|
|
|
@ -63,7 +63,7 @@ impl CliNpmRegistryApi {
|
|||
pub fn new(
|
||||
base_url: Url,
|
||||
cache: Arc<NpmCache>,
|
||||
http_client: HttpClient,
|
||||
http_client: Arc<HttpClient>,
|
||||
progress_bar: ProgressBar,
|
||||
) -> Self {
|
||||
Self(Some(Arc::new(CliNpmRegistryApiInner {
|
||||
|
@ -172,7 +172,7 @@ struct CliNpmRegistryApiInner {
|
|||
force_reload_flag: AtomicFlag,
|
||||
mem_cache: Mutex<HashMap<String, CacheItem>>,
|
||||
previously_reloaded_packages: Mutex<HashSet<String>>,
|
||||
http_client: HttpClient,
|
||||
http_client: Arc<HttpClient>,
|
||||
progress_bar: ProgressBar,
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
|||
use deno_npm::NpmPackageCacheFolderId;
|
||||
use deno_npm::NpmPackageId;
|
||||
use deno_npm::NpmResolutionPackage;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_semver::npm::NpmPackageNv;
|
||||
use deno_semver::npm::NpmPackageNvReference;
|
||||
use deno_semver::npm::NpmPackageReq;
|
||||
|
@ -88,7 +89,7 @@ impl NpmResolution {
|
|||
|
||||
pub async fn add_package_reqs(
|
||||
&self,
|
||||
package_reqs: Vec<NpmPackageReq>,
|
||||
package_reqs: &[NpmPackageReq],
|
||||
) -> Result<(), AnyError> {
|
||||
// only allow one thread in here at a time
|
||||
let _permit = self.update_queue.acquire().await;
|
||||
|
@ -106,12 +107,12 @@ impl NpmResolution {
|
|||
|
||||
pub async fn set_package_reqs(
|
||||
&self,
|
||||
package_reqs: Vec<NpmPackageReq>,
|
||||
package_reqs: &[NpmPackageReq],
|
||||
) -> Result<(), AnyError> {
|
||||
// only allow one thread in here at a time
|
||||
let _permit = self.update_queue.acquire().await;
|
||||
|
||||
let reqs_set = package_reqs.iter().cloned().collect::<HashSet<_>>();
|
||||
let reqs_set = package_reqs.iter().collect::<HashSet<_>>();
|
||||
let snapshot = add_package_reqs_to_snapshot(
|
||||
&self.api,
|
||||
package_reqs,
|
||||
|
@ -143,7 +144,7 @@ impl NpmResolution {
|
|||
|
||||
let snapshot = add_package_reqs_to_snapshot(
|
||||
&self.api,
|
||||
Vec::new(),
|
||||
&Vec::new(),
|
||||
self.maybe_lockfile.clone(),
|
||||
|| self.snapshot.read().clone(),
|
||||
)
|
||||
|
@ -154,7 +155,7 @@ impl NpmResolution {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn pkg_req_ref_to_nv_ref(
|
||||
pub fn resolve_nv_ref_from_pkg_req_ref(
|
||||
&self,
|
||||
req_ref: &NpmPackageReqReference,
|
||||
) -> Result<NpmPackageNvReference, PackageReqNotFoundError> {
|
||||
|
@ -197,7 +198,7 @@ impl NpmResolution {
|
|||
.snapshot
|
||||
.read()
|
||||
.resolve_pkg_from_pkg_req(req)
|
||||
.map(|pkg| pkg.pkg_id.clone())
|
||||
.map(|pkg| pkg.id.clone())
|
||||
}
|
||||
|
||||
pub fn resolve_pkg_id_from_deno_module(
|
||||
|
@ -208,7 +209,7 @@ impl NpmResolution {
|
|||
.snapshot
|
||||
.read()
|
||||
.resolve_package_from_deno_module(id)
|
||||
.map(|pkg| pkg.pkg_id.clone())
|
||||
.map(|pkg| pkg.id.clone())
|
||||
}
|
||||
|
||||
/// Resolves a package requirement for deno graph. This should only be
|
||||
|
@ -237,8 +238,21 @@ impl NpmResolution {
|
|||
Ok(nv)
|
||||
}
|
||||
|
||||
pub fn all_packages_partitioned(&self) -> NpmPackagesPartitioned {
|
||||
self.snapshot.read().all_packages_partitioned()
|
||||
pub fn all_system_packages(
|
||||
&self,
|
||||
system_info: &NpmSystemInfo,
|
||||
) -> Vec<NpmResolutionPackage> {
|
||||
self.snapshot.read().all_system_packages(system_info)
|
||||
}
|
||||
|
||||
pub fn all_system_packages_partitioned(
|
||||
&self,
|
||||
system_info: &NpmSystemInfo,
|
||||
) -> NpmPackagesPartitioned {
|
||||
self
|
||||
.snapshot
|
||||
.read()
|
||||
.all_system_packages_partitioned(system_info)
|
||||
}
|
||||
|
||||
pub fn has_packages(&self) -> bool {
|
||||
|
@ -261,10 +275,7 @@ impl NpmResolution {
|
|||
|
||||
async fn add_package_reqs_to_snapshot(
|
||||
api: &CliNpmRegistryApi,
|
||||
// todo(18079): it should be possible to pass &[NpmPackageReq] in here
|
||||
// and avoid all these clones, but the LSP complains because of its
|
||||
// `Send` requirement
|
||||
package_reqs: Vec<NpmPackageReq>,
|
||||
package_reqs: &[NpmPackageReq],
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
get_new_snapshot: impl Fn() -> NpmResolutionSnapshot,
|
||||
) -> Result<NpmResolutionSnapshot, AnyError> {
|
||||
|
@ -274,10 +285,11 @@ async fn add_package_reqs_to_snapshot(
|
|||
.iter()
|
||||
.all(|req| snapshot.package_reqs().contains_key(req))
|
||||
{
|
||||
return Ok(snapshot); // already up to date
|
||||
log::debug!("Snapshot already up to date. Skipping pending resolution.");
|
||||
return Ok(snapshot);
|
||||
}
|
||||
|
||||
let result = snapshot.resolve_pending(package_reqs.clone()).await;
|
||||
let result = snapshot.resolve_pending(package_reqs).await;
|
||||
api.clear_memory_cache();
|
||||
let snapshot = match result {
|
||||
Ok(snapshot) => snapshot,
|
||||
|
@ -314,11 +326,11 @@ fn populate_lockfile_from_snapshot(
|
|||
snapshot
|
||||
.resolve_package_from_deno_module(nv)
|
||||
.unwrap()
|
||||
.pkg_id
|
||||
.id
|
||||
.as_serialized(),
|
||||
);
|
||||
}
|
||||
for package in snapshot.all_packages() {
|
||||
for package in snapshot.all_packages_for_every_system() {
|
||||
lockfile
|
||||
.check_or_insert_npm_package(npm_package_to_lockfile_info(package))?;
|
||||
}
|
||||
|
@ -326,20 +338,20 @@ fn populate_lockfile_from_snapshot(
|
|||
}
|
||||
|
||||
fn npm_package_to_lockfile_info(
|
||||
pkg: NpmResolutionPackage,
|
||||
pkg: &NpmResolutionPackage,
|
||||
) -> NpmPackageLockfileInfo {
|
||||
let dependencies = pkg
|
||||
.dependencies
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(|(name, id)| NpmPackageDependencyLockfileInfo {
|
||||
name,
|
||||
name: name.clone(),
|
||||
id: id.as_serialized(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
NpmPackageLockfileInfo {
|
||||
display_id: pkg.pkg_id.nv.to_string(),
|
||||
serialized_id: pkg.pkg_id.as_serialized(),
|
||||
display_id: pkg.id.nv.to_string(),
|
||||
serialized_id: pkg.id.as_serialized(),
|
||||
integrity: pkg.dist.integrity().to_string(),
|
||||
dependencies,
|
||||
}
|
||||
|
|
|
@ -9,9 +9,11 @@ use async_trait::async_trait;
|
|||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures;
|
||||
use deno_core::task::spawn;
|
||||
use deno_core::url::Url;
|
||||
use deno_npm::NpmPackageId;
|
||||
use deno_npm::NpmResolutionPackage;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_runtime::deno_node::NodeResolutionMode;
|
||||
|
||||
|
@ -47,7 +49,7 @@ pub trait NpmPackageFsResolver: Send + Sync {
|
|||
|
||||
fn ensure_read_permission(
|
||||
&self,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
permissions: &dyn NodePermissions,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError>;
|
||||
}
|
||||
|
@ -62,17 +64,16 @@ pub async fn cache_packages(
|
|||
if sync_download {
|
||||
// we're running the tests not with --quiet
|
||||
// and we want the output to be deterministic
|
||||
packages.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id));
|
||||
packages.sort_by(|a, b| a.id.cmp(&b.id));
|
||||
}
|
||||
|
||||
let mut handles = Vec::with_capacity(packages.len());
|
||||
for package in packages {
|
||||
assert_eq!(package.copy_index, 0); // the caller should not provide any of these
|
||||
let cache = cache.clone();
|
||||
let registry_url = registry_url.clone();
|
||||
let handle = tokio::task::spawn(async move {
|
||||
let handle = spawn(async move {
|
||||
cache
|
||||
.ensure_package(&package.pkg_id.nv, &package.dist, ®istry_url)
|
||||
.ensure_package(&package.id.nv, &package.dist, ®istry_url)
|
||||
.await
|
||||
});
|
||||
if sync_download {
|
||||
|
@ -90,7 +91,8 @@ pub async fn cache_packages(
|
|||
}
|
||||
|
||||
pub fn ensure_registry_read_permission(
|
||||
permissions: &mut dyn NodePermissions,
|
||||
fs: &Arc<dyn FileSystem>,
|
||||
permissions: &dyn NodePermissions,
|
||||
registry_path: &Path,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
|
@ -101,8 +103,8 @@ pub fn ensure_registry_read_permission(
|
|||
.all(|c| !matches!(c, std::path::Component::ParentDir))
|
||||
{
|
||||
// todo(dsherret): cache this?
|
||||
if let Ok(registry_path) = std::fs::canonicalize(registry_path) {
|
||||
match std::fs::canonicalize(path) {
|
||||
if let Ok(registry_path) = fs.realpath_sync(registry_path) {
|
||||
match fs.realpath_sync(path) {
|
||||
Ok(path) if path.starts_with(registry_path) => {
|
||||
return Ok(());
|
||||
}
|
||||
|
|
|
@ -14,6 +14,8 @@ use deno_npm::resolution::PackageNotFoundFromReferrerError;
|
|||
use deno_npm::NpmPackageCacheFolderId;
|
||||
use deno_npm::NpmPackageId;
|
||||
use deno_npm::NpmResolutionPackage;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_runtime::deno_node::NodeResolutionMode;
|
||||
|
||||
|
@ -28,21 +30,27 @@ use super::common::NpmPackageFsResolver;
|
|||
/// Resolves packages from the global npm cache.
|
||||
#[derive(Debug)]
|
||||
pub struct GlobalNpmPackageResolver {
|
||||
fs: Arc<dyn FileSystem>,
|
||||
cache: Arc<NpmCache>,
|
||||
resolution: Arc<NpmResolution>,
|
||||
registry_url: Url,
|
||||
system_info: NpmSystemInfo,
|
||||
}
|
||||
|
||||
impl GlobalNpmPackageResolver {
|
||||
pub fn new(
|
||||
fs: Arc<dyn FileSystem>,
|
||||
cache: Arc<NpmCache>,
|
||||
registry_url: Url,
|
||||
resolution: Arc<NpmResolution>,
|
||||
system_info: NpmSystemInfo,
|
||||
) -> Self {
|
||||
Self {
|
||||
fs,
|
||||
cache,
|
||||
resolution,
|
||||
registry_url,
|
||||
system_info,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -102,7 +110,7 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
|
|||
.resolution
|
||||
.resolve_package_from_package(name, &referrer_pkg_id)?
|
||||
};
|
||||
self.package_folder(&pkg.pkg_id)
|
||||
self.package_folder(&pkg.id)
|
||||
}
|
||||
|
||||
fn resolve_package_folder_from_specifier(
|
||||
|
@ -121,38 +129,34 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
|
|||
}
|
||||
|
||||
async fn cache_packages(&self) -> Result<(), AnyError> {
|
||||
cache_packages_in_resolver(self).await
|
||||
let package_partitions = self
|
||||
.resolution
|
||||
.all_system_packages_partitioned(&self.system_info);
|
||||
|
||||
cache_packages(
|
||||
package_partitions.packages,
|
||||
&self.cache,
|
||||
&self.registry_url,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// create the copy package folders
|
||||
for copy in package_partitions.copy_packages {
|
||||
self.cache.ensure_copy_package(
|
||||
©.get_package_cache_folder_id(),
|
||||
&self.registry_url,
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn ensure_read_permission(
|
||||
&self,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
permissions: &dyn NodePermissions,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
let registry_path = self.cache.registry_folder(&self.registry_url);
|
||||
ensure_registry_read_permission(permissions, ®istry_path, path)
|
||||
ensure_registry_read_permission(&self.fs, permissions, ®istry_path, path)
|
||||
}
|
||||
}
|
||||
|
||||
async fn cache_packages_in_resolver(
|
||||
resolver: &GlobalNpmPackageResolver,
|
||||
) -> Result<(), AnyError> {
|
||||
let package_partitions = resolver.resolution.all_packages_partitioned();
|
||||
|
||||
cache_packages(
|
||||
package_partitions.packages,
|
||||
&resolver.cache,
|
||||
&resolver.registry_url,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// create the copy package folders
|
||||
for copy in package_partitions.copy_packages {
|
||||
resolver.cache.ensure_copy_package(
|
||||
©.get_package_cache_folder_id(),
|
||||
&resolver.registry_url,
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -3,8 +3,9 @@
|
|||
//! Code for local node_modules resolution.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::collections::VecDeque;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
@ -19,15 +20,19 @@ use deno_ast::ModuleSpecifier;
|
|||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::task::spawn;
|
||||
use deno_core::task::JoinHandle;
|
||||
use deno_core::url::Url;
|
||||
use deno_npm::resolution::NpmResolutionSnapshot;
|
||||
use deno_npm::NpmPackageCacheFolderId;
|
||||
use deno_npm::NpmPackageId;
|
||||
use deno_npm::NpmResolutionPackage;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_runtime::deno_core::futures;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_runtime::deno_node::NodeResolutionMode;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use tokio::task::JoinHandle;
|
||||
|
||||
use crate::npm::cache::mixed_case_package_name_encode;
|
||||
use crate::npm::cache::should_sync_download;
|
||||
|
@ -44,23 +49,28 @@ use super::common::NpmPackageFsResolver;
|
|||
/// and resolves packages from it.
|
||||
#[derive(Debug)]
|
||||
pub struct LocalNpmPackageResolver {
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
cache: Arc<NpmCache>,
|
||||
progress_bar: ProgressBar,
|
||||
resolution: Arc<NpmResolution>,
|
||||
registry_url: Url,
|
||||
root_node_modules_path: PathBuf,
|
||||
root_node_modules_url: Url,
|
||||
system_info: NpmSystemInfo,
|
||||
}
|
||||
|
||||
impl LocalNpmPackageResolver {
|
||||
pub fn new(
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
cache: Arc<NpmCache>,
|
||||
progress_bar: ProgressBar,
|
||||
registry_url: Url,
|
||||
node_modules_folder: PathBuf,
|
||||
resolution: Arc<NpmResolution>,
|
||||
system_info: NpmSystemInfo,
|
||||
) -> Self {
|
||||
Self {
|
||||
fs,
|
||||
cache,
|
||||
progress_bar,
|
||||
resolution,
|
||||
|
@ -68,6 +78,7 @@ impl LocalNpmPackageResolver {
|
|||
root_node_modules_url: Url::from_directory_path(&node_modules_folder)
|
||||
.unwrap(),
|
||||
root_node_modules_path: node_modules_folder,
|
||||
system_info,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -88,7 +99,11 @@ impl LocalNpmPackageResolver {
|
|||
specifier: &ModuleSpecifier,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
match self.maybe_resolve_folder_for_specifier(specifier) {
|
||||
Some(path) => Ok(path),
|
||||
// Canonicalize the path so it's not pointing to the symlinked directory
|
||||
// in `node_modules` directory of the referrer.
|
||||
Some(path) => {
|
||||
Ok(deno_core::strip_unc_prefix(self.fs.realpath_sync(&path)?))
|
||||
}
|
||||
None => bail!("could not find npm package for '{}'", specifier),
|
||||
}
|
||||
}
|
||||
|
@ -145,14 +160,20 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
let package_root_path = self.resolve_package_root(&local_path);
|
||||
let mut current_folder = package_root_path.as_path();
|
||||
loop {
|
||||
current_folder = get_next_node_modules_ancestor(current_folder);
|
||||
let sub_dir = join_package_name(current_folder, name);
|
||||
if sub_dir.is_dir() {
|
||||
current_folder = current_folder.parent().unwrap();
|
||||
let node_modules_folder = if current_folder.ends_with("node_modules") {
|
||||
Cow::Borrowed(current_folder)
|
||||
} else {
|
||||
Cow::Owned(current_folder.join("node_modules"))
|
||||
};
|
||||
let sub_dir = join_package_name(&node_modules_folder, name);
|
||||
if self.fs.is_dir(&sub_dir) {
|
||||
// if doing types resolution, only resolve the package if it specifies a types property
|
||||
if mode.is_types() && !name.starts_with("@types/") {
|
||||
let package_json = PackageJson::load_skip_read_permission::<
|
||||
deno_runtime::deno_node::RealFs,
|
||||
>(sub_dir.join("package.json"))?;
|
||||
let package_json = PackageJson::load_skip_read_permission(
|
||||
&*self.fs,
|
||||
sub_dir.join("package.json"),
|
||||
)?;
|
||||
if package_json.types.is_some() {
|
||||
return Ok(sub_dir);
|
||||
}
|
||||
|
@ -164,8 +185,8 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
// if doing type resolution, check for the existence of a @types package
|
||||
if mode.is_types() && !name.starts_with("@types/") {
|
||||
let sub_dir =
|
||||
join_package_name(current_folder, &types_package_name(name));
|
||||
if sub_dir.is_dir() {
|
||||
join_package_name(&node_modules_folder, &types_package_name(name));
|
||||
if self.fs.is_dir(&sub_dir) {
|
||||
return Ok(sub_dir);
|
||||
}
|
||||
}
|
||||
|
@ -196,16 +217,18 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
&self.progress_bar,
|
||||
&self.registry_url,
|
||||
&self.root_node_modules_path,
|
||||
&self.system_info,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
fn ensure_read_permission(
|
||||
&self,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
permissions: &dyn NodePermissions,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
ensure_registry_read_permission(
|
||||
&self.fs,
|
||||
permissions,
|
||||
&self.root_node_modules_path,
|
||||
path,
|
||||
|
@ -220,13 +243,15 @@ async fn sync_resolution_with_fs(
|
|||
progress_bar: &ProgressBar,
|
||||
registry_url: &Url,
|
||||
root_node_modules_dir_path: &Path,
|
||||
system_info: &NpmSystemInfo,
|
||||
) -> Result<(), AnyError> {
|
||||
if snapshot.is_empty() {
|
||||
return Ok(()); // don't create the directory
|
||||
}
|
||||
|
||||
let deno_local_registry_dir = root_node_modules_dir_path.join(".deno");
|
||||
fs::create_dir_all(&deno_local_registry_dir).with_context(|| {
|
||||
let deno_node_modules_dir = deno_local_registry_dir.join("node_modules");
|
||||
fs::create_dir_all(&deno_node_modules_dir).with_context(|| {
|
||||
format!("Creating '{}'", deno_local_registry_dir.display())
|
||||
})?;
|
||||
|
||||
|
@ -244,47 +269,56 @@ async fn sync_resolution_with_fs(
|
|||
// Copy (hardlink in future) <global_registry_cache>/<package_id>/ to
|
||||
// node_modules/.deno/<package_folder_id_folder_name>/node_modules/<package_name>
|
||||
let sync_download = should_sync_download();
|
||||
let mut package_partitions = snapshot.all_packages_partitioned();
|
||||
let mut package_partitions =
|
||||
snapshot.all_system_packages_partitioned(system_info);
|
||||
if sync_download {
|
||||
// we're running the tests not with --quiet
|
||||
// and we want the output to be deterministic
|
||||
package_partitions
|
||||
.packages
|
||||
.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id));
|
||||
package_partitions.packages.sort_by(|a, b| a.id.cmp(&b.id));
|
||||
}
|
||||
let mut handles: Vec<JoinHandle<Result<(), AnyError>>> =
|
||||
Vec::with_capacity(package_partitions.packages.len());
|
||||
let mut newest_packages_by_name: HashMap<&String, &NpmResolutionPackage> =
|
||||
HashMap::with_capacity(package_partitions.packages.len());
|
||||
for package in &package_partitions.packages {
|
||||
if let Some(current_pkg) =
|
||||
newest_packages_by_name.get_mut(&package.id.nv.name)
|
||||
{
|
||||
if current_pkg.id.nv.cmp(&package.id.nv) == Ordering::Less {
|
||||
*current_pkg = package;
|
||||
}
|
||||
} else {
|
||||
newest_packages_by_name.insert(&package.id.nv.name, package);
|
||||
};
|
||||
|
||||
let folder_name =
|
||||
get_package_folder_id_folder_name(&package.get_package_cache_folder_id());
|
||||
let folder_path = deno_local_registry_dir.join(&folder_name);
|
||||
let initialized_file = folder_path.join(".initialized");
|
||||
if !cache
|
||||
.cache_setting()
|
||||
.should_use_for_npm_package(&package.pkg_id.nv.name)
|
||||
.should_use_for_npm_package(&package.id.nv.name)
|
||||
|| !initialized_file.exists()
|
||||
{
|
||||
let pb = progress_bar.clone();
|
||||
let cache = cache.clone();
|
||||
let registry_url = registry_url.clone();
|
||||
let package = package.clone();
|
||||
let handle = tokio::task::spawn(async move {
|
||||
let handle = spawn(async move {
|
||||
cache
|
||||
.ensure_package(&package.pkg_id.nv, &package.dist, ®istry_url)
|
||||
.ensure_package(&package.id.nv, &package.dist, ®istry_url)
|
||||
.await?;
|
||||
let pb_guard = pb.update_with_prompt(
|
||||
ProgressMessagePrompt::Initialize,
|
||||
&package.pkg_id.nv.to_string(),
|
||||
&package.id.nv.to_string(),
|
||||
);
|
||||
let sub_node_modules = folder_path.join("node_modules");
|
||||
let package_path =
|
||||
join_package_name(&sub_node_modules, &package.pkg_id.nv.name);
|
||||
join_package_name(&sub_node_modules, &package.id.nv.name);
|
||||
fs::create_dir_all(&package_path)
|
||||
.with_context(|| format!("Creating '{}'", folder_path.display()))?;
|
||||
let cache_folder = cache.package_folder_for_name_and_version(
|
||||
&package.pkg_id.nv,
|
||||
®istry_url,
|
||||
);
|
||||
let cache_folder = cache
|
||||
.package_folder_for_name_and_version(&package.id.nv, ®istry_url);
|
||||
// for now copy, but in the future consider hard linking
|
||||
copy_dir_recursive(&cache_folder, &package_path)?;
|
||||
// write out a file that indicates this folder has been initialized
|
||||
|
@ -315,7 +349,7 @@ async fn sync_resolution_with_fs(
|
|||
if !initialized_file.exists() {
|
||||
let sub_node_modules = destination_path.join("node_modules");
|
||||
let package_path =
|
||||
join_package_name(&sub_node_modules, &package.pkg_id.nv.name);
|
||||
join_package_name(&sub_node_modules, &package.id.nv.name);
|
||||
fs::create_dir_all(&package_path).with_context(|| {
|
||||
format!("Creating '{}'", destination_path.display())
|
||||
})?;
|
||||
|
@ -325,7 +359,7 @@ async fn sync_resolution_with_fs(
|
|||
&package_cache_folder_id.with_no_count(),
|
||||
))
|
||||
.join("node_modules"),
|
||||
&package.pkg_id.nv.name,
|
||||
&package.id.nv.name,
|
||||
);
|
||||
hard_link_dir_recursive(&source_path, &package_path)?;
|
||||
// write out a file that indicates this folder has been initialized
|
||||
|
@ -333,13 +367,11 @@ async fn sync_resolution_with_fs(
|
|||
}
|
||||
}
|
||||
|
||||
let all_packages = package_partitions.into_all();
|
||||
|
||||
// 3. Symlink all the dependencies into the .deno directory.
|
||||
//
|
||||
// Symlink node_modules/.deno/<package_id>/node_modules/<dep_name> to
|
||||
// node_modules/.deno/<dep_id>/node_modules/<dep_package_name>
|
||||
for package in &all_packages {
|
||||
for package in package_partitions.iter_all() {
|
||||
let sub_node_modules = deno_local_registry_dir
|
||||
.join(get_package_folder_id_folder_name(
|
||||
&package.get_package_cache_folder_id(),
|
||||
|
@ -365,21 +397,17 @@ async fn sync_resolution_with_fs(
|
|||
}
|
||||
}
|
||||
|
||||
// 4. Create all the packages in the node_modules folder, which are symlinks.
|
||||
// 4. Create all the top level packages in the node_modules folder, which are symlinks.
|
||||
//
|
||||
// Symlink node_modules/<package_name> to
|
||||
// node_modules/.deno/<package_id>/node_modules/<package_name>
|
||||
let mut found_names = HashSet::new();
|
||||
let mut pending_packages = VecDeque::new();
|
||||
pending_packages.extend(snapshot.top_level_packages().map(|id| (id, true)));
|
||||
while let Some((id, is_top_level)) = pending_packages.pop_front() {
|
||||
let root_folder_name = if found_names.insert(id.nv.name.clone()) {
|
||||
id.nv.name.clone()
|
||||
} else if is_top_level {
|
||||
id.nv.to_string()
|
||||
} else {
|
||||
let mut ids = snapshot.top_level_packages().collect::<Vec<_>>();
|
||||
ids.sort_by(|a, b| b.cmp(a)); // create determinism and only include the latest version
|
||||
for id in ids {
|
||||
if !found_names.insert(&id.nv.name) {
|
||||
continue; // skip, already handled
|
||||
};
|
||||
}
|
||||
let package = snapshot.package_from_id(id).unwrap();
|
||||
let local_registry_package_path = join_package_name(
|
||||
&deno_local_registry_dir
|
||||
|
@ -392,11 +420,30 @@ async fn sync_resolution_with_fs(
|
|||
|
||||
symlink_package_dir(
|
||||
&local_registry_package_path,
|
||||
&join_package_name(root_node_modules_dir_path, &root_folder_name),
|
||||
&join_package_name(root_node_modules_dir_path, &id.nv.name),
|
||||
)?;
|
||||
for id in package.dependencies.values() {
|
||||
pending_packages.push_back((id, false));
|
||||
}
|
||||
|
||||
// 5. Create a node_modules/.deno/node_modules/<package-name> directory with
|
||||
// the remaining packages
|
||||
for package in newest_packages_by_name.values() {
|
||||
if !found_names.insert(&package.id.nv.name) {
|
||||
continue; // skip, already handled
|
||||
}
|
||||
|
||||
let local_registry_package_path = join_package_name(
|
||||
&deno_local_registry_dir
|
||||
.join(get_package_folder_id_folder_name(
|
||||
&package.get_package_cache_folder_id(),
|
||||
))
|
||||
.join("node_modules"),
|
||||
&package.id.nv.name,
|
||||
);
|
||||
|
||||
symlink_package_dir(
|
||||
&local_registry_package_path,
|
||||
&join_package_name(&deno_node_modules_dir, &package.id.nv.name),
|
||||
)?;
|
||||
}
|
||||
|
||||
drop(single_process_lock);
|
||||
|
@ -482,13 +529,3 @@ fn join_package_name(path: &Path, package_name: &str) -> PathBuf {
|
|||
}
|
||||
path
|
||||
}
|
||||
|
||||
fn get_next_node_modules_ancestor(mut path: &Path) -> &Path {
|
||||
loop {
|
||||
path = path.parent().unwrap();
|
||||
let file_name = path.file_name().unwrap().to_string_lossy();
|
||||
if file_name == "node_modules" {
|
||||
return path;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,25 +18,30 @@ use deno_npm::resolution::NpmResolutionSnapshot;
|
|||
use deno_npm::resolution::PackageReqNotFoundError;
|
||||
use deno_npm::resolution::SerializedNpmResolutionSnapshot;
|
||||
use deno_npm::NpmPackageId;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_runtime::deno_node::NodeResolutionMode;
|
||||
use deno_runtime::deno_node::NpmResolver;
|
||||
use deno_runtime::deno_node::PathClean;
|
||||
use deno_runtime::deno_node::RequireNpmResolver;
|
||||
use deno_semver::npm::NpmPackageNv;
|
||||
use deno_semver::npm::NpmPackageNvReference;
|
||||
use deno_semver::npm::NpmPackageReq;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use global::GlobalNpmPackageResolver;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::args::Lockfile;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
|
||||
use self::common::NpmPackageFsResolver;
|
||||
use self::local::LocalNpmPackageResolver;
|
||||
use super::resolution::NpmResolution;
|
||||
use super::NpmCache;
|
||||
|
||||
pub use self::common::NpmPackageFsResolver;
|
||||
|
||||
/// State provided to the process via an environment variable.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct NpmProcessState {
|
||||
|
@ -45,15 +50,17 @@ pub struct NpmProcessState {
|
|||
}
|
||||
|
||||
/// Brings together the npm resolution with the file system.
|
||||
pub struct NpmPackageResolver {
|
||||
pub struct CliNpmResolver {
|
||||
fs: Arc<dyn FileSystem>,
|
||||
fs_resolver: Arc<dyn NpmPackageFsResolver>,
|
||||
resolution: Arc<NpmResolution>,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for NpmPackageResolver {
|
||||
impl std::fmt::Debug for CliNpmResolver {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("NpmPackageResolver")
|
||||
.field("fs", &"<omitted>")
|
||||
.field("fs_resolver", &"<omitted>")
|
||||
.field("resolution", &"<omitted>")
|
||||
.field("maybe_lockfile", &"<omitted>")
|
||||
|
@ -61,13 +68,15 @@ impl std::fmt::Debug for NpmPackageResolver {
|
|||
}
|
||||
}
|
||||
|
||||
impl NpmPackageResolver {
|
||||
impl CliNpmResolver {
|
||||
pub fn new(
|
||||
fs: Arc<dyn FileSystem>,
|
||||
resolution: Arc<NpmResolution>,
|
||||
fs_resolver: Arc<dyn NpmPackageFsResolver>,
|
||||
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
fs,
|
||||
fs_resolver,
|
||||
resolution,
|
||||
maybe_lockfile,
|
||||
|
@ -78,6 +87,20 @@ impl NpmPackageResolver {
|
|||
self.fs_resolver.root_dir_url()
|
||||
}
|
||||
|
||||
pub fn node_modules_path(&self) -> Option<PathBuf> {
|
||||
self.fs_resolver.node_modules_path()
|
||||
}
|
||||
|
||||
/// Checks if the provided package req's folder is cached.
|
||||
pub fn is_pkg_req_folder_cached(&self, req: &NpmPackageReq) -> bool {
|
||||
self
|
||||
.resolve_pkg_id_from_pkg_req(req)
|
||||
.ok()
|
||||
.and_then(|id| self.fs_resolver.package_folder(&id).ok())
|
||||
.map(|folder| folder.exists())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn resolve_pkg_id_from_pkg_req(
|
||||
&self,
|
||||
req: &NpmPackageReq,
|
||||
|
@ -85,21 +108,17 @@ impl NpmPackageResolver {
|
|||
self.resolution.resolve_pkg_id_from_pkg_req(req)
|
||||
}
|
||||
|
||||
/// Resolves an npm package folder path from a Deno module.
|
||||
pub fn resolve_package_folder_from_deno_module(
|
||||
&self,
|
||||
pkg_nv: &NpmPackageNv,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
let pkg_id = self.resolution.resolve_pkg_id_from_deno_module(pkg_nv)?;
|
||||
self.resolve_pkg_folder_from_deno_module_at_pkg_id(&pkg_id)
|
||||
}
|
||||
|
||||
fn resolve_pkg_folder_from_deno_module_at_pkg_id(
|
||||
pub fn resolve_pkg_folder_from_pkg_id(
|
||||
&self,
|
||||
pkg_id: &NpmPackageId,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
let path = self.fs_resolver.package_folder(pkg_id)?;
|
||||
let path = canonicalize_path_maybe_not_exists(&path)?;
|
||||
let path = canonicalize_path_maybe_not_exists_with_fs(&path, |path| {
|
||||
self
|
||||
.fs
|
||||
.realpath_sync(path)
|
||||
.map_err(|err| err.into_io_error())
|
||||
})?;
|
||||
log::debug!(
|
||||
"Resolved package folder of {} to {}",
|
||||
pkg_id.as_serialized(),
|
||||
|
@ -108,20 +127,6 @@ impl NpmPackageResolver {
|
|||
Ok(path)
|
||||
}
|
||||
|
||||
/// Resolves an npm package folder path from an npm package referrer.
|
||||
pub fn resolve_package_folder_from_package(
|
||||
&self,
|
||||
name: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
mode: NodeResolutionMode,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
let path = self
|
||||
.fs_resolver
|
||||
.resolve_package_folder_from_package(name, referrer, mode)?;
|
||||
log::debug!("Resolved {} from {} to {}", name, referrer, path.display());
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
/// Resolve the root folder of the package the provided specifier is in.
|
||||
///
|
||||
/// This will error when the provided specifier is not in an npm package.
|
||||
|
@ -164,7 +169,7 @@ impl NpmPackageResolver {
|
|||
/// Adds package requirements to the resolver and ensures everything is setup.
|
||||
pub async fn add_package_reqs(
|
||||
&self,
|
||||
packages: Vec<NpmPackageReq>,
|
||||
packages: &[NpmPackageReq],
|
||||
) -> Result<(), AnyError> {
|
||||
if packages.is_empty() {
|
||||
return Ok(());
|
||||
|
@ -187,7 +192,7 @@ impl NpmPackageResolver {
|
|||
/// This will retrieve and resolve package information, but not cache any package files.
|
||||
pub async fn set_package_reqs(
|
||||
&self,
|
||||
packages: Vec<NpmPackageReq>,
|
||||
packages: &[NpmPackageReq],
|
||||
) -> Result<(), AnyError> {
|
||||
self.resolution.set_package_reqs(packages).await
|
||||
}
|
||||
|
@ -217,7 +222,7 @@ impl NpmPackageResolver {
|
|||
) -> Result<(), AnyError> {
|
||||
// add and ensure this isn't added to the lockfile
|
||||
let package_reqs = vec![NpmPackageReq::from_str("@types/node").unwrap()];
|
||||
self.resolution.add_package_reqs(package_reqs).await?;
|
||||
self.resolution.add_package_reqs(&package_reqs).await?;
|
||||
self.fs_resolver.cache_packages().await?;
|
||||
|
||||
Ok(())
|
||||
|
@ -228,28 +233,20 @@ impl NpmPackageResolver {
|
|||
self.fs_resolver.cache_packages().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn as_require_npm_resolver(
|
||||
self: &Arc<Self>,
|
||||
) -> RequireNpmPackageResolver {
|
||||
RequireNpmPackageResolver(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct RequireNpmPackageResolver(Arc<NpmPackageResolver>);
|
||||
|
||||
impl RequireNpmResolver for RequireNpmPackageResolver {
|
||||
impl NpmResolver for CliNpmResolver {
|
||||
fn resolve_package_folder_from_package(
|
||||
&self,
|
||||
specifier: &str,
|
||||
referrer: &std::path::Path,
|
||||
name: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
mode: NodeResolutionMode,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
let referrer = path_to_specifier(referrer)?;
|
||||
self
|
||||
.0
|
||||
.resolve_package_folder_from_package(specifier, &referrer, mode)
|
||||
let path = self
|
||||
.fs_resolver
|
||||
.resolve_package_folder_from_package(name, referrer, mode)?;
|
||||
log::debug!("Resolved {} from {} to {}", name, referrer, path.display());
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
fn resolve_package_folder_from_path(
|
||||
|
@ -257,49 +254,71 @@ impl RequireNpmResolver for RequireNpmPackageResolver {
|
|||
path: &Path,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
let specifier = path_to_specifier(path)?;
|
||||
self.0.resolve_package_folder_from_specifier(&specifier)
|
||||
self.resolve_package_folder_from_specifier(&specifier)
|
||||
}
|
||||
|
||||
fn in_npm_package(&self, path: &Path) -> bool {
|
||||
let specifier =
|
||||
match ModuleSpecifier::from_file_path(path.to_path_buf().clean()) {
|
||||
Ok(p) => p,
|
||||
Err(_) => return false,
|
||||
};
|
||||
fn resolve_package_folder_from_deno_module(
|
||||
&self,
|
||||
pkg_nv: &NpmPackageNv,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
let pkg_id = self.resolution.resolve_pkg_id_from_deno_module(pkg_nv)?;
|
||||
self.resolve_pkg_folder_from_pkg_id(&pkg_id)
|
||||
}
|
||||
|
||||
fn resolve_pkg_id_from_pkg_req(
|
||||
&self,
|
||||
req: &NpmPackageReq,
|
||||
) -> Result<NpmPackageId, PackageReqNotFoundError> {
|
||||
self.resolution.resolve_pkg_id_from_pkg_req(req)
|
||||
}
|
||||
|
||||
fn resolve_nv_ref_from_pkg_req_ref(
|
||||
&self,
|
||||
req_ref: &NpmPackageReqReference,
|
||||
) -> Result<NpmPackageNvReference, PackageReqNotFoundError> {
|
||||
self.resolution.resolve_nv_ref_from_pkg_req_ref(req_ref)
|
||||
}
|
||||
|
||||
fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
self
|
||||
.0
|
||||
.resolve_package_folder_from_specifier(&specifier)
|
||||
.resolve_package_folder_from_specifier(specifier)
|
||||
.is_ok()
|
||||
}
|
||||
|
||||
fn ensure_read_permission(
|
||||
&self,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
permissions: &dyn NodePermissions,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
self.0.fs_resolver.ensure_read_permission(permissions, path)
|
||||
self.fs_resolver.ensure_read_permission(permissions, path)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_npm_fs_resolver(
|
||||
fs: Arc<dyn FileSystem>,
|
||||
cache: Arc<NpmCache>,
|
||||
progress_bar: &ProgressBar,
|
||||
registry_url: Url,
|
||||
resolution: Arc<NpmResolution>,
|
||||
maybe_node_modules_path: Option<PathBuf>,
|
||||
system_info: NpmSystemInfo,
|
||||
) -> Arc<dyn NpmPackageFsResolver> {
|
||||
match maybe_node_modules_path {
|
||||
Some(node_modules_folder) => Arc::new(LocalNpmPackageResolver::new(
|
||||
fs,
|
||||
cache,
|
||||
progress_bar.clone(),
|
||||
registry_url,
|
||||
node_modules_folder,
|
||||
resolution,
|
||||
system_info,
|
||||
)),
|
||||
None => Arc::new(GlobalNpmPackageResolver::new(
|
||||
fs,
|
||||
cache,
|
||||
registry_url,
|
||||
resolution,
|
||||
system_info,
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,9 +42,6 @@ deno_core::extension!(deno_bench,
|
|||
state.put(options.sender);
|
||||
state.put(BenchContainer::default());
|
||||
},
|
||||
customizer = |ext: &mut deno_core::ExtensionBuilder| {
|
||||
ext.force_op_registration();
|
||||
},
|
||||
);
|
||||
|
||||
#[derive(Clone)]
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::npm::NpmPackageResolver;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op;
|
||||
use deno_core::Extension;
|
||||
|
@ -11,25 +11,22 @@ use deno_core::OpState;
|
|||
pub mod bench;
|
||||
pub mod testing;
|
||||
|
||||
pub fn cli_exts(npm_resolver: Arc<NpmPackageResolver>) -> Vec<Extension> {
|
||||
pub fn cli_exts(npm_resolver: Arc<CliNpmResolver>) -> Vec<Extension> {
|
||||
vec![deno_cli::init_ops(npm_resolver)]
|
||||
}
|
||||
|
||||
deno_core::extension!(deno_cli,
|
||||
ops = [op_npm_process_state],
|
||||
options = {
|
||||
npm_resolver: Arc<NpmPackageResolver>,
|
||||
npm_resolver: Arc<CliNpmResolver>,
|
||||
},
|
||||
state = |state, options| {
|
||||
state.put(options.npm_resolver);
|
||||
},
|
||||
customizer = |ext: &mut deno_core::ExtensionBuilder| {
|
||||
ext.force_op_registration();
|
||||
},
|
||||
);
|
||||
|
||||
#[op]
|
||||
fn op_npm_process_state(state: &mut OpState) -> Result<String, AnyError> {
|
||||
let npm_resolver = state.borrow_mut::<Arc<NpmPackageResolver>>();
|
||||
let npm_resolver = state.borrow_mut::<Arc<CliNpmResolver>>();
|
||||
Ok(npm_resolver.get_npm_process_state())
|
||||
}
|
||||
|
|
|
@ -43,9 +43,6 @@ deno_core::extension!(deno_test,
|
|||
state.put(options.sender);
|
||||
state.put(TestContainer::default());
|
||||
},
|
||||
customizer = |ext: &mut deno_core::ExtensionBuilder| {
|
||||
ext.force_op_registration();
|
||||
},
|
||||
);
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -100,7 +97,9 @@ struct TestInfo<'s> {
|
|||
#[serde(rename = "fn")]
|
||||
function: serde_v8::Value<'s>,
|
||||
name: String,
|
||||
#[serde(default)]
|
||||
ignore: bool,
|
||||
#[serde(default)]
|
||||
only: bool,
|
||||
location: TestLocation,
|
||||
}
|
||||
|
|
|
@ -1,421 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::DenoSubcommand;
|
||||
use crate::args::Flags;
|
||||
use crate::args::Lockfile;
|
||||
use crate::args::TsConfigType;
|
||||
use crate::cache::Caches;
|
||||
use crate::cache::DenoDir;
|
||||
use crate::cache::EmitCache;
|
||||
use crate::cache::HttpCache;
|
||||
use crate::cache::NodeAnalysisCache;
|
||||
use crate::cache::ParsedSourceCache;
|
||||
use crate::emit::Emitter;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::graph_util::ModuleGraphBuilder;
|
||||
use crate::graph_util::ModuleGraphContainer;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::module_loader::ModuleLoadPreparer;
|
||||
use crate::node::CliNodeResolver;
|
||||
use crate::node::NodeCodeTranslator;
|
||||
use crate::npm::create_npm_fs_resolver;
|
||||
use crate::npm::CliNpmRegistryApi;
|
||||
use crate::npm::NpmCache;
|
||||
use crate::npm::NpmPackageResolver;
|
||||
use crate::npm::NpmResolution;
|
||||
use crate::npm::PackageJsonDepsInstaller;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::tools::check::TypeChecker;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::CompiledWasmModuleStore;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_core::SharedArrayBufferStore;
|
||||
|
||||
use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel;
|
||||
use deno_runtime::deno_tls::rustls::RootCertStore;
|
||||
use deno_runtime::deno_web::BlobStore;
|
||||
use deno_runtime::inspector_server::InspectorServer;
|
||||
use import_map::ImportMap;
|
||||
use log::warn;
|
||||
use std::collections::HashSet;
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// This structure represents state of single "deno" program.
|
||||
///
|
||||
/// It is shared by all created workers (thus V8 isolates).
|
||||
#[derive(Clone)]
|
||||
pub struct ProcState(Arc<Inner>);
|
||||
|
||||
pub struct Inner {
|
||||
pub dir: DenoDir,
|
||||
pub caches: Arc<Caches>,
|
||||
pub file_fetcher: Arc<FileFetcher>,
|
||||
pub http_client: HttpClient,
|
||||
pub options: Arc<CliOptions>,
|
||||
pub emit_cache: EmitCache,
|
||||
pub emitter: Arc<Emitter>,
|
||||
pub graph_container: Arc<ModuleGraphContainer>,
|
||||
pub lockfile: Option<Arc<Mutex<Lockfile>>>,
|
||||
pub maybe_import_map: Option<Arc<ImportMap>>,
|
||||
pub maybe_inspector_server: Option<Arc<InspectorServer>>,
|
||||
pub root_cert_store: RootCertStore,
|
||||
pub blob_store: BlobStore,
|
||||
pub broadcast_channel: InMemoryBroadcastChannel,
|
||||
pub shared_array_buffer_store: SharedArrayBufferStore,
|
||||
pub compiled_wasm_module_store: CompiledWasmModuleStore,
|
||||
pub parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
pub resolver: Arc<CliGraphResolver>,
|
||||
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
|
||||
pub module_graph_builder: Arc<ModuleGraphBuilder>,
|
||||
pub module_load_preparer: Arc<ModuleLoadPreparer>,
|
||||
pub node_code_translator: Arc<NodeCodeTranslator>,
|
||||
pub node_resolver: Arc<CliNodeResolver>,
|
||||
pub npm_api: Arc<CliNpmRegistryApi>,
|
||||
pub npm_cache: Arc<NpmCache>,
|
||||
pub npm_resolver: Arc<NpmPackageResolver>,
|
||||
pub npm_resolution: Arc<NpmResolution>,
|
||||
pub package_json_deps_installer: Arc<PackageJsonDepsInstaller>,
|
||||
pub cjs_resolutions: Arc<CjsResolutionStore>,
|
||||
progress_bar: ProgressBar,
|
||||
}
|
||||
|
||||
impl Deref for ProcState {
|
||||
type Target = Arc<Inner>;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl ProcState {
|
||||
pub async fn from_cli_options(
|
||||
options: Arc<CliOptions>,
|
||||
) -> Result<Self, AnyError> {
|
||||
Self::build_with_sender(options, None).await
|
||||
}
|
||||
|
||||
pub async fn from_flags(flags: Flags) -> Result<Self, AnyError> {
|
||||
Self::from_cli_options(Arc::new(CliOptions::from_flags(flags)?)).await
|
||||
}
|
||||
|
||||
pub async fn from_flags_for_file_watcher(
|
||||
flags: Flags,
|
||||
files_to_watch_sender: tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>,
|
||||
) -> Result<Self, AnyError> {
|
||||
// resolve the config each time
|
||||
let cli_options = Arc::new(CliOptions::from_flags(flags)?);
|
||||
let ps =
|
||||
Self::build_with_sender(cli_options, Some(files_to_watch_sender.clone()))
|
||||
.await?;
|
||||
ps.init_watcher();
|
||||
Ok(ps)
|
||||
}
|
||||
|
||||
/// Reset all runtime state to its default. This should be used on file
|
||||
/// watcher restarts.
|
||||
pub fn reset_for_file_watcher(&mut self) {
|
||||
self.cjs_resolutions.clear();
|
||||
self.parsed_source_cache.clear();
|
||||
self.graph_container.clear();
|
||||
|
||||
self.0 = Arc::new(Inner {
|
||||
dir: self.dir.clone(),
|
||||
caches: self.caches.clone(),
|
||||
options: self.options.clone(),
|
||||
emit_cache: self.emit_cache.clone(),
|
||||
emitter: self.emitter.clone(),
|
||||
file_fetcher: self.file_fetcher.clone(),
|
||||
http_client: self.http_client.clone(),
|
||||
graph_container: self.graph_container.clone(),
|
||||
lockfile: self.lockfile.clone(),
|
||||
maybe_import_map: self.maybe_import_map.clone(),
|
||||
maybe_inspector_server: self.maybe_inspector_server.clone(),
|
||||
root_cert_store: self.root_cert_store.clone(),
|
||||
blob_store: self.blob_store.clone(),
|
||||
broadcast_channel: Default::default(),
|
||||
shared_array_buffer_store: Default::default(),
|
||||
compiled_wasm_module_store: Default::default(),
|
||||
parsed_source_cache: self.parsed_source_cache.clone(),
|
||||
resolver: self.resolver.clone(),
|
||||
maybe_file_watcher_reporter: self.maybe_file_watcher_reporter.clone(),
|
||||
module_graph_builder: self.module_graph_builder.clone(),
|
||||
module_load_preparer: self.module_load_preparer.clone(),
|
||||
node_code_translator: self.node_code_translator.clone(),
|
||||
node_resolver: self.node_resolver.clone(),
|
||||
npm_api: self.npm_api.clone(),
|
||||
npm_cache: self.npm_cache.clone(),
|
||||
npm_resolver: self.npm_resolver.clone(),
|
||||
npm_resolution: self.npm_resolution.clone(),
|
||||
package_json_deps_installer: self.package_json_deps_installer.clone(),
|
||||
cjs_resolutions: self.cjs_resolutions.clone(),
|
||||
progress_bar: self.progress_bar.clone(),
|
||||
});
|
||||
self.init_watcher();
|
||||
}
|
||||
|
||||
// Add invariant files like the import map and explicit watch flag list to
|
||||
// the watcher. Dedup for build_for_file_watcher and reset_for_file_watcher.
|
||||
fn init_watcher(&self) {
|
||||
let files_to_watch_sender = match &self.0.maybe_file_watcher_reporter {
|
||||
Some(reporter) => &reporter.sender,
|
||||
None => return,
|
||||
};
|
||||
if let Some(watch_paths) = self.options.watch_paths() {
|
||||
files_to_watch_sender.send(watch_paths.clone()).unwrap();
|
||||
}
|
||||
if let Ok(Some(import_map_path)) = self
|
||||
.options
|
||||
.resolve_import_map_specifier()
|
||||
.map(|ms| ms.and_then(|ref s| s.to_file_path().ok()))
|
||||
{
|
||||
files_to_watch_sender.send(vec![import_map_path]).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
async fn build_with_sender(
|
||||
cli_options: Arc<CliOptions>,
|
||||
maybe_sender: Option<tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>>,
|
||||
) -> Result<Self, AnyError> {
|
||||
let dir = cli_options.resolve_deno_dir()?;
|
||||
let caches = Arc::new(Caches::default());
|
||||
// Warm up the caches we know we'll likely need based on the CLI mode
|
||||
match cli_options.sub_command() {
|
||||
DenoSubcommand::Run(_) => {
|
||||
_ = caches.dep_analysis_db(&dir);
|
||||
_ = caches.node_analysis_db(&dir);
|
||||
}
|
||||
DenoSubcommand::Check(_) => {
|
||||
_ = caches.dep_analysis_db(&dir);
|
||||
_ = caches.node_analysis_db(&dir);
|
||||
_ = caches.type_checking_cache_db(&dir);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
let blob_store = BlobStore::default();
|
||||
let broadcast_channel = InMemoryBroadcastChannel::default();
|
||||
let shared_array_buffer_store = SharedArrayBufferStore::default();
|
||||
let compiled_wasm_module_store = CompiledWasmModuleStore::default();
|
||||
let deps_cache_location = dir.deps_folder_path();
|
||||
let http_cache = HttpCache::new(&deps_cache_location);
|
||||
let root_cert_store = cli_options.resolve_root_cert_store()?;
|
||||
let cache_usage = cli_options.cache_setting();
|
||||
let progress_bar = ProgressBar::new(ProgressBarStyle::TextOnly);
|
||||
let http_client = HttpClient::new(
|
||||
Some(root_cert_store.clone()),
|
||||
cli_options.unsafely_ignore_certificate_errors().clone(),
|
||||
)?;
|
||||
let file_fetcher = FileFetcher::new(
|
||||
http_cache,
|
||||
cache_usage,
|
||||
!cli_options.no_remote(),
|
||||
http_client.clone(),
|
||||
blob_store.clone(),
|
||||
Some(progress_bar.clone()),
|
||||
);
|
||||
|
||||
let lockfile = cli_options.maybe_lock_file();
|
||||
|
||||
let npm_registry_url = CliNpmRegistryApi::default_url().to_owned();
|
||||
let npm_cache = Arc::new(NpmCache::from_deno_dir(
|
||||
&dir,
|
||||
cli_options.cache_setting(),
|
||||
http_client.clone(),
|
||||
progress_bar.clone(),
|
||||
));
|
||||
let npm_api = Arc::new(CliNpmRegistryApi::new(
|
||||
npm_registry_url.clone(),
|
||||
npm_cache.clone(),
|
||||
http_client.clone(),
|
||||
progress_bar.clone(),
|
||||
));
|
||||
let npm_snapshot = cli_options
|
||||
.resolve_npm_resolution_snapshot(&npm_api)
|
||||
.await?;
|
||||
let npm_resolution = Arc::new(NpmResolution::from_serialized(
|
||||
npm_api.clone(),
|
||||
npm_snapshot,
|
||||
lockfile.as_ref().cloned(),
|
||||
));
|
||||
let npm_fs_resolver = create_npm_fs_resolver(
|
||||
npm_cache,
|
||||
&progress_bar,
|
||||
npm_registry_url,
|
||||
npm_resolution.clone(),
|
||||
cli_options.node_modules_dir_path(),
|
||||
);
|
||||
let npm_resolver = Arc::new(NpmPackageResolver::new(
|
||||
npm_resolution.clone(),
|
||||
npm_fs_resolver,
|
||||
lockfile.as_ref().cloned(),
|
||||
));
|
||||
let package_json_deps_installer = Arc::new(PackageJsonDepsInstaller::new(
|
||||
npm_api.clone(),
|
||||
npm_resolution.clone(),
|
||||
cli_options.maybe_package_json_deps(),
|
||||
));
|
||||
let maybe_import_map = cli_options
|
||||
.resolve_import_map(&file_fetcher)
|
||||
.await?
|
||||
.map(Arc::new);
|
||||
let maybe_inspector_server =
|
||||
cli_options.resolve_inspector_server().map(Arc::new);
|
||||
|
||||
let resolver = Arc::new(CliGraphResolver::new(
|
||||
cli_options.to_maybe_jsx_import_source_config(),
|
||||
maybe_import_map.clone(),
|
||||
cli_options.no_npm(),
|
||||
npm_api.clone(),
|
||||
npm_resolution.clone(),
|
||||
package_json_deps_installer.clone(),
|
||||
));
|
||||
|
||||
let maybe_file_watcher_reporter =
|
||||
maybe_sender.map(|sender| FileWatcherReporter {
|
||||
sender,
|
||||
file_paths: Arc::new(Mutex::new(vec![])),
|
||||
});
|
||||
|
||||
let ts_config_result =
|
||||
cli_options.resolve_ts_config_for_emit(TsConfigType::Emit)?;
|
||||
if let Some(ignored_options) = ts_config_result.maybe_ignored_options {
|
||||
warn!("{}", ignored_options);
|
||||
}
|
||||
let emit_cache = EmitCache::new(dir.gen_cache.clone());
|
||||
let parsed_source_cache =
|
||||
Arc::new(ParsedSourceCache::new(caches.dep_analysis_db(&dir)));
|
||||
let emit_options: deno_ast::EmitOptions = ts_config_result.ts_config.into();
|
||||
let emitter = Arc::new(Emitter::new(
|
||||
emit_cache.clone(),
|
||||
parsed_source_cache.clone(),
|
||||
emit_options,
|
||||
));
|
||||
let npm_cache = Arc::new(NpmCache::from_deno_dir(
|
||||
&dir,
|
||||
cli_options.cache_setting(),
|
||||
http_client.clone(),
|
||||
progress_bar.clone(),
|
||||
));
|
||||
let file_fetcher = Arc::new(file_fetcher);
|
||||
let node_analysis_cache =
|
||||
NodeAnalysisCache::new(caches.node_analysis_db(&dir));
|
||||
let node_code_translator = Arc::new(NodeCodeTranslator::new(
|
||||
node_analysis_cache,
|
||||
file_fetcher.clone(),
|
||||
npm_resolver.clone(),
|
||||
));
|
||||
let node_resolver = Arc::new(CliNodeResolver::new(
|
||||
npm_resolution.clone(),
|
||||
npm_resolver.clone(),
|
||||
));
|
||||
let type_checker = Arc::new(TypeChecker::new(
|
||||
dir.clone(),
|
||||
caches.clone(),
|
||||
cli_options.clone(),
|
||||
node_resolver.clone(),
|
||||
npm_resolver.clone(),
|
||||
));
|
||||
let module_graph_builder = Arc::new(ModuleGraphBuilder::new(
|
||||
cli_options.clone(),
|
||||
resolver.clone(),
|
||||
npm_resolver.clone(),
|
||||
parsed_source_cache.clone(),
|
||||
lockfile.clone(),
|
||||
emit_cache.clone(),
|
||||
file_fetcher.clone(),
|
||||
type_checker.clone(),
|
||||
));
|
||||
let graph_container: Arc<ModuleGraphContainer> = Default::default();
|
||||
let module_load_preparer = Arc::new(ModuleLoadPreparer::new(
|
||||
cli_options.clone(),
|
||||
graph_container.clone(),
|
||||
lockfile.clone(),
|
||||
maybe_file_watcher_reporter.clone(),
|
||||
module_graph_builder.clone(),
|
||||
parsed_source_cache.clone(),
|
||||
progress_bar.clone(),
|
||||
resolver.clone(),
|
||||
type_checker,
|
||||
));
|
||||
|
||||
Ok(ProcState(Arc::new(Inner {
|
||||
dir,
|
||||
caches,
|
||||
options: cli_options,
|
||||
emit_cache,
|
||||
emitter,
|
||||
file_fetcher,
|
||||
http_client,
|
||||
graph_container,
|
||||
lockfile,
|
||||
maybe_import_map,
|
||||
maybe_inspector_server,
|
||||
root_cert_store,
|
||||
blob_store,
|
||||
broadcast_channel,
|
||||
shared_array_buffer_store,
|
||||
compiled_wasm_module_store,
|
||||
parsed_source_cache,
|
||||
resolver,
|
||||
maybe_file_watcher_reporter,
|
||||
module_graph_builder,
|
||||
node_code_translator,
|
||||
node_resolver,
|
||||
npm_api,
|
||||
npm_cache,
|
||||
npm_resolver,
|
||||
npm_resolution,
|
||||
package_json_deps_installer,
|
||||
cjs_resolutions: Default::default(),
|
||||
module_load_preparer,
|
||||
progress_bar,
|
||||
})))
|
||||
}
|
||||
}
|
||||
|
||||
/// Keeps track of what module specifiers were resolved as CJS.
|
||||
#[derive(Default)]
|
||||
pub struct CjsResolutionStore(Mutex<HashSet<ModuleSpecifier>>);
|
||||
|
||||
impl CjsResolutionStore {
|
||||
pub fn clear(&self) {
|
||||
self.0.lock().clear();
|
||||
}
|
||||
|
||||
pub fn contains(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
self.0.lock().contains(specifier)
|
||||
}
|
||||
|
||||
pub fn insert(&self, specifier: ModuleSpecifier) {
|
||||
self.0.lock().insert(specifier);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherReporter {
|
||||
sender: tokio::sync::mpsc::UnboundedSender<Vec<PathBuf>>,
|
||||
file_paths: Arc<Mutex<Vec<PathBuf>>>,
|
||||
}
|
||||
|
||||
impl deno_graph::source::Reporter for FileWatcherReporter {
|
||||
fn on_load(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
modules_done: usize,
|
||||
modules_total: usize,
|
||||
) {
|
||||
let mut file_paths = self.file_paths.lock();
|
||||
if specifier.scheme() == "file" {
|
||||
file_paths.push(specifier.to_file_path().unwrap());
|
||||
}
|
||||
|
||||
if modules_done == modules_total {
|
||||
self.sender.send(file_paths.drain(..).collect()).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
130
cli/resolver.rs
130
cli/resolver.rs
|
@ -20,16 +20,86 @@ use std::sync::Arc;
|
|||
|
||||
use crate::args::package_json::PackageJsonDeps;
|
||||
use crate::args::JsxImportSourceConfig;
|
||||
use crate::args::PackageJsonDepsProvider;
|
||||
use crate::npm::CliNpmRegistryApi;
|
||||
use crate::npm::NpmResolution;
|
||||
use crate::npm::PackageJsonDepsInstaller;
|
||||
use crate::util::sync::AtomicFlag;
|
||||
|
||||
/// Result of checking if a specifier is mapped via
|
||||
/// an import map or package.json.
|
||||
pub enum MappedResolution {
|
||||
None,
|
||||
PackageJson(ModuleSpecifier),
|
||||
ImportMap(ModuleSpecifier),
|
||||
}
|
||||
|
||||
impl MappedResolution {
|
||||
pub fn into_specifier(self) -> Option<ModuleSpecifier> {
|
||||
match self {
|
||||
MappedResolution::None => Option::None,
|
||||
MappedResolution::PackageJson(specifier) => Some(specifier),
|
||||
MappedResolution::ImportMap(specifier) => Some(specifier),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolver for specifiers that could be mapped via an
|
||||
/// import map or package.json.
|
||||
#[derive(Debug)]
|
||||
pub struct MappedSpecifierResolver {
|
||||
maybe_import_map: Option<Arc<ImportMap>>,
|
||||
package_json_deps_provider: Arc<PackageJsonDepsProvider>,
|
||||
}
|
||||
|
||||
impl MappedSpecifierResolver {
|
||||
pub fn new(
|
||||
maybe_import_map: Option<Arc<ImportMap>>,
|
||||
package_json_deps_provider: Arc<PackageJsonDepsProvider>,
|
||||
) -> Self {
|
||||
Self {
|
||||
maybe_import_map,
|
||||
package_json_deps_provider,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve(
|
||||
&self,
|
||||
specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<MappedResolution, AnyError> {
|
||||
// attempt to resolve with the import map first
|
||||
let maybe_import_map_err = match self
|
||||
.maybe_import_map
|
||||
.as_ref()
|
||||
.map(|import_map| import_map.resolve(specifier, referrer))
|
||||
{
|
||||
Some(Ok(value)) => return Ok(MappedResolution::ImportMap(value)),
|
||||
Some(Err(err)) => Some(err),
|
||||
None => None,
|
||||
};
|
||||
|
||||
// then with package.json
|
||||
if let Some(deps) = self.package_json_deps_provider.deps() {
|
||||
if let Some(specifier) = resolve_package_json_dep(specifier, deps)? {
|
||||
return Ok(MappedResolution::PackageJson(specifier));
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise, surface the import map error or try resolving when has no import map
|
||||
if let Some(err) = maybe_import_map_err {
|
||||
Err(err.into())
|
||||
} else {
|
||||
Ok(MappedResolution::None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A resolver that takes care of resolution, taking into account loaded
|
||||
/// import map, JSX settings.
|
||||
#[derive(Debug)]
|
||||
pub struct CliGraphResolver {
|
||||
maybe_import_map: Option<Arc<ImportMap>>,
|
||||
mapped_specifier_resolver: MappedSpecifierResolver,
|
||||
maybe_default_jsx_import_source: Option<String>,
|
||||
maybe_jsx_import_source_module: Option<String>,
|
||||
no_npm: bool,
|
||||
|
@ -51,7 +121,10 @@ impl Default for CliGraphResolver {
|
|||
None,
|
||||
));
|
||||
Self {
|
||||
maybe_import_map: Default::default(),
|
||||
mapped_specifier_resolver: MappedSpecifierResolver {
|
||||
maybe_import_map: Default::default(),
|
||||
package_json_deps_provider: Default::default(),
|
||||
},
|
||||
maybe_default_jsx_import_source: Default::default(),
|
||||
maybe_jsx_import_source_module: Default::default(),
|
||||
no_npm: false,
|
||||
|
@ -71,10 +144,14 @@ impl CliGraphResolver {
|
|||
no_npm: bool,
|
||||
npm_registry_api: Arc<CliNpmRegistryApi>,
|
||||
npm_resolution: Arc<NpmResolution>,
|
||||
package_json_deps_provider: Arc<PackageJsonDepsProvider>,
|
||||
package_json_deps_installer: Arc<PackageJsonDepsInstaller>,
|
||||
) -> Self {
|
||||
Self {
|
||||
maybe_import_map,
|
||||
mapped_specifier_resolver: MappedSpecifierResolver {
|
||||
maybe_import_map,
|
||||
package_json_deps_provider,
|
||||
},
|
||||
maybe_default_jsx_import_source: maybe_jsx_import_source_config
|
||||
.as_ref()
|
||||
.and_then(|c| c.default_specifier.clone()),
|
||||
|
@ -105,14 +182,20 @@ impl CliGraphResolver {
|
|||
self
|
||||
}
|
||||
|
||||
pub async fn force_top_level_package_json_install(
|
||||
&self,
|
||||
) -> Result<(), AnyError> {
|
||||
self
|
||||
.package_json_deps_installer
|
||||
.ensure_top_level_install()
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn top_level_package_json_install_if_necessary(
|
||||
&self,
|
||||
) -> Result<(), AnyError> {
|
||||
if self.found_package_json_dep_flag.is_raised() {
|
||||
self
|
||||
.package_json_deps_installer
|
||||
.ensure_top_level_install()
|
||||
.await?;
|
||||
self.force_top_level_package_json_install().await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -135,31 +218,20 @@ impl Resolver for CliGraphResolver {
|
|||
specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
// attempt to resolve with the import map first
|
||||
let maybe_import_map_err = match self
|
||||
.maybe_import_map
|
||||
.as_ref()
|
||||
.map(|import_map| import_map.resolve(specifier, referrer))
|
||||
use MappedResolution::*;
|
||||
match self
|
||||
.mapped_specifier_resolver
|
||||
.resolve(specifier, referrer)?
|
||||
{
|
||||
Some(Ok(value)) => return Ok(value),
|
||||
Some(Err(err)) => Some(err),
|
||||
None => None,
|
||||
};
|
||||
|
||||
// then with package.json
|
||||
if let Some(deps) = self.package_json_deps_installer.package_deps().as_ref()
|
||||
{
|
||||
if let Some(specifier) = resolve_package_json_dep(specifier, deps)? {
|
||||
ImportMap(specifier) => Ok(specifier),
|
||||
PackageJson(specifier) => {
|
||||
// found a specifier in the package.json, so mark that
|
||||
// we need to do an "npm install" later
|
||||
self.found_package_json_dep_flag.raise();
|
||||
return Ok(specifier);
|
||||
Ok(specifier)
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise, surface the import map error or try resolving when has no import map
|
||||
if let Some(err) = maybe_import_map_err {
|
||||
Err(err.into())
|
||||
} else {
|
||||
deno_graph::resolve_import(specifier, referrer).map_err(|err| err.into())
|
||||
None => deno_graph::resolve_import(specifier, referrer)
|
||||
.map_err(|err| err.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -223,23 +223,44 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"exclude": {
|
||||
"type": "array",
|
||||
"description": "List of files, directories or globs that will be ignored by all other configurations. Requires Deno 1.34 or later.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"lint": {
|
||||
"description": "Configuration for linter",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"include": {
|
||||
"type": "array",
|
||||
"description": "List of files, directories or globs that will be linted.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"exclude": {
|
||||
"type": "array",
|
||||
"description": "List of files, directories or globs that will not be linted.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"files": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"include": {
|
||||
"type": "array",
|
||||
"description": "List of files or directories that will be linted.",
|
||||
"description": "List of files, directories or globs that will be linted.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"exclude": {
|
||||
"type": "array",
|
||||
"description": "List of files or directories that will not be linted.",
|
||||
"description": "List of files, directories or globs that will not be linted.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
|
@ -293,25 +314,73 @@
|
|||
"description": "Configuration for formatter",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"include": {
|
||||
"type": "array",
|
||||
"description": "List of files, directories or globs that will be formatted.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"exclude": {
|
||||
"type": "array",
|
||||
"description": "List of files, directories or globs that will not be formatted.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"files": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"include": {
|
||||
"type": "array",
|
||||
"description": "List of files or directories that will be formatted.",
|
||||
"description": "List of files, directories or globs that will be formatted.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"exclude": {
|
||||
"type": "array",
|
||||
"description": "List of files or directories that will not be formatted.",
|
||||
"description": "List of files, directories or globs that will not be formatted.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"useTabs": {
|
||||
"description": "Whether to use tabs (true) or spaces (false) for indentation.",
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"lineWidth": {
|
||||
"description": "The width of a line the printer will try to stay under. Note that the printer may exceed this width in certain cases.",
|
||||
"type": "number",
|
||||
"default": 80
|
||||
},
|
||||
"indentWidth": {
|
||||
"description": "The number of characters for an indent.",
|
||||
"type": "number",
|
||||
"default": 2
|
||||
},
|
||||
"singleQuote": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to use single quote (true) or double quote (false) for quotation.",
|
||||
"default": false
|
||||
},
|
||||
"proseWrap": {
|
||||
"description": "Define how prose should be wrapped in Markdown files.",
|
||||
"default": "always",
|
||||
"enum": [
|
||||
"always",
|
||||
"never",
|
||||
"preserve"
|
||||
]
|
||||
},
|
||||
"semiColons": {
|
||||
"description": "Whether to prefer using semicolons.",
|
||||
"type": "boolean",
|
||||
"default": true
|
||||
},
|
||||
"options": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
@ -353,6 +422,10 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"nodeModulesDir": {
|
||||
"description": "Enables or disables the use of a local node_modules folder for npm packages. Alternatively, use the `--node-modules-dir` or `--node-modules-dir=false` flag. Requires Deno 1.34 or later.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"tasks": {
|
||||
"description": "Configuration for deno task",
|
||||
"type": "object",
|
||||
|
@ -368,19 +441,33 @@
|
|||
"description": "Configuration for deno test",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"include": {
|
||||
"type": "array",
|
||||
"description": "List of files, directories or globs that will be searched for tests.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"exclude": {
|
||||
"type": "array",
|
||||
"description": "List of files, directories or globs that will not be searched for tests.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"files": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"include": {
|
||||
"type": "array",
|
||||
"description": "List of files or directories that will be searched for tests.",
|
||||
"description": "List of files, directories or globs that will be searched for tests.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"exclude": {
|
||||
"type": "array",
|
||||
"description": "List of files or directories that will not be searched for tests.",
|
||||
"description": "List of files, directories or globs that will not be searched for tests.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
|
@ -393,19 +480,33 @@
|
|||
"description": "Configuration for deno bench",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"include": {
|
||||
"type": "array",
|
||||
"description": "List of files, directories or globs that will be searched for benchmarks.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"exclude": {
|
||||
"type": "array",
|
||||
"description": "List of files, directories or globs that will not be searched for benchmarks.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"files": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"include": {
|
||||
"type": "array",
|
||||
"description": "List of files or directories that will be searched for benchmarks.",
|
||||
"description": "List of files, directories or globs that will be searched for benchmarks.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"exclude": {
|
||||
"type": "array",
|
||||
"description": "List of files or directories that will not be searched for benchmarks.",
|
||||
"description": "List of files, directories or globs that will not be searched for benchmarks.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
|
|
|
@ -1,409 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::CaData;
|
||||
use crate::args::Flags;
|
||||
use crate::colors;
|
||||
use crate::file_fetcher::get_source_from_data_url;
|
||||
use crate::ops;
|
||||
use crate::proc_state::ProcState;
|
||||
use crate::util::v8::construct_v8_flags;
|
||||
use crate::version;
|
||||
use crate::CliGraphResolver;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::io::AllowStdIo;
|
||||
use deno_core::futures::task::LocalFutureObj;
|
||||
use deno_core::futures::AsyncReadExt;
|
||||
use deno_core::futures::AsyncSeekExt;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::located_script_name;
|
||||
use deno_core::serde::Deserialize;
|
||||
use deno_core::serde::Serialize;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::v8_set_flags;
|
||||
use deno_core::ModuleLoader;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_core::ModuleType;
|
||||
use deno_core::ResolutionKind;
|
||||
use deno_graph::source::Resolver;
|
||||
use deno_runtime::fmt_errors::format_js_error;
|
||||
use deno_runtime::ops::worker_host::CreateWebWorkerCb;
|
||||
use deno_runtime::ops::worker_host::WorkerEventCb;
|
||||
use deno_runtime::permissions::Permissions;
|
||||
use deno_runtime::permissions::PermissionsContainer;
|
||||
use deno_runtime::permissions::PermissionsOptions;
|
||||
use deno_runtime::web_worker::WebWorker;
|
||||
use deno_runtime::web_worker::WebWorkerOptions;
|
||||
use deno_runtime::worker::MainWorker;
|
||||
use deno_runtime::worker::WorkerOptions;
|
||||
use deno_runtime::BootstrapOptions;
|
||||
use import_map::parse_from_json;
|
||||
use log::Level;
|
||||
use std::env::current_exe;
|
||||
use std::io::SeekFrom;
|
||||
use std::pin::Pin;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct Metadata {
|
||||
pub argv: Vec<String>,
|
||||
pub unstable: bool,
|
||||
pub seed: Option<u64>,
|
||||
pub permissions: PermissionsOptions,
|
||||
pub location: Option<Url>,
|
||||
pub v8_flags: Vec<String>,
|
||||
pub log_level: Option<Level>,
|
||||
pub ca_stores: Option<Vec<String>>,
|
||||
pub ca_data: Option<Vec<u8>>,
|
||||
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
|
||||
pub maybe_import_map: Option<(Url, String)>,
|
||||
pub entrypoint: ModuleSpecifier,
|
||||
}
|
||||
|
||||
pub const MAGIC_TRAILER: &[u8; 8] = b"d3n0l4nd";
|
||||
|
||||
/// This function will try to run this binary as a standalone binary
|
||||
/// produced by `deno compile`. It determines if this is a standalone
|
||||
/// binary by checking for the magic trailer string `d3n0l4nd` at EOF-24.
|
||||
/// The magic trailer is followed by:
|
||||
/// - a u64 pointer to the JS bundle embedded in the binary
|
||||
/// - a u64 pointer to JSON metadata (serialized flags) embedded in the binary
|
||||
/// These are dereferenced, and the bundle is executed under the configuration
|
||||
/// specified by the metadata. If no magic trailer is present, this function
|
||||
/// exits with `Ok(None)`.
|
||||
pub async fn extract_standalone(
|
||||
args: Vec<String>,
|
||||
) -> Result<Option<(Metadata, eszip::EszipV2)>, AnyError> {
|
||||
let current_exe_path = current_exe()?;
|
||||
|
||||
let file = std::fs::File::open(current_exe_path)?;
|
||||
|
||||
let mut bufreader =
|
||||
deno_core::futures::io::BufReader::new(AllowStdIo::new(file));
|
||||
|
||||
let trailer_pos = bufreader.seek(SeekFrom::End(-24)).await?;
|
||||
let mut trailer = [0; 24];
|
||||
bufreader.read_exact(&mut trailer).await?;
|
||||
let (magic_trailer, rest) = trailer.split_at(8);
|
||||
if magic_trailer != MAGIC_TRAILER {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let (eszip_archive_pos, rest) = rest.split_at(8);
|
||||
let metadata_pos = rest;
|
||||
let eszip_archive_pos = u64_from_bytes(eszip_archive_pos)?;
|
||||
let metadata_pos = u64_from_bytes(metadata_pos)?;
|
||||
let metadata_len = trailer_pos - metadata_pos;
|
||||
|
||||
bufreader.seek(SeekFrom::Start(eszip_archive_pos)).await?;
|
||||
|
||||
let (eszip, loader) = eszip::EszipV2::parse(bufreader)
|
||||
.await
|
||||
.context("Failed to parse eszip header")?;
|
||||
|
||||
let mut bufreader = loader.await.context("Failed to parse eszip archive")?;
|
||||
|
||||
bufreader.seek(SeekFrom::Start(metadata_pos)).await?;
|
||||
|
||||
let mut metadata = String::new();
|
||||
|
||||
bufreader
|
||||
.take(metadata_len)
|
||||
.read_to_string(&mut metadata)
|
||||
.await
|
||||
.context("Failed to read metadata from the current executable")?;
|
||||
|
||||
let mut metadata: Metadata = serde_json::from_str(&metadata).unwrap();
|
||||
metadata.argv.append(&mut args[1..].to_vec());
|
||||
|
||||
Ok(Some((metadata, eszip)))
|
||||
}
|
||||
|
||||
fn u64_from_bytes(arr: &[u8]) -> Result<u64, AnyError> {
|
||||
let fixed_arr: &[u8; 8] = arr
|
||||
.try_into()
|
||||
.context("Failed to convert the buffer into a fixed-size array")?;
|
||||
Ok(u64::from_be_bytes(*fixed_arr))
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct EmbeddedModuleLoader {
|
||||
eszip: Arc<eszip::EszipV2>,
|
||||
maybe_import_map_resolver: Option<Arc<CliGraphResolver>>,
|
||||
}
|
||||
|
||||
impl ModuleLoader for EmbeddedModuleLoader {
|
||||
fn resolve(
|
||||
&self,
|
||||
specifier: &str,
|
||||
referrer: &str,
|
||||
_kind: ResolutionKind,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
// Try to follow redirects when resolving.
|
||||
let referrer = match self.eszip.get_module(referrer) {
|
||||
Some(eszip::Module { ref specifier, .. }) => {
|
||||
ModuleSpecifier::parse(specifier)?
|
||||
}
|
||||
None => {
|
||||
let cwd = std::env::current_dir().context("Unable to get CWD")?;
|
||||
deno_core::resolve_url_or_path(referrer, &cwd)?
|
||||
}
|
||||
};
|
||||
|
||||
self
|
||||
.maybe_import_map_resolver
|
||||
.as_ref()
|
||||
.map(|r| r.resolve(specifier, &referrer))
|
||||
.unwrap_or_else(|| {
|
||||
deno_core::resolve_import(specifier, referrer.as_str())
|
||||
.map_err(|err| err.into())
|
||||
})
|
||||
}
|
||||
|
||||
fn load(
|
||||
&self,
|
||||
module_specifier: &ModuleSpecifier,
|
||||
_maybe_referrer: Option<&ModuleSpecifier>,
|
||||
_is_dynamic: bool,
|
||||
) -> Pin<Box<deno_core::ModuleSourceFuture>> {
|
||||
let is_data_uri = get_source_from_data_url(module_specifier).ok();
|
||||
let module = self
|
||||
.eszip
|
||||
.get_module(module_specifier.as_str())
|
||||
.ok_or_else(|| type_error("Module not found"));
|
||||
// TODO(mmastrac): This clone can probably be removed in the future if ModuleSpecifier is no longer a full-fledged URL
|
||||
let module_specifier = module_specifier.clone();
|
||||
|
||||
async move {
|
||||
if let Some((source, _)) = is_data_uri {
|
||||
return Ok(deno_core::ModuleSource::new(
|
||||
deno_core::ModuleType::JavaScript,
|
||||
source.into(),
|
||||
&module_specifier,
|
||||
));
|
||||
}
|
||||
|
||||
let module = module?;
|
||||
let code = module.source().await.unwrap_or_default();
|
||||
let code = std::str::from_utf8(&code)
|
||||
.map_err(|_| type_error("Module source is not utf-8"))?
|
||||
.to_owned()
|
||||
.into();
|
||||
|
||||
Ok(deno_core::ModuleSource::new(
|
||||
match module.kind {
|
||||
eszip::ModuleKind::JavaScript => ModuleType::JavaScript,
|
||||
eszip::ModuleKind::Json => ModuleType::Json,
|
||||
},
|
||||
code,
|
||||
&module_specifier,
|
||||
))
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
}
|
||||
|
||||
fn metadata_to_flags(metadata: &Metadata) -> Flags {
|
||||
let permissions = metadata.permissions.clone();
|
||||
Flags {
|
||||
argv: metadata.argv.clone(),
|
||||
unstable: metadata.unstable,
|
||||
seed: metadata.seed,
|
||||
location: metadata.location.clone(),
|
||||
allow_env: permissions.allow_env,
|
||||
allow_hrtime: permissions.allow_hrtime,
|
||||
allow_net: permissions.allow_net,
|
||||
allow_ffi: permissions.allow_ffi,
|
||||
allow_read: permissions.allow_read,
|
||||
allow_run: permissions.allow_run,
|
||||
allow_write: permissions.allow_write,
|
||||
v8_flags: metadata.v8_flags.clone(),
|
||||
log_level: metadata.log_level,
|
||||
ca_stores: metadata.ca_stores.clone(),
|
||||
ca_data: metadata.ca_data.clone().map(CaData::Bytes),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
fn web_worker_callback() -> Arc<WorkerEventCb> {
|
||||
Arc::new(|worker| {
|
||||
let fut = async move { Ok(worker) };
|
||||
LocalFutureObj::new(Box::new(fut))
|
||||
})
|
||||
}
|
||||
|
||||
fn create_web_worker_callback(
|
||||
ps: &ProcState,
|
||||
module_loader: &Rc<EmbeddedModuleLoader>,
|
||||
) -> Arc<CreateWebWorkerCb> {
|
||||
let ps = ps.clone();
|
||||
let module_loader = module_loader.as_ref().clone();
|
||||
Arc::new(move |args| {
|
||||
let module_loader = Rc::new(module_loader.clone());
|
||||
|
||||
let create_web_worker_cb = create_web_worker_callback(&ps, &module_loader);
|
||||
let web_worker_cb = web_worker_callback();
|
||||
|
||||
let options = WebWorkerOptions {
|
||||
bootstrap: BootstrapOptions {
|
||||
args: ps.options.argv().clone(),
|
||||
cpu_count: std::thread::available_parallelism()
|
||||
.map(|p| p.get())
|
||||
.unwrap_or(1),
|
||||
debug_flag: ps.options.log_level().map_or(false, |l| l == Level::Debug),
|
||||
enable_testing_features: false,
|
||||
locale: deno_core::v8::icu::get_language_tag(),
|
||||
location: Some(args.main_module.clone()),
|
||||
no_color: !colors::use_color(),
|
||||
is_tty: colors::is_tty(),
|
||||
runtime_version: version::deno().to_string(),
|
||||
ts_version: version::TYPESCRIPT.to_string(),
|
||||
unstable: ps.options.unstable(),
|
||||
user_agent: version::get_user_agent().to_string(),
|
||||
inspect: ps.options.is_inspecting(),
|
||||
},
|
||||
extensions: ops::cli_exts(ps.npm_resolver.clone()),
|
||||
startup_snapshot: Some(crate::js::deno_isolate_init()),
|
||||
unsafely_ignore_certificate_errors: ps
|
||||
.options
|
||||
.unsafely_ignore_certificate_errors()
|
||||
.clone(),
|
||||
root_cert_store: Some(ps.root_cert_store.clone()),
|
||||
seed: ps.options.seed(),
|
||||
module_loader,
|
||||
npm_resolver: None, // not currently supported
|
||||
create_web_worker_cb,
|
||||
preload_module_cb: web_worker_cb.clone(),
|
||||
pre_execute_module_cb: web_worker_cb,
|
||||
format_js_error_fn: Some(Arc::new(format_js_error)),
|
||||
source_map_getter: None,
|
||||
worker_type: args.worker_type,
|
||||
maybe_inspector_server: None,
|
||||
get_error_class_fn: Some(&get_error_class_name),
|
||||
blob_store: ps.blob_store.clone(),
|
||||
broadcast_channel: ps.broadcast_channel.clone(),
|
||||
shared_array_buffer_store: Some(ps.shared_array_buffer_store.clone()),
|
||||
compiled_wasm_module_store: Some(ps.compiled_wasm_module_store.clone()),
|
||||
cache_storage_dir: None,
|
||||
stdio: Default::default(),
|
||||
};
|
||||
|
||||
WebWorker::bootstrap_from_options(
|
||||
args.name,
|
||||
args.permissions,
|
||||
args.main_module,
|
||||
args.worker_id,
|
||||
options,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn run(
|
||||
eszip: eszip::EszipV2,
|
||||
metadata: Metadata,
|
||||
) -> Result<(), AnyError> {
|
||||
let flags = metadata_to_flags(&metadata);
|
||||
let main_module = &metadata.entrypoint;
|
||||
let ps = ProcState::from_flags(flags).await?;
|
||||
let permissions = PermissionsContainer::new(Permissions::from_options(
|
||||
&metadata.permissions,
|
||||
)?);
|
||||
let module_loader = Rc::new(EmbeddedModuleLoader {
|
||||
eszip: Arc::new(eszip),
|
||||
maybe_import_map_resolver: metadata.maybe_import_map.map(
|
||||
|(base, source)| {
|
||||
Arc::new(CliGraphResolver::new(
|
||||
None,
|
||||
Some(Arc::new(
|
||||
parse_from_json(&base, &source).unwrap().import_map,
|
||||
)),
|
||||
false,
|
||||
ps.npm_api.clone(),
|
||||
ps.npm_resolution.clone(),
|
||||
ps.package_json_deps_installer.clone(),
|
||||
))
|
||||
},
|
||||
),
|
||||
});
|
||||
let create_web_worker_cb = create_web_worker_callback(&ps, &module_loader);
|
||||
let web_worker_cb = web_worker_callback();
|
||||
|
||||
v8_set_flags(construct_v8_flags(&metadata.v8_flags, vec![]));
|
||||
|
||||
let options = WorkerOptions {
|
||||
bootstrap: BootstrapOptions {
|
||||
args: metadata.argv,
|
||||
cpu_count: std::thread::available_parallelism()
|
||||
.map(|p| p.get())
|
||||
.unwrap_or(1),
|
||||
debug_flag: metadata
|
||||
.log_level
|
||||
.map(|l| l == Level::Debug)
|
||||
.unwrap_or(false),
|
||||
enable_testing_features: false,
|
||||
locale: deno_core::v8::icu::get_language_tag(),
|
||||
location: metadata.location,
|
||||
no_color: !colors::use_color(),
|
||||
is_tty: colors::is_tty(),
|
||||
runtime_version: version::deno().to_string(),
|
||||
ts_version: version::TYPESCRIPT.to_string(),
|
||||
unstable: metadata.unstable,
|
||||
user_agent: version::get_user_agent().to_string(),
|
||||
inspect: ps.options.is_inspecting(),
|
||||
},
|
||||
extensions: ops::cli_exts(ps.npm_resolver.clone()),
|
||||
startup_snapshot: Some(crate::js::deno_isolate_init()),
|
||||
unsafely_ignore_certificate_errors: metadata
|
||||
.unsafely_ignore_certificate_errors,
|
||||
root_cert_store: Some(ps.root_cert_store.clone()),
|
||||
seed: metadata.seed,
|
||||
source_map_getter: None,
|
||||
format_js_error_fn: Some(Arc::new(format_js_error)),
|
||||
create_web_worker_cb,
|
||||
web_worker_preload_module_cb: web_worker_cb.clone(),
|
||||
web_worker_pre_execute_module_cb: web_worker_cb,
|
||||
maybe_inspector_server: None,
|
||||
should_break_on_first_statement: false,
|
||||
should_wait_for_inspector_session: false,
|
||||
module_loader,
|
||||
npm_resolver: None, // not currently supported
|
||||
get_error_class_fn: Some(&get_error_class_name),
|
||||
cache_storage_dir: None,
|
||||
origin_storage_dir: None,
|
||||
blob_store: ps.blob_store.clone(),
|
||||
broadcast_channel: ps.broadcast_channel.clone(),
|
||||
shared_array_buffer_store: Some(ps.shared_array_buffer_store.clone()),
|
||||
compiled_wasm_module_store: Some(ps.compiled_wasm_module_store.clone()),
|
||||
stdio: Default::default(),
|
||||
};
|
||||
let mut worker = MainWorker::bootstrap_from_options(
|
||||
main_module.clone(),
|
||||
permissions,
|
||||
options,
|
||||
);
|
||||
worker.execute_main_module(main_module).await?;
|
||||
worker.dispatch_load_event(located_script_name!())?;
|
||||
|
||||
loop {
|
||||
worker.run_event_loop(false).await?;
|
||||
if !worker.dispatch_beforeunload_event(located_script_name!())? {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
worker.dispatch_unload_event(located_script_name!())?;
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
||||
fn get_error_class_name(e: &AnyError) -> &'static str {
|
||||
deno_runtime::errors::get_error_class_name(e).unwrap_or_else(|| {
|
||||
panic!(
|
||||
"Error '{}' contains boxed error of unsupported type:{}",
|
||||
e,
|
||||
e.chain().map(|e| format!("\n {e:?}")).collect::<String>()
|
||||
);
|
||||
})
|
||||
}
|
562
cli/standalone/binary.rs
Normal file
562
cli/standalone/binary.rs
Normal file
|
@ -0,0 +1,562 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use std::env::current_exe;
|
||||
use std::io::Read;
|
||||
use std::io::Seek;
|
||||
use std::io::SeekFrom;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::io::AllowStdIo;
|
||||
use deno_core::futures::AsyncReadExt;
|
||||
use deno_core::futures::AsyncSeekExt;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_npm::registry::PackageDepNpmSchemeValueParseError;
|
||||
use deno_npm::resolution::SerializedNpmResolutionSnapshot;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_runtime::permissions::PermissionsOptions;
|
||||
use deno_semver::npm::NpmPackageReq;
|
||||
use deno_semver::npm::NpmVersionReqSpecifierParseError;
|
||||
use log::Level;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::args::package_json::PackageJsonDepValueParseError;
|
||||
use crate::args::package_json::PackageJsonDeps;
|
||||
use crate::args::CaData;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::CompileFlags;
|
||||
use crate::args::PackageJsonDepsProvider;
|
||||
use crate::cache::DenoDir;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::npm::CliNpmRegistryApi;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::npm::NpmCache;
|
||||
use crate::npm::NpmResolution;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
|
||||
use super::virtual_fs::FileBackedVfs;
|
||||
use super::virtual_fs::VfsBuilder;
|
||||
use super::virtual_fs::VfsRoot;
|
||||
use super::virtual_fs::VirtualDirectory;
|
||||
|
||||
const MAGIC_TRAILER: &[u8; 8] = b"d3n0l4nd";
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
enum SerializablePackageJsonDepValueParseError {
|
||||
SchemeValue(String),
|
||||
Specifier(String),
|
||||
Unsupported { scheme: String },
|
||||
}
|
||||
|
||||
impl SerializablePackageJsonDepValueParseError {
|
||||
pub fn from_err(err: PackageJsonDepValueParseError) -> Self {
|
||||
match err {
|
||||
PackageJsonDepValueParseError::SchemeValue(err) => {
|
||||
Self::SchemeValue(err.value)
|
||||
}
|
||||
PackageJsonDepValueParseError::Specifier(err) => {
|
||||
Self::Specifier(err.source.to_string())
|
||||
}
|
||||
PackageJsonDepValueParseError::Unsupported { scheme } => {
|
||||
Self::Unsupported { scheme }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_err(self) -> PackageJsonDepValueParseError {
|
||||
match self {
|
||||
SerializablePackageJsonDepValueParseError::SchemeValue(value) => {
|
||||
PackageJsonDepValueParseError::SchemeValue(
|
||||
PackageDepNpmSchemeValueParseError { value },
|
||||
)
|
||||
}
|
||||
SerializablePackageJsonDepValueParseError::Specifier(source) => {
|
||||
PackageJsonDepValueParseError::Specifier(
|
||||
NpmVersionReqSpecifierParseError {
|
||||
source: monch::ParseErrorFailureError::new(source),
|
||||
},
|
||||
)
|
||||
}
|
||||
SerializablePackageJsonDepValueParseError::Unsupported { scheme } => {
|
||||
PackageJsonDepValueParseError::Unsupported { scheme }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SerializablePackageJsonDeps(
|
||||
BTreeMap<
|
||||
String,
|
||||
Result<NpmPackageReq, SerializablePackageJsonDepValueParseError>,
|
||||
>,
|
||||
);
|
||||
|
||||
impl SerializablePackageJsonDeps {
|
||||
pub fn from_deps(deps: PackageJsonDeps) -> Self {
|
||||
Self(
|
||||
deps
|
||||
.into_iter()
|
||||
.map(|(name, req)| {
|
||||
let res =
|
||||
req.map_err(SerializablePackageJsonDepValueParseError::from_err);
|
||||
(name, res)
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn into_deps(self) -> PackageJsonDeps {
|
||||
self
|
||||
.0
|
||||
.into_iter()
|
||||
.map(|(name, res)| (name, res.map_err(|err| err.into_err())))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct Metadata {
|
||||
pub argv: Vec<String>,
|
||||
pub unstable: bool,
|
||||
pub seed: Option<u64>,
|
||||
pub permissions: PermissionsOptions,
|
||||
pub location: Option<Url>,
|
||||
pub v8_flags: Vec<String>,
|
||||
pub log_level: Option<Level>,
|
||||
pub ca_stores: Option<Vec<String>>,
|
||||
pub ca_data: Option<Vec<u8>>,
|
||||
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
|
||||
pub maybe_import_map: Option<(Url, String)>,
|
||||
pub entrypoint: ModuleSpecifier,
|
||||
/// Whether this uses a node_modules directory (true) or the global cache (false).
|
||||
pub node_modules_dir: bool,
|
||||
pub npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
|
||||
pub package_json_deps: Option<SerializablePackageJsonDeps>,
|
||||
}
|
||||
|
||||
pub fn load_npm_vfs(root_dir_path: PathBuf) -> Result<FileBackedVfs, AnyError> {
|
||||
let file_path = current_exe().unwrap();
|
||||
let mut file = std::fs::File::open(file_path)?;
|
||||
file.seek(SeekFrom::End(-(TRAILER_SIZE as i64)))?;
|
||||
let mut trailer = [0; TRAILER_SIZE];
|
||||
file.read_exact(&mut trailer)?;
|
||||
let trailer = Trailer::parse(&trailer)?.unwrap();
|
||||
file.seek(SeekFrom::Start(trailer.npm_vfs_pos))?;
|
||||
let mut vfs_data = vec![0; trailer.npm_vfs_len() as usize];
|
||||
file.read_exact(&mut vfs_data)?;
|
||||
let mut dir: VirtualDirectory = serde_json::from_slice(&vfs_data)?;
|
||||
|
||||
// align the name of the directory with the root dir
|
||||
dir.name = root_dir_path
|
||||
.file_name()
|
||||
.unwrap()
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
let fs_root = VfsRoot {
|
||||
dir,
|
||||
root_path: root_dir_path,
|
||||
start_file_offset: trailer.npm_files_pos,
|
||||
};
|
||||
Ok(FileBackedVfs::new(file, fs_root))
|
||||
}
|
||||
|
||||
fn write_binary_bytes(
|
||||
writer: &mut impl Write,
|
||||
original_bin: Vec<u8>,
|
||||
metadata: &Metadata,
|
||||
eszip: eszip::EszipV2,
|
||||
npm_vfs: Option<&VirtualDirectory>,
|
||||
npm_files: &Vec<Vec<u8>>,
|
||||
) -> Result<(), AnyError> {
|
||||
let metadata = serde_json::to_string(metadata)?.as_bytes().to_vec();
|
||||
let npm_vfs = serde_json::to_string(&npm_vfs)?.as_bytes().to_vec();
|
||||
let eszip_archive = eszip.into_bytes();
|
||||
|
||||
writer.write_all(&original_bin)?;
|
||||
writer.write_all(&eszip_archive)?;
|
||||
writer.write_all(&metadata)?;
|
||||
writer.write_all(&npm_vfs)?;
|
||||
for file in npm_files {
|
||||
writer.write_all(file)?;
|
||||
}
|
||||
|
||||
// write the trailer, which includes the positions
|
||||
// of the data blocks in the file
|
||||
writer.write_all(&{
|
||||
let eszip_pos = original_bin.len() as u64;
|
||||
let metadata_pos = eszip_pos + (eszip_archive.len() as u64);
|
||||
let npm_vfs_pos = metadata_pos + (metadata.len() as u64);
|
||||
let npm_files_pos = npm_vfs_pos + (npm_vfs.len() as u64);
|
||||
Trailer {
|
||||
eszip_pos,
|
||||
metadata_pos,
|
||||
npm_vfs_pos,
|
||||
npm_files_pos,
|
||||
}
|
||||
.as_bytes()
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn is_standalone_binary(exe_path: &Path) -> bool {
|
||||
let Ok(mut output_file) = std::fs::File::open(exe_path) else {
|
||||
return false;
|
||||
};
|
||||
if output_file
|
||||
.seek(SeekFrom::End(-(TRAILER_SIZE as i64)))
|
||||
.is_err()
|
||||
{
|
||||
// This seek may fail because the file is too small to possibly be
|
||||
// `deno compile` output.
|
||||
return false;
|
||||
}
|
||||
let mut trailer = [0; TRAILER_SIZE];
|
||||
if output_file.read_exact(&mut trailer).is_err() {
|
||||
return false;
|
||||
};
|
||||
let (magic_trailer, _) = trailer.split_at(8);
|
||||
magic_trailer == MAGIC_TRAILER
|
||||
}
|
||||
|
||||
/// This function will try to run this binary as a standalone binary
|
||||
/// produced by `deno compile`. It determines if this is a standalone
|
||||
/// binary by skipping over the trailer width at the end of the file,
|
||||
/// then checking for the magic trailer string `d3n0l4nd`. If found,
|
||||
/// the bundle is executed. If not, this function exits with `Ok(None)`.
|
||||
pub async fn extract_standalone(
|
||||
exe_path: &Path,
|
||||
cli_args: Vec<String>,
|
||||
) -> Result<Option<(Metadata, eszip::EszipV2)>, AnyError> {
|
||||
let file = std::fs::File::open(exe_path)?;
|
||||
|
||||
let mut bufreader =
|
||||
deno_core::futures::io::BufReader::new(AllowStdIo::new(file));
|
||||
|
||||
let _trailer_pos = bufreader
|
||||
.seek(SeekFrom::End(-(TRAILER_SIZE as i64)))
|
||||
.await?;
|
||||
let mut trailer = [0; TRAILER_SIZE];
|
||||
bufreader.read_exact(&mut trailer).await?;
|
||||
let trailer = match Trailer::parse(&trailer)? {
|
||||
None => return Ok(None),
|
||||
Some(trailer) => trailer,
|
||||
};
|
||||
|
||||
bufreader.seek(SeekFrom::Start(trailer.eszip_pos)).await?;
|
||||
|
||||
let (eszip, loader) = eszip::EszipV2::parse(bufreader)
|
||||
.await
|
||||
.context("Failed to parse eszip header")?;
|
||||
|
||||
let mut bufreader = loader.await.context("Failed to parse eszip archive")?;
|
||||
|
||||
bufreader
|
||||
.seek(SeekFrom::Start(trailer.metadata_pos))
|
||||
.await?;
|
||||
|
||||
let mut metadata = String::new();
|
||||
|
||||
bufreader
|
||||
.take(trailer.metadata_len())
|
||||
.read_to_string(&mut metadata)
|
||||
.await
|
||||
.context("Failed to read metadata from the current executable")?;
|
||||
|
||||
let mut metadata: Metadata = serde_json::from_str(&metadata).unwrap();
|
||||
metadata.argv.append(&mut cli_args[1..].to_vec());
|
||||
|
||||
Ok(Some((metadata, eszip)))
|
||||
}
|
||||
|
||||
const TRAILER_SIZE: usize = std::mem::size_of::<Trailer>() + 8; // 8 bytes for the magic trailer string
|
||||
|
||||
struct Trailer {
|
||||
eszip_pos: u64,
|
||||
metadata_pos: u64,
|
||||
npm_vfs_pos: u64,
|
||||
npm_files_pos: u64,
|
||||
}
|
||||
|
||||
impl Trailer {
|
||||
pub fn parse(trailer: &[u8]) -> Result<Option<Trailer>, AnyError> {
|
||||
let (magic_trailer, rest) = trailer.split_at(8);
|
||||
if magic_trailer != MAGIC_TRAILER {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let (eszip_archive_pos, rest) = rest.split_at(8);
|
||||
let (metadata_pos, rest) = rest.split_at(8);
|
||||
let (npm_vfs_pos, npm_files_pos) = rest.split_at(8);
|
||||
let eszip_archive_pos = u64_from_bytes(eszip_archive_pos)?;
|
||||
let metadata_pos = u64_from_bytes(metadata_pos)?;
|
||||
let npm_vfs_pos = u64_from_bytes(npm_vfs_pos)?;
|
||||
let npm_files_pos = u64_from_bytes(npm_files_pos)?;
|
||||
Ok(Some(Trailer {
|
||||
eszip_pos: eszip_archive_pos,
|
||||
metadata_pos,
|
||||
npm_vfs_pos,
|
||||
npm_files_pos,
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn metadata_len(&self) -> u64 {
|
||||
self.npm_vfs_pos - self.metadata_pos
|
||||
}
|
||||
|
||||
pub fn npm_vfs_len(&self) -> u64 {
|
||||
self.npm_files_pos - self.npm_vfs_pos
|
||||
}
|
||||
|
||||
pub fn as_bytes(&self) -> Vec<u8> {
|
||||
let mut trailer = MAGIC_TRAILER.to_vec();
|
||||
trailer.write_all(&self.eszip_pos.to_be_bytes()).unwrap();
|
||||
trailer.write_all(&self.metadata_pos.to_be_bytes()).unwrap();
|
||||
trailer.write_all(&self.npm_vfs_pos.to_be_bytes()).unwrap();
|
||||
trailer
|
||||
.write_all(&self.npm_files_pos.to_be_bytes())
|
||||
.unwrap();
|
||||
trailer
|
||||
}
|
||||
}
|
||||
|
||||
fn u64_from_bytes(arr: &[u8]) -> Result<u64, AnyError> {
|
||||
let fixed_arr: &[u8; 8] = arr
|
||||
.try_into()
|
||||
.context("Failed to convert the buffer into a fixed-size array")?;
|
||||
Ok(u64::from_be_bytes(*fixed_arr))
|
||||
}
|
||||
|
||||
pub struct DenoCompileBinaryWriter<'a> {
|
||||
file_fetcher: &'a FileFetcher,
|
||||
client: &'a HttpClient,
|
||||
deno_dir: &'a DenoDir,
|
||||
npm_api: &'a CliNpmRegistryApi,
|
||||
npm_cache: &'a NpmCache,
|
||||
npm_resolution: &'a NpmResolution,
|
||||
npm_resolver: &'a CliNpmResolver,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
package_json_deps_provider: &'a PackageJsonDepsProvider,
|
||||
}
|
||||
|
||||
impl<'a> DenoCompileBinaryWriter<'a> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
file_fetcher: &'a FileFetcher,
|
||||
client: &'a HttpClient,
|
||||
deno_dir: &'a DenoDir,
|
||||
npm_api: &'a CliNpmRegistryApi,
|
||||
npm_cache: &'a NpmCache,
|
||||
npm_resolution: &'a NpmResolution,
|
||||
npm_resolver: &'a CliNpmResolver,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
package_json_deps_provider: &'a PackageJsonDepsProvider,
|
||||
) -> Self {
|
||||
Self {
|
||||
file_fetcher,
|
||||
client,
|
||||
deno_dir,
|
||||
npm_api,
|
||||
npm_cache,
|
||||
npm_resolver,
|
||||
npm_system_info,
|
||||
npm_resolution,
|
||||
package_json_deps_provider,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn write_bin(
|
||||
&self,
|
||||
writer: &mut impl Write,
|
||||
eszip: eszip::EszipV2,
|
||||
module_specifier: &ModuleSpecifier,
|
||||
compile_flags: &CompileFlags,
|
||||
cli_options: &CliOptions,
|
||||
) -> Result<(), AnyError> {
|
||||
// Select base binary based on target
|
||||
let original_binary =
|
||||
self.get_base_binary(compile_flags.target.clone()).await?;
|
||||
|
||||
self
|
||||
.write_standalone_binary(
|
||||
writer,
|
||||
original_binary,
|
||||
eszip,
|
||||
module_specifier,
|
||||
cli_options,
|
||||
compile_flags,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn get_base_binary(
|
||||
&self,
|
||||
target: Option<String>,
|
||||
) -> Result<Vec<u8>, AnyError> {
|
||||
if target.is_none() {
|
||||
let path = std::env::current_exe()?;
|
||||
return Ok(std::fs::read(path)?);
|
||||
}
|
||||
|
||||
let target = target.unwrap_or_else(|| env!("TARGET").to_string());
|
||||
let binary_name = format!("deno-{target}.zip");
|
||||
|
||||
let binary_path_suffix = if crate::version::is_canary() {
|
||||
format!("canary/{}/{}", crate::version::GIT_COMMIT_HASH, binary_name)
|
||||
} else {
|
||||
format!("release/v{}/{}", env!("CARGO_PKG_VERSION"), binary_name)
|
||||
};
|
||||
|
||||
let download_directory = self.deno_dir.dl_folder_path();
|
||||
let binary_path = download_directory.join(&binary_path_suffix);
|
||||
|
||||
if !binary_path.exists() {
|
||||
self
|
||||
.download_base_binary(&download_directory, &binary_path_suffix)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let archive_data = std::fs::read(binary_path)?;
|
||||
let temp_dir = tempfile::TempDir::new()?;
|
||||
let base_binary_path = crate::tools::upgrade::unpack_into_dir(
|
||||
archive_data,
|
||||
target.contains("windows"),
|
||||
&temp_dir,
|
||||
)?;
|
||||
let base_binary = std::fs::read(base_binary_path)?;
|
||||
drop(temp_dir); // delete the temp dir
|
||||
Ok(base_binary)
|
||||
}
|
||||
|
||||
async fn download_base_binary(
|
||||
&self,
|
||||
output_directory: &Path,
|
||||
binary_path_suffix: &str,
|
||||
) -> Result<(), AnyError> {
|
||||
let download_url = format!("https://dl.deno.land/{binary_path_suffix}");
|
||||
let maybe_bytes = {
|
||||
let progress_bars = ProgressBar::new(ProgressBarStyle::DownloadBars);
|
||||
let progress = progress_bars.update(&download_url);
|
||||
|
||||
self
|
||||
.client
|
||||
.download_with_progress(download_url, &progress)
|
||||
.await?
|
||||
};
|
||||
let bytes = match maybe_bytes {
|
||||
Some(bytes) => bytes,
|
||||
None => {
|
||||
log::info!("Download could not be found, aborting");
|
||||
std::process::exit(1)
|
||||
}
|
||||
};
|
||||
|
||||
std::fs::create_dir_all(output_directory)?;
|
||||
let output_path = output_directory.join(binary_path_suffix);
|
||||
std::fs::create_dir_all(output_path.parent().unwrap())?;
|
||||
tokio::fs::write(output_path, bytes).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// This functions creates a standalone deno binary by appending a bundle
|
||||
/// and magic trailer to the currently executing binary.
|
||||
async fn write_standalone_binary(
|
||||
&self,
|
||||
writer: &mut impl Write,
|
||||
original_bin: Vec<u8>,
|
||||
eszip: eszip::EszipV2,
|
||||
entrypoint: &ModuleSpecifier,
|
||||
cli_options: &CliOptions,
|
||||
compile_flags: &CompileFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let ca_data = match cli_options.ca_data() {
|
||||
Some(CaData::File(ca_file)) => Some(
|
||||
std::fs::read(ca_file)
|
||||
.with_context(|| format!("Reading: {ca_file}"))?,
|
||||
),
|
||||
Some(CaData::Bytes(bytes)) => Some(bytes.clone()),
|
||||
None => None,
|
||||
};
|
||||
let maybe_import_map = cli_options
|
||||
.resolve_import_map(self.file_fetcher)
|
||||
.await?
|
||||
.map(|import_map| (import_map.base_url().clone(), import_map.to_json()));
|
||||
let (npm_snapshot, npm_vfs, npm_files) =
|
||||
if self.npm_resolution.has_packages() {
|
||||
let (root_dir, files) = self.build_vfs()?.into_dir_and_files();
|
||||
let snapshot = self.npm_resolution.serialized_snapshot();
|
||||
(Some(snapshot), Some(root_dir), files)
|
||||
} else {
|
||||
(None, None, Vec::new())
|
||||
};
|
||||
|
||||
let metadata = Metadata {
|
||||
argv: compile_flags.args.clone(),
|
||||
unstable: cli_options.unstable(),
|
||||
seed: cli_options.seed(),
|
||||
location: cli_options.location_flag().clone(),
|
||||
permissions: cli_options.permissions_options(),
|
||||
v8_flags: cli_options.v8_flags().clone(),
|
||||
unsafely_ignore_certificate_errors: cli_options
|
||||
.unsafely_ignore_certificate_errors()
|
||||
.clone(),
|
||||
log_level: cli_options.log_level(),
|
||||
ca_stores: cli_options.ca_stores().clone(),
|
||||
ca_data,
|
||||
entrypoint: entrypoint.clone(),
|
||||
maybe_import_map,
|
||||
node_modules_dir: self.npm_resolver.node_modules_path().is_some(),
|
||||
npm_snapshot,
|
||||
package_json_deps: self
|
||||
.package_json_deps_provider
|
||||
.deps()
|
||||
.map(|deps| SerializablePackageJsonDeps::from_deps(deps.clone())),
|
||||
};
|
||||
|
||||
write_binary_bytes(
|
||||
writer,
|
||||
original_bin,
|
||||
&metadata,
|
||||
eszip,
|
||||
npm_vfs.as_ref(),
|
||||
&npm_files,
|
||||
)
|
||||
}
|
||||
|
||||
fn build_vfs(&self) -> Result<VfsBuilder, AnyError> {
|
||||
if let Some(node_modules_path) = self.npm_resolver.node_modules_path() {
|
||||
let mut builder = VfsBuilder::new(node_modules_path.clone())?;
|
||||
builder.add_dir_recursive(&node_modules_path)?;
|
||||
Ok(builder)
|
||||
} else {
|
||||
// DO NOT include the user's registry url as it may contain credentials,
|
||||
// but also don't make this dependent on the registry url
|
||||
let registry_url = self.npm_api.base_url();
|
||||
let root_path = self.npm_cache.registry_folder(registry_url);
|
||||
let mut builder = VfsBuilder::new(root_path)?;
|
||||
for package in self
|
||||
.npm_resolution
|
||||
.all_system_packages(&self.npm_system_info)
|
||||
{
|
||||
let folder = self
|
||||
.npm_resolver
|
||||
.resolve_pkg_folder_from_pkg_id(&package.id)?;
|
||||
builder.add_dir_recursive(&folder)?;
|
||||
}
|
||||
// overwrite the root directory's name to obscure the user's registry url
|
||||
builder.set_root_dir_name("node_modules".to_string());
|
||||
Ok(builder)
|
||||
}
|
||||
}
|
||||
}
|
337
cli/standalone/file_system.rs
Normal file
337
cli/standalone/file_system.rs
Normal file
|
@ -0,0 +1,337 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_fs::FsDirEntry;
|
||||
use deno_runtime::deno_fs::FsFileType;
|
||||
use deno_runtime::deno_fs::OpenOptions;
|
||||
use deno_runtime::deno_fs::RealFs;
|
||||
use deno_runtime::deno_io::fs::File;
|
||||
use deno_runtime::deno_io::fs::FsError;
|
||||
use deno_runtime::deno_io::fs::FsResult;
|
||||
use deno_runtime::deno_io::fs::FsStat;
|
||||
|
||||
use super::virtual_fs::FileBackedVfs;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DenoCompileFileSystem(Arc<FileBackedVfs>);
|
||||
|
||||
impl DenoCompileFileSystem {
|
||||
pub fn new(vfs: FileBackedVfs) -> Self {
|
||||
Self(Arc::new(vfs))
|
||||
}
|
||||
|
||||
fn error_if_in_vfs(&self, path: &Path) -> FsResult<()> {
|
||||
if self.0.is_path_within(path) {
|
||||
Err(FsError::NotSupported)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn copy_to_real_path(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
|
||||
let old_file = self.0.file_entry(oldpath)?;
|
||||
let old_file_bytes = self.0.read_file_all(old_file)?;
|
||||
RealFs.write_file_sync(
|
||||
newpath,
|
||||
OpenOptions {
|
||||
read: false,
|
||||
write: true,
|
||||
create: true,
|
||||
truncate: true,
|
||||
append: false,
|
||||
create_new: false,
|
||||
mode: None,
|
||||
},
|
||||
&old_file_bytes,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait(?Send)]
|
||||
impl FileSystem for DenoCompileFileSystem {
|
||||
fn cwd(&self) -> FsResult<PathBuf> {
|
||||
RealFs.cwd()
|
||||
}
|
||||
|
||||
fn tmp_dir(&self) -> FsResult<PathBuf> {
|
||||
RealFs.tmp_dir()
|
||||
}
|
||||
|
||||
fn chdir(&self, path: &Path) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.chdir(path)
|
||||
}
|
||||
|
||||
fn umask(&self, mask: Option<u32>) -> FsResult<u32> {
|
||||
RealFs.umask(mask)
|
||||
}
|
||||
|
||||
fn open_sync(
|
||||
&self,
|
||||
path: &Path,
|
||||
options: OpenOptions,
|
||||
) -> FsResult<Rc<dyn File>> {
|
||||
if self.0.is_path_within(path) {
|
||||
Ok(self.0.open_file(path)?)
|
||||
} else {
|
||||
RealFs.open_sync(path, options)
|
||||
}
|
||||
}
|
||||
async fn open_async(
|
||||
&self,
|
||||
path: PathBuf,
|
||||
options: OpenOptions,
|
||||
) -> FsResult<Rc<dyn File>> {
|
||||
if self.0.is_path_within(&path) {
|
||||
Ok(self.0.open_file(&path)?)
|
||||
} else {
|
||||
RealFs.open_async(path, options).await
|
||||
}
|
||||
}
|
||||
|
||||
fn mkdir_sync(
|
||||
&self,
|
||||
path: &Path,
|
||||
recursive: bool,
|
||||
mode: u32,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.mkdir_sync(path, recursive, mode)
|
||||
}
|
||||
async fn mkdir_async(
|
||||
&self,
|
||||
path: PathBuf,
|
||||
recursive: bool,
|
||||
mode: u32,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&path)?;
|
||||
RealFs.mkdir_async(path, recursive, mode).await
|
||||
}
|
||||
|
||||
fn chmod_sync(&self, path: &Path, mode: u32) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.chmod_sync(path, mode)
|
||||
}
|
||||
async fn chmod_async(&self, path: PathBuf, mode: u32) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&path)?;
|
||||
RealFs.chmod_async(path, mode).await
|
||||
}
|
||||
|
||||
fn chown_sync(
|
||||
&self,
|
||||
path: &Path,
|
||||
uid: Option<u32>,
|
||||
gid: Option<u32>,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.chown_sync(path, uid, gid)
|
||||
}
|
||||
async fn chown_async(
|
||||
&self,
|
||||
path: PathBuf,
|
||||
uid: Option<u32>,
|
||||
gid: Option<u32>,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&path)?;
|
||||
RealFs.chown_async(path, uid, gid).await
|
||||
}
|
||||
|
||||
fn remove_sync(&self, path: &Path, recursive: bool) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.remove_sync(path, recursive)
|
||||
}
|
||||
async fn remove_async(&self, path: PathBuf, recursive: bool) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&path)?;
|
||||
RealFs.remove_async(path, recursive).await
|
||||
}
|
||||
|
||||
fn copy_file_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
|
||||
self.error_if_in_vfs(newpath)?;
|
||||
if self.0.is_path_within(oldpath) {
|
||||
self.copy_to_real_path(oldpath, newpath)
|
||||
} else {
|
||||
RealFs.copy_file_sync(oldpath, newpath)
|
||||
}
|
||||
}
|
||||
async fn copy_file_async(
|
||||
&self,
|
||||
oldpath: PathBuf,
|
||||
newpath: PathBuf,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&newpath)?;
|
||||
if self.0.is_path_within(&oldpath) {
|
||||
let fs = self.clone();
|
||||
tokio::task::spawn_blocking(move || {
|
||||
fs.copy_to_real_path(&oldpath, &newpath)
|
||||
})
|
||||
.await?
|
||||
} else {
|
||||
RealFs.copy_file_async(oldpath, newpath).await
|
||||
}
|
||||
}
|
||||
|
||||
fn stat_sync(&self, path: &Path) -> FsResult<FsStat> {
|
||||
if self.0.is_path_within(path) {
|
||||
Ok(self.0.stat(path)?)
|
||||
} else {
|
||||
RealFs.stat_sync(path)
|
||||
}
|
||||
}
|
||||
async fn stat_async(&self, path: PathBuf) -> FsResult<FsStat> {
|
||||
if self.0.is_path_within(&path) {
|
||||
Ok(self.0.stat(&path)?)
|
||||
} else {
|
||||
RealFs.stat_async(path).await
|
||||
}
|
||||
}
|
||||
|
||||
fn lstat_sync(&self, path: &Path) -> FsResult<FsStat> {
|
||||
if self.0.is_path_within(path) {
|
||||
Ok(self.0.lstat(path)?)
|
||||
} else {
|
||||
RealFs.lstat_sync(path)
|
||||
}
|
||||
}
|
||||
async fn lstat_async(&self, path: PathBuf) -> FsResult<FsStat> {
|
||||
if self.0.is_path_within(&path) {
|
||||
Ok(self.0.lstat(&path)?)
|
||||
} else {
|
||||
RealFs.lstat_async(path).await
|
||||
}
|
||||
}
|
||||
|
||||
fn realpath_sync(&self, path: &Path) -> FsResult<PathBuf> {
|
||||
if self.0.is_path_within(path) {
|
||||
Ok(self.0.canonicalize(path)?)
|
||||
} else {
|
||||
RealFs.realpath_sync(path)
|
||||
}
|
||||
}
|
||||
async fn realpath_async(&self, path: PathBuf) -> FsResult<PathBuf> {
|
||||
if self.0.is_path_within(&path) {
|
||||
Ok(self.0.canonicalize(&path)?)
|
||||
} else {
|
||||
RealFs.realpath_async(path).await
|
||||
}
|
||||
}
|
||||
|
||||
fn read_dir_sync(&self, path: &Path) -> FsResult<Vec<FsDirEntry>> {
|
||||
if self.0.is_path_within(path) {
|
||||
Ok(self.0.read_dir(path)?)
|
||||
} else {
|
||||
RealFs.read_dir_sync(path)
|
||||
}
|
||||
}
|
||||
async fn read_dir_async(&self, path: PathBuf) -> FsResult<Vec<FsDirEntry>> {
|
||||
if self.0.is_path_within(&path) {
|
||||
Ok(self.0.read_dir(&path)?)
|
||||
} else {
|
||||
RealFs.read_dir_async(path).await
|
||||
}
|
||||
}
|
||||
|
||||
fn rename_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
|
||||
self.error_if_in_vfs(oldpath)?;
|
||||
self.error_if_in_vfs(newpath)?;
|
||||
RealFs.rename_sync(oldpath, newpath)
|
||||
}
|
||||
async fn rename_async(
|
||||
&self,
|
||||
oldpath: PathBuf,
|
||||
newpath: PathBuf,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&oldpath)?;
|
||||
self.error_if_in_vfs(&newpath)?;
|
||||
RealFs.rename_async(oldpath, newpath).await
|
||||
}
|
||||
|
||||
fn link_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> {
|
||||
self.error_if_in_vfs(oldpath)?;
|
||||
self.error_if_in_vfs(newpath)?;
|
||||
RealFs.link_sync(oldpath, newpath)
|
||||
}
|
||||
async fn link_async(
|
||||
&self,
|
||||
oldpath: PathBuf,
|
||||
newpath: PathBuf,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&oldpath)?;
|
||||
self.error_if_in_vfs(&newpath)?;
|
||||
RealFs.link_async(oldpath, newpath).await
|
||||
}
|
||||
|
||||
fn symlink_sync(
|
||||
&self,
|
||||
oldpath: &Path,
|
||||
newpath: &Path,
|
||||
file_type: Option<FsFileType>,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(oldpath)?;
|
||||
self.error_if_in_vfs(newpath)?;
|
||||
RealFs.symlink_sync(oldpath, newpath, file_type)
|
||||
}
|
||||
async fn symlink_async(
|
||||
&self,
|
||||
oldpath: PathBuf,
|
||||
newpath: PathBuf,
|
||||
file_type: Option<FsFileType>,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&oldpath)?;
|
||||
self.error_if_in_vfs(&newpath)?;
|
||||
RealFs.symlink_async(oldpath, newpath, file_type).await
|
||||
}
|
||||
|
||||
fn read_link_sync(&self, path: &Path) -> FsResult<PathBuf> {
|
||||
if self.0.is_path_within(path) {
|
||||
Ok(self.0.read_link(path)?)
|
||||
} else {
|
||||
RealFs.read_link_sync(path)
|
||||
}
|
||||
}
|
||||
async fn read_link_async(&self, path: PathBuf) -> FsResult<PathBuf> {
|
||||
if self.0.is_path_within(&path) {
|
||||
Ok(self.0.read_link(&path)?)
|
||||
} else {
|
||||
RealFs.read_link_async(path).await
|
||||
}
|
||||
}
|
||||
|
||||
fn truncate_sync(&self, path: &Path, len: u64) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.truncate_sync(path, len)
|
||||
}
|
||||
async fn truncate_async(&self, path: PathBuf, len: u64) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&path)?;
|
||||
RealFs.truncate_async(path, len).await
|
||||
}
|
||||
|
||||
fn utime_sync(
|
||||
&self,
|
||||
path: &Path,
|
||||
atime_secs: i64,
|
||||
atime_nanos: u32,
|
||||
mtime_secs: i64,
|
||||
mtime_nanos: u32,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.utime_sync(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
|
||||
}
|
||||
async fn utime_async(
|
||||
&self,
|
||||
path: PathBuf,
|
||||
atime_secs: i64,
|
||||
atime_nanos: u32,
|
||||
mtime_secs: i64,
|
||||
mtime_nanos: u32,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&path)?;
|
||||
RealFs
|
||||
.utime_async(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
|
||||
.await
|
||||
}
|
||||
}
|
460
cli/standalone/mod.rs
Normal file
460
cli/standalone/mod.rs
Normal file
|
@ -0,0 +1,460 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::get_root_cert_store;
|
||||
use crate::args::npm_pkg_req_ref_to_binary_command;
|
||||
use crate::args::CaData;
|
||||
use crate::args::CacheSetting;
|
||||
use crate::args::PackageJsonDepsProvider;
|
||||
use crate::args::StorageKeyResolver;
|
||||
use crate::cache::Caches;
|
||||
use crate::cache::DenoDirProvider;
|
||||
use crate::cache::NodeAnalysisCache;
|
||||
use crate::file_fetcher::get_source_from_data_url;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::module_loader::CjsResolutionStore;
|
||||
use crate::module_loader::NpmModuleLoader;
|
||||
use crate::node::CliCjsEsmCodeAnalyzer;
|
||||
use crate::npm::create_npm_fs_resolver;
|
||||
use crate::npm::CliNpmRegistryApi;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::npm::NpmCache;
|
||||
use crate::npm::NpmResolution;
|
||||
use crate::resolver::MappedSpecifierResolver;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
use crate::util::v8::construct_v8_flags;
|
||||
use crate::worker::CliMainWorkerFactory;
|
||||
use crate::worker::CliMainWorkerOptions;
|
||||
use crate::worker::HasNodeSpecifierChecker;
|
||||
use crate::worker::ModuleLoaderFactory;
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::v8_set_flags;
|
||||
use deno_core::ModuleLoader;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_core::ModuleType;
|
||||
use deno_core::ResolutionKind;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::analyze::NodeCodeTranslator;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_runtime::deno_tls::rustls::RootCertStore;
|
||||
use deno_runtime::deno_tls::RootCertStoreProvider;
|
||||
use deno_runtime::deno_web::BlobStore;
|
||||
use deno_runtime::permissions::Permissions;
|
||||
use deno_runtime::permissions::PermissionsContainer;
|
||||
use deno_runtime::WorkerLogLevel;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use import_map::parse_from_json;
|
||||
use std::pin::Pin;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
mod binary;
|
||||
mod file_system;
|
||||
mod virtual_fs;
|
||||
|
||||
pub use binary::extract_standalone;
|
||||
pub use binary::is_standalone_binary;
|
||||
pub use binary::DenoCompileBinaryWriter;
|
||||
|
||||
use self::binary::load_npm_vfs;
|
||||
use self::binary::Metadata;
|
||||
use self::file_system::DenoCompileFileSystem;
|
||||
|
||||
struct SharedModuleLoaderState {
|
||||
eszip: eszip::EszipV2,
|
||||
mapped_specifier_resolver: MappedSpecifierResolver,
|
||||
npm_module_loader: Arc<NpmModuleLoader>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct EmbeddedModuleLoader {
|
||||
shared: Arc<SharedModuleLoaderState>,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
}
|
||||
|
||||
impl ModuleLoader for EmbeddedModuleLoader {
|
||||
fn resolve(
|
||||
&self,
|
||||
specifier: &str,
|
||||
referrer: &str,
|
||||
kind: ResolutionKind,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
// Try to follow redirects when resolving.
|
||||
let referrer = match self.shared.eszip.get_module(referrer) {
|
||||
Some(eszip::Module { ref specifier, .. }) => {
|
||||
ModuleSpecifier::parse(specifier)?
|
||||
}
|
||||
None => {
|
||||
let cwd = std::env::current_dir().context("Unable to get CWD")?;
|
||||
deno_core::resolve_url_or_path(referrer, &cwd)?
|
||||
}
|
||||
};
|
||||
|
||||
let permissions = if matches!(kind, ResolutionKind::DynamicImport) {
|
||||
&self.dynamic_permissions
|
||||
} else {
|
||||
&self.root_permissions
|
||||
};
|
||||
|
||||
if let Some(result) = self
|
||||
.shared
|
||||
.npm_module_loader
|
||||
.resolve_if_in_npm_package(specifier, &referrer, permissions)
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
let maybe_mapped = self
|
||||
.shared
|
||||
.mapped_specifier_resolver
|
||||
.resolve(specifier, &referrer)?
|
||||
.into_specifier();
|
||||
|
||||
// npm specifier
|
||||
let specifier_text = maybe_mapped
|
||||
.as_ref()
|
||||
.map(|r| r.as_str())
|
||||
.unwrap_or(specifier);
|
||||
if let Ok(reference) = NpmPackageReqReference::from_str(specifier_text) {
|
||||
return self
|
||||
.shared
|
||||
.npm_module_loader
|
||||
.resolve_req_reference(&reference, permissions);
|
||||
}
|
||||
|
||||
match maybe_mapped {
|
||||
Some(resolved) => Ok(resolved),
|
||||
None => deno_core::resolve_import(specifier, referrer.as_str())
|
||||
.map_err(|err| err.into()),
|
||||
}
|
||||
}
|
||||
|
||||
fn load(
|
||||
&self,
|
||||
module_specifier: &ModuleSpecifier,
|
||||
maybe_referrer: Option<&ModuleSpecifier>,
|
||||
is_dynamic: bool,
|
||||
) -> Pin<Box<deno_core::ModuleSourceFuture>> {
|
||||
let is_data_uri = get_source_from_data_url(module_specifier).ok();
|
||||
let permissions = if is_dynamic {
|
||||
&self.dynamic_permissions
|
||||
} else {
|
||||
&self.root_permissions
|
||||
};
|
||||
|
||||
if let Some(result) =
|
||||
self.shared.npm_module_loader.load_sync_if_in_npm_package(
|
||||
module_specifier,
|
||||
maybe_referrer,
|
||||
permissions,
|
||||
)
|
||||
{
|
||||
return match result {
|
||||
Ok(code_source) => Box::pin(deno_core::futures::future::ready(Ok(
|
||||
deno_core::ModuleSource::new_with_redirect(
|
||||
match code_source.media_type {
|
||||
MediaType::Json => ModuleType::Json,
|
||||
_ => ModuleType::JavaScript,
|
||||
},
|
||||
code_source.code,
|
||||
module_specifier,
|
||||
&code_source.found_url,
|
||||
),
|
||||
))),
|
||||
Err(err) => Box::pin(deno_core::futures::future::ready(Err(err))),
|
||||
};
|
||||
}
|
||||
|
||||
let module = self
|
||||
.shared
|
||||
.eszip
|
||||
.get_module(module_specifier.as_str())
|
||||
.ok_or_else(|| {
|
||||
type_error(format!("Module not found: {}", module_specifier))
|
||||
});
|
||||
// TODO(mmastrac): This clone can probably be removed in the future if ModuleSpecifier is no longer a full-fledged URL
|
||||
let module_specifier = module_specifier.clone();
|
||||
|
||||
async move {
|
||||
if let Some((source, _)) = is_data_uri {
|
||||
return Ok(deno_core::ModuleSource::new(
|
||||
deno_core::ModuleType::JavaScript,
|
||||
source.into(),
|
||||
&module_specifier,
|
||||
));
|
||||
}
|
||||
|
||||
let module = module?;
|
||||
let code = module.source().await.unwrap_or_default();
|
||||
let code = std::str::from_utf8(&code)
|
||||
.map_err(|_| type_error("Module source is not utf-8"))?
|
||||
.to_owned()
|
||||
.into();
|
||||
|
||||
Ok(deno_core::ModuleSource::new(
|
||||
match module.kind {
|
||||
eszip::ModuleKind::JavaScript => ModuleType::JavaScript,
|
||||
eszip::ModuleKind::Json => ModuleType::Json,
|
||||
eszip::ModuleKind::Jsonc => {
|
||||
return Err(type_error("jsonc modules not supported"))
|
||||
}
|
||||
},
|
||||
code,
|
||||
&module_specifier,
|
||||
))
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
}
|
||||
|
||||
struct StandaloneModuleLoaderFactory {
|
||||
shared: Arc<SharedModuleLoaderState>,
|
||||
}
|
||||
|
||||
impl ModuleLoaderFactory for StandaloneModuleLoaderFactory {
|
||||
fn create_for_main(
|
||||
&self,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
) -> Rc<dyn ModuleLoader> {
|
||||
Rc::new(EmbeddedModuleLoader {
|
||||
shared: self.shared.clone(),
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
})
|
||||
}
|
||||
|
||||
fn create_for_worker(
|
||||
&self,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
) -> Rc<dyn ModuleLoader> {
|
||||
Rc::new(EmbeddedModuleLoader {
|
||||
shared: self.shared.clone(),
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
})
|
||||
}
|
||||
|
||||
fn create_source_map_getter(
|
||||
&self,
|
||||
) -> Option<Box<dyn deno_core::SourceMapGetter>> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
struct StandaloneHasNodeSpecifierChecker;
|
||||
|
||||
impl HasNodeSpecifierChecker for StandaloneHasNodeSpecifierChecker {
|
||||
fn has_node_specifier(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
struct StandaloneRootCertStoreProvider {
|
||||
ca_stores: Option<Vec<String>>,
|
||||
ca_data: Option<CaData>,
|
||||
cell: once_cell::sync::OnceCell<RootCertStore>,
|
||||
}
|
||||
|
||||
impl RootCertStoreProvider for StandaloneRootCertStoreProvider {
|
||||
fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError> {
|
||||
self.cell.get_or_try_init(|| {
|
||||
get_root_cert_store(None, self.ca_stores.clone(), self.ca_data.clone())
|
||||
.map_err(|err| err.into())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run(
|
||||
eszip: eszip::EszipV2,
|
||||
metadata: Metadata,
|
||||
) -> Result<(), AnyError> {
|
||||
let main_module = &metadata.entrypoint;
|
||||
let current_exe_path = std::env::current_exe().unwrap();
|
||||
let current_exe_name =
|
||||
current_exe_path.file_name().unwrap().to_string_lossy();
|
||||
let deno_dir_provider = Arc::new(DenoDirProvider::new(None));
|
||||
let root_cert_store_provider = Arc::new(StandaloneRootCertStoreProvider {
|
||||
ca_stores: metadata.ca_stores,
|
||||
ca_data: metadata.ca_data.map(CaData::Bytes),
|
||||
cell: Default::default(),
|
||||
});
|
||||
let progress_bar = ProgressBar::new(ProgressBarStyle::TextOnly);
|
||||
let http_client = Arc::new(HttpClient::new(
|
||||
Some(root_cert_store_provider.clone()),
|
||||
metadata.unsafely_ignore_certificate_errors.clone(),
|
||||
));
|
||||
// use a dummy npm registry url
|
||||
let npm_registry_url = ModuleSpecifier::parse("https://localhost/").unwrap();
|
||||
let root_path = std::env::temp_dir()
|
||||
.join(format!("deno-compile-{}", current_exe_name))
|
||||
.join("node_modules");
|
||||
|
||||
let npm_cache = Arc::new(NpmCache::new(
|
||||
root_path.clone(),
|
||||
CacheSetting::Use,
|
||||
http_client.clone(),
|
||||
progress_bar.clone(),
|
||||
));
|
||||
let npm_api = Arc::new(CliNpmRegistryApi::new(
|
||||
npm_registry_url.clone(),
|
||||
npm_cache.clone(),
|
||||
http_client.clone(),
|
||||
progress_bar.clone(),
|
||||
));
|
||||
let (fs, vfs_root, node_modules_path, snapshot) = if let Some(snapshot) =
|
||||
metadata.npm_snapshot
|
||||
{
|
||||
let vfs_root_dir_path = if metadata.node_modules_dir {
|
||||
root_path
|
||||
} else {
|
||||
npm_cache.registry_folder(&npm_registry_url)
|
||||
};
|
||||
let vfs = load_npm_vfs(vfs_root_dir_path.clone())
|
||||
.context("Failed to load npm vfs.")?;
|
||||
let node_modules_path = if metadata.node_modules_dir {
|
||||
Some(vfs.root().to_path_buf())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
(
|
||||
Arc::new(DenoCompileFileSystem::new(vfs)) as Arc<dyn deno_fs::FileSystem>,
|
||||
Some(vfs_root_dir_path),
|
||||
node_modules_path,
|
||||
Some(snapshot.into_valid()?),
|
||||
)
|
||||
} else {
|
||||
(
|
||||
Arc::new(deno_fs::RealFs) as Arc<dyn deno_fs::FileSystem>,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
};
|
||||
let npm_resolution = Arc::new(NpmResolution::from_serialized(
|
||||
npm_api.clone(),
|
||||
snapshot,
|
||||
None,
|
||||
));
|
||||
let has_node_modules_dir = node_modules_path.is_some();
|
||||
let npm_fs_resolver = create_npm_fs_resolver(
|
||||
fs.clone(),
|
||||
npm_cache,
|
||||
&progress_bar,
|
||||
npm_registry_url,
|
||||
npm_resolution.clone(),
|
||||
node_modules_path,
|
||||
NpmSystemInfo::default(),
|
||||
);
|
||||
let npm_resolver = Arc::new(CliNpmResolver::new(
|
||||
fs.clone(),
|
||||
npm_resolution.clone(),
|
||||
npm_fs_resolver,
|
||||
None,
|
||||
));
|
||||
let node_resolver =
|
||||
Arc::new(NodeResolver::new(fs.clone(), npm_resolver.clone()));
|
||||
let cjs_resolutions = Arc::new(CjsResolutionStore::default());
|
||||
let cache_db = Caches::new(deno_dir_provider.clone());
|
||||
let node_analysis_cache = NodeAnalysisCache::new(cache_db.node_analysis_db());
|
||||
let cjs_esm_code_analyzer = CliCjsEsmCodeAnalyzer::new(node_analysis_cache);
|
||||
let node_code_translator = Arc::new(NodeCodeTranslator::new(
|
||||
cjs_esm_code_analyzer,
|
||||
fs.clone(),
|
||||
node_resolver.clone(),
|
||||
npm_resolver.clone(),
|
||||
));
|
||||
let package_json_deps_provider = Arc::new(PackageJsonDepsProvider::new(
|
||||
metadata
|
||||
.package_json_deps
|
||||
.map(|serialized| serialized.into_deps()),
|
||||
));
|
||||
let maybe_import_map = metadata.maybe_import_map.map(|(base, source)| {
|
||||
Arc::new(parse_from_json(&base, &source).unwrap().import_map)
|
||||
});
|
||||
let module_loader_factory = StandaloneModuleLoaderFactory {
|
||||
shared: Arc::new(SharedModuleLoaderState {
|
||||
eszip,
|
||||
mapped_specifier_resolver: MappedSpecifierResolver::new(
|
||||
maybe_import_map.clone(),
|
||||
package_json_deps_provider.clone(),
|
||||
),
|
||||
npm_module_loader: Arc::new(NpmModuleLoader::new(
|
||||
cjs_resolutions,
|
||||
node_code_translator,
|
||||
fs.clone(),
|
||||
node_resolver.clone(),
|
||||
)),
|
||||
}),
|
||||
};
|
||||
|
||||
let permissions = {
|
||||
let mut permissions = metadata.permissions;
|
||||
// if running with an npm vfs, grant read access to it
|
||||
if let Some(vfs_root) = vfs_root {
|
||||
match &mut permissions.allow_read {
|
||||
Some(vec) if vec.is_empty() => {
|
||||
// do nothing, already granted
|
||||
}
|
||||
Some(vec) => {
|
||||
vec.push(vfs_root);
|
||||
}
|
||||
None => {
|
||||
permissions.allow_read = Some(vec![vfs_root]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PermissionsContainer::new(Permissions::from_options(&permissions)?)
|
||||
};
|
||||
let worker_factory = CliMainWorkerFactory::new(
|
||||
StorageKeyResolver::empty(),
|
||||
npm_resolver.clone(),
|
||||
node_resolver,
|
||||
Box::new(StandaloneHasNodeSpecifierChecker),
|
||||
BlobStore::default(),
|
||||
Box::new(module_loader_factory),
|
||||
root_cert_store_provider,
|
||||
fs,
|
||||
None,
|
||||
None,
|
||||
CliMainWorkerOptions {
|
||||
argv: metadata.argv,
|
||||
log_level: WorkerLogLevel::Info,
|
||||
coverage_dir: None,
|
||||
enable_testing_features: false,
|
||||
has_node_modules_dir,
|
||||
inspect_brk: false,
|
||||
inspect_wait: false,
|
||||
is_inspecting: false,
|
||||
is_npm_main: main_module.scheme() == "npm",
|
||||
location: metadata.location,
|
||||
maybe_binary_npm_command_name: NpmPackageReqReference::from_specifier(
|
||||
main_module,
|
||||
)
|
||||
.ok()
|
||||
.map(|req_ref| npm_pkg_req_ref_to_binary_command(&req_ref)),
|
||||
origin_data_folder_path: None,
|
||||
seed: metadata.seed,
|
||||
unsafely_ignore_certificate_errors: metadata
|
||||
.unsafely_ignore_certificate_errors,
|
||||
unstable: metadata.unstable,
|
||||
},
|
||||
);
|
||||
|
||||
v8_set_flags(construct_v8_flags(&[], &metadata.v8_flags, vec![]));
|
||||
|
||||
let mut worker = worker_factory
|
||||
.create_main_worker(main_module.clone(), permissions)
|
||||
.await?;
|
||||
|
||||
let exit_code = worker.run().await?;
|
||||
std::process::exit(exit_code)
|
||||
}
|
1096
cli/standalone/virtual_fs.rs
Normal file
1096
cli/standalone/virtual_fs.rs
Normal file
File diff suppressed because it is too large
Load diff
|
@ -3,6 +3,7 @@
|
|||
use deno_core::url::Url;
|
||||
use test_util as util;
|
||||
use util::assert_contains;
|
||||
use util::assert_not_contains;
|
||||
use util::env_vars_for_npm_tests;
|
||||
use util::TestContext;
|
||||
|
||||
|
@ -42,6 +43,12 @@ itest!(fail {
|
|||
output: "bench/fail.out",
|
||||
});
|
||||
|
||||
itest!(bench_formatting {
|
||||
args: "bench bench/bench_formatting.ts",
|
||||
exit_code: 0,
|
||||
output: "bench/bench_formatting.out",
|
||||
});
|
||||
|
||||
itest!(collect {
|
||||
args: "bench --ignore=bench/collect/ignore bench/collect",
|
||||
exit_code: 0,
|
||||
|
@ -114,6 +121,11 @@ itest!(finally_timeout {
|
|||
output: "bench/finally_timeout.out",
|
||||
});
|
||||
|
||||
itest!(before_unload_prevent_default {
|
||||
args: "bench --quiet bench/before_unload_prevent_default.ts",
|
||||
output: "bench/before_unload_prevent_default.out",
|
||||
});
|
||||
|
||||
itest!(group_baseline {
|
||||
args: "bench bench/group_baseline.ts",
|
||||
exit_code: 0,
|
||||
|
@ -245,3 +257,18 @@ itest!(bench_no_lock {
|
|||
cwd: Some("lockfile/basic"),
|
||||
output: "lockfile/basic/bench.nolock.out",
|
||||
});
|
||||
|
||||
#[test]
|
||||
fn conditionally_loads_type_graph() {
|
||||
let context = TestContext::default();
|
||||
let output = context
|
||||
.new_command()
|
||||
.args("bench --reload -L debug run/type_directives_js_main.js")
|
||||
.run();
|
||||
output.assert_matches_text("[WILDCARD] - FileFetcher::fetch() - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]");
|
||||
let output = context
|
||||
.new_command()
|
||||
.args("bench --reload -L debug --no-check run/type_directives_js_main.js")
|
||||
.run();
|
||||
assert_not_contains!(output.combined_output(), "type_reference.d.ts");
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use test_util::env_vars_for_npm_tests;
|
||||
use test_util::TestContext;
|
||||
use test_util::TestContextBuilder;
|
||||
|
||||
itest!(_036_import_map_fetch {
|
||||
|
@ -181,3 +182,12 @@ fn cache_put_overwrite() {
|
|||
output.assert_matches_text("res1\n");
|
||||
output.assert_exit_code(0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn loads_type_graph() {
|
||||
let output = TestContext::default()
|
||||
.new_command()
|
||||
.args("cache --reload -L debug run/type_directives_js_main.js")
|
||||
.run();
|
||||
output.assert_matches_text("[WILDCARD] - FileFetcher::fetch() - specifier: file:///[WILDCARD]/subdir/type_reference.d.ts[WILDCARD]");
|
||||
}
|
||||
|
|
|
@ -11,7 +11,6 @@ use std::process::Command;
|
|||
use std::sync::Arc;
|
||||
use test_util as util;
|
||||
use test_util::TempDir;
|
||||
use tokio::task::LocalSet;
|
||||
use util::TestContext;
|
||||
|
||||
itest_flaky!(cafile_url_imports {
|
||||
|
@ -219,113 +218,99 @@ fn cafile_bundle_remote_exports() {
|
|||
|
||||
#[tokio::test]
|
||||
async fn listen_tls_alpn() {
|
||||
// TLS streams require the presence of an ambient local task set to gracefully
|
||||
// close dropped connections in the background.
|
||||
LocalSet::new()
|
||||
.run_until(async {
|
||||
let mut child = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("run")
|
||||
.arg("--unstable")
|
||||
.arg("--quiet")
|
||||
.arg("--allow-net")
|
||||
.arg("--allow-read")
|
||||
.arg("./cert/listen_tls_alpn.ts")
|
||||
.arg("4504")
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.spawn()
|
||||
.unwrap();
|
||||
let stdout = child.stdout.as_mut().unwrap();
|
||||
let mut msg = [0; 5];
|
||||
let read = stdout.read(&mut msg).unwrap();
|
||||
assert_eq!(read, 5);
|
||||
assert_eq!(&msg, b"READY");
|
||||
let mut child = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("run")
|
||||
.arg("--unstable")
|
||||
.arg("--quiet")
|
||||
.arg("--allow-net")
|
||||
.arg("--allow-read")
|
||||
.arg("./cert/listen_tls_alpn.ts")
|
||||
.arg("4504")
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.spawn()
|
||||
.unwrap();
|
||||
let stdout = child.stdout.as_mut().unwrap();
|
||||
let mut msg = [0; 5];
|
||||
let read = stdout.read(&mut msg).unwrap();
|
||||
assert_eq!(read, 5);
|
||||
assert_eq!(&msg, b"READY");
|
||||
|
||||
let mut reader = &mut BufReader::new(Cursor::new(include_bytes!(
|
||||
"../testdata/tls/RootCA.crt"
|
||||
)));
|
||||
let certs = rustls_pemfile::certs(&mut reader).unwrap();
|
||||
let mut root_store = rustls::RootCertStore::empty();
|
||||
root_store.add_parsable_certificates(&certs);
|
||||
let mut cfg = rustls::ClientConfig::builder()
|
||||
.with_safe_defaults()
|
||||
.with_root_certificates(root_store)
|
||||
.with_no_client_auth();
|
||||
cfg.alpn_protocols.push(b"foobar".to_vec());
|
||||
let cfg = Arc::new(cfg);
|
||||
let mut reader = &mut BufReader::new(Cursor::new(include_bytes!(
|
||||
"../testdata/tls/RootCA.crt"
|
||||
)));
|
||||
let certs = rustls_pemfile::certs(&mut reader).unwrap();
|
||||
let mut root_store = rustls::RootCertStore::empty();
|
||||
root_store.add_parsable_certificates(&certs);
|
||||
let mut cfg = rustls::ClientConfig::builder()
|
||||
.with_safe_defaults()
|
||||
.with_root_certificates(root_store)
|
||||
.with_no_client_auth();
|
||||
cfg.alpn_protocols.push(b"foobar".to_vec());
|
||||
let cfg = Arc::new(cfg);
|
||||
|
||||
let hostname = rustls::ServerName::try_from("localhost").unwrap();
|
||||
let hostname = rustls::ServerName::try_from("localhost").unwrap();
|
||||
|
||||
let tcp_stream = tokio::net::TcpStream::connect("localhost:4504")
|
||||
.await
|
||||
.unwrap();
|
||||
let mut tls_stream =
|
||||
TlsStream::new_client_side(tcp_stream, cfg, hostname);
|
||||
let tcp_stream = tokio::net::TcpStream::connect("localhost:4504")
|
||||
.await
|
||||
.unwrap();
|
||||
let mut tls_stream = TlsStream::new_client_side(tcp_stream, cfg, hostname);
|
||||
|
||||
tls_stream.handshake().await.unwrap();
|
||||
tls_stream.handshake().await.unwrap();
|
||||
|
||||
let (_, rustls_connection) = tls_stream.get_ref();
|
||||
let alpn = rustls_connection.alpn_protocol().unwrap();
|
||||
assert_eq!(alpn, b"foobar");
|
||||
let (_, rustls_connection) = tls_stream.get_ref();
|
||||
let alpn = rustls_connection.alpn_protocol().unwrap();
|
||||
assert_eq!(alpn, b"foobar");
|
||||
|
||||
let status = child.wait().unwrap();
|
||||
assert!(status.success());
|
||||
})
|
||||
.await;
|
||||
let status = child.wait().unwrap();
|
||||
assert!(status.success());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn listen_tls_alpn_fail() {
|
||||
// TLS streams require the presence of an ambient local task set to gracefully
|
||||
// close dropped connections in the background.
|
||||
LocalSet::new()
|
||||
.run_until(async {
|
||||
let mut child = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("run")
|
||||
.arg("--unstable")
|
||||
.arg("--quiet")
|
||||
.arg("--allow-net")
|
||||
.arg("--allow-read")
|
||||
.arg("./cert/listen_tls_alpn_fail.ts")
|
||||
.arg("4505")
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.spawn()
|
||||
.unwrap();
|
||||
let stdout = child.stdout.as_mut().unwrap();
|
||||
let mut msg = [0; 5];
|
||||
let read = stdout.read(&mut msg).unwrap();
|
||||
assert_eq!(read, 5);
|
||||
assert_eq!(&msg, b"READY");
|
||||
let mut child = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("run")
|
||||
.arg("--unstable")
|
||||
.arg("--quiet")
|
||||
.arg("--allow-net")
|
||||
.arg("--allow-read")
|
||||
.arg("./cert/listen_tls_alpn_fail.ts")
|
||||
.arg("4505")
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.spawn()
|
||||
.unwrap();
|
||||
let stdout = child.stdout.as_mut().unwrap();
|
||||
let mut msg = [0; 5];
|
||||
let read = stdout.read(&mut msg).unwrap();
|
||||
assert_eq!(read, 5);
|
||||
assert_eq!(&msg, b"READY");
|
||||
|
||||
let mut reader = &mut BufReader::new(Cursor::new(include_bytes!(
|
||||
"../testdata/tls/RootCA.crt"
|
||||
)));
|
||||
let certs = rustls_pemfile::certs(&mut reader).unwrap();
|
||||
let mut root_store = rustls::RootCertStore::empty();
|
||||
root_store.add_parsable_certificates(&certs);
|
||||
let mut cfg = rustls::ClientConfig::builder()
|
||||
.with_safe_defaults()
|
||||
.with_root_certificates(root_store)
|
||||
.with_no_client_auth();
|
||||
cfg.alpn_protocols.push(b"boofar".to_vec());
|
||||
let cfg = Arc::new(cfg);
|
||||
let mut reader = &mut BufReader::new(Cursor::new(include_bytes!(
|
||||
"../testdata/tls/RootCA.crt"
|
||||
)));
|
||||
let certs = rustls_pemfile::certs(&mut reader).unwrap();
|
||||
let mut root_store = rustls::RootCertStore::empty();
|
||||
root_store.add_parsable_certificates(&certs);
|
||||
let mut cfg = rustls::ClientConfig::builder()
|
||||
.with_safe_defaults()
|
||||
.with_root_certificates(root_store)
|
||||
.with_no_client_auth();
|
||||
cfg.alpn_protocols.push(b"boofar".to_vec());
|
||||
let cfg = Arc::new(cfg);
|
||||
|
||||
let hostname = rustls::ServerName::try_from("localhost").unwrap();
|
||||
let hostname = rustls::ServerName::try_from("localhost").unwrap();
|
||||
|
||||
let tcp_stream = tokio::net::TcpStream::connect("localhost:4505")
|
||||
.await
|
||||
.unwrap();
|
||||
let mut tls_stream =
|
||||
TlsStream::new_client_side(tcp_stream, cfg, hostname);
|
||||
let tcp_stream = tokio::net::TcpStream::connect("localhost:4505")
|
||||
.await
|
||||
.unwrap();
|
||||
let mut tls_stream = TlsStream::new_client_side(tcp_stream, cfg, hostname);
|
||||
|
||||
tls_stream.handshake().await.unwrap_err();
|
||||
tls_stream.handshake().await.unwrap_err();
|
||||
|
||||
let (_, rustls_connection) = tls_stream.get_ref();
|
||||
assert!(rustls_connection.alpn_protocol().is_none());
|
||||
let (_, rustls_connection) = tls_stream.get_ref();
|
||||
assert!(rustls_connection.alpn_protocol().is_none());
|
||||
|
||||
let status = child.wait().unwrap();
|
||||
assert!(status.success());
|
||||
})
|
||||
.await;
|
||||
let status = child.wait().unwrap();
|
||||
assert!(status.success());
|
||||
}
|
||||
|
|
|
@ -84,6 +84,17 @@ itest!(check_no_error_truncation {
|
|||
exit_code: 1,
|
||||
});
|
||||
|
||||
itest!(check_broadcast_channel_stable {
|
||||
args: "check --quiet check/broadcast_channel.ts",
|
||||
output: "check/broadcast_channel.ts.error.out",
|
||||
exit_code: 1,
|
||||
});
|
||||
|
||||
itest!(check_broadcast_channel_unstable {
|
||||
args: "check --quiet --unstable check/broadcast_channel.ts",
|
||||
exit_code: 0,
|
||||
});
|
||||
|
||||
#[test]
|
||||
fn cache_switching_config_then_no_config() {
|
||||
let context = TestContext::default();
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
use test_util as util;
|
||||
use test_util::TempDir;
|
||||
use util::assert_contains;
|
||||
use util::TestContextBuilder;
|
||||
|
||||
#[test]
|
||||
fn compile() {
|
||||
let dir = TempDir::new();
|
||||
fn compile_basic() {
|
||||
let context = TestContextBuilder::new().build();
|
||||
let dir = context.temp_dir();
|
||||
let exe = if cfg!(windows) {
|
||||
dir.path().join("welcome.exe")
|
||||
} else {
|
||||
|
@ -15,27 +19,45 @@ fn compile() {
|
|||
};
|
||||
// try this twice to ensure it works with the cache
|
||||
for _ in 0..2 {
|
||||
let output = util::deno_cmd_with_deno_dir(&dir)
|
||||
.current_dir(util::root_path())
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--output")
|
||||
.arg(&exe)
|
||||
.arg("./test_util/std/examples/welcome.ts")
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.spawn()
|
||||
.unwrap()
|
||||
.wait_with_output()
|
||||
.unwrap();
|
||||
assert!(output.status.success());
|
||||
let output = Command::new(&exe)
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.spawn()
|
||||
.unwrap()
|
||||
.wait_with_output()
|
||||
.unwrap();
|
||||
assert!(output.status.success());
|
||||
assert_eq!(output.stdout, "Welcome to Deno!\n".as_bytes());
|
||||
let output = context
|
||||
.new_command()
|
||||
.args_vec([
|
||||
"compile",
|
||||
"--output",
|
||||
&exe.to_string_lossy(),
|
||||
"../../../test_util/std/examples/welcome.ts",
|
||||
])
|
||||
.run();
|
||||
output.assert_exit_code(0);
|
||||
output.skip_output_check();
|
||||
let output = context
|
||||
.new_command()
|
||||
.command_name(exe.to_string_lossy())
|
||||
.run();
|
||||
output.assert_matches_text("Welcome to Deno!\n");
|
||||
}
|
||||
|
||||
// now ensure this works when the deno_dir is readonly
|
||||
let readonly_dir = dir.path().join("readonly");
|
||||
make_dir_readonly(&readonly_dir);
|
||||
let readonly_sub_dir = readonly_dir.join("sub");
|
||||
|
||||
let output = context
|
||||
.new_command()
|
||||
// it should fail creating this, but still work
|
||||
.env("DENO_DIR", readonly_sub_dir.to_string_lossy())
|
||||
.command_name(exe.to_string_lossy())
|
||||
.run();
|
||||
output.assert_matches_text("Welcome to Deno!\n");
|
||||
}
|
||||
|
||||
fn make_dir_readonly(dir: &Path) {
|
||||
std::fs::create_dir_all(dir).unwrap();
|
||||
eprintln!("DIR: {}", dir.display());
|
||||
if cfg!(windows) {
|
||||
Command::new("attrib").arg("+r").arg(dir).output().unwrap();
|
||||
} else if cfg!(unix) {
|
||||
Command::new("chmod").arg("555").arg(dir).output().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -50,7 +72,6 @@ fn standalone_args() {
|
|||
let output = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--output")
|
||||
.arg(&exe)
|
||||
.arg("./compile/args.ts")
|
||||
|
@ -87,7 +108,6 @@ fn standalone_error() {
|
|||
let output = util::deno_cmd()
|
||||
.current_dir(&testdata_path)
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--output")
|
||||
.arg(&exe)
|
||||
.arg("./compile/standalone_error.ts")
|
||||
|
@ -111,13 +131,13 @@ fn standalone_error() {
|
|||
let stderr = util::strip_ansi_codes(&stderr).to_string();
|
||||
// On Windows, we cannot assert the file path (because '\').
|
||||
// Instead we just check for relevant output.
|
||||
assert!(stderr.contains("error: Uncaught Error: boom!"));
|
||||
assert!(stderr.contains("throw new Error(\"boom!\");"));
|
||||
assert!(stderr.contains("\n at boom (file://"));
|
||||
assert!(stderr.contains("standalone_error.ts:2:11"));
|
||||
assert!(stderr.contains("at foo (file://"));
|
||||
assert!(stderr.contains("standalone_error.ts:5:5"));
|
||||
assert!(stderr.contains("standalone_error.ts:7:1"));
|
||||
assert_contains!(stderr, "error: Uncaught Error: boom!");
|
||||
assert_contains!(stderr, "throw new Error(\"boom!\");");
|
||||
assert_contains!(stderr, "\n at boom (file://");
|
||||
assert_contains!(stderr, "standalone_error.ts:2:11");
|
||||
assert_contains!(stderr, "at foo (file://");
|
||||
assert_contains!(stderr, "standalone_error.ts:5:5");
|
||||
assert_contains!(stderr, "standalone_error.ts:7:1");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -132,7 +152,6 @@ fn standalone_error_module_with_imports() {
|
|||
let output = util::deno_cmd()
|
||||
.current_dir(&testdata_path)
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--output")
|
||||
.arg(&exe)
|
||||
.arg("./compile/standalone_error_module_with_imports_1.ts")
|
||||
|
@ -156,10 +175,10 @@ fn standalone_error_module_with_imports() {
|
|||
let stderr = util::strip_ansi_codes(&stderr).to_string();
|
||||
// On Windows, we cannot assert the file path (because '\').
|
||||
// Instead we just check for relevant output.
|
||||
assert!(stderr.contains("error: Uncaught Error: boom!"));
|
||||
assert!(stderr.contains("throw new Error(\"boom!\");"));
|
||||
assert!(stderr.contains("\n at file://"));
|
||||
assert!(stderr.contains("standalone_error_module_with_imports_2.ts:2:7"));
|
||||
assert_contains!(stderr, "error: Uncaught Error: boom!");
|
||||
assert_contains!(stderr, "throw new Error(\"boom!\");");
|
||||
assert_contains!(stderr, "\n at file://");
|
||||
assert_contains!(stderr, "standalone_error_module_with_imports_2.ts:2:7");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -173,7 +192,6 @@ fn standalone_load_datauri() {
|
|||
let output = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--output")
|
||||
.arg(&exe)
|
||||
.arg("./compile/standalone_import_datauri.ts")
|
||||
|
@ -206,7 +224,6 @@ fn standalone_follow_redirects() {
|
|||
let output = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--output")
|
||||
.arg(&exe)
|
||||
.arg("./compile/standalone_follow_redirects.ts")
|
||||
|
@ -240,7 +257,6 @@ fn compile_with_file_exists_error() {
|
|||
let output = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--output")
|
||||
.arg(&output_path)
|
||||
.arg("./compile/args.ts")
|
||||
|
@ -259,7 +275,7 @@ fn compile_with_file_exists_error() {
|
|||
file_path.display(),
|
||||
);
|
||||
let stderr = String::from_utf8(output.stderr).unwrap();
|
||||
assert!(stderr.contains(&expected_stderr));
|
||||
assert_contains!(stderr, &expected_stderr);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -274,7 +290,6 @@ fn compile_with_directory_exists_error() {
|
|||
let output = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--output")
|
||||
.arg(&exe)
|
||||
.arg("./compile/args.ts")
|
||||
|
@ -293,7 +308,7 @@ fn compile_with_directory_exists_error() {
|
|||
exe.display()
|
||||
);
|
||||
let stderr = String::from_utf8(output.stderr).unwrap();
|
||||
assert!(stderr.contains(&expected_stderr));
|
||||
assert_contains!(stderr, &expected_stderr);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -308,7 +323,6 @@ fn compile_with_conflict_file_exists_error() {
|
|||
let output = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--output")
|
||||
.arg(&exe)
|
||||
.arg("./compile/args.ts")
|
||||
|
@ -322,13 +336,12 @@ fn compile_with_conflict_file_exists_error() {
|
|||
concat!(
|
||||
"Could not compile to file '{}' because the file already exists ",
|
||||
"and cannot be overwritten. Please delete the existing file or ",
|
||||
"use the `--output <file-path` flag to provide an alternative name."
|
||||
"use the `--output <file-path>` flag to provide an alternative name."
|
||||
),
|
||||
exe.display()
|
||||
);
|
||||
let stderr = String::from_utf8(output.stderr).unwrap();
|
||||
dbg!(&stderr);
|
||||
assert!(stderr.contains(&expected_stderr));
|
||||
assert_contains!(stderr, &expected_stderr);
|
||||
assert!(std::fs::read(&exe)
|
||||
.unwrap()
|
||||
.eq(b"SHOULD NOT BE OVERWRITTEN"));
|
||||
|
@ -345,7 +358,6 @@ fn compile_and_overwrite_file() {
|
|||
let output = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--output")
|
||||
.arg(&exe)
|
||||
.arg("./compile/args.ts")
|
||||
|
@ -360,7 +372,6 @@ fn compile_and_overwrite_file() {
|
|||
let recompile_output = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--output")
|
||||
.arg(&exe)
|
||||
.arg("./compile/args.ts")
|
||||
|
@ -383,7 +394,6 @@ fn standalone_runtime_flags() {
|
|||
let output = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--allow-read")
|
||||
.arg("--seed")
|
||||
.arg("1")
|
||||
|
@ -407,8 +417,10 @@ fn standalone_runtime_flags() {
|
|||
let stdout_str = String::from_utf8(output.stdout).unwrap();
|
||||
assert_eq!(util::strip_ansi_codes(&stdout_str), "0.147205063401058\n");
|
||||
let stderr_str = String::from_utf8(output.stderr).unwrap();
|
||||
assert!(util::strip_ansi_codes(&stderr_str)
|
||||
.contains("PermissionDenied: Requires write access"));
|
||||
assert_contains!(
|
||||
util::strip_ansi_codes(&stderr_str),
|
||||
"PermissionDenied: Requires write access"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -422,7 +434,6 @@ fn standalone_ext_flag_ts() {
|
|||
let output = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--ext")
|
||||
.arg("ts")
|
||||
.arg("--output")
|
||||
|
@ -460,7 +471,6 @@ fn standalone_ext_flag_js() {
|
|||
let output = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--ext")
|
||||
.arg("js")
|
||||
.arg("--output")
|
||||
|
@ -498,7 +508,6 @@ fn standalone_import_map() {
|
|||
let output = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--allow-read")
|
||||
.arg("--import-map")
|
||||
.arg("compile/standalone_import_map.json")
|
||||
|
@ -532,7 +541,6 @@ fn standalone_import_map_config_file() {
|
|||
let output = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--allow-read")
|
||||
.arg("--config")
|
||||
.arg("compile/standalone_import_map_config.json")
|
||||
|
@ -567,7 +575,6 @@ fn skip_rebundle() {
|
|||
let output = util::deno_cmd()
|
||||
.current_dir(util::testdata_path())
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--output")
|
||||
.arg(&exe)
|
||||
.arg("./run/001_hello.js")
|
||||
|
@ -604,7 +611,6 @@ fn check_local_by_default() {
|
|||
let status = util::deno_cmd()
|
||||
.current_dir(util::root_path())
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--output")
|
||||
.arg(&exe)
|
||||
.arg(util::testdata_path().join("./compile/check_local_by_default.ts"))
|
||||
|
@ -626,7 +632,6 @@ fn check_local_by_default2() {
|
|||
.current_dir(util::root_path())
|
||||
.env("NO_COLOR", "1")
|
||||
.arg("compile")
|
||||
.arg("--unstable")
|
||||
.arg("--output")
|
||||
.arg(&exe)
|
||||
.arg(util::testdata_path().join("./compile/check_local_by_default2.ts"))
|
||||
|
@ -636,9 +641,10 @@ fn check_local_by_default2() {
|
|||
let stdout = String::from_utf8(output.stdout).unwrap();
|
||||
let stderr = String::from_utf8(output.stderr).unwrap();
|
||||
assert!(stdout.is_empty());
|
||||
assert!(stderr.contains(
|
||||
assert_contains!(
|
||||
stderr,
|
||||
r#"error: TS2322 [ERROR]: Type '12' is not assignable to type '"b"'."#
|
||||
));
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -672,30 +678,40 @@ fn workers_basic() {
|
|||
|
||||
#[test]
|
||||
fn workers_not_in_module_map() {
|
||||
let _guard = util::http_server();
|
||||
let dir = TempDir::new();
|
||||
let context = TestContextBuilder::for_npm()
|
||||
.use_http_server()
|
||||
.use_temp_cwd()
|
||||
.build();
|
||||
let temp_dir = context.temp_dir();
|
||||
let exe = if cfg!(windows) {
|
||||
dir.path().join("not_in_module_map.exe")
|
||||
temp_dir.path().join("not_in_module_map.exe")
|
||||
} else {
|
||||
dir.path().join("not_in_module_map")
|
||||
temp_dir.path().join("not_in_module_map")
|
||||
};
|
||||
let output = util::deno_cmd()
|
||||
.current_dir(util::root_path())
|
||||
.arg("compile")
|
||||
.arg("--output")
|
||||
.arg(&exe)
|
||||
.arg(util::testdata_path().join("./compile/workers/not_in_module_map.ts"))
|
||||
.output()
|
||||
.unwrap();
|
||||
assert!(output.status.success());
|
||||
let main_path =
|
||||
util::testdata_path().join("./compile/workers/not_in_module_map.ts");
|
||||
let output = context
|
||||
.new_command()
|
||||
.args_vec([
|
||||
"compile",
|
||||
"--output",
|
||||
&exe.to_string_lossy(),
|
||||
&main_path.to_string_lossy(),
|
||||
])
|
||||
.run();
|
||||
output.assert_exit_code(0);
|
||||
output.skip_output_check();
|
||||
|
||||
let output = Command::new(&exe).env("NO_COLOR", "").output().unwrap();
|
||||
assert!(!output.status.success());
|
||||
let stderr = String::from_utf8(output.stderr).unwrap();
|
||||
assert!(stderr.starts_with(concat!(
|
||||
"error: Uncaught (in worker \"\") Module not found\n",
|
||||
"error: Uncaught (in promise) Error: Unhandled error in child worker.\n"
|
||||
)));
|
||||
let output = context
|
||||
.new_command()
|
||||
.command_name(exe.to_string_lossy())
|
||||
.env("NO_COLOR", "")
|
||||
.run();
|
||||
output.assert_exit_code(1);
|
||||
output.assert_matches_text(concat!(
|
||||
"error: Uncaught (in worker \"\") Module not found: [WILDCARD]",
|
||||
"error: Uncaught (in promise) Error: Unhandled error in child worker.\n[WILDCARD]"
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -787,3 +803,347 @@ fn dynamic_import_unanalyzable() {
|
|||
.unwrap();
|
||||
assert_eq!(String::from_utf8(output.stdout).unwrap(), expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compile_npm_specifiers() {
|
||||
let context = TestContextBuilder::for_npm()
|
||||
.use_sync_npm_download()
|
||||
.use_temp_cwd()
|
||||
.build();
|
||||
|
||||
let temp_dir = context.temp_dir();
|
||||
temp_dir.write(
|
||||
"main.ts",
|
||||
concat!(
|
||||
"import path from 'node:path';\n",
|
||||
"import { getValue, setValue } from 'npm:@denotest/esm-basic';\n",
|
||||
"import getValueDefault from 'npm:@denotest/esm-import-cjs-default';\n",
|
||||
"setValue(2);\n",
|
||||
"console.log(path.join('testing', 'this'));",
|
||||
"console.log(getValue());",
|
||||
"console.log(getValueDefault());",
|
||||
),
|
||||
);
|
||||
|
||||
let binary_path = if cfg!(windows) {
|
||||
temp_dir.path().join("binary.exe")
|
||||
} else {
|
||||
temp_dir.path().join("binary")
|
||||
};
|
||||
|
||||
// try with and without --node-modules-dir
|
||||
let compile_commands = &[
|
||||
"compile --output binary main.ts",
|
||||
"compile --node-modules-dir --output binary main.ts",
|
||||
];
|
||||
|
||||
for compile_command in compile_commands {
|
||||
let output = context.new_command().args(compile_command).run();
|
||||
output.assert_exit_code(0);
|
||||
output.skip_output_check();
|
||||
|
||||
let output = context
|
||||
.new_command()
|
||||
.command_name(binary_path.to_string_lossy())
|
||||
.run();
|
||||
output.assert_matches_text(
|
||||
r#"Node esm importing node cjs
|
||||
===========================
|
||||
{
|
||||
default: [Function (anonymous)],
|
||||
named: [Function (anonymous)],
|
||||
MyClass: [class MyClass]
|
||||
}
|
||||
{ default: [Function (anonymous)], named: [Function (anonymous)] }
|
||||
[Module: null prototype] {
|
||||
MyClass: [class MyClass],
|
||||
__esModule: true,
|
||||
default: {
|
||||
default: [Function (anonymous)],
|
||||
named: [Function (anonymous)],
|
||||
MyClass: [class MyClass]
|
||||
},
|
||||
named: [Function (anonymous)]
|
||||
}
|
||||
[Module: null prototype] {
|
||||
__esModule: true,
|
||||
default: { default: [Function (anonymous)], named: [Function (anonymous)] },
|
||||
named: [Function (anonymous)]
|
||||
}
|
||||
===========================
|
||||
static method
|
||||
testing[WILDCARD]this
|
||||
2
|
||||
5
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
// try with a package.json
|
||||
temp_dir.remove_dir_all("node_modules");
|
||||
temp_dir.write(
|
||||
"main.ts",
|
||||
concat!(
|
||||
"import { getValue, setValue } from '@denotest/esm-basic';\n",
|
||||
"setValue(2);\n",
|
||||
"console.log(getValue());",
|
||||
),
|
||||
);
|
||||
temp_dir.write(
|
||||
"package.json",
|
||||
r#"{ "dependencies": { "@denotest/esm-basic": "1" } }"#,
|
||||
);
|
||||
|
||||
let output = context
|
||||
.new_command()
|
||||
.args("compile --output binary main.ts")
|
||||
.run();
|
||||
output.assert_exit_code(0);
|
||||
output.skip_output_check();
|
||||
|
||||
let output = context
|
||||
.new_command()
|
||||
.command_name(binary_path.to_string_lossy())
|
||||
.run();
|
||||
output.assert_matches_text("2\n");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compile_npm_file_system() {
|
||||
run_npm_bin_compile_test(RunNpmBinCompileOptions {
|
||||
input_specifier: "compile/npm_fs/main.ts",
|
||||
compile_args: vec!["-A"],
|
||||
run_args: vec![],
|
||||
output_file: "compile/npm_fs/main.out",
|
||||
node_modules_dir: true,
|
||||
input_name: Some("binary"),
|
||||
expected_name: "binary",
|
||||
exit_code: 0,
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compile_npm_bin_esm() {
|
||||
run_npm_bin_compile_test(RunNpmBinCompileOptions {
|
||||
input_specifier: "npm:@denotest/bin/cli-esm",
|
||||
compile_args: vec![],
|
||||
run_args: vec!["this", "is", "a", "test"],
|
||||
output_file: "npm/deno_run_esm.out",
|
||||
node_modules_dir: false,
|
||||
input_name: None,
|
||||
expected_name: "cli-esm",
|
||||
exit_code: 0,
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compile_npm_bin_cjs() {
|
||||
run_npm_bin_compile_test(RunNpmBinCompileOptions {
|
||||
input_specifier: "npm:@denotest/bin/cli-cjs",
|
||||
compile_args: vec![],
|
||||
run_args: vec!["this", "is", "a", "test"],
|
||||
output_file: "npm/deno_run_cjs.out",
|
||||
node_modules_dir: false,
|
||||
input_name: None,
|
||||
expected_name: "cli-cjs",
|
||||
exit_code: 0,
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compile_npm_cowsay_main() {
|
||||
run_npm_bin_compile_test(RunNpmBinCompileOptions {
|
||||
input_specifier: "npm:cowsay@1.5.0",
|
||||
compile_args: vec!["--allow-read"],
|
||||
run_args: vec!["Hello"],
|
||||
output_file: "npm/deno_run_cowsay.out",
|
||||
node_modules_dir: false,
|
||||
input_name: None,
|
||||
expected_name: "cowsay",
|
||||
exit_code: 0,
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compile_npm_vfs_implicit_read_permissions() {
|
||||
run_npm_bin_compile_test(RunNpmBinCompileOptions {
|
||||
input_specifier: "compile/vfs_implicit_read_permission/main.ts",
|
||||
compile_args: vec![],
|
||||
run_args: vec![],
|
||||
output_file: "compile/vfs_implicit_read_permission/main.out",
|
||||
node_modules_dir: false,
|
||||
input_name: Some("binary"),
|
||||
expected_name: "binary",
|
||||
exit_code: 0,
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compile_npm_no_permissions() {
|
||||
run_npm_bin_compile_test(RunNpmBinCompileOptions {
|
||||
input_specifier: "npm:cowsay@1.5.0",
|
||||
compile_args: vec![],
|
||||
run_args: vec!["Hello"],
|
||||
output_file: "npm/deno_run_cowsay_no_permissions.out",
|
||||
node_modules_dir: false,
|
||||
input_name: None,
|
||||
expected_name: "cowsay",
|
||||
exit_code: 1,
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compile_npm_cowsay_explicit() {
|
||||
run_npm_bin_compile_test(RunNpmBinCompileOptions {
|
||||
input_specifier: "npm:cowsay@1.5.0/cowsay",
|
||||
compile_args: vec!["--allow-read"],
|
||||
run_args: vec!["Hello"],
|
||||
output_file: "npm/deno_run_cowsay.out",
|
||||
node_modules_dir: false,
|
||||
input_name: None,
|
||||
expected_name: "cowsay",
|
||||
exit_code: 0,
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compile_npm_cowthink() {
|
||||
run_npm_bin_compile_test(RunNpmBinCompileOptions {
|
||||
input_specifier: "npm:cowsay@1.5.0/cowthink",
|
||||
compile_args: vec!["--allow-read"],
|
||||
run_args: vec!["Hello"],
|
||||
output_file: "npm/deno_run_cowthink.out",
|
||||
node_modules_dir: false,
|
||||
input_name: None,
|
||||
expected_name: "cowthink",
|
||||
exit_code: 0,
|
||||
});
|
||||
}
|
||||
|
||||
struct RunNpmBinCompileOptions<'a> {
|
||||
input_specifier: &'a str,
|
||||
node_modules_dir: bool,
|
||||
output_file: &'a str,
|
||||
input_name: Option<&'a str>,
|
||||
expected_name: &'a str,
|
||||
run_args: Vec<&'a str>,
|
||||
compile_args: Vec<&'a str>,
|
||||
exit_code: i32,
|
||||
}
|
||||
|
||||
fn run_npm_bin_compile_test(opts: RunNpmBinCompileOptions) {
|
||||
let context = TestContextBuilder::for_npm()
|
||||
.use_sync_npm_download()
|
||||
.use_temp_cwd()
|
||||
.build();
|
||||
|
||||
let temp_dir = context.temp_dir();
|
||||
let testdata_path = context.testdata_path();
|
||||
let main_specifier = if opts.input_specifier.starts_with("npm:") {
|
||||
opts.input_specifier.to_string()
|
||||
} else {
|
||||
testdata_path
|
||||
.join(opts.input_specifier)
|
||||
.to_string_lossy()
|
||||
.to_string()
|
||||
};
|
||||
|
||||
let mut args = vec!["compile".to_string()];
|
||||
|
||||
args.extend(opts.compile_args.iter().map(|s| s.to_string()));
|
||||
|
||||
if opts.node_modules_dir {
|
||||
args.push("--node-modules-dir".to_string());
|
||||
}
|
||||
|
||||
if let Some(bin_name) = opts.input_name {
|
||||
args.push("--output".to_string());
|
||||
args.push(bin_name.to_string());
|
||||
}
|
||||
|
||||
args.push(main_specifier);
|
||||
|
||||
// compile
|
||||
let output = context.new_command().args_vec(args).run();
|
||||
output.assert_exit_code(0);
|
||||
output.skip_output_check();
|
||||
|
||||
// run
|
||||
let binary_path = if cfg!(windows) {
|
||||
temp_dir.path().join(format!("{}.exe", opts.expected_name))
|
||||
} else {
|
||||
temp_dir.path().join(opts.expected_name)
|
||||
};
|
||||
let output = context
|
||||
.new_command()
|
||||
.command_name(binary_path.to_string_lossy())
|
||||
.args_vec(opts.run_args)
|
||||
.run();
|
||||
output.assert_matches_file(opts.output_file);
|
||||
output.assert_exit_code(opts.exit_code);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compile_node_modules_symlink_outside() {
|
||||
let context = TestContextBuilder::for_npm()
|
||||
.use_sync_npm_download()
|
||||
.use_copy_temp_dir("compile/node_modules_symlink_outside")
|
||||
.cwd("compile/node_modules_symlink_outside")
|
||||
.build();
|
||||
|
||||
let temp_dir = context.temp_dir();
|
||||
let project_dir = temp_dir
|
||||
.path()
|
||||
.join("compile")
|
||||
.join("node_modules_symlink_outside");
|
||||
temp_dir.create_dir_all(project_dir.join("node_modules"));
|
||||
temp_dir.create_dir_all(project_dir.join("some_folder"));
|
||||
temp_dir.write(project_dir.join("test.txt"), "5");
|
||||
|
||||
// create a symlink in the node_modules directory that points to a folder in the cwd
|
||||
temp_dir.symlink_dir(
|
||||
project_dir.join("some_folder"),
|
||||
project_dir.join("node_modules").join("some_folder"),
|
||||
);
|
||||
// compile folder
|
||||
let output = context
|
||||
.new_command()
|
||||
.args("compile --allow-read --node-modules-dir --output bin main.ts")
|
||||
.run();
|
||||
output.assert_exit_code(0);
|
||||
output.assert_matches_file(
|
||||
"compile/node_modules_symlink_outside/main_compile_folder.out",
|
||||
);
|
||||
assert!(project_dir.join("node_modules/some_folder").exists());
|
||||
|
||||
// Cleanup and remove the folder. The folder test is done separately from
|
||||
// the file symlink test because different systems would traverse
|
||||
// the directory items in different order.
|
||||
temp_dir.remove_dir_all(project_dir.join("node_modules/some_folder"));
|
||||
|
||||
// create a symlink in the node_modules directory that points to a file in the cwd
|
||||
temp_dir.symlink_file(
|
||||
project_dir.join("test.txt"),
|
||||
project_dir.join("node_modules").join("test.txt"),
|
||||
);
|
||||
assert!(project_dir.join("node_modules/test.txt").exists());
|
||||
|
||||
// compile
|
||||
let output = context
|
||||
.new_command()
|
||||
.args("compile --allow-read --node-modules-dir --output bin main.ts")
|
||||
.run();
|
||||
output.assert_exit_code(0);
|
||||
output.assert_matches_file(
|
||||
"compile/node_modules_symlink_outside/main_compile_file.out",
|
||||
);
|
||||
|
||||
// run
|
||||
let binary_path =
|
||||
project_dir.join(if cfg!(windows) { "bin.exe" } else { "bin" });
|
||||
let output = context
|
||||
.new_command()
|
||||
.command_name(binary_path.to_string_lossy())
|
||||
.run();
|
||||
output.assert_matches_file("compile/node_modules_symlink_outside/main.out");
|
||||
}
|
||||
|
|
|
@ -26,6 +26,13 @@ fn no_snaps() {
|
|||
no_snaps_included("no_snaps_included", "ts");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_tests() {
|
||||
no_tests_included("foo", "mts");
|
||||
no_tests_included("foo", "ts");
|
||||
no_tests_included("foo", "js");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_if_invalid_cache() {
|
||||
let context = TestContextBuilder::new().use_temp_cwd().build();
|
||||
|
@ -277,6 +284,53 @@ fn no_snaps_included(test_name: &str, extension: &str) {
|
|||
output.assert_exit_code(0);
|
||||
}
|
||||
|
||||
fn no_tests_included(test_name: &str, extension: &str) {
|
||||
let context = TestContext::default();
|
||||
let tempdir = context.deno_dir();
|
||||
let tempdir = tempdir.path().join("cov");
|
||||
|
||||
let output = context
|
||||
.new_command()
|
||||
.args_vec(vec![
|
||||
"test".to_string(),
|
||||
"--quiet".to_string(),
|
||||
"--allow-read".to_string(),
|
||||
format!("--coverage={}", tempdir.to_str().unwrap()),
|
||||
format!("coverage/no_tests_included/{test_name}.test.{extension}"),
|
||||
])
|
||||
.run();
|
||||
|
||||
output.assert_exit_code(0);
|
||||
output.skip_output_check();
|
||||
|
||||
let output = context
|
||||
.new_command()
|
||||
.args_vec(vec![
|
||||
"coverage".to_string(),
|
||||
format!("{}/", tempdir.to_str().unwrap()),
|
||||
])
|
||||
.split_output()
|
||||
.run();
|
||||
|
||||
// Verify there's no "Check" being printed
|
||||
assert!(output.stderr().is_empty());
|
||||
|
||||
let actual = util::strip_ansi_codes(output.stdout()).to_string();
|
||||
|
||||
let expected = fs::read_to_string(
|
||||
util::testdata_path().join("coverage/no_tests_included/expected.out"),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
if !util::wildcard_match(&expected, &actual) {
|
||||
println!("OUTPUT\n{actual}\nOUTPUT");
|
||||
println!("EXPECTED\n{expected}\nEXPECTED");
|
||||
panic!("pattern match failed");
|
||||
}
|
||||
|
||||
output.assert_exit_code(0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_npm_cache_coverage() {
|
||||
let context = TestContext::default();
|
||||
|
|
|
@ -2,7 +2,9 @@
|
|||
|
||||
use test_util as util;
|
||||
use test_util::TempDir;
|
||||
use util::assert_contains;
|
||||
use util::TestContext;
|
||||
use util::TestContextBuilder;
|
||||
|
||||
#[test]
|
||||
fn fmt_test() {
|
||||
|
@ -229,6 +231,12 @@ itest!(fmt_with_config {
|
|||
output: "fmt/fmt_with_config.out",
|
||||
});
|
||||
|
||||
itest!(fmt_with_deprecated_config {
|
||||
args:
|
||||
"fmt --config fmt/with_config/deno.deprecated.jsonc fmt/with_config/subdir",
|
||||
output: "fmt/fmt_with_deprecated_config.out",
|
||||
});
|
||||
|
||||
itest!(fmt_with_config_default {
|
||||
args: "fmt fmt/with_config/subdir",
|
||||
output: "fmt/fmt_with_config.out",
|
||||
|
@ -251,3 +259,93 @@ itest!(fmt_with_malformed_config2 {
|
|||
output: "fmt/fmt_with_malformed_config2.out",
|
||||
exit_code: 1,
|
||||
});
|
||||
|
||||
#[test]
|
||||
fn fmt_with_glob_config() {
|
||||
let context = TestContextBuilder::new().cwd("fmt").build();
|
||||
|
||||
let cmd_output = context
|
||||
.new_command()
|
||||
.args("fmt --check --config deno.glob.json")
|
||||
.run();
|
||||
|
||||
cmd_output.assert_exit_code(1);
|
||||
|
||||
let output = cmd_output.combined_output();
|
||||
if cfg!(windows) {
|
||||
assert_contains!(output, r#"glob\nested\fizz\fizz.ts"#);
|
||||
assert_contains!(output, r#"glob\pages\[id].ts"#);
|
||||
assert_contains!(output, r#"glob\nested\fizz\bar.ts"#);
|
||||
assert_contains!(output, r#"glob\nested\foo\foo.ts"#);
|
||||
assert_contains!(output, r#"glob\data\test1.js"#);
|
||||
assert_contains!(output, r#"glob\nested\foo\bar.ts"#);
|
||||
assert_contains!(output, r#"glob\nested\foo\fizz.ts"#);
|
||||
assert_contains!(output, r#"glob\nested\fizz\foo.ts"#);
|
||||
assert_contains!(output, r#"glob\data\test1.ts"#);
|
||||
} else {
|
||||
assert_contains!(output, "glob/nested/fizz/fizz.ts");
|
||||
assert_contains!(output, "glob/pages/[id].ts");
|
||||
assert_contains!(output, "glob/nested/fizz/bar.ts");
|
||||
assert_contains!(output, "glob/nested/foo/foo.ts");
|
||||
assert_contains!(output, "glob/data/test1.js");
|
||||
assert_contains!(output, "glob/nested/foo/bar.ts");
|
||||
assert_contains!(output, "glob/nested/foo/fizz.ts");
|
||||
assert_contains!(output, "glob/nested/fizz/foo.ts");
|
||||
assert_contains!(output, "glob/data/test1.ts");
|
||||
}
|
||||
|
||||
assert_contains!(output, "Found 9 not formatted files in 9 files");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fmt_with_glob_config_and_flags() {
|
||||
let context = TestContextBuilder::new().cwd("fmt").build();
|
||||
|
||||
let cmd_output = context
|
||||
.new_command()
|
||||
.args("fmt --check --config deno.glob.json --ignore=glob/nested/**/bar.ts")
|
||||
.run();
|
||||
|
||||
cmd_output.assert_exit_code(1);
|
||||
|
||||
let output = cmd_output.combined_output();
|
||||
if cfg!(windows) {
|
||||
assert_contains!(output, r#"glob\nested\fizz\fizz.ts"#);
|
||||
assert_contains!(output, r#"glob\pages\[id].ts"#);
|
||||
assert_contains!(output, r#"glob\nested\fizz\bazz.ts"#);
|
||||
assert_contains!(output, r#"glob\nested\foo\foo.ts"#);
|
||||
assert_contains!(output, r#"glob\data\test1.js"#);
|
||||
assert_contains!(output, r#"glob\nested\foo\bazz.ts"#);
|
||||
assert_contains!(output, r#"glob\nested\foo\fizz.ts"#);
|
||||
assert_contains!(output, r#"glob\nested\fizz\foo.ts"#);
|
||||
assert_contains!(output, r#"glob\data\test1.ts"#);
|
||||
} else {
|
||||
assert_contains!(output, "glob/nested/fizz/fizz.ts");
|
||||
assert_contains!(output, "glob/pages/[id].ts");
|
||||
assert_contains!(output, "glob/nested/fizz/bazz.ts");
|
||||
assert_contains!(output, "glob/nested/foo/foo.ts");
|
||||
assert_contains!(output, "glob/data/test1.js");
|
||||
assert_contains!(output, "glob/nested/foo/bazz.ts");
|
||||
assert_contains!(output, "glob/nested/foo/fizz.ts");
|
||||
assert_contains!(output, "glob/nested/fizz/foo.ts");
|
||||
assert_contains!(output, "glob/data/test1.ts");
|
||||
}
|
||||
assert_contains!(output, "Found 9 not formatted files in 9 files");
|
||||
let cmd_output = context
|
||||
.new_command()
|
||||
.args("fmt --check --config deno.glob.json glob/data/test1.?s")
|
||||
.run();
|
||||
|
||||
cmd_output.assert_exit_code(1);
|
||||
|
||||
let output = cmd_output.combined_output();
|
||||
if cfg!(windows) {
|
||||
assert_contains!(output, r#"glob\data\test1.js"#);
|
||||
assert_contains!(output, r#"glob\data\test1.ts"#);
|
||||
} else {
|
||||
assert_contains!(output, "glob/data/test1.js");
|
||||
assert_contains!(output, "glob/data/test1.ts");
|
||||
}
|
||||
|
||||
assert_contains!(output, "Found 2 not formatted files in 2 files");
|
||||
}
|
||||
|
|
|
@ -1,34 +1,72 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::prelude::*;
|
||||
use deno_core::futures::stream::SplitSink;
|
||||
use deno_core::futures::stream::SplitStream;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::json;
|
||||
use deno_core::url;
|
||||
use deno_runtime::deno_fetch::reqwest;
|
||||
use deno_runtime::deno_websocket::tokio_tungstenite;
|
||||
use deno_runtime::deno_websocket::tokio_tungstenite::tungstenite;
|
||||
use fastwebsockets::FragmentCollector;
|
||||
use fastwebsockets::Frame;
|
||||
use fastwebsockets::WebSocket;
|
||||
use hyper::upgrade::Upgraded;
|
||||
use hyper::Body;
|
||||
use hyper::Request;
|
||||
use hyper::Response;
|
||||
use std::io::BufRead;
|
||||
use test_util as util;
|
||||
use test_util::TempDir;
|
||||
use tokio::net::TcpStream;
|
||||
use url::Url;
|
||||
use util::assert_starts_with;
|
||||
use util::http_server;
|
||||
use util::DenoChild;
|
||||
|
||||
struct SpawnExecutor;
|
||||
|
||||
impl<Fut> hyper::rt::Executor<Fut> for SpawnExecutor
|
||||
where
|
||||
Fut: std::future::Future + Send + 'static,
|
||||
Fut::Output: Send + 'static,
|
||||
{
|
||||
fn execute(&self, fut: Fut) {
|
||||
deno_core::task::spawn(fut);
|
||||
}
|
||||
}
|
||||
|
||||
async fn connect_to_ws(uri: Url) -> (WebSocket<Upgraded>, Response<Body>) {
|
||||
let domain = &uri.host().unwrap().to_string();
|
||||
let port = &uri.port().unwrap_or(match uri.scheme() {
|
||||
"wss" | "https" => 443,
|
||||
_ => 80,
|
||||
});
|
||||
let addr = format!("{domain}:{port}");
|
||||
|
||||
let stream = TcpStream::connect(addr).await.unwrap();
|
||||
|
||||
let host = uri.host_str().unwrap();
|
||||
|
||||
let req = Request::builder()
|
||||
.method("GET")
|
||||
.uri(uri.path())
|
||||
.header("Host", host)
|
||||
.header(hyper::header::UPGRADE, "websocket")
|
||||
.header(hyper::header::CONNECTION, "Upgrade")
|
||||
.header(
|
||||
"Sec-WebSocket-Key",
|
||||
fastwebsockets::handshake::generate_key(),
|
||||
)
|
||||
.header("Sec-WebSocket-Version", "13")
|
||||
.body(hyper::Body::empty())
|
||||
.unwrap();
|
||||
|
||||
fastwebsockets::handshake::client(&SpawnExecutor, req, stream)
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
struct InspectorTester {
|
||||
socket_tx: SplitSink<
|
||||
tokio_tungstenite::WebSocketStream<
|
||||
tokio_tungstenite::MaybeTlsStream<TcpStream>,
|
||||
>,
|
||||
tungstenite::Message,
|
||||
>,
|
||||
socket_rx: SplitStream<
|
||||
tokio_tungstenite::WebSocketStream<
|
||||
tokio_tungstenite::MaybeTlsStream<TcpStream>,
|
||||
>,
|
||||
>,
|
||||
socket: FragmentCollector<Upgraded>,
|
||||
notification_filter: Box<dyn FnMut(&str) -> bool + 'static>,
|
||||
child: DenoChild,
|
||||
stderr_lines: Box<dyn Iterator<Item = String>>,
|
||||
|
@ -52,17 +90,14 @@ impl InspectorTester {
|
|||
let mut stderr_lines =
|
||||
std::io::BufReader::new(stderr).lines().map(|r| r.unwrap());
|
||||
|
||||
let ws_url = extract_ws_url_from_stderr(&mut stderr_lines);
|
||||
let uri = extract_ws_url_from_stderr(&mut stderr_lines);
|
||||
|
||||
let (socket, response) = connect_to_ws(uri).await;
|
||||
|
||||
let (socket, response) =
|
||||
tokio_tungstenite::connect_async(ws_url).await.unwrap();
|
||||
assert_eq!(response.status(), 101); // Switching protocols.
|
||||
|
||||
let (socket_tx, socket_rx) = socket.split();
|
||||
|
||||
Self {
|
||||
socket_tx,
|
||||
socket_rx,
|
||||
socket: FragmentCollector::new(socket),
|
||||
notification_filter: Box::new(notification_filter),
|
||||
child,
|
||||
stderr_lines: Box::new(stderr_lines),
|
||||
|
@ -74,10 +109,10 @@ impl InspectorTester {
|
|||
// TODO(bartlomieju): add graceful error handling
|
||||
for msg in messages {
|
||||
let result = self
|
||||
.socket_tx
|
||||
.send(msg.to_string().into())
|
||||
.socket
|
||||
.write_frame(Frame::text(msg.to_string().into_bytes()))
|
||||
.await
|
||||
.map_err(|e| e.into());
|
||||
.map_err(|e| anyhow!(e));
|
||||
self.handle_error(result);
|
||||
}
|
||||
}
|
||||
|
@ -111,8 +146,9 @@ impl InspectorTester {
|
|||
|
||||
async fn recv(&mut self) -> String {
|
||||
loop {
|
||||
let result = self.socket_rx.next().await.unwrap().map_err(|e| e.into());
|
||||
let message = self.handle_error(result).to_string();
|
||||
let result = self.socket.read_frame().await.map_err(|e| anyhow!(e));
|
||||
let message =
|
||||
String::from_utf8(self.handle_error(result).payload).unwrap();
|
||||
if (self.notification_filter)(&message) {
|
||||
return message;
|
||||
}
|
||||
|
@ -182,15 +218,6 @@ impl InspectorTester {
|
|||
}
|
||||
}
|
||||
|
||||
macro_rules! assert_starts_with {
|
||||
($string:expr, $($test:expr),+) => {
|
||||
let string = $string; // This might be a function call or something
|
||||
if !($(string.starts_with($test))||+) {
|
||||
panic!("{:?} does not start with {:?}", string, [$($test),+]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_stderr(
|
||||
stderr_lines: &mut impl std::iter::Iterator<Item = String>,
|
||||
expected_lines: &[&str],
|
||||
|
@ -236,7 +263,7 @@ fn skip_check_line(
|
|||
let mut line = stderr_lines.next().unwrap();
|
||||
line = util::strip_ansi_codes(&line).to_string();
|
||||
|
||||
if line.starts_with("Check") {
|
||||
if line.starts_with("Check") || line.starts_with("Download") {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -260,10 +287,7 @@ async fn inspector_connect() {
|
|||
std::io::BufReader::new(stderr).lines().map(|r| r.unwrap());
|
||||
let ws_url = extract_ws_url_from_stderr(&mut stderr_lines);
|
||||
|
||||
// We use tokio_tungstenite as a websocket client because warp (which is
|
||||
// a dependency of Deno) uses it.
|
||||
let (_socket, response) =
|
||||
tokio_tungstenite::connect_async(ws_url).await.unwrap();
|
||||
let (_socket, response) = connect_to_ws(ws_url).await;
|
||||
assert_eq!("101 Switching Protocols", response.status().to_string());
|
||||
child.kill().unwrap();
|
||||
child.wait().unwrap();
|
||||
|
@ -514,8 +538,11 @@ async fn inspector_does_not_hang() {
|
|||
}
|
||||
|
||||
// Check that we can gracefully close the websocket connection.
|
||||
tester.socket_tx.close().await.unwrap();
|
||||
tester.socket_rx.for_each(|_| async {}).await;
|
||||
tester
|
||||
.socket
|
||||
.write_frame(Frame::close_raw(vec![]))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(&tester.stdout_lines.next().unwrap(), "done");
|
||||
assert!(tester.child.wait().unwrap().success());
|
||||
|
|
|
@ -1,27 +1,113 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::io::BufRead;
|
||||
use std::io::BufReader;
|
||||
use std::process::Stdio;
|
||||
use std::time::Duration;
|
||||
use std::time::Instant;
|
||||
use test_util as util;
|
||||
|
||||
#[test]
|
||||
fn js_unit_tests_lint() {
|
||||
let status = util::deno_cmd()
|
||||
.arg("lint")
|
||||
.arg("--unstable")
|
||||
.arg(util::tests_path().join("unit"))
|
||||
.spawn()
|
||||
.unwrap()
|
||||
.wait()
|
||||
.unwrap();
|
||||
assert!(status.success());
|
||||
}
|
||||
util::unit_test_factory!(
|
||||
js_unit_test,
|
||||
"tests/unit",
|
||||
"*.ts",
|
||||
[
|
||||
abort_controller_test,
|
||||
blob_test,
|
||||
body_test,
|
||||
broadcast_channel_test,
|
||||
buffer_test,
|
||||
build_test,
|
||||
cache_api_test,
|
||||
chmod_test,
|
||||
chown_test,
|
||||
command_test,
|
||||
console_test,
|
||||
copy_file_test,
|
||||
custom_event_test,
|
||||
dir_test,
|
||||
dom_exception_test,
|
||||
error_stack_test,
|
||||
error_test,
|
||||
esnext_test,
|
||||
event_target_test,
|
||||
event_test,
|
||||
fetch_test,
|
||||
ffi_test,
|
||||
file_test,
|
||||
filereader_test,
|
||||
files_test,
|
||||
flock_test,
|
||||
fs_events_test,
|
||||
get_random_values_test,
|
||||
globals_test,
|
||||
headers_test,
|
||||
http_test,
|
||||
internals_test,
|
||||
intl_test,
|
||||
io_test,
|
||||
kv_test,
|
||||
link_test,
|
||||
make_temp_test,
|
||||
message_channel_test,
|
||||
metrics_test,
|
||||
mkdir_test,
|
||||
navigator_test,
|
||||
net_test,
|
||||
network_interfaces_test,
|
||||
opcall_test,
|
||||
os_test,
|
||||
path_from_url_test,
|
||||
performance_test,
|
||||
permissions_test,
|
||||
process_test,
|
||||
progressevent_test,
|
||||
promise_hooks_test,
|
||||
read_dir_test,
|
||||
read_file_test,
|
||||
read_link_test,
|
||||
read_text_file_test,
|
||||
real_path_test,
|
||||
ref_unref_test,
|
||||
remove_test,
|
||||
rename_test,
|
||||
request_test,
|
||||
resources_test,
|
||||
response_test,
|
||||
serve_test,
|
||||
signal_test,
|
||||
stat_test,
|
||||
stdio_test,
|
||||
structured_clone_test,
|
||||
symlink_test,
|
||||
sync_test,
|
||||
test_util,
|
||||
testing_test,
|
||||
text_encoding_test,
|
||||
timers_test,
|
||||
tls_test,
|
||||
truncate_test,
|
||||
tty_color_test,
|
||||
tty_test,
|
||||
umask_test,
|
||||
url_search_params_test,
|
||||
url_test,
|
||||
urlpattern_test,
|
||||
utime_test,
|
||||
version_test,
|
||||
wasm_test,
|
||||
webcrypto_test,
|
||||
websocket_test,
|
||||
webstorage_test,
|
||||
worker_permissions_test,
|
||||
worker_types,
|
||||
write_file_test,
|
||||
write_text_file_test,
|
||||
]
|
||||
);
|
||||
|
||||
#[test]
|
||||
fn js_unit_tests() {
|
||||
fn js_unit_test(test: String) {
|
||||
let _g = util::http_server();
|
||||
|
||||
// Note that the unit tests are not safe for concurrency and must be run with a concurrency limit
|
||||
// of one because there are some chdir tests in there.
|
||||
// TODO(caspervonb) split these tests into two groups: parallel and serial.
|
||||
let mut deno = util::deno_cmd()
|
||||
.current_dir(util::root_path())
|
||||
.arg("test")
|
||||
|
@ -29,11 +115,58 @@ fn js_unit_tests() {
|
|||
.arg("--location=http://js-unit-tests/foo/bar")
|
||||
.arg("--no-prompt")
|
||||
.arg("-A")
|
||||
.arg(util::tests_path().join("unit"))
|
||||
.arg(util::tests_path().join("unit").join(format!("{test}.ts")))
|
||||
.stderr(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()
|
||||
.expect("failed to spawn script");
|
||||
|
||||
let status = deno.wait().expect("failed to wait for the child process");
|
||||
let now = Instant::now();
|
||||
let stdout = deno.stdout.take().unwrap();
|
||||
let test_name = test.clone();
|
||||
let stdout = std::thread::spawn(move || {
|
||||
let reader = BufReader::new(stdout);
|
||||
for line in reader.lines() {
|
||||
if let Ok(line) = line {
|
||||
println!("[{test_name} {:0>6.2}] {line}", now.elapsed().as_secs_f32());
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let now = Instant::now();
|
||||
let stderr = deno.stderr.take().unwrap();
|
||||
let test_name = test.clone();
|
||||
let stderr = std::thread::spawn(move || {
|
||||
let reader = BufReader::new(stderr);
|
||||
for line in reader.lines() {
|
||||
if let Ok(line) = line {
|
||||
eprintln!("[{test_name} {:0>6.2}] {line}", now.elapsed().as_secs_f32());
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const PER_TEST_TIMEOUT: Duration = Duration::from_secs(2 * 60);
|
||||
|
||||
let now = Instant::now();
|
||||
let status = loop {
|
||||
if now.elapsed() > PER_TEST_TIMEOUT {
|
||||
// Last-ditch kill
|
||||
_ = deno.kill();
|
||||
panic!("Test {test} failed to complete in time");
|
||||
}
|
||||
if let Some(status) = deno
|
||||
.try_wait()
|
||||
.expect("failed to wait for the child process")
|
||||
{
|
||||
break status;
|
||||
}
|
||||
std::thread::sleep(Duration::from_millis(100));
|
||||
};
|
||||
|
||||
#[cfg(unix)]
|
||||
assert_eq!(
|
||||
std::os::unix::process::ExitStatusExt::signal(&status),
|
||||
|
@ -41,5 +174,9 @@ fn js_unit_tests() {
|
|||
"Deno should not have died with a signal"
|
||||
);
|
||||
assert_eq!(Some(0), status.code(), "Deno should have exited cleanly");
|
||||
|
||||
stdout.join().unwrap();
|
||||
stderr.join().unwrap();
|
||||
|
||||
assert!(status.success());
|
||||
}
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use test_util::assert_contains;
|
||||
use test_util::TestContextBuilder;
|
||||
|
||||
itest!(ignore_unexplicit_files {
|
||||
args: "lint --unstable --ignore=./",
|
||||
output_str: Some("error: No target files found.\n"),
|
||||
|
@ -114,3 +117,95 @@ itest!(lint_with_malformed_config2 {
|
|||
output: "lint/with_malformed_config2.out",
|
||||
exit_code: 1,
|
||||
});
|
||||
|
||||
#[test]
|
||||
fn lint_with_glob_config() {
|
||||
let context = TestContextBuilder::new().cwd("lint").build();
|
||||
|
||||
let cmd_output = context
|
||||
.new_command()
|
||||
.args("lint --config deno.glob.json")
|
||||
.run();
|
||||
|
||||
cmd_output.assert_exit_code(1);
|
||||
|
||||
let output = cmd_output.combined_output();
|
||||
if cfg!(windows) {
|
||||
assert_contains!(output, r#"glob\nested\fizz\fizz.ts:1:10"#);
|
||||
assert_contains!(output, r#"glob\pages\[id].ts:1:10"#);
|
||||
assert_contains!(output, r#"glob\nested\fizz\bar.ts:1:10"#);
|
||||
assert_contains!(output, r#"glob\nested\foo\foo.ts:1:10"#);
|
||||
assert_contains!(output, r#"glob\data\test1.js:1:10"#);
|
||||
assert_contains!(output, r#"glob\nested\foo\bar.ts:1:10"#);
|
||||
assert_contains!(output, r#"glob\nested\foo\fizz.ts:1:10"#);
|
||||
assert_contains!(output, r#"glob\nested\fizz\foo.ts:1:10"#);
|
||||
assert_contains!(output, r#"glob\data\test1.ts:1:10"#);
|
||||
} else {
|
||||
assert_contains!(output, "glob/nested/fizz/fizz.ts:1:10");
|
||||
assert_contains!(output, "glob/pages/[id].ts:1:10");
|
||||
assert_contains!(output, "glob/nested/fizz/bar.ts:1:10");
|
||||
assert_contains!(output, "glob/nested/foo/foo.ts:1:10");
|
||||
assert_contains!(output, "glob/data/test1.js:1:10");
|
||||
assert_contains!(output, "glob/nested/foo/bar.ts:1:10");
|
||||
assert_contains!(output, "glob/nested/foo/fizz.ts:1:10");
|
||||
assert_contains!(output, "glob/nested/fizz/foo.ts:1:10");
|
||||
assert_contains!(output, "glob/data/test1.ts:1:10");
|
||||
}
|
||||
assert_contains!(output, "Found 9 problems");
|
||||
assert_contains!(output, "Checked 9 files");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lint_with_glob_config_and_flags() {
|
||||
let context = TestContextBuilder::new().cwd("lint").build();
|
||||
|
||||
let cmd_output = context
|
||||
.new_command()
|
||||
.args("lint --config deno.glob.json --ignore=glob/nested/**/bar.ts")
|
||||
.run();
|
||||
|
||||
cmd_output.assert_exit_code(1);
|
||||
|
||||
let output = cmd_output.combined_output();
|
||||
if cfg!(windows) {
|
||||
assert_contains!(output, r#"glob\nested\fizz\fizz.ts:1:10"#);
|
||||
assert_contains!(output, r#"glob\pages\[id].ts:1:10"#);
|
||||
assert_contains!(output, r#"glob\nested\fizz\bazz.ts:1:10"#);
|
||||
assert_contains!(output, r#"glob\nested\foo\foo.ts:1:10"#);
|
||||
assert_contains!(output, r#"glob\data\test1.js:1:10"#);
|
||||
assert_contains!(output, r#"glob\nested\foo\bazz.ts:1:10"#);
|
||||
assert_contains!(output, r#"glob\nested\foo\fizz.ts:1:10"#);
|
||||
assert_contains!(output, r#"glob\nested\fizz\foo.ts:1:10"#);
|
||||
assert_contains!(output, r#"glob\data\test1.ts:1:10"#);
|
||||
} else {
|
||||
assert_contains!(output, "glob/nested/fizz/fizz.ts:1:10");
|
||||
assert_contains!(output, "glob/pages/[id].ts:1:10");
|
||||
assert_contains!(output, "glob/nested/fizz/bazz.ts:1:10");
|
||||
assert_contains!(output, "glob/nested/foo/foo.ts:1:10");
|
||||
assert_contains!(output, "glob/data/test1.js:1:10");
|
||||
assert_contains!(output, "glob/nested/foo/bazz.ts:1:10");
|
||||
assert_contains!(output, "glob/nested/foo/fizz.ts:1:10");
|
||||
assert_contains!(output, "glob/nested/fizz/foo.ts:1:10");
|
||||
assert_contains!(output, "glob/data/test1.ts:1:10");
|
||||
}
|
||||
assert_contains!(output, "Found 9 problems");
|
||||
assert_contains!(output, "Checked 9 files");
|
||||
|
||||
let cmd_output = context
|
||||
.new_command()
|
||||
.args("lint --config deno.glob.json glob/data/test1.?s")
|
||||
.run();
|
||||
|
||||
cmd_output.assert_exit_code(1);
|
||||
|
||||
let output = cmd_output.combined_output();
|
||||
if cfg!(windows) {
|
||||
assert_contains!(output, r#"glob\data\test1.js:1:10"#);
|
||||
assert_contains!(output, r#"glob\data\test1.ts:1:10"#);
|
||||
} else {
|
||||
assert_contains!(output, "glob/data/test1.js:1:10");
|
||||
assert_contains!(output, "glob/data/test1.ts:1:10");
|
||||
}
|
||||
assert_contains!(output, "Found 2 problems");
|
||||
assert_contains!(output, "Checked 2 files");
|
||||
}
|
||||
|
|
|
@ -9,8 +9,10 @@ use deno_core::url::Url;
|
|||
use pretty_assertions::assert_eq;
|
||||
use std::fs;
|
||||
use std::process::Stdio;
|
||||
use test_util::assert_starts_with;
|
||||
use test_util::deno_cmd_with_deno_dir;
|
||||
use test_util::env_vars_for_npm_tests;
|
||||
use test_util::lsp::LspClient;
|
||||
use test_util::testdata_path;
|
||||
use test_util::TestContextBuilder;
|
||||
use tower_lsp::lsp_types as lsp;
|
||||
|
@ -51,7 +53,7 @@ fn lsp_init_tsconfig() {
|
|||
}
|
||||
}));
|
||||
|
||||
assert_eq!(diagnostics.viewed().len(), 0);
|
||||
assert_eq!(diagnostics.all().len(), 0);
|
||||
|
||||
client.shutdown();
|
||||
}
|
||||
|
@ -91,7 +93,7 @@ fn lsp_tsconfig_types() {
|
|||
}
|
||||
}));
|
||||
|
||||
assert_eq!(diagnostics.viewed().len(), 0);
|
||||
assert_eq!(diagnostics.all().len(), 0);
|
||||
|
||||
client.shutdown();
|
||||
}
|
||||
|
@ -119,7 +121,7 @@ fn lsp_tsconfig_bad_config_path() {
|
|||
"text": "console.log(Deno.args);\n"
|
||||
}
|
||||
}));
|
||||
assert_eq!(diagnostics.viewed().len(), 0);
|
||||
assert_eq!(diagnostics.all().len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -140,7 +142,7 @@ fn lsp_triple_slash_types() {
|
|||
}
|
||||
}));
|
||||
|
||||
assert_eq!(diagnostics.viewed().len(), 0);
|
||||
assert_eq!(diagnostics.all().len(), 0);
|
||||
|
||||
client.shutdown();
|
||||
}
|
||||
|
@ -174,7 +176,7 @@ fn lsp_import_map() {
|
|||
}
|
||||
}));
|
||||
|
||||
assert_eq!(diagnostics.viewed().len(), 0);
|
||||
assert_eq!(diagnostics.all().len(), 0);
|
||||
|
||||
let res = client.write_request(
|
||||
"textDocument/hover",
|
||||
|
@ -221,7 +223,7 @@ fn lsp_import_map_data_url() {
|
|||
}));
|
||||
|
||||
// This indicates that the import map is applied correctly.
|
||||
assert!(diagnostics.viewed().iter().any(|diagnostic| diagnostic.code
|
||||
assert!(diagnostics.all().iter().any(|diagnostic| diagnostic.code
|
||||
== Some(lsp::NumberOrString::String("no-cache".to_string()))
|
||||
&& diagnostic
|
||||
.message
|
||||
|
@ -266,7 +268,7 @@ fn lsp_import_map_config_file() {
|
|||
}
|
||||
}));
|
||||
|
||||
assert_eq!(diagnostics.viewed().len(), 0);
|
||||
assert_eq!(diagnostics.all().len(), 0);
|
||||
|
||||
let res = client.write_request(
|
||||
"textDocument/hover",
|
||||
|
@ -327,7 +329,7 @@ fn lsp_import_map_embedded_in_config_file() {
|
|||
}
|
||||
}));
|
||||
|
||||
assert_eq!(diagnostics.viewed().len(), 0);
|
||||
assert_eq!(diagnostics.all().len(), 0);
|
||||
|
||||
let res = client.write_request(
|
||||
"textDocument/hover",
|
||||
|
@ -429,7 +431,7 @@ fn lsp_import_assertions() {
|
|||
assert_eq!(
|
||||
json!(
|
||||
diagnostics
|
||||
.with_file_and_source("file:///a/a.ts", "deno")
|
||||
.messages_with_file_and_source("file:///a/a.ts", "deno")
|
||||
.diagnostics
|
||||
),
|
||||
json!([
|
||||
|
@ -3690,7 +3692,7 @@ fn lsp_code_actions_deno_cache() {
|
|||
}
|
||||
}));
|
||||
assert_eq!(
|
||||
diagnostics.with_source("deno"),
|
||||
diagnostics.messages_with_source("deno"),
|
||||
serde_json::from_value(json!({
|
||||
"uri": "file:///a/file.ts",
|
||||
"diagnostics": [{
|
||||
|
@ -3780,7 +3782,7 @@ fn lsp_code_actions_deno_cache_npm() {
|
|||
}
|
||||
}));
|
||||
assert_eq!(
|
||||
diagnostics.with_source("deno"),
|
||||
diagnostics.messages_with_source("deno"),
|
||||
serde_json::from_value(json!({
|
||||
"uri": "file:///a/file.ts",
|
||||
"diagnostics": [{
|
||||
|
@ -4713,7 +4715,7 @@ fn lsp_completions_auto_import() {
|
|||
"source": "./b.ts",
|
||||
"data": {
|
||||
"exportName": "foo",
|
||||
"exportMapKey": "foo|6845|file:///a/b",
|
||||
"exportMapKey": "foo|6812|file:///a/b",
|
||||
"moduleSpecifier": "./b.ts",
|
||||
"fileName": "file:///a/b.ts"
|
||||
},
|
||||
|
@ -5137,7 +5139,7 @@ fn lsp_completions_node_specifier() {
|
|||
}));
|
||||
|
||||
let non_existent_diagnostics = diagnostics
|
||||
.with_file_and_source("file:///a/file.ts", "deno")
|
||||
.messages_with_file_and_source("file:///a/file.ts", "deno")
|
||||
.diagnostics
|
||||
.into_iter()
|
||||
.filter(|d| {
|
||||
|
@ -5181,7 +5183,7 @@ fn lsp_completions_node_specifier() {
|
|||
);
|
||||
let diagnostics = client.read_diagnostics();
|
||||
let diagnostics = diagnostics
|
||||
.with_file_and_source("file:///a/file.ts", "deno")
|
||||
.messages_with_file_and_source("file:///a/file.ts", "deno")
|
||||
.diagnostics
|
||||
.into_iter()
|
||||
.filter(|d| {
|
||||
|
@ -5267,7 +5269,7 @@ fn lsp_completions_node_specifier() {
|
|||
|
||||
let diagnostics = client.read_diagnostics();
|
||||
let cache_diagnostics = diagnostics
|
||||
.with_file_and_source("file:///a/file.ts", "deno")
|
||||
.messages_with_file_and_source("file:///a/file.ts", "deno")
|
||||
.diagnostics
|
||||
.into_iter()
|
||||
.filter(|d| {
|
||||
|
@ -5537,7 +5539,7 @@ fn lsp_cache_location() {
|
|||
"text": "import * as a from \"http://127.0.0.1:4545/xTypeScriptTypes.js\";\n// @deno-types=\"http://127.0.0.1:4545/type_definitions/foo.d.ts\"\nimport * as b from \"http://127.0.0.1:4545/type_definitions/foo.js\";\nimport * as c from \"http://127.0.0.1:4545/subdir/type_reference.js\";\nimport * as d from \"http://127.0.0.1:4545/subdir/mod1.ts\";\nimport * as e from \"data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=\";\nimport * as f from \"./file_01.ts\";\nimport * as g from \"http://localhost:4545/x/a/mod.ts\";\n\nconsole.log(a, b, c, d, e, f, g);\n"
|
||||
}
|
||||
}));
|
||||
assert_eq!(diagnostics.viewed().len(), 7);
|
||||
assert_eq!(diagnostics.all().len(), 7);
|
||||
client.write_request(
|
||||
"deno/cache",
|
||||
json!({
|
||||
|
@ -5632,7 +5634,7 @@ fn lsp_tls_cert() {
|
|||
"text": "import * as a from \"https://localhost:5545/xTypeScriptTypes.js\";\n// @deno-types=\"https://localhost:5545/type_definitions/foo.d.ts\"\nimport * as b from \"https://localhost:5545/type_definitions/foo.js\";\nimport * as c from \"https://localhost:5545/subdir/type_reference.js\";\nimport * as d from \"https://localhost:5545/subdir/mod1.ts\";\nimport * as e from \"data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=\";\nimport * as f from \"./file_01.ts\";\nimport * as g from \"http://localhost:4545/x/a/mod.ts\";\n\nconsole.log(a, b, c, d, e, f, g);\n"
|
||||
}
|
||||
}));
|
||||
let diagnostics = diagnostics.viewed();
|
||||
let diagnostics = diagnostics.all();
|
||||
assert_eq!(diagnostics.len(), 7);
|
||||
client.write_request(
|
||||
"deno/cache",
|
||||
|
@ -5723,7 +5725,7 @@ fn lsp_diagnostics_warn_redirect() {
|
|||
);
|
||||
let diagnostics = client.read_diagnostics();
|
||||
assert_eq!(
|
||||
diagnostics.with_source("deno"),
|
||||
diagnostics.messages_with_source("deno"),
|
||||
lsp::PublishDiagnosticsParams {
|
||||
uri: Url::parse("file:///a/file.ts").unwrap(),
|
||||
diagnostics: vec![
|
||||
|
@ -5800,7 +5802,10 @@ fn lsp_redirect_quick_fix() {
|
|||
],
|
||||
}),
|
||||
);
|
||||
let diagnostics = client.read_diagnostics().with_source("deno").diagnostics;
|
||||
let diagnostics = client
|
||||
.read_diagnostics()
|
||||
.messages_with_source("deno")
|
||||
.diagnostics;
|
||||
let res = client.write_request(
|
||||
"textDocument/codeAction",
|
||||
json!(json!({
|
||||
|
@ -5870,35 +5875,25 @@ fn lsp_diagnostics_deprecated() {
|
|||
},
|
||||
}));
|
||||
assert_eq!(
|
||||
json!(diagnostics.0),
|
||||
json!([
|
||||
{
|
||||
"uri": "file:///a/file.ts",
|
||||
"diagnostics": [],
|
||||
"version": 1
|
||||
}, {
|
||||
"uri": "file:///a/file.ts",
|
||||
"diagnostics": [],
|
||||
"version": 1
|
||||
}, {
|
||||
"uri": "file:///a/file.ts",
|
||||
"diagnostics": [
|
||||
{
|
||||
"range": {
|
||||
"start": { "line": 3, "character": 0 },
|
||||
"end": { "line": 3, "character": 1 }
|
||||
},
|
||||
"severity": 4,
|
||||
"code": 6385,
|
||||
"source": "deno-ts",
|
||||
"message": "'a' is deprecated.",
|
||||
"relatedInformation": [],
|
||||
"tags": [2]
|
||||
}
|
||||
],
|
||||
"version": 1
|
||||
}
|
||||
])
|
||||
json!(diagnostics.all_messages()),
|
||||
json!([{
|
||||
"uri": "file:///a/file.ts",
|
||||
"diagnostics": [
|
||||
{
|
||||
"range": {
|
||||
"start": { "line": 3, "character": 0 },
|
||||
"end": { "line": 3, "character": 1 }
|
||||
},
|
||||
"severity": 4,
|
||||
"code": 6385,
|
||||
"source": "deno-ts",
|
||||
"message": "'a' is deprecated.",
|
||||
"relatedInformation": [],
|
||||
"tags": [2]
|
||||
}
|
||||
],
|
||||
"version": 1
|
||||
}])
|
||||
);
|
||||
client.shutdown();
|
||||
}
|
||||
|
@ -5927,7 +5922,7 @@ fn lsp_diagnostics_deno_types() {
|
|||
}
|
||||
}),
|
||||
);
|
||||
assert_eq!(diagnostics.viewed().len(), 5);
|
||||
assert_eq!(diagnostics.all().len(), 5);
|
||||
client.shutdown();
|
||||
}
|
||||
|
||||
|
@ -5961,7 +5956,8 @@ fn lsp_diagnostics_refresh_dependents() {
|
|||
}
|
||||
}));
|
||||
assert_eq!(
|
||||
json!(diagnostics.with_file_and_source("file:///a/file_02.ts", "deno-ts")),
|
||||
json!(diagnostics
|
||||
.messages_with_file_and_source("file:///a/file_02.ts", "deno-ts")),
|
||||
json!({
|
||||
"uri": "file:///a/file_02.ts",
|
||||
"diagnostics": [
|
||||
|
@ -6000,7 +5996,7 @@ fn lsp_diagnostics_refresh_dependents() {
|
|||
}),
|
||||
);
|
||||
let diagnostics = client.read_diagnostics();
|
||||
assert_eq!(diagnostics.viewed().len(), 0); // no diagnostics now
|
||||
assert_eq!(diagnostics.all().len(), 0); // no diagnostics now
|
||||
|
||||
client.shutdown();
|
||||
assert_eq!(client.queue_len(), 0);
|
||||
|
@ -7054,7 +7050,7 @@ fn lsp_lint_with_config() {
|
|||
"text": "// TODO: fixme\nexport async function non_camel_case() {\nconsole.log(\"finished!\")\n}"
|
||||
}
|
||||
}));
|
||||
let diagnostics = diagnostics.viewed();
|
||||
let diagnostics = diagnostics.all();
|
||||
assert_eq!(diagnostics.len(), 1);
|
||||
assert_eq!(
|
||||
diagnostics[0].code,
|
||||
|
@ -7099,7 +7095,7 @@ fn lsp_lint_exclude_with_config() {
|
|||
}
|
||||
}),
|
||||
);
|
||||
let diagnostics = diagnostics.viewed();
|
||||
let diagnostics = diagnostics.all();
|
||||
assert_eq!(diagnostics, Vec::new());
|
||||
client.shutdown();
|
||||
}
|
||||
|
@ -7418,6 +7414,49 @@ fn lsp_closed_file_find_references() {
|
|||
client.shutdown();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lsp_closed_file_find_references_low_document_pre_load() {
|
||||
let context = TestContextBuilder::new().use_temp_cwd().build();
|
||||
let temp_dir = context.temp_dir();
|
||||
temp_dir.create_dir_all("sub_dir");
|
||||
temp_dir.write("./other_file.ts", "export const b = 5;");
|
||||
temp_dir.write("./sub_dir/mod.ts", "export const a = 5;");
|
||||
temp_dir.write(
|
||||
"./sub_dir/mod.test.ts",
|
||||
"import { a } from './mod.ts'; console.log(a);",
|
||||
);
|
||||
let temp_dir_url = temp_dir.uri();
|
||||
let mut client = context.new_lsp_command().build();
|
||||
client.initialize(|builder| {
|
||||
builder.set_preload_limit(1);
|
||||
});
|
||||
client.did_open(json!({
|
||||
"textDocument": {
|
||||
"uri": temp_dir_url.join("sub_dir/mod.ts").unwrap(),
|
||||
"languageId": "typescript",
|
||||
"version": 1,
|
||||
"text": r#"export const a = 5;"#
|
||||
}
|
||||
}));
|
||||
let res = client.write_request(
|
||||
"textDocument/references",
|
||||
json!({
|
||||
"textDocument": {
|
||||
"uri": temp_dir_url.join("sub_dir/mod.ts").unwrap(),
|
||||
},
|
||||
"position": { "line": 0, "character": 13 },
|
||||
"context": {
|
||||
"includeDeclaration": false
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
// won't have results because the document won't be pre-loaded
|
||||
assert_eq!(res, json!([]));
|
||||
|
||||
client.shutdown();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lsp_data_urls_with_jsx_compiler_option() {
|
||||
let context = TestContextBuilder::new().use_temp_cwd().build();
|
||||
|
@ -7439,7 +7478,7 @@ fn lsp_data_urls_with_jsx_compiler_option() {
|
|||
"version": 1,
|
||||
"text": "import a from \"data:application/typescript,export default 5;\";\na;"
|
||||
}
|
||||
})).viewed();
|
||||
})).all();
|
||||
|
||||
// there will be a diagnostic about not having cached the data url
|
||||
assert_eq!(diagnostics.len(), 1);
|
||||
|
@ -7497,3 +7536,145 @@ fn lsp_data_urls_with_jsx_compiler_option() {
|
|||
|
||||
client.shutdown();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lsp_node_modules_dir() {
|
||||
let context = TestContextBuilder::new()
|
||||
.use_http_server()
|
||||
.use_temp_cwd()
|
||||
.build();
|
||||
let temp_dir = context.temp_dir();
|
||||
|
||||
// having a package.json should have no effect on whether
|
||||
// a node_modules dir is created
|
||||
temp_dir.write("package.json", "{}");
|
||||
|
||||
let mut client = context.new_lsp_command().build();
|
||||
client.initialize_default();
|
||||
let file_uri = temp_dir.uri().join("file.ts").unwrap();
|
||||
client.did_open(json!({
|
||||
"textDocument": {
|
||||
"uri": file_uri,
|
||||
"languageId": "typescript",
|
||||
"version": 1,
|
||||
"text": "import chalk from 'npm:chalk';\nimport path from 'node:path';\n\nconsole.log(chalk.green(path.join('a', 'b')));",
|
||||
}
|
||||
}));
|
||||
let cache = |client: &mut LspClient| {
|
||||
client.write_request(
|
||||
"deno/cache",
|
||||
json!({
|
||||
"referrer": {
|
||||
"uri": file_uri,
|
||||
},
|
||||
"uris": [
|
||||
{
|
||||
"uri": "npm:chalk",
|
||||
},
|
||||
{
|
||||
"uri": "npm:@types/node",
|
||||
}
|
||||
]
|
||||
}),
|
||||
);
|
||||
};
|
||||
|
||||
cache(&mut client);
|
||||
|
||||
assert!(!temp_dir.path().join("node_modules").exists());
|
||||
|
||||
temp_dir.write(
|
||||
temp_dir.path().join("deno.json"),
|
||||
"{ \"nodeModulesDir\": true, \"lock\": false }\n",
|
||||
);
|
||||
let refresh_config = |client: &mut LspClient| {
|
||||
client.write_notification(
|
||||
"workspace/didChangeConfiguration",
|
||||
json!({
|
||||
"settings": {
|
||||
"enable": true,
|
||||
"config": "./deno.json",
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
let request = json!([{
|
||||
"enable": true,
|
||||
"config": "./deno.json",
|
||||
"codeLens": {
|
||||
"implementations": true,
|
||||
"references": true
|
||||
},
|
||||
"importMap": null,
|
||||
"lint": false,
|
||||
"suggest": {
|
||||
"autoImports": true,
|
||||
"completeFunctionCalls": false,
|
||||
"names": true,
|
||||
"paths": true,
|
||||
"imports": {}
|
||||
},
|
||||
"unstable": false
|
||||
}]);
|
||||
// one for the workspace
|
||||
client.handle_configuration_request(request.clone());
|
||||
// one for the specifier
|
||||
client.handle_configuration_request(request);
|
||||
};
|
||||
refresh_config(&mut client);
|
||||
|
||||
let diagnostics = client.read_diagnostics();
|
||||
assert_eq!(diagnostics.all().len(), 2, "{:#?}", diagnostics); // not cached
|
||||
|
||||
cache(&mut client);
|
||||
|
||||
assert!(temp_dir.path().join("node_modules/chalk").exists());
|
||||
assert!(temp_dir.path().join("node_modules/@types/node").exists());
|
||||
assert!(!temp_dir.path().join("deno.lock").exists());
|
||||
|
||||
// now add a lockfile and cache
|
||||
temp_dir.write(
|
||||
temp_dir.path().join("deno.json"),
|
||||
"{ \"nodeModulesDir\": true }\n",
|
||||
);
|
||||
refresh_config(&mut client);
|
||||
cache(&mut client);
|
||||
|
||||
let diagnostics = client.read_diagnostics();
|
||||
assert_eq!(diagnostics.all().len(), 0, "{:#?}", diagnostics);
|
||||
|
||||
assert!(temp_dir.path().join("deno.lock").exists());
|
||||
|
||||
// the declaration should be found in the node_modules directory
|
||||
let res = client.write_request(
|
||||
"textDocument/references",
|
||||
json!({
|
||||
"textDocument": {
|
||||
"uri": file_uri,
|
||||
},
|
||||
"position": { "line": 0, "character": 7 }, // chalk
|
||||
"context": {
|
||||
"includeDeclaration": false
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
// ensure that it's using the node_modules directory
|
||||
let references = res.as_array().unwrap();
|
||||
assert_eq!(references.len(), 2, "references: {:#?}", references);
|
||||
let uri = references[1]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.get("uri")
|
||||
.unwrap()
|
||||
.as_str()
|
||||
.unwrap();
|
||||
// canonicalize for mac
|
||||
let path = temp_dir.path().join("node_modules").canonicalize().unwrap();
|
||||
assert_starts_with!(
|
||||
uri,
|
||||
ModuleSpecifier::from_file_path(&path).unwrap().as_str()
|
||||
);
|
||||
|
||||
client.shutdown();
|
||||
}
|
||||
|
|
|
@ -1,12 +1,84 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::io::BufRead;
|
||||
use std::io::BufReader;
|
||||
use std::process::Stdio;
|
||||
use std::time::Duration;
|
||||
use std::time::Instant;
|
||||
use test_util as util;
|
||||
use util::env_vars_for_npm_tests;
|
||||
|
||||
#[test]
|
||||
fn node_unit_tests() {
|
||||
util::unit_test_factory!(
|
||||
node_unit_test,
|
||||
"tests/unit_node",
|
||||
"**/*_test.ts",
|
||||
[
|
||||
_fs_access_test = _fs / _fs_access_test,
|
||||
_fs_appendFile_test = _fs / _fs_appendFile_test,
|
||||
_fs_chmod_test = _fs / _fs_chmod_test,
|
||||
_fs_chown_test = _fs / _fs_chown_test,
|
||||
_fs_close_test = _fs / _fs_close_test,
|
||||
_fs_copy_test = _fs / _fs_copy_test,
|
||||
_fs_dir_test = _fs / _fs_dir_test,
|
||||
_fs_exists_test = _fs / _fs_exists_test,
|
||||
_fs_fdatasync_test = _fs / _fs_fdatasync_test,
|
||||
_fs_fstat_test = _fs / _fs_fstat_test,
|
||||
_fs_fsync_test = _fs / _fs_fsync_test,
|
||||
_fs_ftruncate_test = _fs / _fs_ftruncate_test,
|
||||
_fs_futimes_test = _fs / _fs_futimes_test,
|
||||
_fs_handle_test = _fs / _fs_handle_test,
|
||||
_fs_link_test = _fs / _fs_link_test,
|
||||
_fs_lstat_test = _fs / _fs_lstat_test,
|
||||
_fs_mkdir_test = _fs / _fs_mkdir_test,
|
||||
_fs_mkdtemp_test = _fs / _fs_mkdtemp_test,
|
||||
_fs_opendir_test = _fs / _fs_opendir_test,
|
||||
_fs_readFile_test = _fs / _fs_readFile_test,
|
||||
_fs_readdir_test = _fs / _fs_readdir_test,
|
||||
_fs_readlink_test = _fs / _fs_readlink_test,
|
||||
_fs_realpath_test = _fs / _fs_realpath_test,
|
||||
_fs_rename_test = _fs / _fs_rename_test,
|
||||
_fs_rm_test = _fs / _fs_rm_test,
|
||||
_fs_rmdir_test = _fs / _fs_rmdir_test,
|
||||
_fs_stat_test = _fs / _fs_stat_test,
|
||||
_fs_symlink_test = _fs / _fs_symlink_test,
|
||||
_fs_truncate_test = _fs / _fs_truncate_test,
|
||||
_fs_unlink_test = _fs / _fs_unlink_test,
|
||||
_fs_utimes_test = _fs / _fs_utimes_test,
|
||||
_fs_watch_test = _fs / _fs_watch_test,
|
||||
_fs_write_test = _fs / _fs_write_test,
|
||||
async_hooks_test,
|
||||
child_process_test,
|
||||
crypto_cipher_test = crypto / crypto_cipher_test,
|
||||
crypto_hash_test = crypto / crypto_hash_test,
|
||||
crypto_key_test = crypto / crypto_key_test,
|
||||
crypto_sign_test = crypto / crypto_sign_test,
|
||||
fs_test,
|
||||
http_test,
|
||||
http2_test,
|
||||
_randomBytes_test = internal / _randomBytes_test,
|
||||
_randomFill_test = internal / _randomFill_test,
|
||||
_randomInt_test = internal / _randomInt_test,
|
||||
pbkdf2_test = internal / pbkdf2_test,
|
||||
scrypt_test = internal / scrypt_test,
|
||||
module_test,
|
||||
os_test,
|
||||
process_test,
|
||||
querystring_test,
|
||||
readline_test,
|
||||
string_decoder_test,
|
||||
timers_test,
|
||||
tls_test,
|
||||
tty_test,
|
||||
util_test,
|
||||
v8_test,
|
||||
worker_threads_test
|
||||
]
|
||||
);
|
||||
|
||||
fn node_unit_test(test: String) {
|
||||
let _g = util::http_server();
|
||||
|
||||
let mut deno = util::deno_cmd()
|
||||
let mut deno = util::deno_cmd();
|
||||
let mut deno = deno
|
||||
.current_dir(util::root_path())
|
||||
.arg("test")
|
||||
.arg("--unstable")
|
||||
|
@ -14,12 +86,95 @@ fn node_unit_tests() {
|
|||
// but this shouldn't be necessary. tls.connect currently doesn't
|
||||
// pass hostname option correctly and it causes cert errors.
|
||||
.arg("--unsafely-ignore-certificate-errors")
|
||||
.arg("-A")
|
||||
.arg(util::tests_path().join("unit_node"))
|
||||
.arg("-A");
|
||||
// Parallel tests for crypto
|
||||
if test.starts_with("crypto/") {
|
||||
deno = deno.arg("--parallel");
|
||||
}
|
||||
let mut deno = deno
|
||||
.arg(
|
||||
util::tests_path()
|
||||
.join("unit_node")
|
||||
.join(format!("{test}.ts")),
|
||||
)
|
||||
.stderr(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()
|
||||
.expect("failed to spawn script");
|
||||
|
||||
let status = deno.wait().expect("failed to wait for the child process");
|
||||
assert_eq!(Some(0), status.code());
|
||||
let now = Instant::now();
|
||||
let stdout = deno.stdout.take().unwrap();
|
||||
let test_name = test.clone();
|
||||
let stdout = std::thread::spawn(move || {
|
||||
let reader = BufReader::new(stdout);
|
||||
for line in reader.lines() {
|
||||
if let Ok(line) = line {
|
||||
println!("[{test_name} {:0>6.2}] {line}", now.elapsed().as_secs_f32());
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let now = Instant::now();
|
||||
let stderr = deno.stderr.take().unwrap();
|
||||
let test_name = test.clone();
|
||||
let stderr = std::thread::spawn(move || {
|
||||
let reader = BufReader::new(stderr);
|
||||
for line in reader.lines() {
|
||||
if let Ok(line) = line {
|
||||
eprintln!("[{test_name} {:0>6.2}] {line}", now.elapsed().as_secs_f32());
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const PER_TEST_TIMEOUT: Duration = Duration::from_secs(5 * 60);
|
||||
|
||||
let now = Instant::now();
|
||||
let status = loop {
|
||||
if now.elapsed() > PER_TEST_TIMEOUT {
|
||||
// Last-ditch kill
|
||||
_ = deno.kill();
|
||||
panic!("Test {test} failed to complete in time");
|
||||
}
|
||||
if let Some(status) = deno
|
||||
.try_wait()
|
||||
.expect("failed to wait for the child process")
|
||||
{
|
||||
break status;
|
||||
}
|
||||
std::thread::sleep(Duration::from_millis(100));
|
||||
};
|
||||
|
||||
#[cfg(unix)]
|
||||
assert_eq!(
|
||||
std::os::unix::process::ExitStatusExt::signal(&status),
|
||||
None,
|
||||
"Deno should not have died with a signal"
|
||||
);
|
||||
assert_eq!(Some(0), status.code(), "Deno should have exited cleanly");
|
||||
|
||||
stdout.join().unwrap();
|
||||
stderr.join().unwrap();
|
||||
|
||||
assert!(status.success());
|
||||
}
|
||||
|
||||
// Regression test for https://github.com/denoland/deno/issues/16928
|
||||
itest!(unhandled_rejection_web {
|
||||
args: "run -A node/unhandled_rejection_web.ts",
|
||||
output: "node/unhandled_rejection_web.ts.out",
|
||||
envs: env_vars_for_npm_tests(),
|
||||
http_server: true,
|
||||
});
|
||||
|
||||
// Ensure that Web `onunhandledrejection` is fired before
|
||||
// Node's `process.on('unhandledRejection')`.
|
||||
itest!(unhandled_rejection_web_process {
|
||||
args: "run -A node/unhandled_rejection_web_process.ts",
|
||||
output: "node/unhandled_rejection_web_process.ts.out",
|
||||
envs: env_vars_for_npm_tests(),
|
||||
http_server: true,
|
||||
});
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::json;
|
||||
use deno_core::serde_json::Value;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::process::Stdio;
|
||||
|
@ -156,6 +157,16 @@ itest!(mixed_case_package_name_local_dir {
|
|||
temp_cwd: true,
|
||||
});
|
||||
|
||||
itest!(local_dir_resolves_symlinks {
|
||||
args: "run -A index.js",
|
||||
output: "npm/local_dir_resolves_symlinks/index.out",
|
||||
exit_code: 0,
|
||||
envs: env_vars_for_npm_tests(),
|
||||
cwd: Some("npm/local_dir_resolves_symlinks/"),
|
||||
copy_temp_dir: Some("npm/local_dir_resolves_symlinks/"),
|
||||
http_server: true,
|
||||
});
|
||||
|
||||
// FIXME(bartlomieju): npm: specifiers are not handled in dynamic imports
|
||||
// at the moment
|
||||
// itest!(dynamic_import {
|
||||
|
@ -234,6 +245,25 @@ itest!(tarball_with_global_header {
|
|||
http_server: true,
|
||||
});
|
||||
|
||||
itest!(node_modules_deno_node_modules {
|
||||
args: "run --quiet npm/node_modules_deno_node_modules/main.ts",
|
||||
output: "npm/node_modules_deno_node_modules/main.out",
|
||||
copy_temp_dir: Some("npm/node_modules_deno_node_modules/"),
|
||||
exit_code: 0,
|
||||
envs: env_vars_for_npm_tests(),
|
||||
http_server: true,
|
||||
});
|
||||
|
||||
itest!(node_modules_deno_node_modules_local {
|
||||
args:
|
||||
"run --quiet --node-modules-dir npm/node_modules_deno_node_modules/main.ts",
|
||||
output: "npm/node_modules_deno_node_modules/main.out",
|
||||
copy_temp_dir: Some("npm/node_modules_deno_node_modules/"),
|
||||
exit_code: 0,
|
||||
envs: env_vars_for_npm_tests(),
|
||||
http_server: true,
|
||||
});
|
||||
|
||||
itest!(nonexistent_file {
|
||||
args: "run -A --quiet npm/nonexistent_file/main.js",
|
||||
output: "npm/nonexistent_file/main.out",
|
||||
|
@ -406,7 +436,7 @@ fn cached_only_after_first_run() {
|
|||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
assert_contains!(stderr, "Download");
|
||||
assert_contains!(stdout, "createChalk: chalk");
|
||||
assert_contains!(stdout, "[Function: chalk] createChalk");
|
||||
assert!(output.status.success());
|
||||
|
||||
let deno = util::deno_cmd_with_deno_dir(&deno_dir)
|
||||
|
@ -451,7 +481,7 @@ fn cached_only_after_first_run() {
|
|||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
assert!(output.status.success());
|
||||
assert!(stderr.is_empty());
|
||||
assert_contains!(stdout, "createChalk: chalk");
|
||||
assert_contains!(stdout, "[Function: chalk] createChalk");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -476,7 +506,7 @@ fn reload_flag() {
|
|||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
assert_contains!(stderr, "Download");
|
||||
assert_contains!(stdout, "createChalk: chalk");
|
||||
assert_contains!(stdout, "[Function: chalk] createChalk");
|
||||
assert!(output.status.success());
|
||||
|
||||
let deno = util::deno_cmd_with_deno_dir(&deno_dir)
|
||||
|
@ -496,7 +526,7 @@ fn reload_flag() {
|
|||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
assert_contains!(stderr, "Download");
|
||||
assert_contains!(stdout, "createChalk: chalk");
|
||||
assert_contains!(stdout, "[Function: chalk] createChalk");
|
||||
assert!(output.status.success());
|
||||
|
||||
let deno = util::deno_cmd_with_deno_dir(&deno_dir)
|
||||
|
@ -516,7 +546,7 @@ fn reload_flag() {
|
|||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
assert_contains!(stderr, "Download");
|
||||
assert_contains!(stdout, "createChalk: chalk");
|
||||
assert_contains!(stdout, "[Function: chalk] createChalk");
|
||||
assert!(output.status.success());
|
||||
|
||||
let deno = util::deno_cmd_with_deno_dir(&deno_dir)
|
||||
|
@ -536,7 +566,7 @@ fn reload_flag() {
|
|||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
assert_contains!(stderr, "Download");
|
||||
assert_contains!(stdout, "createChalk: chalk");
|
||||
assert_contains!(stdout, "[Function: chalk] createChalk");
|
||||
assert!(output.status.success());
|
||||
|
||||
let deno = util::deno_cmd_with_deno_dir(&deno_dir)
|
||||
|
@ -556,7 +586,7 @@ fn reload_flag() {
|
|||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
assert!(stderr.is_empty());
|
||||
assert_contains!(stdout, "createChalk: chalk");
|
||||
assert_contains!(stdout, "[Function: chalk] createChalk");
|
||||
assert!(output.status.success());
|
||||
}
|
||||
|
||||
|
@ -605,7 +635,7 @@ fn no_npm_after_first_run() {
|
|||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
assert_contains!(stderr, "Download");
|
||||
assert_contains!(stdout, "createChalk: chalk");
|
||||
assert_contains!(stdout, "[Function: chalk] createChalk");
|
||||
assert!(output.status.success());
|
||||
|
||||
let deno = util::deno_cmd_with_deno_dir(&deno_dir)
|
||||
|
@ -713,6 +743,19 @@ itest!(deno_run_bin_cjs {
|
|||
http_server: true,
|
||||
});
|
||||
|
||||
#[test]
|
||||
fn deno_run_bin_lockfile() {
|
||||
let context = TestContextBuilder::for_npm().use_temp_cwd().build();
|
||||
let temp_dir = context.temp_dir();
|
||||
temp_dir.write("deno.json", "{}");
|
||||
let output = context
|
||||
.new_command()
|
||||
.args("run -A --quiet npm:@denotest/bin/cli-esm this is a test")
|
||||
.run();
|
||||
output.assert_matches_file("npm/deno_run_esm.out");
|
||||
assert!(temp_dir.path().join("deno.lock").exists());
|
||||
}
|
||||
|
||||
itest!(deno_run_non_existent {
|
||||
args: "run npm:mkdirp@0.5.125",
|
||||
output: "npm/deno_run_non_existent.out",
|
||||
|
@ -845,17 +888,9 @@ fn ensure_registry_files_local() {
|
|||
}
|
||||
}
|
||||
|
||||
itest!(compile_errors {
|
||||
args: "compile -A --quiet npm/cached_only/main.ts",
|
||||
output_str: Some("error: npm specifiers have not yet been implemented for this sub command (https://github.com/denoland/deno/issues/15960). Found: npm:chalk@5.0.1\n"),
|
||||
exit_code: 1,
|
||||
envs: env_vars_for_npm_tests(),
|
||||
http_server: true,
|
||||
});
|
||||
|
||||
itest!(bundle_errors {
|
||||
args: "bundle --quiet npm/esm/main.js",
|
||||
output_str: Some("error: npm specifiers have not yet been implemented for this sub command (https://github.com/denoland/deno/issues/15960). Found: npm:chalk@5.0.1\n"),
|
||||
output_str: Some("error: npm specifiers have not yet been implemented for this subcommand (https://github.com/denoland/deno/issues/15960). Found: npm:chalk@5.0.1\n"),
|
||||
exit_code: 1,
|
||||
envs: env_vars_for_npm_tests(),
|
||||
http_server: true,
|
||||
|
@ -1797,3 +1832,185 @@ fn reload_info_not_found_cache_but_exists_remote() {
|
|||
output.assert_exit_code(0);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn binary_package_with_optional_dependencies() {
|
||||
let context = TestContextBuilder::for_npm()
|
||||
.use_sync_npm_download()
|
||||
.use_separate_deno_dir() // the "npm" folder means something in the deno dir, so use a separate folder
|
||||
.use_copy_temp_dir("npm/binary_package")
|
||||
.cwd("npm/binary_package")
|
||||
.build();
|
||||
|
||||
let temp_dir = context.temp_dir();
|
||||
let temp_dir_path = temp_dir.path();
|
||||
let project_path = temp_dir_path.join("npm/binary_package");
|
||||
|
||||
// write empty config file so a lockfile gets created
|
||||
temp_dir.write("npm/binary_package/deno.json", "{}");
|
||||
|
||||
// run it twice, with the first time creating the lockfile and the second using it
|
||||
for i in 0..2 {
|
||||
if i == 1 {
|
||||
assert!(project_path.join("deno.lock").exists());
|
||||
}
|
||||
|
||||
let output = context
|
||||
.new_command()
|
||||
.args("run -A --node-modules-dir main.js")
|
||||
.run();
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
output.assert_exit_code(0);
|
||||
output.assert_matches_text(
|
||||
"[WILDCARD]Hello from binary package on windows[WILDCARD]",
|
||||
);
|
||||
assert!(project_path
|
||||
.join("node_modules/.deno/@denotest+binary-package-windows@1.0.0")
|
||||
.exists());
|
||||
assert!(!project_path
|
||||
.join("node_modules/.deno/@denotest+binary-package-linux@1.0.0")
|
||||
.exists());
|
||||
assert!(!project_path
|
||||
.join("node_modules/.deno/@denotest+binary-package-mac@1.0.0")
|
||||
.exists());
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
output.assert_exit_code(0);
|
||||
output.assert_matches_text(
|
||||
"[WILDCARD]Hello from binary package on mac[WILDCARD]",
|
||||
);
|
||||
|
||||
assert!(!project_path
|
||||
.join("node_modules/.deno/@denotest+binary-package-windows@1.0.0")
|
||||
.exists());
|
||||
assert!(!project_path
|
||||
.join("node_modules/.deno/@denotest+binary-package-linux@1.0.0")
|
||||
.exists());
|
||||
assert!(project_path
|
||||
.join("node_modules/.deno/@denotest+binary-package-mac@1.0.0")
|
||||
.exists());
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
output.assert_exit_code(0);
|
||||
output.assert_matches_text(
|
||||
"[WILDCARD]Hello from binary package on linux[WILDCARD]",
|
||||
);
|
||||
assert!(!project_path
|
||||
.join("node_modules/.deno/@denotest+binary-package-windows@1.0.0")
|
||||
.exists());
|
||||
assert!(project_path
|
||||
.join("node_modules/.deno/@denotest+binary-package-linux@1.0.0")
|
||||
.exists());
|
||||
assert!(!project_path
|
||||
.join("node_modules/.deno/@denotest+binary-package-mac@1.0.0")
|
||||
.exists());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn node_modules_dir_config_file() {
|
||||
let test_context = TestContextBuilder::for_npm().use_temp_cwd().build();
|
||||
let temp_dir = test_context.temp_dir();
|
||||
let node_modules_dir = temp_dir.path().join("node_modules");
|
||||
let rm_node_modules = || std::fs::remove_dir_all(&node_modules_dir).unwrap();
|
||||
|
||||
temp_dir.write("deno.json", r#"{ "nodeModulesDir": true }"#);
|
||||
temp_dir.write("main.ts", "import 'npm:@denotest/esm-basic';");
|
||||
|
||||
let deno_cache_cmd = test_context.new_command().args("cache --quiet main.ts");
|
||||
deno_cache_cmd.run();
|
||||
|
||||
assert!(node_modules_dir.exists());
|
||||
rm_node_modules();
|
||||
temp_dir.write("deno.json", r#"{ "nodeModulesDir": false }"#);
|
||||
|
||||
deno_cache_cmd.run();
|
||||
assert!(!node_modules_dir.exists());
|
||||
|
||||
temp_dir.write("package.json", r#"{}"#);
|
||||
deno_cache_cmd.run();
|
||||
assert!(!node_modules_dir.exists());
|
||||
|
||||
test_context
|
||||
.new_command()
|
||||
.args("cache --quiet --node-modules-dir main.ts")
|
||||
.run();
|
||||
assert!(node_modules_dir.exists());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn top_level_install_package_json_explicit_opt_in() {
|
||||
let test_context = TestContextBuilder::for_npm().use_temp_cwd().build();
|
||||
let temp_dir = test_context.temp_dir();
|
||||
let node_modules_dir = temp_dir.path().join("node_modules");
|
||||
let rm_created_files = || {
|
||||
std::fs::remove_dir_all(&node_modules_dir).unwrap();
|
||||
std::fs::remove_file(temp_dir.path().join("deno.lock")).unwrap();
|
||||
};
|
||||
|
||||
// when the node_modules_dir is explicitly opted into, we should always
|
||||
// ensure a top level package.json install occurs
|
||||
temp_dir.write("deno.json", "{ \"nodeModulesDir\": true }");
|
||||
temp_dir.write(
|
||||
"package.json",
|
||||
"{ \"dependencies\": { \"@denotest/esm-basic\": \"1.0\" }}",
|
||||
);
|
||||
|
||||
temp_dir.write("main.ts", "console.log(5);");
|
||||
let output = test_context.new_command().args("cache main.ts").run();
|
||||
output.assert_matches_text(
|
||||
concat!(
|
||||
"Download http://localhost:4545/npm/registry/@denotest/esm-basic\n",
|
||||
"Download http://localhost:4545/npm/registry/@denotest/esm-basic/1.0.0.tgz\n",
|
||||
"Initialize @denotest/esm-basic@1.0.0\n",
|
||||
)
|
||||
);
|
||||
|
||||
rm_created_files();
|
||||
let output = test_context
|
||||
.new_command()
|
||||
.args_vec(["eval", "console.log(5)"])
|
||||
.run();
|
||||
output.assert_matches_text(concat!(
|
||||
"Initialize @denotest/esm-basic@1.0.0\n",
|
||||
"5\n"
|
||||
));
|
||||
|
||||
rm_created_files();
|
||||
let output = test_context
|
||||
.new_command()
|
||||
.args("run -")
|
||||
.stdin("console.log(5)")
|
||||
.run();
|
||||
output.assert_matches_text(concat!(
|
||||
"Initialize @denotest/esm-basic@1.0.0\n",
|
||||
"5\n"
|
||||
));
|
||||
|
||||
// now ensure this is cached in the lsp
|
||||
rm_created_files();
|
||||
let mut client = test_context.new_lsp_command().build();
|
||||
client.initialize_default();
|
||||
let file_uri = temp_dir.uri().join("file.ts").unwrap();
|
||||
client.did_open(json!({
|
||||
"textDocument": {
|
||||
"uri": file_uri,
|
||||
"languageId": "typescript",
|
||||
"version": 1,
|
||||
"text": "",
|
||||
}
|
||||
}));
|
||||
client.write_request(
|
||||
"deno/cache",
|
||||
json!({ "referrer": { "uri": file_uri }, "uris": [] }),
|
||||
);
|
||||
|
||||
assert!(node_modules_dir.join("@denotest").exists());
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue