mirror of
https://github.com/denoland/deno.git
synced 2025-01-05 13:59:01 -05:00
Merge branch 'main' into support_create_connection
This commit is contained in:
commit
0467865e17
673 changed files with 8280 additions and 4369 deletions
|
@ -71,7 +71,7 @@
|
|||
"https://plugins.dprint.dev/typescript-0.93.0.wasm",
|
||||
"https://plugins.dprint.dev/json-0.19.3.wasm",
|
||||
"https://plugins.dprint.dev/markdown-0.17.8.wasm",
|
||||
"https://plugins.dprint.dev/toml-0.6.2.wasm",
|
||||
"https://plugins.dprint.dev/toml-0.6.3.wasm",
|
||||
"https://plugins.dprint.dev/exec-0.5.0.json@8d9972eee71fa1590e04873540421f3eda7674d0f1aae3d7c788615e7b7413d0",
|
||||
"https://plugins.dprint.dev/g-plane/pretty_yaml-v0.5.0.wasm"
|
||||
]
|
||||
|
|
26
.github/workflows/ci.generate.ts
vendored
26
.github/workflows/ci.generate.ts
vendored
|
@ -757,8 +757,10 @@ const ci = {
|
|||
].join("\n"),
|
||||
run: [
|
||||
"cd target/release",
|
||||
"shasum -a 256 deno > deno-${{ matrix.arch }}-unknown-linux-gnu.sha256sum",
|
||||
"zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno",
|
||||
"strip denort",
|
||||
"shasum -a 256 denort > denort-${{ matrix.arch }}-unknown-linux-gnu.sha256sum",
|
||||
"zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort",
|
||||
"./deno types > lib.deno.d.ts",
|
||||
].join("\n"),
|
||||
|
@ -783,8 +785,10 @@ const ci = {
|
|||
"--p12-file=<(echo $APPLE_CODESIGN_KEY | base64 -d) " +
|
||||
"--entitlements-xml-file=cli/entitlements.plist",
|
||||
"cd target/release",
|
||||
"shasum -a 256 deno > deno-${{ matrix.arch }}-apple-darwin.sha256sum",
|
||||
"zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno",
|
||||
"strip denort",
|
||||
"shasum -a 256 denort > denort-${{ matrix.arch }}-apple-darwin.sha256sum",
|
||||
"zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort",
|
||||
]
|
||||
.join("\n"),
|
||||
|
@ -799,7 +803,9 @@ const ci = {
|
|||
].join("\n"),
|
||||
shell: "pwsh",
|
||||
run: [
|
||||
"Get-FileHash target/release/deno.exe -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.sha256sum",
|
||||
"Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip",
|
||||
"Get-FileHash target/release/denort.exe -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.sha256sum",
|
||||
"Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip",
|
||||
].join("\n"),
|
||||
},
|
||||
|
@ -813,6 +819,7 @@ const ci = {
|
|||
].join("\n"),
|
||||
run: [
|
||||
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/',
|
||||
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/canary/$(git rev-parse HEAD)/',
|
||||
"echo ${{ github.sha }} > canary-latest.txt",
|
||||
'gsutil -h "Cache-Control: no-cache" cp canary-latest.txt gs://dl.deno.land/canary-$(rustc -vV | sed -n "s|host: ||p")-latest.txt',
|
||||
].join("\n"),
|
||||
|
@ -994,8 +1001,10 @@ const ci = {
|
|||
"github.repository == 'denoland/deno' &&",
|
||||
"startsWith(github.ref, 'refs/tags/')",
|
||||
].join("\n"),
|
||||
run:
|
||||
run: [
|
||||
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/',
|
||||
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/',
|
||||
].join("\n"),
|
||||
},
|
||||
{
|
||||
name: "Upload release to dl.deno.land (windows)",
|
||||
|
@ -1009,8 +1018,10 @@ const ci = {
|
|||
env: {
|
||||
CLOUDSDK_PYTHON: "${{env.pythonLocation}}\\python.exe",
|
||||
},
|
||||
run:
|
||||
run: [
|
||||
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/',
|
||||
'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/',
|
||||
].join("\n"),
|
||||
},
|
||||
{
|
||||
name: "Create release notes",
|
||||
|
@ -1040,15 +1051,25 @@ const ci = {
|
|||
with: {
|
||||
files: [
|
||||
"target/release/deno-x86_64-pc-windows-msvc.zip",
|
||||
"target/release/deno-x86_64-pc-windows-msvc.sha256sum",
|
||||
"target/release/denort-x86_64-pc-windows-msvc.zip",
|
||||
"target/release/denort-x86_64-pc-windows-msvc.sha256sum",
|
||||
"target/release/deno-x86_64-unknown-linux-gnu.zip",
|
||||
"target/release/deno-x86_64-unknown-linux-gnu.sha256sum",
|
||||
"target/release/denort-x86_64-unknown-linux-gnu.zip",
|
||||
"target/release/denort-x86_64-unknown-linux-gnu.sha256sum",
|
||||
"target/release/deno-x86_64-apple-darwin.zip",
|
||||
"target/release/deno-x86_64-apple-darwin.sha256sum",
|
||||
"target/release/denort-x86_64-apple-darwin.zip",
|
||||
"target/release/denort-x86_64-apple-darwin.sha256sum",
|
||||
"target/release/deno-aarch64-unknown-linux-gnu.zip",
|
||||
"target/release/deno-aarch64-unknown-linux-gnu.sha256sum",
|
||||
"target/release/denort-aarch64-unknown-linux-gnu.zip",
|
||||
"target/release/denort-aarch64-unknown-linux-gnu.sha256sum",
|
||||
"target/release/deno-aarch64-apple-darwin.zip",
|
||||
"target/release/deno-aarch64-apple-darwin.sha256sum",
|
||||
"target/release/denort-aarch64-apple-darwin.zip",
|
||||
"target/release/denort-aarch64-apple-darwin.sha256sum",
|
||||
"target/release/deno_src.tar.gz",
|
||||
"target/release/lib.deno.d.ts",
|
||||
].join("\n"),
|
||||
|
@ -1067,6 +1088,7 @@ const ci = {
|
|||
"./target",
|
||||
"!./target/*/gn_out",
|
||||
"!./target/*/*.zip",
|
||||
"!./target/*/*.sha256sum",
|
||||
"!./target/*/*.tar.gz",
|
||||
].join("\n"),
|
||||
key: prCacheKeyPrefix + "${{ github.sha }}",
|
||||
|
|
26
.github/workflows/ci.yml
vendored
26
.github/workflows/ci.yml
vendored
|
@ -448,8 +448,10 @@ jobs:
|
|||
github.repository == 'denoland/deno')
|
||||
run: |-
|
||||
cd target/release
|
||||
shasum -a 256 deno > deno-${{ matrix.arch }}-unknown-linux-gnu.sha256sum
|
||||
zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno
|
||||
strip denort
|
||||
shasum -a 256 denort > denort-${{ matrix.arch }}-unknown-linux-gnu.sha256sum
|
||||
zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort
|
||||
./deno types > lib.deno.d.ts
|
||||
- name: Pre-release (mac)
|
||||
|
@ -465,8 +467,10 @@ jobs:
|
|||
echo "Key is $(echo $APPLE_CODESIGN_KEY | base64 -d | wc -c) bytes"
|
||||
rcodesign sign target/release/deno --code-signature-flags=runtime --p12-password="$APPLE_CODESIGN_PASSWORD" --p12-file=<(echo $APPLE_CODESIGN_KEY | base64 -d) --entitlements-xml-file=cli/entitlements.plist
|
||||
cd target/release
|
||||
shasum -a 256 deno > deno-${{ matrix.arch }}-apple-darwin.sha256sum
|
||||
zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno
|
||||
strip denort
|
||||
shasum -a 256 denort > denort-${{ matrix.arch }}-apple-darwin.sha256sum
|
||||
zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort
|
||||
- name: Pre-release (windows)
|
||||
if: |-
|
||||
|
@ -476,7 +480,9 @@ jobs:
|
|||
github.repository == 'denoland/deno')
|
||||
shell: pwsh
|
||||
run: |-
|
||||
Get-FileHash target/release/deno.exe -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.sha256sum
|
||||
Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip
|
||||
Get-FileHash target/release/denort.exe -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.sha256sum
|
||||
Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip
|
||||
- name: Upload canary to dl.deno.land
|
||||
if: |-
|
||||
|
@ -486,6 +492,7 @@ jobs:
|
|||
github.ref == 'refs/heads/main')
|
||||
run: |-
|
||||
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/canary/$(git rev-parse HEAD)/
|
||||
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/canary/$(git rev-parse HEAD)/
|
||||
echo ${{ github.sha }} > canary-latest.txt
|
||||
gsutil -h "Cache-Control: no-cache" cp canary-latest.txt gs://dl.deno.land/canary-$(rustc -vV | sed -n "s|host: ||p")-latest.txt
|
||||
- name: Autobahn testsuite
|
||||
|
@ -615,7 +622,9 @@ jobs:
|
|||
matrix.profile == 'release' &&
|
||||
github.repository == 'denoland/deno' &&
|
||||
startsWith(github.ref, 'refs/tags/'))
|
||||
run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/'
|
||||
run: |-
|
||||
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
|
||||
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
|
||||
- name: Upload release to dl.deno.land (windows)
|
||||
if: |-
|
||||
!(matrix.skip) && (matrix.os == 'windows' &&
|
||||
|
@ -625,7 +634,9 @@ jobs:
|
|||
startsWith(github.ref, 'refs/tags/'))
|
||||
env:
|
||||
CLOUDSDK_PYTHON: '${{env.pythonLocation}}\python.exe'
|
||||
run: 'gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/'
|
||||
run: |-
|
||||
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.zip gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
|
||||
gsutil -h "Cache-Control: public, max-age=3600" cp ./target/release/*.sha256sum gs://dl.deno.land/release/${GITHUB_REF#refs/*/}/
|
||||
- name: Create release notes
|
||||
if: |-
|
||||
!(matrix.skip) && (matrix.job == 'test' &&
|
||||
|
@ -647,15 +658,25 @@ jobs:
|
|||
with:
|
||||
files: |-
|
||||
target/release/deno-x86_64-pc-windows-msvc.zip
|
||||
target/release/deno-x86_64-pc-windows-msvc.sha256sum
|
||||
target/release/denort-x86_64-pc-windows-msvc.zip
|
||||
target/release/denort-x86_64-pc-windows-msvc.sha256sum
|
||||
target/release/deno-x86_64-unknown-linux-gnu.zip
|
||||
target/release/deno-x86_64-unknown-linux-gnu.sha256sum
|
||||
target/release/denort-x86_64-unknown-linux-gnu.zip
|
||||
target/release/denort-x86_64-unknown-linux-gnu.sha256sum
|
||||
target/release/deno-x86_64-apple-darwin.zip
|
||||
target/release/deno-x86_64-apple-darwin.sha256sum
|
||||
target/release/denort-x86_64-apple-darwin.zip
|
||||
target/release/denort-x86_64-apple-darwin.sha256sum
|
||||
target/release/deno-aarch64-unknown-linux-gnu.zip
|
||||
target/release/deno-aarch64-unknown-linux-gnu.sha256sum
|
||||
target/release/denort-aarch64-unknown-linux-gnu.zip
|
||||
target/release/denort-aarch64-unknown-linux-gnu.sha256sum
|
||||
target/release/deno-aarch64-apple-darwin.zip
|
||||
target/release/deno-aarch64-apple-darwin.sha256sum
|
||||
target/release/denort-aarch64-apple-darwin.zip
|
||||
target/release/denort-aarch64-apple-darwin.sha256sum
|
||||
target/release/deno_src.tar.gz
|
||||
target/release/lib.deno.d.ts
|
||||
body_path: target/release/release-notes.md
|
||||
|
@ -668,6 +689,7 @@ jobs:
|
|||
./target
|
||||
!./target/*/gn_out
|
||||
!./target/*/*.zip
|
||||
!./target/*/*.sha256sum
|
||||
!./target/*/*.tar.gz
|
||||
key: '15-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
publish-canary:
|
||||
|
|
144
Cargo.lock
generated
144
Cargo.lock
generated
|
@ -1146,11 +1146,10 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno"
|
||||
version = "2.0.0-rc.4"
|
||||
version = "2.0.0-rc.8"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"async-trait",
|
||||
"base32",
|
||||
"base64 0.21.7",
|
||||
"bincode",
|
||||
"bytes",
|
||||
|
@ -1174,6 +1173,8 @@ dependencies = [
|
|||
"deno_lockfile",
|
||||
"deno_npm",
|
||||
"deno_package_json",
|
||||
"deno_path_util",
|
||||
"deno_resolver",
|
||||
"deno_runtime",
|
||||
"deno_semver",
|
||||
"deno_task_shell",
|
||||
|
@ -1272,9 +1273,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_ast"
|
||||
version = "0.42.0"
|
||||
version = "0.42.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3b08d11d9e4086b00d3428650e31153cf5896586411763cb88a6423ce5b18791"
|
||||
checksum = "89ea2fd038c9c7e3e87e624fd708303cd33f39c33707f6c48fa9a65d65fefc47"
|
||||
dependencies = [
|
||||
"base64 0.21.7",
|
||||
"deno_media_type",
|
||||
|
@ -1347,10 +1348,11 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_cache_dir"
|
||||
version = "0.11.1"
|
||||
version = "0.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6df43311cb7703fa3242c282823a850e4c8d0c06b9527d8209b55bd695452ea5"
|
||||
checksum = "87900cfcd07bdbf3597bc36b77da0c0e7b6c2e65213faa2ed43d9a1ec12bd31d"
|
||||
dependencies = [
|
||||
"base32",
|
||||
"deno_media_type",
|
||||
"indexmap",
|
||||
"log",
|
||||
|
@ -1489,9 +1491,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_doc"
|
||||
version = "0.148.0"
|
||||
version = "0.150.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "144fa07977ba9eeeb98bcd267b7f0a6f8033f0f1f20fd210e669b3c4f30cefa2"
|
||||
checksum = "c762829006b555837691b7016828eb1f93acf0a4ff344357b946898ea5b5610d"
|
||||
dependencies = [
|
||||
"ammonia",
|
||||
"anyhow",
|
||||
|
@ -1570,6 +1572,7 @@ dependencies = [
|
|||
"base32",
|
||||
"deno_core",
|
||||
"deno_io",
|
||||
"deno_path_util",
|
||||
"deno_permissions",
|
||||
"filetime",
|
||||
"junction",
|
||||
|
@ -1584,9 +1587,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_graph"
|
||||
version = "0.82.1"
|
||||
version = "0.82.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "78b63015c73aa203da206b5d35b4c1eaa23bc7fed37ab325da62d525a5524a04"
|
||||
checksum = "938ed2efa1dd9fdcceeebc169b2b7910506b8dacc992cfdcffd84aa6a3eb8db0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
|
@ -1682,6 +1685,7 @@ dependencies = [
|
|||
"chrono",
|
||||
"deno_core",
|
||||
"deno_fetch",
|
||||
"deno_path_util",
|
||||
"deno_permissions",
|
||||
"deno_tls",
|
||||
"denokv_proto",
|
||||
|
@ -1800,6 +1804,7 @@ dependencies = [
|
|||
"deno_media_type",
|
||||
"deno_net",
|
||||
"deno_package_json",
|
||||
"deno_path_util",
|
||||
"deno_permissions",
|
||||
"deno_whoami",
|
||||
"der",
|
||||
|
@ -1916,11 +1921,23 @@ dependencies = [
|
|||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_path_util"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4889646c1ce8437a6fde3acb057fd7e2d039e62c61f5063fc125ed1ede114dc6"
|
||||
dependencies = [
|
||||
"percent-encoding",
|
||||
"thiserror",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_permissions"
|
||||
version = "0.28.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_path_util",
|
||||
"deno_terminal 0.2.0",
|
||||
"fqdn",
|
||||
"libc",
|
||||
|
@ -1932,6 +1949,21 @@ dependencies = [
|
|||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_resolver"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base32",
|
||||
"deno_media_type",
|
||||
"deno_package_json",
|
||||
"deno_path_util",
|
||||
"deno_semver",
|
||||
"node_resolver",
|
||||
"test_server",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_runtime"
|
||||
version = "0.177.0"
|
||||
|
@ -1953,6 +1985,7 @@ dependencies = [
|
|||
"deno_napi",
|
||||
"deno_net",
|
||||
"deno_node",
|
||||
"deno_path_util",
|
||||
"deno_permissions",
|
||||
"deno_terminal 0.2.0",
|
||||
"deno_tls",
|
||||
|
@ -1985,6 +2018,7 @@ dependencies = [
|
|||
"serde",
|
||||
"signal-hook",
|
||||
"signal-hook-registry",
|
||||
"tempfile",
|
||||
"test_server",
|
||||
"tokio",
|
||||
"tokio-metrics",
|
||||
|
@ -1997,9 +2031,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_semver"
|
||||
version = "0.5.13"
|
||||
version = "0.5.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6657fecb9ac6a7a71f552c95e8cc492466a75f5660224577e2226bcf30db9768"
|
||||
checksum = "670fec7ef309384e23c2a90ac5d2d9d91a776d225306c75f5cdd28cf6cc8a59f"
|
||||
dependencies = [
|
||||
"monch",
|
||||
"once_cell",
|
||||
|
@ -4071,70 +4105,6 @@ version = "1.3.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
|
||||
|
||||
[[package]]
|
||||
name = "lexical-core"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2cde5de06e8d4c2faabc400238f9ae1c74d5412d03a7bd067645ccbc47070e46"
|
||||
dependencies = [
|
||||
"lexical-parse-float",
|
||||
"lexical-parse-integer",
|
||||
"lexical-util",
|
||||
"lexical-write-float",
|
||||
"lexical-write-integer",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lexical-parse-float"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "683b3a5ebd0130b8fb52ba0bdc718cc56815b6a097e28ae5a6997d0ad17dc05f"
|
||||
dependencies = [
|
||||
"lexical-parse-integer",
|
||||
"lexical-util",
|
||||
"static_assertions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lexical-parse-integer"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6d0994485ed0c312f6d965766754ea177d07f9c00c9b82a5ee62ed5b47945ee9"
|
||||
dependencies = [
|
||||
"lexical-util",
|
||||
"static_assertions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lexical-util"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5255b9ff16ff898710eb9eb63cb39248ea8a5bb036bea8085b1a767ff6c4e3fc"
|
||||
dependencies = [
|
||||
"static_assertions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lexical-write-float"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "accabaa1c4581f05a3923d1b4cfd124c329352288b7b9da09e766b0668116862"
|
||||
dependencies = [
|
||||
"lexical-util",
|
||||
"lexical-write-integer",
|
||||
"static_assertions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lexical-write-integer"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e1b6f3d1f4422866b68192d62f77bc5c700bee84f3069f2469d7bc8c77852446"
|
||||
dependencies = [
|
||||
"lexical-util",
|
||||
"static_assertions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.153"
|
||||
|
@ -4209,9 +4179,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "libsui"
|
||||
version = "0.3.1"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "56e39af24eff8df7c8b9980ef56a1a1f4d2e77b34b2d5c0529f108c53ae96a7a"
|
||||
checksum = "205eca4e7beaad637dcd38fe41292065894ee7f498077cf3c135d5f7252b9f27"
|
||||
dependencies = [
|
||||
"editpe",
|
||||
"libc",
|
||||
|
@ -4342,9 +4312,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "markup_fmt"
|
||||
version = "0.13.0"
|
||||
version = "0.13.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74fc137a4a591720176339bf7e857586a48ff35c0caee7ad6cf709327901232c"
|
||||
checksum = "9dab5ae899659fbe5c8835b2c8ca8d3e357974a3e454138925b404004973361f"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"css_dataset",
|
||||
|
@ -4592,6 +4562,7 @@ dependencies = [
|
|||
"async-trait",
|
||||
"deno_media_type",
|
||||
"deno_package_json",
|
||||
"deno_path_util",
|
||||
"futures",
|
||||
"lazy-regex",
|
||||
"once_cell",
|
||||
|
@ -6386,13 +6357,12 @@ checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe"
|
|||
|
||||
[[package]]
|
||||
name = "simd-json"
|
||||
version = "0.13.9"
|
||||
version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b0b84c23a1066e1d650ebc99aa8fb9f8ed0ab96fd36e2e836173c92fc9fb29bc"
|
||||
checksum = "05f0b376aada35f30a0012f5790e50aed62f91804a0682669aefdbe81c7fcb91"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
"halfbrown",
|
||||
"lexical-core",
|
||||
"ref-cast",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
@ -7947,9 +7917,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "value-trait"
|
||||
version = "0.8.1"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dad8db98c1e677797df21ba03fca7d3bf9bec3ca38db930954e4fe6e1ea27eb4"
|
||||
checksum = "bcaa56177466248ba59d693a048c0959ddb67f1151b963f904306312548cf392"
|
||||
dependencies = [
|
||||
"float-cmp",
|
||||
"halfbrown",
|
||||
|
@ -8119,9 +8089,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "webpki-root-certs"
|
||||
version = "0.26.5"
|
||||
version = "0.26.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e6d93b773107ba49bc84dd3b241e019c702d886fd5c457defe2ea8b1123a5dcd"
|
||||
checksum = "e8c6dfa3ac045bc517de14c7b1384298de1dbd229d38e08e169d9ae8c170937c"
|
||||
dependencies = [
|
||||
"rustls-pki-types",
|
||||
]
|
||||
|
|
17
Cargo.toml
17
Cargo.toml
|
@ -21,13 +21,14 @@ members = [
|
|||
"ext/napi",
|
||||
"ext/net",
|
||||
"ext/node",
|
||||
"ext/node_resolver",
|
||||
"ext/url",
|
||||
"ext/web",
|
||||
"ext/webgpu",
|
||||
"ext/webidl",
|
||||
"ext/websocket",
|
||||
"ext/webstorage",
|
||||
"resolvers/deno",
|
||||
"resolvers/node",
|
||||
"runtime",
|
||||
"runtime/permissions",
|
||||
"tests",
|
||||
|
@ -44,15 +45,17 @@ license = "MIT"
|
|||
repository = "https://github.com/denoland/deno"
|
||||
|
||||
[workspace.dependencies]
|
||||
deno_ast = { version = "=0.42.0", features = ["transpiling"] }
|
||||
deno_ast = { version = "=0.42.1", features = ["transpiling"] }
|
||||
deno_core = { version = "0.311.0" }
|
||||
|
||||
deno_bench_util = { version = "0.162.0", path = "./bench_util" }
|
||||
deno_lockfile = "=0.23.1"
|
||||
deno_media_type = { version = "0.1.4", features = ["module_specifier"] }
|
||||
deno_npm = "=0.25.2"
|
||||
deno_path_util = "=0.2.0"
|
||||
deno_permissions = { version = "0.28.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.177.0", path = "./runtime" }
|
||||
deno_semver = "=0.5.13"
|
||||
deno_semver = "=0.5.14"
|
||||
deno_terminal = "0.2.0"
|
||||
napi_sym = { version = "0.98.0", path = "./cli/napi/sym" }
|
||||
test_util = { package = "test_server", path = "./tests/util/server" }
|
||||
|
@ -85,7 +88,10 @@ deno_webgpu = { version = "0.135.0", path = "./ext/webgpu" }
|
|||
deno_webidl = { version = "0.168.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.173.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.163.0", path = "./ext/webstorage" }
|
||||
node_resolver = { version = "0.7.0", path = "./ext/node_resolver" }
|
||||
|
||||
# resolvers
|
||||
deno_resolver = { version = "0.0.1", path = "./resolvers/deno" }
|
||||
node_resolver = { version = "0.7.0", path = "./resolvers/node" }
|
||||
|
||||
aes = "=0.8.3"
|
||||
anyhow = "1.0.57"
|
||||
|
@ -101,9 +107,10 @@ cbc = { version = "=0.1.2", features = ["alloc"] }
|
|||
# Instead use util::time::utc_now()
|
||||
chrono = { version = "0.4", default-features = false, features = ["std", "serde"] }
|
||||
console_static_text = "=0.8.1"
|
||||
dashmap = "5.5.3"
|
||||
data-encoding = "2.3.3"
|
||||
data-url = "=0.3.0"
|
||||
deno_cache_dir = "=0.11.1"
|
||||
deno_cache_dir = "=0.12.0"
|
||||
deno_package_json = { version = "=0.1.1", default-features = false }
|
||||
dlopen2 = "0.6.1"
|
||||
ecb = "=0.1.2"
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno"
|
||||
version = "2.0.0-rc.4"
|
||||
version = "2.0.0-rc.8"
|
||||
authors.workspace = true
|
||||
default-run = "deno"
|
||||
edition.workspace = true
|
||||
|
@ -67,24 +67,25 @@ deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposa
|
|||
deno_cache_dir = { workspace = true }
|
||||
deno_config = { version = "=0.35.0", features = ["workspace", "sync"] }
|
||||
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_doc = { version = "0.148.0", features = ["html", "syntect"] }
|
||||
deno_graph = { version = "=0.82.1" }
|
||||
deno_doc = { version = "0.150.0", features = ["html", "syntect"] }
|
||||
deno_graph = { version = "=0.82.3" }
|
||||
deno_lint = { version = "=0.67.0", features = ["docs"] }
|
||||
deno_lockfile.workspace = true
|
||||
deno_npm = "=0.25.2"
|
||||
deno_npm.workspace = true
|
||||
deno_package_json.workspace = true
|
||||
deno_path_util.workspace = true
|
||||
deno_resolver.workspace = true
|
||||
deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_semver.workspace = true
|
||||
deno_task_shell = "=0.17.0"
|
||||
deno_terminal.workspace = true
|
||||
eszip = "=0.78.0"
|
||||
libsui = "0.3.1"
|
||||
libsui = "0.4.0"
|
||||
napi_sym.workspace = true
|
||||
node_resolver.workspace = true
|
||||
|
||||
anstream = "0.6.14"
|
||||
async-trait.workspace = true
|
||||
base32.workspace = true
|
||||
base64.workspace = true
|
||||
bincode = "=1.3.3"
|
||||
bytes.workspace = true
|
||||
|
@ -95,7 +96,7 @@ clap_complete = "=4.5.24"
|
|||
clap_complete_fig = "=4.5.2"
|
||||
color-print = "0.3.5"
|
||||
console_static_text.workspace = true
|
||||
dashmap = "5.5.3"
|
||||
dashmap.workspace = true
|
||||
data-encoding.workspace = true
|
||||
dissimilar = "=1.0.4"
|
||||
dotenvy = "0.15.7"
|
||||
|
@ -124,7 +125,7 @@ libz-sys.workspace = true
|
|||
log = { workspace = true, features = ["serde"] }
|
||||
lsp-types.workspace = true
|
||||
malva = "=0.10.1"
|
||||
markup_fmt = "=0.13.0"
|
||||
markup_fmt = "=0.13.1"
|
||||
memmem.workspace = true
|
||||
monch.workspace = true
|
||||
notify.workspace = true
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashSet;
|
||||
use std::env;
|
||||
use std::ffi::OsString;
|
||||
|
@ -28,13 +29,13 @@ use deno_config::glob::PathOrPatternSet;
|
|||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::normalize_path;
|
||||
use deno_core::resolve_url_or_path;
|
||||
use deno_core::url::Url;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_runtime::deno_permissions::parse_sys_kind;
|
||||
use deno_path_util::normalize_path;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_runtime::deno_permissions::PermissionsOptions;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
use deno_runtime::deno_permissions::SysDescriptor;
|
||||
use log::debug;
|
||||
use log::Level;
|
||||
use serde::Deserialize;
|
||||
|
@ -44,6 +45,7 @@ use crate::args::resolve_no_prompt;
|
|||
use crate::util::fs::canonicalize_path;
|
||||
|
||||
use super::flags_net;
|
||||
use super::jsr_url;
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub enum ConfigFlag {
|
||||
|
@ -544,7 +546,10 @@ pub enum CaData {
|
|||
#[derive(Clone, Debug, Eq, PartialEq, Default)]
|
||||
pub struct LifecycleScriptsConfig {
|
||||
pub allowed: PackagesAllowedScripts,
|
||||
pub initial_cwd: Option<PathBuf>,
|
||||
pub initial_cwd: PathBuf,
|
||||
pub root_dir: PathBuf,
|
||||
/// Part of an explicit `deno install`
|
||||
pub explicit_install: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Default)]
|
||||
|
@ -638,6 +643,7 @@ pub struct PermissionFlags {
|
|||
pub allow_write: Option<Vec<String>>,
|
||||
pub deny_write: Option<Vec<String>>,
|
||||
pub no_prompt: bool,
|
||||
pub allow_import: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl PermissionFlags {
|
||||
|
@ -657,9 +663,10 @@ impl PermissionFlags {
|
|||
|| self.deny_sys.is_some()
|
||||
|| self.allow_write.is_some()
|
||||
|| self.deny_write.is_some()
|
||||
|| self.allow_import.is_some()
|
||||
}
|
||||
|
||||
pub fn to_options(&self) -> PermissionsOptions {
|
||||
pub fn to_options(&self, cli_arg_urls: &[Cow<Url>]) -> PermissionsOptions {
|
||||
fn handle_allow<T: Default>(
|
||||
allow_all: bool,
|
||||
value: Option<T>,
|
||||
|
@ -672,6 +679,41 @@ impl PermissionFlags {
|
|||
}
|
||||
}
|
||||
|
||||
fn handle_imports(
|
||||
cli_arg_urls: &[Cow<Url>],
|
||||
imports: Option<Vec<String>>,
|
||||
) -> Option<Vec<String>> {
|
||||
if imports.is_some() {
|
||||
return imports;
|
||||
}
|
||||
|
||||
let builtin_allowed_import_hosts = [
|
||||
"deno.land:443",
|
||||
"esm.sh:443",
|
||||
"jsr.io:443",
|
||||
"raw.githubusercontent.com:443",
|
||||
"gist.githubusercontent.com:443",
|
||||
];
|
||||
|
||||
let mut imports =
|
||||
Vec::with_capacity(builtin_allowed_import_hosts.len() + 1);
|
||||
imports
|
||||
.extend(builtin_allowed_import_hosts.iter().map(|s| s.to_string()));
|
||||
|
||||
// also add the JSR_URL env var
|
||||
if let Some(jsr_host) = allow_import_host_from_url(jsr_url()) {
|
||||
imports.push(jsr_host);
|
||||
}
|
||||
// include the cli arg urls
|
||||
for url in cli_arg_urls {
|
||||
if let Some(host) = allow_import_host_from_url(url) {
|
||||
imports.push(host);
|
||||
}
|
||||
}
|
||||
|
||||
Some(imports)
|
||||
}
|
||||
|
||||
PermissionsOptions {
|
||||
allow_all: self.allow_all,
|
||||
allow_env: handle_allow(self.allow_all, self.allow_env.clone()),
|
||||
|
@ -688,11 +730,33 @@ impl PermissionFlags {
|
|||
deny_sys: self.deny_sys.clone(),
|
||||
allow_write: handle_allow(self.allow_all, self.allow_write.clone()),
|
||||
deny_write: self.deny_write.clone(),
|
||||
allow_import: handle_imports(
|
||||
cli_arg_urls,
|
||||
handle_allow(self.allow_all, self.allow_import.clone()),
|
||||
),
|
||||
prompt: !resolve_no_prompt(self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets the --allow-import host from the provided url
|
||||
fn allow_import_host_from_url(url: &Url) -> Option<String> {
|
||||
let host = url.host()?;
|
||||
if let Some(port) = url.port() {
|
||||
Some(format!("{}:{}", host, port))
|
||||
} else {
|
||||
use deno_core::url::Host::*;
|
||||
match host {
|
||||
Domain(domain) if domain == "jsr.io" && url.scheme() == "https" => None,
|
||||
_ => match url.scheme() {
|
||||
"https" => Some(format!("{}:443", host)),
|
||||
"http" => Some(format!("{}:80", host)),
|
||||
_ => None,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn join_paths(allowlist: &[String], d: &str) -> String {
|
||||
allowlist
|
||||
.iter()
|
||||
|
@ -880,6 +944,17 @@ impl Flags {
|
|||
_ => {}
|
||||
}
|
||||
|
||||
match &self.permissions.allow_import {
|
||||
Some(allowlist) if allowlist.is_empty() => {
|
||||
args.push("--allow-import".to_string());
|
||||
}
|
||||
Some(allowlist) => {
|
||||
let s = format!("--allow-import={}", allowlist.join(","));
|
||||
args.push(s);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
args
|
||||
}
|
||||
|
||||
|
@ -927,7 +1002,7 @@ impl Flags {
|
|||
if module_specifier.scheme() == "file"
|
||||
|| module_specifier.scheme() == "npm"
|
||||
{
|
||||
if let Ok(p) = specifier_to_file_path(&module_specifier) {
|
||||
if let Ok(p) = url_to_file_path(&module_specifier) {
|
||||
Some(vec![p.parent().unwrap().to_path_buf()])
|
||||
} else {
|
||||
Some(vec![current_dir.to_path_buf()])
|
||||
|
@ -990,6 +1065,7 @@ impl Flags {
|
|||
self.permissions.allow_write = None;
|
||||
self.permissions.allow_sys = None;
|
||||
self.permissions.allow_ffi = None;
|
||||
self.permissions.allow_import = None;
|
||||
}
|
||||
|
||||
pub fn resolve_watch_exclude_set(
|
||||
|
@ -1182,28 +1258,7 @@ pub fn flags_from_vec(args: Vec<OsString>) -> clap::error::Result<Flags> {
|
|||
.get_arguments()
|
||||
.any(|arg| arg.get_id().as_str() == "unstable")
|
||||
{
|
||||
subcommand = subcommand
|
||||
.mut_arg("unstable", |arg| {
|
||||
let new_help = arg
|
||||
.get_help()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.split_once("\n")
|
||||
.unwrap()
|
||||
.0
|
||||
.to_string();
|
||||
arg.help_heading(UNSTABLE_HEADING).help(new_help)
|
||||
})
|
||||
.mut_args(|arg| {
|
||||
// long_help here is being used as a metadata, see unstable args definition
|
||||
if arg.get_help_heading() == Some(UNSTABLE_HEADING)
|
||||
&& arg.get_long_help().is_some()
|
||||
{
|
||||
arg.hide(false)
|
||||
} else {
|
||||
arg
|
||||
}
|
||||
});
|
||||
subcommand = enable_unstable(subcommand);
|
||||
}
|
||||
|
||||
help_parse(&mut flags, subcommand);
|
||||
|
@ -1338,6 +1393,31 @@ pub fn flags_from_vec(args: Vec<OsString>) -> clap::error::Result<Flags> {
|
|||
Ok(flags)
|
||||
}
|
||||
|
||||
fn enable_unstable(command: Command) -> Command {
|
||||
command
|
||||
.mut_arg("unstable", |arg| {
|
||||
let new_help = arg
|
||||
.get_help()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.split_once("\n")
|
||||
.unwrap()
|
||||
.0
|
||||
.to_string();
|
||||
arg.help_heading(UNSTABLE_HEADING).help(new_help)
|
||||
})
|
||||
.mut_args(|arg| {
|
||||
// long_help here is being used as a metadata, see unstable args definition
|
||||
if arg.get_help_heading() == Some(UNSTABLE_HEADING)
|
||||
&& arg.get_long_help().is_some()
|
||||
{
|
||||
arg.hide(false)
|
||||
} else {
|
||||
arg
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
macro_rules! heading {
|
||||
($($name:ident = $title:expr),+; $total:literal) => {
|
||||
$(const $name: &str = $title;)+
|
||||
|
@ -1706,6 +1786,7 @@ Future runs of this module will trigger no downloads or compilation unless --rel
|
|||
)
|
||||
.arg(frozen_lockfile_arg())
|
||||
.arg(allow_scripts_arg())
|
||||
.arg(allow_import_arg())
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -1765,6 +1846,7 @@ Unless --reload is specified, this command will not re-download already cached d
|
|||
.required_unless_present("help")
|
||||
.value_hint(ValueHint::FilePath),
|
||||
)
|
||||
.arg(allow_import_arg())
|
||||
}
|
||||
)
|
||||
}
|
||||
|
@ -1774,11 +1856,15 @@ fn compile_subcommand() -> Command {
|
|||
"compile",
|
||||
cstr!("Compiles the given script into a self contained executable.
|
||||
|
||||
<p(245)>deno compile -A jsr:@std/http/file-server</>
|
||||
<p(245)>deno compile --allow-read --allow-net jsr:@std/http/file-server</>
|
||||
<p(245)>deno compile --output file_server jsr:@std/http/file-server</>
|
||||
|
||||
Any flags specified which affect runtime behavior will be applied to the resulting binary.
|
||||
|
||||
This allows distribution of a Deno application to systems that do not have Deno installed.
|
||||
Under the hood, it bundles a slimmed down version of the Deno runtime along with your
|
||||
JavaScript or TypeScript code.
|
||||
|
||||
Cross-compiling to different target architectures is supported using the <c>--target</> flag.
|
||||
On the first invocation with deno will download the proper binary and cache it in <c>$DENO_DIR</>.
|
||||
|
||||
|
@ -1993,6 +2079,7 @@ Show documentation for runtime built-ins:
|
|||
.arg(no_lock_arg())
|
||||
.arg(no_npm_arg())
|
||||
.arg(no_remote_arg())
|
||||
.arg(allow_import_arg())
|
||||
.arg(
|
||||
Arg::new("json")
|
||||
.long("json")
|
||||
|
@ -2144,6 +2231,9 @@ Supported file types which are behind corresponding unstable flags (see formatti
|
|||
Format stdin and write to stdout:
|
||||
<p(245)>cat file.ts | deno fmt -</>
|
||||
|
||||
Check if the files are formatted:
|
||||
<p(245)>deno fmt --check</>
|
||||
|
||||
Ignore formatting code by preceding it with an ignore comment:
|
||||
<p(245)>// deno-fmt-ignore</>
|
||||
|
||||
|
@ -2294,7 +2384,7 @@ Ignore formatting a file by adding an ignore comment at the top of the file:
|
|||
}
|
||||
|
||||
fn init_subcommand() -> Command {
|
||||
command("init", "Initialize a new project", UnstableArgsConfig::None).defer(
|
||||
command("init", "scaffolds a basic Deno project with a script, test, and configuration file", UnstableArgsConfig::None).defer(
|
||||
|cmd| {
|
||||
cmd
|
||||
.arg(Arg::new("dir").value_hint(ValueHint::DirPath))
|
||||
|
@ -2339,7 +2429,7 @@ The following information is shown:
|
|||
.arg(
|
||||
location_arg()
|
||||
.conflicts_with("file")
|
||||
.help("Show files used for origin bound APIs like the Web Storage API when running a script with '--location=<HREF>'")
|
||||
.help(cstr!("Show files used for origin bound APIs like the Web Storage API when running a script with <c>--location=<<HREF>></>"))
|
||||
)
|
||||
.arg(no_check_arg().hide(true)) // TODO(lucacasonato): remove for 2.0
|
||||
.arg(no_config_arg())
|
||||
|
@ -2357,6 +2447,7 @@ The following information is shown:
|
|||
.help("UNSTABLE: Outputs the information in JSON format")
|
||||
.action(ArgAction::SetTrue),
|
||||
))
|
||||
.arg(allow_import_arg())
|
||||
}
|
||||
|
||||
fn install_subcommand() -> Command {
|
||||
|
@ -2380,7 +2471,7 @@ If the <bold>--global</> flag is set, installs a script as an executable in the
|
|||
<p(245)>deno install --global --allow-net --allow-read jsr:@std/http/file-server</>
|
||||
<p(245)>deno install -g https://examples.deno.land/color-logging.ts</>
|
||||
|
||||
To change the executable name, use -n/--name:
|
||||
To change the executable name, use <c>-n</>/<c>--name</>:
|
||||
<p(245)>deno install -g --allow-net --allow-read -n serve jsr:@std/http/file-server</>
|
||||
|
||||
The executable name is inferred by default:
|
||||
|
@ -2662,7 +2753,12 @@ To ignore linting on an entire file, you can add an ignore comment at the top of
|
|||
}
|
||||
|
||||
fn repl_subcommand() -> Command {
|
||||
command("repl", "Read Eval Print Loop", UnstableArgsConfig::ResolutionAndRuntime)
|
||||
command("repl", cstr!(
|
||||
"Starts a read-eval-print-loop, which lets you interactively build up program state in the global context.
|
||||
It is especially useful for quick prototyping and checking snippets of code.
|
||||
|
||||
TypeScript is supported, however it is not type-checked, only transpiled."
|
||||
), UnstableArgsConfig::ResolutionAndRuntime)
|
||||
.defer(|cmd| runtime_args(cmd, true, true)
|
||||
.arg(check_arg(false))
|
||||
.arg(
|
||||
|
@ -2745,8 +2841,6 @@ fn serve_subcommand() -> Command {
|
|||
|
||||
The serve command uses the default exports of the main module to determine which servers to start.
|
||||
|
||||
See https://docs.deno.com/runtime/manual/tools/serve for more detailed information.
|
||||
|
||||
Start a server defined in server.ts:
|
||||
<p(245)>deno serve server.ts</>
|
||||
|
||||
|
@ -2757,7 +2851,7 @@ Start a server defined in server.ts, watching for changes and running on port 50
|
|||
.arg(
|
||||
Arg::new("port")
|
||||
.long("port")
|
||||
.help("The TCP port to serve on, defaulting to 8000. Pass 0 to pick a random free port")
|
||||
.help(cstr!("The TCP port to serve on. Pass 0 to pick a random free port <p(245)>[default: 8000]</>"))
|
||||
.value_parser(value_parser!(u16)),
|
||||
)
|
||||
.arg(
|
||||
|
@ -2967,11 +3061,13 @@ fn parallel_arg(descr: &str) -> Arg {
|
|||
fn types_subcommand() -> Command {
|
||||
command(
|
||||
"types",
|
||||
"Print runtime TypeScript declarations.
|
||||
cstr!(
|
||||
"Print runtime TypeScript declarations.
|
||||
|
||||
<p(245)>deno types > lib.deno.d.ts</>
|
||||
|
||||
The declaration file could be saved and used for typing information.",
|
||||
The declaration file could be saved and used for typing information."
|
||||
),
|
||||
UnstableArgsConfig::None,
|
||||
)
|
||||
}
|
||||
|
@ -3081,7 +3177,7 @@ See the Deno 1.x to 2.x Migration Guide for migration instructions: https://docs
|
|||
}
|
||||
|
||||
fn publish_subcommand() -> Command {
|
||||
command("publish", "Publish the current working directory's package or workspace", UnstableArgsConfig::ResolutionOnly)
|
||||
command("publish", "Publish the current working directory's package or workspace to JSR", UnstableArgsConfig::ResolutionOnly)
|
||||
.defer(|cmd| {
|
||||
cmd
|
||||
.arg(
|
||||
|
@ -3150,47 +3246,44 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
|
|||
.after_help(cstr!(r#"<y>Permission options:</>
|
||||
<y>Docs</>: <c>https://docs.deno.com/go/permissions</>
|
||||
|
||||
<g>-A, --allow-all</> Allow all permissions.
|
||||
<g>--no-prompt</> Always throw if required permission wasn't passed.
|
||||
<p(245)>Can also be set via the DENO_NO_PROMPT environment variable.</>
|
||||
<g>-R, --allow-read[=<<PATH>...]</> Allow file system read access. Optionally specify allowed paths.
|
||||
<p(245)>--allow-read | --allow-read="/etc,/var/log.txt"</>
|
||||
<g>-W, --allow-write[=<<PATH>...]</> Allow file system write access. Optionally specify allowed paths.
|
||||
<p(245)>--allow-write | --allow-write="/etc,/var/log.txt"</>
|
||||
<g>-N, --allow-net[=<<IP_OR_HOSTNAME>...]</> Allow network access. Optionally specify allowed IP addresses and host names, with ports as necessary.
|
||||
<p(245)>--allow-net | --allow-net="localhost:8080,deno.land"</>
|
||||
<g>-E, --allow-env[=<<VARIABLE_NAME>...]</> Allow access to environment variables. Optionally specify accessible environment variables.
|
||||
<p(245)>--allow-env | --allow-env="PORT,HOME,PATH"</>
|
||||
<g>-S, --allow-sys[=<<API_NAME>...]</> Allow access to OS information. Optionally allow specific APIs by function name.
|
||||
<p(245)>--allow-sys | --allow-sys="systemMemoryInfo,osRelease"</>
|
||||
<g>--allow-run[=<<PROGRAM_NAME>...]</> Allow running subprocesses. Optionally specify allowed runnable program names.
|
||||
<p(245)>--allow-run | --allow-run="whoami,ps"</>
|
||||
<g>--allow-ffi[=<<PATH>...]</> (Unstable) Allow loading dynamic libraries. Optionally specify allowed directories or files.
|
||||
<p(245)>--allow-ffi | --allow-ffi="./libfoo.so"</>
|
||||
<g> --deny-read[=<<PATH>...]</> Deny file system read access. Optionally specify denied paths.
|
||||
<p(245)>--deny-read | --deny-read="/etc,/var/log.txt"</>
|
||||
<g> --deny-write[=<<PATH>...]</> Deny file system write access. Optionally specify denied paths.
|
||||
<p(245)>--deny-write | --deny-write="/etc,/var/log.txt"</>
|
||||
<g> --deny-net[=<<IP_OR_HOSTNAME>...]</> Deny network access. Optionally specify defined IP addresses and host names, with ports as necessary.
|
||||
<p(245)>--deny-net | --deny-net="localhost:8080,deno.land"</>
|
||||
<g> --deny-env[=<<VARIABLE_NAME>...]</> Deny access to environment variables. Optionally specify inacessible environment variables.
|
||||
<p(245)>--deny-env | --deny-env="PORT,HOME,PATH"</>
|
||||
<g>-S, --deny-sys[=<<API_NAME>...]</> Deny access to OS information. Optionally deny specific APIs by function name.
|
||||
<p(245)>--deny-sys | --deny-sys="systemMemoryInfo,osRelease"</>
|
||||
<g>--deny-run[=<<PROGRAM_NAME>...]</> Deny running subprocesses. Optionally specify denied runnable program names.
|
||||
<p(245)>--deny-run | --deny-run="whoami,ps"</>
|
||||
<g>--deny-ffi[=<<PATH>...]</> (Unstable) Deny loading dynamic libraries. Optionally specify denied directories or files.
|
||||
<p(245)>--deny-ffi | --deny-ffi="./libfoo.so"</>
|
||||
<g>-A, --allow-all</> Allow all permissions.
|
||||
<g>--no-prompt</> Always throw if required permission wasn't passed.
|
||||
<p(245)>Can also be set via the DENO_NO_PROMPT environment variable.</>
|
||||
<g>-R, --allow-read[=<<PATH>...]</> Allow file system read access. Optionally specify allowed paths.
|
||||
<p(245)>--allow-read | --allow-read="/etc,/var/log.txt"</>
|
||||
<g>-W, --allow-write[=<<PATH>...]</> Allow file system write access. Optionally specify allowed paths.
|
||||
<p(245)>--allow-write | --allow-write="/etc,/var/log.txt"</>
|
||||
<g>-I, --allow-import[=<<IP_OR_HOSTNAME>...]</> Allow importing from remote hosts. Optionally specify allowed IP addresses and host names, with ports as necessary.
|
||||
Default value: <p(245)>deno.land:443,jsr.io:443,esm.sh:443,raw.githubusercontent.com:443,user.githubusercontent.com:443</>
|
||||
<p(245)>--allow-import | --allow-import="example.com,github.com"</>
|
||||
<g>-N, --allow-net[=<<IP_OR_HOSTNAME>...]</> Allow network access. Optionally specify allowed IP addresses and host names, with ports as necessary.
|
||||
<p(245)>--allow-net | --allow-net="localhost:8080,deno.land"</>
|
||||
<g>-E, --allow-env[=<<VARIABLE_NAME>...]</> Allow access to environment variables. Optionally specify accessible environment variables.
|
||||
<p(245)>--allow-env | --allow-env="PORT,HOME,PATH"</>
|
||||
<g>-S, --allow-sys[=<<API_NAME>...]</> Allow access to OS information. Optionally allow specific APIs by function name.
|
||||
<p(245)>--allow-sys | --allow-sys="systemMemoryInfo,osRelease"</>
|
||||
<g>--allow-run[=<<PROGRAM_NAME>...]</> Allow running subprocesses. Optionally specify allowed runnable program names.
|
||||
<p(245)>--allow-run | --allow-run="whoami,ps"</>
|
||||
<g>--allow-ffi[=<<PATH>...]</> (Unstable) Allow loading dynamic libraries. Optionally specify allowed directories or files.
|
||||
<p(245)>--allow-ffi | --allow-ffi="./libfoo.so"</>
|
||||
<g> --deny-read[=<<PATH>...]</> Deny file system read access. Optionally specify denied paths.
|
||||
<p(245)>--deny-read | --deny-read="/etc,/var/log.txt"</>
|
||||
<g> --deny-write[=<<PATH>...]</> Deny file system write access. Optionally specify denied paths.
|
||||
<p(245)>--deny-write | --deny-write="/etc,/var/log.txt"</>
|
||||
<g> --deny-net[=<<IP_OR_HOSTNAME>...]</> Deny network access. Optionally specify defined IP addresses and host names, with ports as necessary.
|
||||
<p(245)>--deny-net | --deny-net="localhost:8080,deno.land"</>
|
||||
<g> --deny-env[=<<VARIABLE_NAME>...]</> Deny access to environment variables. Optionally specify inacessible environment variables.
|
||||
<p(245)>--deny-env | --deny-env="PORT,HOME,PATH"</>
|
||||
<g>-S, --deny-sys[=<<API_NAME>...]</> Deny access to OS information. Optionally deny specific APIs by function name.
|
||||
<p(245)>--deny-sys | --deny-sys="systemMemoryInfo,osRelease"</>
|
||||
<g>--deny-run[=<<PROGRAM_NAME>...]</> Deny running subprocesses. Optionally specify denied runnable program names.
|
||||
<p(245)>--deny-run | --deny-run="whoami,ps"</>
|
||||
<g>--deny-ffi[=<<PATH>...]</> (Unstable) Deny loading dynamic libraries. Optionally specify denied directories or files.
|
||||
<p(245)>--deny-ffi | --deny-ffi="./libfoo.so"</>
|
||||
"#))
|
||||
.arg(
|
||||
{
|
||||
let mut arg = Arg::new("allow-all")
|
||||
.short('A')
|
||||
.long("allow-all")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("Allow all permissions")
|
||||
.hide(true)
|
||||
;
|
||||
let mut arg = allow_all_arg().hide(true);
|
||||
if let Some(requires) = requires {
|
||||
arg = arg.requires(requires)
|
||||
}
|
||||
|
@ -3199,7 +3292,7 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
|
|||
)
|
||||
.arg(
|
||||
{
|
||||
let mut arg = Arg::new("allow-read")
|
||||
let mut arg = Arg::new("allow-read")
|
||||
.long("allow-read")
|
||||
.short('R')
|
||||
.num_args(0..)
|
||||
|
@ -3217,7 +3310,7 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
|
|||
)
|
||||
.arg(
|
||||
{
|
||||
let mut arg = Arg::new("deny-read")
|
||||
let mut arg = Arg::new("deny-read")
|
||||
.long("deny-read")
|
||||
.num_args(0..)
|
||||
.action(ArgAction::Append)
|
||||
|
@ -3234,7 +3327,7 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
|
|||
)
|
||||
.arg(
|
||||
{
|
||||
let mut arg = Arg::new("allow-write")
|
||||
let mut arg = Arg::new("allow-write")
|
||||
.long("allow-write")
|
||||
.short('W')
|
||||
.num_args(0..)
|
||||
|
@ -3252,7 +3345,7 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
|
|||
)
|
||||
.arg(
|
||||
{
|
||||
let mut arg = Arg::new("deny-write")
|
||||
let mut arg = Arg::new("deny-write")
|
||||
.long("deny-write")
|
||||
.num_args(0..)
|
||||
.action(ArgAction::Append)
|
||||
|
@ -3269,7 +3362,7 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
|
|||
)
|
||||
.arg(
|
||||
{
|
||||
let mut arg = Arg::new("allow-net")
|
||||
let mut arg = Arg::new("allow-net")
|
||||
.long("allow-net")
|
||||
.short('N')
|
||||
.num_args(0..)
|
||||
|
@ -3288,7 +3381,7 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
|
|||
)
|
||||
.arg(
|
||||
{
|
||||
let mut arg = Arg::new("deny-net")
|
||||
let mut arg = Arg::new("deny-net")
|
||||
.long("deny-net")
|
||||
.num_args(0..)
|
||||
.use_value_delimiter(true)
|
||||
|
@ -3371,7 +3464,7 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
|
|||
.require_equals(true)
|
||||
.value_name("API_NAME")
|
||||
.help("Allow access to OS information. Optionally allow specific APIs by function name")
|
||||
.value_parser(|key: &str| parse_sys_kind(key).map(ToString::to_string))
|
||||
.value_parser(|key: &str| SysDescriptor::parse(key.to_string()).map(|s| s.into_string()))
|
||||
.hide(true)
|
||||
;
|
||||
if let Some(requires) = requires {
|
||||
|
@ -3382,14 +3475,14 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
|
|||
)
|
||||
.arg(
|
||||
{
|
||||
let mut arg = Arg::new("deny-sys")
|
||||
let mut arg = Arg::new("deny-sys")
|
||||
.long("deny-sys")
|
||||
.num_args(0..)
|
||||
.use_value_delimiter(true)
|
||||
.require_equals(true)
|
||||
.value_name("API_NAME")
|
||||
.help("Deny access to OS information. Optionally deny specific APIs by function name")
|
||||
.value_parser(|key: &str| parse_sys_kind(key).map(ToString::to_string))
|
||||
.value_parser(|key: &str| SysDescriptor::parse(key.to_string()).map(|s| s.into_string()))
|
||||
.hide(true)
|
||||
;
|
||||
if let Some(requires) = requires {
|
||||
|
@ -3417,7 +3510,7 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
|
|||
)
|
||||
.arg(
|
||||
{
|
||||
let mut arg = Arg::new("deny-run")
|
||||
let mut arg = Arg::new("deny-run")
|
||||
.long("deny-run")
|
||||
.num_args(0..)
|
||||
.use_value_delimiter(true)
|
||||
|
@ -3473,8 +3566,7 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
|
|||
.long("allow-hrtime")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("REMOVED in Deno 2.0")
|
||||
.hide(true)
|
||||
;
|
||||
.hide(true);
|
||||
if let Some(requires) = requires {
|
||||
arg = arg.requires(requires)
|
||||
}
|
||||
|
@ -3487,8 +3579,7 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
|
|||
.long("deny-hrtime")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("REMOVED in Deno 2.0")
|
||||
.hide(true)
|
||||
;
|
||||
.hide(true);
|
||||
if let Some(requires) = requires {
|
||||
arg = arg.requires(requires)
|
||||
}
|
||||
|
@ -3508,6 +3599,34 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
|
|||
arg
|
||||
}
|
||||
)
|
||||
.arg(
|
||||
{
|
||||
let mut arg = allow_import_arg().hide(true);
|
||||
if let Some(requires) = requires {
|
||||
// allow this for install --global
|
||||
if requires != "global" {
|
||||
arg = arg.requires(requires)
|
||||
}
|
||||
}
|
||||
arg
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
fn allow_all_arg() -> Arg {
|
||||
Arg::new("allow-all")
|
||||
.short('A')
|
||||
.long("allow-all")
|
||||
.conflicts_with("allow-read")
|
||||
.conflicts_with("allow-write")
|
||||
.conflicts_with("allow-net")
|
||||
.conflicts_with("allow-env")
|
||||
.conflicts_with("allow-run")
|
||||
.conflicts_with("allow-sys")
|
||||
.conflicts_with("allow-ffi")
|
||||
.conflicts_with("allow-import")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("Allow all permissions")
|
||||
}
|
||||
|
||||
fn runtime_args(
|
||||
|
@ -3536,6 +3655,20 @@ fn runtime_args(
|
|||
.arg(strace_ops_arg())
|
||||
}
|
||||
|
||||
fn allow_import_arg() -> Arg {
|
||||
Arg::new("allow-import")
|
||||
.long("allow-import")
|
||||
.short('I')
|
||||
.num_args(0..)
|
||||
.use_value_delimiter(true)
|
||||
.require_equals(true)
|
||||
.value_name("IP_OR_HOSTNAME")
|
||||
.help(cstr!(
|
||||
"Allow importing from remote hosts. Optionally specify allowed IP addresses and host names, with ports as necessary. Default value: <p(245)>deno.land:443,jsr.io:443,esm.sh:443,raw.githubusercontent.com:443,user.githubusercontent.com:443</>"
|
||||
))
|
||||
.value_parser(flags_net::validator)
|
||||
}
|
||||
|
||||
fn inspect_args(app: Command) -> Command {
|
||||
app
|
||||
.arg(
|
||||
|
@ -3682,7 +3815,9 @@ fn location_arg() -> Arg {
|
|||
url.set_password(None).unwrap();
|
||||
Ok(url)
|
||||
})
|
||||
.help("Value of 'globalThis.location' used by some web APIs")
|
||||
.help(cstr!(
|
||||
"Value of <p(245)>globalThis.location</> used by some web APIs"
|
||||
))
|
||||
.value_hint(ValueHint::Url)
|
||||
}
|
||||
|
||||
|
@ -4173,6 +4308,7 @@ fn cache_parse(
|
|||
unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionOnly);
|
||||
frozen_lockfile_arg_parse(flags, matches);
|
||||
allow_scripts_arg_parse(flags, matches)?;
|
||||
allow_import_parse(flags, matches);
|
||||
let files = matches.remove_many::<String>("file").unwrap().collect();
|
||||
flags.subcommand = DenoSubcommand::Cache(CacheFlags { files });
|
||||
Ok(())
|
||||
|
@ -4194,6 +4330,7 @@ fn check_parse(
|
|||
doc: matches.get_flag("doc"),
|
||||
doc_only: matches.get_flag("doc-only"),
|
||||
});
|
||||
allow_import_parse(flags, matches);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -4319,6 +4456,7 @@ fn doc_parse(
|
|||
no_lock_arg_parse(flags, matches);
|
||||
no_npm_arg_parse(flags, matches);
|
||||
no_remote_arg_parse(flags, matches);
|
||||
allow_import_parse(flags, matches);
|
||||
|
||||
let source_files_val = matches.remove_many::<String>("source_file");
|
||||
let source_files = if let Some(val) = source_files_val {
|
||||
|
@ -4459,6 +4597,7 @@ fn info_parse(
|
|||
lock_args_parse(flags, matches);
|
||||
no_remote_arg_parse(flags, matches);
|
||||
no_npm_arg_parse(flags, matches);
|
||||
allow_import_parse(flags, matches);
|
||||
let json = matches.get_flag("json");
|
||||
flags.subcommand = DenoSubcommand::Info(InfoFlags {
|
||||
file: matches.remove_one::<String>("file"),
|
||||
|
@ -4494,6 +4633,7 @@ fn install_parse(
|
|||
force,
|
||||
}),
|
||||
});
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
@ -4531,7 +4671,7 @@ fn json_reference_parse(
|
|||
app.build();
|
||||
|
||||
fn serialize_command(
|
||||
command: &mut Command,
|
||||
mut command: Command,
|
||||
top_level: bool,
|
||||
) -> deno_core::serde_json::Value {
|
||||
let args = command
|
||||
|
@ -4539,7 +4679,7 @@ fn json_reference_parse(
|
|||
.filter(|arg| {
|
||||
!arg.is_hide_set()
|
||||
&& if top_level {
|
||||
true
|
||||
arg.is_global_set()
|
||||
} else {
|
||||
!arg.is_global_set()
|
||||
}
|
||||
|
@ -4548,40 +4688,49 @@ fn json_reference_parse(
|
|||
let name = arg.get_id().as_str();
|
||||
let short = arg.get_short();
|
||||
let long = arg.get_long();
|
||||
let aliases = arg.get_visible_aliases();
|
||||
let required = arg.is_required_set();
|
||||
let help = arg.get_help().map(|help| help.to_string());
|
||||
let help = arg.get_help().map(|help| help.ansi().to_string());
|
||||
let help_heading = arg
|
||||
.get_help_heading()
|
||||
.map(|help_heading| help_heading.to_string());
|
||||
let usage = arg.to_string();
|
||||
|
||||
json!({
|
||||
"name": name,
|
||||
"short": short,
|
||||
"long": long,
|
||||
"aliases": aliases,
|
||||
"required": required,
|
||||
"help": help,
|
||||
"help_heading": help_heading,
|
||||
"usage": usage,
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let name = command.get_name().to_string();
|
||||
let about = command.get_about().map(|about| about.to_string());
|
||||
let visible_aliases = command
|
||||
.get_visible_aliases()
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
let usage = command.render_usage().to_string();
|
||||
let about = command.get_about().map(|about| about.ansi().to_string());
|
||||
let usage = command.render_usage().ansi().to_string();
|
||||
|
||||
let subcommands = command
|
||||
.get_subcommands_mut()
|
||||
.map(|command| serialize_command(command, false))
|
||||
.get_subcommands()
|
||||
.map(|command| {
|
||||
serialize_command(
|
||||
if command
|
||||
.get_arguments()
|
||||
.any(|arg| arg.get_id().as_str() == "unstable")
|
||||
{
|
||||
enable_unstable(command.clone())
|
||||
} else {
|
||||
command.clone()
|
||||
},
|
||||
false,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
json!({
|
||||
"name": name,
|
||||
"about": about,
|
||||
"visible_aliases": visible_aliases,
|
||||
"args": args,
|
||||
"subcommands": subcommands,
|
||||
"usage": usage,
|
||||
|
@ -4589,7 +4738,7 @@ fn json_reference_parse(
|
|||
}
|
||||
|
||||
flags.subcommand = DenoSubcommand::JSONReference(JSONReferenceFlags {
|
||||
json: serialize_command(&mut app, true),
|
||||
json: serialize_command(app, true),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -5174,13 +5323,22 @@ fn permission_args_parse(
|
|||
}
|
||||
|
||||
if matches.get_flag("allow-hrtime") || matches.get_flag("deny-hrtime") {
|
||||
log::warn!("⚠️ Warning: `allow-hrtime` and `deny-hrtime` have been removed in Deno 2, as high resolution time is now always allowed.");
|
||||
// use eprintln instead of log::warn because logging hasn't been initialized yet
|
||||
#[allow(clippy::print_stderr)]
|
||||
{
|
||||
eprintln!(
|
||||
"{} `allow-hrtime` and `deny-hrtime` have been removed in Deno 2, as high resolution time is now always allowed",
|
||||
deno_runtime::colors::yellow("Warning")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if matches.get_flag("allow-all") {
|
||||
flags.allow_all();
|
||||
}
|
||||
|
||||
allow_import_parse(flags, matches);
|
||||
|
||||
if matches.get_flag("no-prompt") {
|
||||
flags.permissions.no_prompt = true;
|
||||
}
|
||||
|
@ -5188,6 +5346,13 @@ fn permission_args_parse(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn allow_import_parse(flags: &mut Flags, matches: &mut ArgMatches) {
|
||||
if let Some(imports_wl) = matches.remove_many::<String>("allow-import") {
|
||||
let imports_allowlist = flags_net::parse(imports_wl.collect()).unwrap();
|
||||
flags.permissions.allow_import = Some(imports_allowlist);
|
||||
}
|
||||
}
|
||||
|
||||
fn unsafely_ignore_certificate_errors_parse(
|
||||
flags: &mut Flags,
|
||||
matches: &mut ArgMatches,
|
||||
|
@ -6214,7 +6379,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn short_permission_flags() {
|
||||
let r = flags_from_vec(svec!["deno", "run", "-RNESW", "gist.ts"]);
|
||||
let r = flags_from_vec(svec!["deno", "run", "-RNESWI", "gist.ts"]);
|
||||
assert_eq!(
|
||||
r.unwrap(),
|
||||
Flags {
|
||||
|
@ -6225,6 +6390,7 @@ mod tests {
|
|||
allow_read: Some(vec![]),
|
||||
allow_write: Some(vec![]),
|
||||
allow_env: Some(vec![]),
|
||||
allow_import: Some(vec![]),
|
||||
allow_net: Some(vec![]),
|
||||
allow_sys: Some(vec![]),
|
||||
..Default::default()
|
||||
|
@ -10776,7 +10942,7 @@ mod tests {
|
|||
}
|
||||
);
|
||||
// just make sure this doesn't panic
|
||||
let _ = flags.permissions.to_options();
|
||||
let _ = flags.permissions.to_options(&[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -10851,4 +11017,46 @@ mod tests {
|
|||
Usage: deno repl [OPTIONS] [-- [ARGS]...]\n"
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_allow_import_host_from_url() {
|
||||
fn parse(text: &str) -> Option<String> {
|
||||
allow_import_host_from_url(&Url::parse(text).unwrap())
|
||||
}
|
||||
|
||||
assert_eq!(parse("https://jsr.io"), None);
|
||||
assert_eq!(
|
||||
parse("http://127.0.0.1:4250"),
|
||||
Some("127.0.0.1:4250".to_string())
|
||||
);
|
||||
assert_eq!(parse("http://jsr.io"), Some("jsr.io:80".to_string()));
|
||||
assert_eq!(
|
||||
parse("https://example.com"),
|
||||
Some("example.com:443".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
parse("http://example.com"),
|
||||
Some("example.com:80".to_string())
|
||||
);
|
||||
assert_eq!(parse("file:///example.com"), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn allow_all_conflicts_allow_perms() {
|
||||
let flags = [
|
||||
"--allow-read",
|
||||
"--allow-write",
|
||||
"--allow-net",
|
||||
"--allow-env",
|
||||
"--allow-run",
|
||||
"--allow-sys",
|
||||
"--allow-ffi",
|
||||
"--allow-import",
|
||||
];
|
||||
for flag in flags {
|
||||
let r =
|
||||
flags_from_vec(svec!["deno", "run", "--allow-all", flag, "foo.ts"]);
|
||||
assert!(r.is_err());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
158
cli/args/mod.rs
158
cli/args/mod.rs
|
@ -20,13 +20,13 @@ use deno_config::workspace::WorkspaceDiscoverOptions;
|
|||
use deno_config::workspace::WorkspaceDiscoverStart;
|
||||
use deno_config::workspace::WorkspaceLintConfig;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::normalize_path;
|
||||
use deno_core::resolve_url_or_path;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_npm::npm_rc::NpmRc;
|
||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_path_util::normalize_path;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use import_map::resolve_import_map_value_from_specifier;
|
||||
|
||||
|
@ -69,6 +69,8 @@ use std::collections::HashMap;
|
|||
use std::env;
|
||||
use std::io::BufReader;
|
||||
use std::io::Cursor;
|
||||
use std::io::Read;
|
||||
use std::io::Seek;
|
||||
use std::net::SocketAddr;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::path::Path;
|
||||
|
@ -742,15 +744,33 @@ pub enum NpmProcessStateKind {
|
|||
Byonm,
|
||||
}
|
||||
|
||||
pub(crate) const NPM_RESOLUTION_STATE_ENV_VAR_NAME: &str =
|
||||
"DENO_DONT_USE_INTERNAL_NODE_COMPAT_STATE";
|
||||
|
||||
static NPM_PROCESS_STATE: Lazy<Option<NpmProcessState>> = Lazy::new(|| {
|
||||
let state = std::env::var(NPM_RESOLUTION_STATE_ENV_VAR_NAME).ok()?;
|
||||
let state: NpmProcessState = serde_json::from_str(&state).ok()?;
|
||||
// remove the environment variable so that sub processes
|
||||
// that are spawned do not also use this.
|
||||
std::env::remove_var(NPM_RESOLUTION_STATE_ENV_VAR_NAME);
|
||||
use deno_runtime::ops::process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME;
|
||||
let fd = std::env::var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME).ok()?;
|
||||
std::env::remove_var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME);
|
||||
let fd = fd.parse::<usize>().ok()?;
|
||||
let mut file = {
|
||||
use deno_runtime::deno_io::FromRawIoHandle;
|
||||
unsafe { std::fs::File::from_raw_io_handle(fd as _) }
|
||||
};
|
||||
let mut buf = Vec::new();
|
||||
// seek to beginning. after the file is written the position will be inherited by this subprocess,
|
||||
// and also this file might have been read before
|
||||
file.seek(std::io::SeekFrom::Start(0)).unwrap();
|
||||
file
|
||||
.read_to_end(&mut buf)
|
||||
.inspect_err(|e| {
|
||||
log::error!("failed to read npm process state from fd {fd}: {e}");
|
||||
})
|
||||
.ok()?;
|
||||
let state: NpmProcessState = serde_json::from_slice(&buf)
|
||||
.inspect_err(|e| {
|
||||
log::error!(
|
||||
"failed to deserialize npm process state: {e} {}",
|
||||
String::from_utf8_lossy(&buf)
|
||||
)
|
||||
})
|
||||
.ok()?;
|
||||
Some(state)
|
||||
});
|
||||
|
||||
|
@ -769,6 +789,7 @@ pub struct CliOptions {
|
|||
// application need not concern itself with, so keep these private
|
||||
flags: Arc<Flags>,
|
||||
initial_cwd: PathBuf,
|
||||
main_module_cell: std::sync::OnceLock<Result<ModuleSpecifier, AnyError>>,
|
||||
maybe_node_modules_folder: Option<PathBuf>,
|
||||
npmrc: Arc<ResolvedNpmRc>,
|
||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||
|
@ -825,6 +846,7 @@ impl CliOptions {
|
|||
npmrc,
|
||||
maybe_node_modules_folder,
|
||||
overrides: Default::default(),
|
||||
main_module_cell: std::sync::OnceLock::new(),
|
||||
start_dir,
|
||||
deno_dir_provider,
|
||||
})
|
||||
|
@ -1105,40 +1127,39 @@ impl CliOptions {
|
|||
self.flags.env_file.as_ref()
|
||||
}
|
||||
|
||||
pub fn resolve_main_module(&self) -> Result<ModuleSpecifier, AnyError> {
|
||||
let main_module = match &self.flags.subcommand {
|
||||
DenoSubcommand::Compile(compile_flags) => {
|
||||
resolve_url_or_path(&compile_flags.source_file, self.initial_cwd())?
|
||||
}
|
||||
DenoSubcommand::Eval(_) => {
|
||||
resolve_url_or_path("./$deno$eval.ts", self.initial_cwd())?
|
||||
}
|
||||
DenoSubcommand::Repl(_) => {
|
||||
resolve_url_or_path("./$deno$repl.ts", self.initial_cwd())?
|
||||
}
|
||||
DenoSubcommand::Run(run_flags) => {
|
||||
if run_flags.is_stdin() {
|
||||
std::env::current_dir()
|
||||
.context("Unable to get CWD")
|
||||
.and_then(|cwd| {
|
||||
resolve_url_or_path("./$deno$stdin.ts", &cwd)
|
||||
.map_err(AnyError::from)
|
||||
})?
|
||||
} else if NpmPackageReqReference::from_str(&run_flags.script).is_ok() {
|
||||
ModuleSpecifier::parse(&run_flags.script)?
|
||||
} else {
|
||||
resolve_url_or_path(&run_flags.script, self.initial_cwd())?
|
||||
}
|
||||
}
|
||||
DenoSubcommand::Serve(run_flags) => {
|
||||
resolve_url_or_path(&run_flags.script, self.initial_cwd())?
|
||||
}
|
||||
_ => {
|
||||
bail!("No main module.")
|
||||
}
|
||||
};
|
||||
pub fn resolve_main_module(&self) -> Result<&ModuleSpecifier, AnyError> {
|
||||
self
|
||||
.main_module_cell
|
||||
.get_or_init(|| {
|
||||
let main_module = match &self.flags.subcommand {
|
||||
DenoSubcommand::Compile(compile_flags) => {
|
||||
resolve_url_or_path(&compile_flags.source_file, self.initial_cwd())?
|
||||
}
|
||||
DenoSubcommand::Eval(_) => {
|
||||
resolve_url_or_path("./$deno$eval.ts", self.initial_cwd())?
|
||||
}
|
||||
DenoSubcommand::Repl(_) => {
|
||||
resolve_url_or_path("./$deno$repl.ts", self.initial_cwd())?
|
||||
}
|
||||
DenoSubcommand::Run(run_flags) => {
|
||||
if run_flags.is_stdin() {
|
||||
resolve_url_or_path("./$deno$stdin.ts", self.initial_cwd())?
|
||||
} else {
|
||||
resolve_url_or_path(&run_flags.script, self.initial_cwd())?
|
||||
}
|
||||
}
|
||||
DenoSubcommand::Serve(run_flags) => {
|
||||
resolve_url_or_path(&run_flags.script, self.initial_cwd())?
|
||||
}
|
||||
_ => {
|
||||
bail!("No main module.")
|
||||
}
|
||||
};
|
||||
|
||||
Ok(main_module)
|
||||
Ok(main_module)
|
||||
})
|
||||
.as_ref()
|
||||
.map_err(|err| deno_core::anyhow::anyhow!("{}", err))
|
||||
}
|
||||
|
||||
pub fn resolve_file_header_overrides(
|
||||
|
@ -1159,7 +1180,7 @@ impl CliOptions {
|
|||
(maybe_main_specifier, maybe_content_type)
|
||||
{
|
||||
HashMap::from([(
|
||||
main_specifier,
|
||||
main_specifier.clone(),
|
||||
HashMap::from([("content-type".to_string(), content_type.to_string())]),
|
||||
)])
|
||||
} else {
|
||||
|
@ -1322,11 +1343,9 @@ impl CliOptions {
|
|||
)?;
|
||||
|
||||
Ok(deno_lint::linter::LintConfig {
|
||||
default_jsx_factory: transpile_options
|
||||
.jsx_automatic
|
||||
default_jsx_factory: (!transpile_options.jsx_automatic)
|
||||
.then(|| transpile_options.jsx_factory.clone()),
|
||||
default_jsx_fragment_factory: transpile_options
|
||||
.jsx_automatic
|
||||
default_jsx_fragment_factory: (!transpile_options.jsx_automatic)
|
||||
.then(|| transpile_options.jsx_fragment_factory.clone()),
|
||||
})
|
||||
}
|
||||
|
@ -1480,7 +1499,34 @@ impl CliOptions {
|
|||
}
|
||||
|
||||
pub fn permissions_options(&self) -> PermissionsOptions {
|
||||
self.flags.permissions.to_options()
|
||||
fn files_to_urls(files: &[String]) -> Vec<Cow<'_, Url>> {
|
||||
files
|
||||
.iter()
|
||||
.filter_map(|f| Url::parse(f).ok().map(Cow::Owned))
|
||||
.collect()
|
||||
}
|
||||
|
||||
// get a list of urls to imply for --allow-import
|
||||
let cli_arg_urls = self
|
||||
.resolve_main_module()
|
||||
.ok()
|
||||
.map(|url| vec![Cow::Borrowed(url)])
|
||||
.or_else(|| match &self.flags.subcommand {
|
||||
DenoSubcommand::Cache(cache_flags) => {
|
||||
Some(files_to_urls(&cache_flags.files))
|
||||
}
|
||||
DenoSubcommand::Check(check_flags) => {
|
||||
Some(files_to_urls(&check_flags.files))
|
||||
}
|
||||
DenoSubcommand::Install(InstallFlags {
|
||||
kind: InstallKind::Global(flags),
|
||||
}) => Url::parse(&flags.module_url)
|
||||
.ok()
|
||||
.map(|url| vec![Cow::Owned(url)]),
|
||||
_ => None,
|
||||
})
|
||||
.unwrap_or_default();
|
||||
self.flags.permissions.to_options(&cli_arg_urls)
|
||||
}
|
||||
|
||||
pub fn reload_flag(&self) -> bool {
|
||||
|
@ -1652,14 +1698,14 @@ impl CliOptions {
|
|||
pub fn lifecycle_scripts_config(&self) -> LifecycleScriptsConfig {
|
||||
LifecycleScriptsConfig {
|
||||
allowed: self.flags.allow_scripts.clone(),
|
||||
initial_cwd: if matches!(
|
||||
self.flags.allow_scripts,
|
||||
PackagesAllowedScripts::None
|
||||
) {
|
||||
None
|
||||
} else {
|
||||
Some(self.initial_cwd.clone())
|
||||
},
|
||||
initial_cwd: self.initial_cwd.clone(),
|
||||
root_dir: self.workspace().root_dir_path(),
|
||||
explicit_install: matches!(
|
||||
self.sub_command(),
|
||||
DenoSubcommand::Install(_)
|
||||
| DenoSubcommand::Cache(_)
|
||||
| DenoSubcommand::Add(_)
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
119
cli/cache/mod.rs
vendored
119
cli/cache/mod.rs
vendored
|
@ -1,14 +1,17 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::jsr_url;
|
||||
use crate::args::CacheSetting;
|
||||
use crate::errors::get_error_class_name;
|
||||
use crate::file_fetcher::FetchNoFollowOptions;
|
||||
use crate::file_fetcher::FetchOptions;
|
||||
use crate::file_fetcher::FetchPermissionsOption;
|
||||
use crate::file_fetcher::FetchPermissionsOptionRef;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::file_fetcher::FileOrRedirect;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::util::fs::atomic_write_file_with_retries;
|
||||
use crate::util::fs::atomic_write_file_with_retries_and_fs;
|
||||
use crate::util::fs::AtomicWriteFileFsAdapter;
|
||||
use crate::util::path::specifier_has_extension;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
|
@ -19,6 +22,7 @@ use deno_graph::source::CacheInfo;
|
|||
use deno_graph::source::LoadFuture;
|
||||
use deno_graph::source::LoadResponse;
|
||||
use deno_graph::source::Loader;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
@ -75,6 +79,14 @@ impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
|
|||
atomic_write_file_with_retries(path, bytes, CACHE_PERM)
|
||||
}
|
||||
|
||||
fn canonicalize_path(&self, path: &Path) -> std::io::Result<PathBuf> {
|
||||
crate::util::fs::canonicalize_path(path)
|
||||
}
|
||||
|
||||
fn create_dir_all(&self, path: &Path) -> std::io::Result<()> {
|
||||
std::fs::create_dir_all(path)
|
||||
}
|
||||
|
||||
fn remove_file(&self, path: &Path) -> std::io::Result<()> {
|
||||
std::fs::remove_file(path)
|
||||
}
|
||||
|
@ -98,12 +110,86 @@ impl deno_cache_dir::DenoCacheEnv for RealDenoCacheEnv {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DenoCacheEnvFsAdapter<'a>(
|
||||
pub &'a dyn deno_runtime::deno_fs::FileSystem,
|
||||
);
|
||||
|
||||
impl<'a> deno_cache_dir::DenoCacheEnv for DenoCacheEnvFsAdapter<'a> {
|
||||
fn read_file_bytes(&self, path: &Path) -> std::io::Result<Vec<u8>> {
|
||||
self
|
||||
.0
|
||||
.read_file_sync(path, None)
|
||||
.map_err(|err| err.into_io_error())
|
||||
}
|
||||
|
||||
fn atomic_write_file(
|
||||
&self,
|
||||
path: &Path,
|
||||
bytes: &[u8],
|
||||
) -> std::io::Result<()> {
|
||||
atomic_write_file_with_retries_and_fs(
|
||||
&AtomicWriteFileFsAdapter {
|
||||
fs: self.0,
|
||||
write_mode: CACHE_PERM,
|
||||
},
|
||||
path,
|
||||
bytes,
|
||||
)
|
||||
}
|
||||
|
||||
fn canonicalize_path(&self, path: &Path) -> std::io::Result<PathBuf> {
|
||||
self.0.realpath_sync(path).map_err(|e| e.into_io_error())
|
||||
}
|
||||
|
||||
fn create_dir_all(&self, path: &Path) -> std::io::Result<()> {
|
||||
self
|
||||
.0
|
||||
.mkdir_sync(path, true, None)
|
||||
.map_err(|e| e.into_io_error())
|
||||
}
|
||||
|
||||
fn remove_file(&self, path: &Path) -> std::io::Result<()> {
|
||||
self
|
||||
.0
|
||||
.remove_sync(path, false)
|
||||
.map_err(|e| e.into_io_error())
|
||||
}
|
||||
|
||||
fn modified(&self, path: &Path) -> std::io::Result<Option<SystemTime>> {
|
||||
self
|
||||
.0
|
||||
.stat_sync(path)
|
||||
.map(|stat| {
|
||||
stat
|
||||
.mtime
|
||||
.map(|ts| SystemTime::UNIX_EPOCH + std::time::Duration::from_secs(ts))
|
||||
})
|
||||
.map_err(|e| e.into_io_error())
|
||||
}
|
||||
|
||||
fn is_file(&self, path: &Path) -> bool {
|
||||
self.0.is_file_sync(path)
|
||||
}
|
||||
|
||||
fn time_now(&self) -> SystemTime {
|
||||
SystemTime::now()
|
||||
}
|
||||
}
|
||||
|
||||
pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache<RealDenoCacheEnv>;
|
||||
pub type LocalHttpCache = deno_cache_dir::LocalHttpCache<RealDenoCacheEnv>;
|
||||
pub type LocalLspHttpCache =
|
||||
deno_cache_dir::LocalLspHttpCache<RealDenoCacheEnv>;
|
||||
pub use deno_cache_dir::HttpCache;
|
||||
|
||||
pub struct FetchCacherOptions {
|
||||
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
|
||||
pub permissions: PermissionsContainer,
|
||||
/// If we're publishing for `deno publish`.
|
||||
pub is_deno_publish: bool,
|
||||
}
|
||||
|
||||
/// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides
|
||||
/// a concise interface to the DENO_DIR when building module graphs.
|
||||
pub struct FetchCacher {
|
||||
|
@ -112,26 +198,27 @@ pub struct FetchCacher {
|
|||
global_http_cache: Arc<GlobalHttpCache>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
module_info_cache: Arc<ModuleInfoCache>,
|
||||
permissions: FetchPermissionsOption,
|
||||
permissions: PermissionsContainer,
|
||||
cache_info_enabled: bool,
|
||||
is_deno_publish: bool,
|
||||
}
|
||||
|
||||
impl FetchCacher {
|
||||
pub fn new(
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
|
||||
global_http_cache: Arc<GlobalHttpCache>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
module_info_cache: Arc<ModuleInfoCache>,
|
||||
permissions: FetchPermissionsOption,
|
||||
options: FetchCacherOptions,
|
||||
) -> Self {
|
||||
Self {
|
||||
file_fetcher,
|
||||
file_header_overrides,
|
||||
global_http_cache,
|
||||
npm_resolver,
|
||||
module_info_cache,
|
||||
permissions,
|
||||
file_header_overrides: options.file_header_overrides,
|
||||
permissions: options.permissions,
|
||||
is_deno_publish: options.is_deno_publish,
|
||||
cache_info_enabled: false,
|
||||
}
|
||||
}
|
||||
|
@ -208,10 +295,24 @@ impl Loader for FetchCacher {
|
|||
}
|
||||
}
|
||||
|
||||
if self.is_deno_publish
|
||||
&& matches!(specifier.scheme(), "http" | "https")
|
||||
&& !specifier.as_str().starts_with(jsr_url().as_str())
|
||||
{
|
||||
// mark non-JSR remote modules as external so we don't need --allow-import
|
||||
// permissions as these will error out later when publishing
|
||||
return Box::pin(futures::future::ready(Ok(Some(
|
||||
LoadResponse::External {
|
||||
specifier: specifier.clone(),
|
||||
},
|
||||
))));
|
||||
}
|
||||
|
||||
let file_fetcher = self.file_fetcher.clone();
|
||||
let file_header_overrides = self.file_header_overrides.clone();
|
||||
let permissions = self.permissions.clone();
|
||||
let specifier = specifier.clone();
|
||||
let is_statically_analyzable = !options.was_dynamic_root;
|
||||
|
||||
async move {
|
||||
let maybe_cache_setting = match options.cache_setting {
|
||||
|
@ -230,7 +331,11 @@ impl Loader for FetchCacher {
|
|||
.fetch_no_follow_with_options(FetchNoFollowOptions {
|
||||
fetch_options: FetchOptions {
|
||||
specifier: &specifier,
|
||||
permissions: permissions.as_ref(),
|
||||
permissions: if is_statically_analyzable {
|
||||
FetchPermissionsOptionRef::StaticContainer(&permissions)
|
||||
} else {
|
||||
FetchPermissionsOptionRef::DynamicContainer(&permissions)
|
||||
},
|
||||
maybe_accept: None,
|
||||
maybe_cache_setting: maybe_cache_setting.as_ref(),
|
||||
},
|
||||
|
|
|
@ -32,17 +32,19 @@ use crate::module_loader::ModuleLoadPreparer;
|
|||
use crate::node::CliCjsCodeAnalyzer;
|
||||
use crate::node::CliNodeCodeTranslator;
|
||||
use crate::npm::create_cli_npm_resolver;
|
||||
use crate::npm::CliByonmNpmResolverCreateOptions;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::npm::CliNpmResolverByonmCreateOptions;
|
||||
use crate::npm::CliNpmResolverCreateOptions;
|
||||
use crate::npm::CliNpmResolverManagedCreateOptions;
|
||||
use crate::npm::CliNpmResolverManagedSnapshotOption;
|
||||
use crate::resolver::CjsResolutionStore;
|
||||
use crate::resolver::CliDenoResolverFs;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::resolver::CliGraphResolverOptions;
|
||||
use crate::resolver::CliNodeResolver;
|
||||
use crate::resolver::CliSloppyImportsResolver;
|
||||
use crate::resolver::NpmModuleLoader;
|
||||
use crate::resolver::SloppyImportsResolver;
|
||||
use crate::resolver::SloppyImportsCachedFs;
|
||||
use crate::standalone::DenoCompileBinaryWriter;
|
||||
use crate::tools::check::TypeChecker;
|
||||
use crate::tools::coverage::CoverageCollector;
|
||||
|
@ -185,7 +187,8 @@ struct CliFactoryServices {
|
|||
node_resolver: Deferred<Arc<NodeResolver>>,
|
||||
npm_resolver: Deferred<Arc<dyn CliNpmResolver>>,
|
||||
permission_desc_parser: Deferred<Arc<RuntimePermissionDescriptorParser>>,
|
||||
sloppy_imports_resolver: Deferred<Option<Arc<SloppyImportsResolver>>>,
|
||||
root_permissions_container: Deferred<PermissionsContainer>,
|
||||
sloppy_imports_resolver: Deferred<Option<Arc<CliSloppyImportsResolver>>>,
|
||||
text_only_progress_bar: Deferred<ProgressBar>,
|
||||
type_checker: Deferred<Arc<TypeChecker>>,
|
||||
cjs_resolutions: Deferred<Arc<CjsResolutionStore>>,
|
||||
|
@ -359,8 +362,8 @@ impl CliFactory {
|
|||
let cli_options = self.cli_options()?;
|
||||
// For `deno install` we want to force the managed resolver so it can set up `node_modules/` directory.
|
||||
create_cli_npm_resolver(if cli_options.use_byonm() && !matches!(cli_options.sub_command(), DenoSubcommand::Install(_) | DenoSubcommand::Add(_) | DenoSubcommand::Remove(_)) {
|
||||
CliNpmResolverCreateOptions::Byonm(CliNpmResolverByonmCreateOptions {
|
||||
fs: fs.clone(),
|
||||
CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions {
|
||||
fs: CliDenoResolverFs(fs.clone()),
|
||||
root_node_modules_dir: Some(match cli_options.node_modules_dir_path() {
|
||||
Some(node_modules_path) => node_modules_path.to_path_buf(),
|
||||
// path needs to be canonicalized for node resolution
|
||||
|
@ -403,17 +406,16 @@ impl CliFactory {
|
|||
|
||||
pub fn sloppy_imports_resolver(
|
||||
&self,
|
||||
) -> Result<Option<&Arc<SloppyImportsResolver>>, AnyError> {
|
||||
) -> Result<Option<&Arc<CliSloppyImportsResolver>>, AnyError> {
|
||||
self
|
||||
.services
|
||||
.sloppy_imports_resolver
|
||||
.get_or_try_init(|| {
|
||||
Ok(
|
||||
self
|
||||
.cli_options()?
|
||||
.unstable_sloppy_imports()
|
||||
.then(|| Arc::new(SloppyImportsResolver::new(self.fs().clone()))),
|
||||
)
|
||||
Ok(self.cli_options()?.unstable_sloppy_imports().then(|| {
|
||||
Arc::new(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(
|
||||
self.fs().clone(),
|
||||
)))
|
||||
}))
|
||||
})
|
||||
.map(|maybe| maybe.as_ref())
|
||||
}
|
||||
|
@ -626,6 +628,7 @@ impl CliFactory {
|
|||
self.maybe_file_watcher_reporter().clone(),
|
||||
self.file_fetcher()?.clone(),
|
||||
self.global_http_cache()?.clone(),
|
||||
self.root_permissions_container()?.clone(),
|
||||
)))
|
||||
})
|
||||
.await
|
||||
|
@ -659,6 +662,7 @@ impl CliFactory {
|
|||
Ok(Arc::new(MainModuleGraphContainer::new(
|
||||
self.cli_options()?.clone(),
|
||||
self.module_load_preparer().await?.clone(),
|
||||
self.root_permissions_container()?.clone(),
|
||||
)))
|
||||
})
|
||||
.await
|
||||
|
@ -755,15 +759,20 @@ impl CliFactory {
|
|||
))
|
||||
}
|
||||
|
||||
pub fn create_permissions_container(
|
||||
pub fn root_permissions_container(
|
||||
&self,
|
||||
) -> Result<PermissionsContainer, AnyError> {
|
||||
let desc_parser = self.permission_desc_parser()?.clone();
|
||||
let permissions = Permissions::from_options(
|
||||
desc_parser.as_ref(),
|
||||
&self.cli_options()?.permissions_options(),
|
||||
)?;
|
||||
Ok(PermissionsContainer::new(desc_parser, permissions))
|
||||
) -> Result<&PermissionsContainer, AnyError> {
|
||||
self
|
||||
.services
|
||||
.root_permissions_container
|
||||
.get_or_try_init(|| {
|
||||
let desc_parser = self.permission_desc_parser()?.clone();
|
||||
let permissions = Permissions::from_options(
|
||||
desc_parser.as_ref(),
|
||||
&self.cli_options()?.permissions_options(),
|
||||
)?;
|
||||
Ok(PermissionsContainer::new(desc_parser, permissions))
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn create_cli_main_worker_factory(
|
||||
|
@ -774,6 +783,7 @@ impl CliFactory {
|
|||
let npm_resolver = self.npm_resolver().await?;
|
||||
let fs = self.fs();
|
||||
let cli_node_resolver = self.cli_node_resolver().await?;
|
||||
let cli_npm_resolver = self.npm_resolver().await?.clone();
|
||||
let maybe_file_watcher_communicator = if cli_options.has_hmr() {
|
||||
Some(self.watcher_communicator.clone().unwrap())
|
||||
} else {
|
||||
|
@ -803,6 +813,7 @@ impl CliFactory {
|
|||
self.main_module_graph_container().await?.clone(),
|
||||
self.module_load_preparer().await?.clone(),
|
||||
cli_node_resolver.clone(),
|
||||
cli_npm_resolver.clone(),
|
||||
NpmModuleLoader::new(
|
||||
self.cjs_resolutions().clone(),
|
||||
self.node_code_translator().await?.clone(),
|
||||
|
@ -814,8 +825,8 @@ impl CliFactory {
|
|||
)),
|
||||
node_resolver.clone(),
|
||||
npm_resolver.clone(),
|
||||
self.permission_desc_parser()?.clone(),
|
||||
self.root_cert_store_provider().clone(),
|
||||
self.root_permissions_container()?.clone(),
|
||||
StorageKeyResolver::from_options(cli_options),
|
||||
cli_options.sub_command().clone(),
|
||||
self.create_cli_main_worker_options()?,
|
||||
|
|
|
@ -21,6 +21,7 @@ use deno_core::url::Url;
|
|||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::source::LoaderChecksum;
|
||||
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_runtime::deno_web::BlobStore;
|
||||
use log::debug;
|
||||
|
@ -135,7 +136,7 @@ impl MemoryFiles {
|
|||
|
||||
/// Fetch a source file from the local file system.
|
||||
fn fetch_local(specifier: &ModuleSpecifier) -> Result<File, AnyError> {
|
||||
let local = specifier.to_file_path().map_err(|_| {
|
||||
let local = url_to_file_path(specifier).map_err(|_| {
|
||||
uri_error(format!("Invalid file path.\n Specifier: {specifier}"))
|
||||
})?;
|
||||
// If it doesnt have a extension, we want to treat it as typescript by default
|
||||
|
@ -173,30 +174,8 @@ fn get_validated_scheme(
|
|||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum FetchPermissionsOptionRef<'a> {
|
||||
AllowAll,
|
||||
Container(&'a PermissionsContainer),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum FetchPermissionsOption {
|
||||
AllowAll,
|
||||
Container(PermissionsContainer),
|
||||
}
|
||||
|
||||
impl FetchPermissionsOption {
|
||||
pub fn as_ref(&self) -> FetchPermissionsOptionRef {
|
||||
match self {
|
||||
FetchPermissionsOption::AllowAll => FetchPermissionsOptionRef::AllowAll,
|
||||
FetchPermissionsOption::Container(container) => {
|
||||
FetchPermissionsOptionRef::Container(container)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PermissionsContainer> for FetchPermissionsOption {
|
||||
fn from(value: PermissionsContainer) -> Self {
|
||||
Self::Container(value)
|
||||
}
|
||||
DynamicContainer(&'a PermissionsContainer),
|
||||
StaticContainer(&'a PermissionsContainer),
|
||||
}
|
||||
|
||||
pub struct FetchOptions<'a> {
|
||||
|
@ -564,7 +543,6 @@ impl FileFetcher {
|
|||
}
|
||||
|
||||
/// Fetch a source file and asynchronously return it.
|
||||
#[allow(dead_code)] // todo(25469): undo when merging
|
||||
#[inline(always)]
|
||||
pub async fn fetch(
|
||||
&self,
|
||||
|
@ -572,7 +550,10 @@ impl FileFetcher {
|
|||
permissions: &PermissionsContainer,
|
||||
) -> Result<File, AnyError> {
|
||||
self
|
||||
.fetch_inner(specifier, FetchPermissionsOptionRef::Container(permissions))
|
||||
.fetch_inner(
|
||||
specifier,
|
||||
FetchPermissionsOptionRef::StaticContainer(permissions),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
|
@ -647,8 +628,17 @@ impl FileFetcher {
|
|||
FetchPermissionsOptionRef::AllowAll => {
|
||||
// allow
|
||||
}
|
||||
FetchPermissionsOptionRef::Container(permissions) => {
|
||||
permissions.check_specifier(specifier)?;
|
||||
FetchPermissionsOptionRef::StaticContainer(permissions) => {
|
||||
permissions.check_specifier(
|
||||
specifier,
|
||||
deno_runtime::deno_permissions::CheckSpecifierKind::Static,
|
||||
)?;
|
||||
}
|
||||
FetchPermissionsOptionRef::DynamicContainer(permissions) => {
|
||||
permissions.check_specifier(
|
||||
specifier,
|
||||
deno_runtime::deno_permissions::CheckSpecifierKind::Dynamic,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
if let Some(file) = self.memory_files.get(specifier) {
|
||||
|
|
|
@ -9,9 +9,9 @@ use deno_core::error::AnyError;
|
|||
use deno_core::parking_lot::RwLock;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_runtime::colors;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
|
||||
use crate::args::CliOptions;
|
||||
use crate::file_fetcher::FetchPermissionsOption;
|
||||
use crate::module_loader::ModuleLoadPreparer;
|
||||
use crate::util::fs::collect_specifiers;
|
||||
use crate::util::path::is_script_ext;
|
||||
|
@ -45,12 +45,14 @@ pub struct MainModuleGraphContainer {
|
|||
inner: Arc<RwLock<Arc<ModuleGraph>>>,
|
||||
cli_options: Arc<CliOptions>,
|
||||
module_load_preparer: Arc<ModuleLoadPreparer>,
|
||||
root_permissions: PermissionsContainer,
|
||||
}
|
||||
|
||||
impl MainModuleGraphContainer {
|
||||
pub fn new(
|
||||
cli_options: Arc<CliOptions>,
|
||||
module_load_preparer: Arc<ModuleLoadPreparer>,
|
||||
root_permissions: PermissionsContainer,
|
||||
) -> Self {
|
||||
Self {
|
||||
update_queue: Default::default(),
|
||||
|
@ -59,6 +61,7 @@ impl MainModuleGraphContainer {
|
|||
)))),
|
||||
cli_options,
|
||||
module_load_preparer,
|
||||
root_permissions,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -76,7 +79,7 @@ impl MainModuleGraphContainer {
|
|||
specifiers,
|
||||
false,
|
||||
self.cli_options.ts_type_lib_window(),
|
||||
FetchPermissionsOption::AllowAll,
|
||||
self.root_permissions.clone(),
|
||||
ext_overwrite,
|
||||
)
|
||||
.await?;
|
||||
|
|
|
@ -11,17 +11,19 @@ use crate::cache::ModuleInfoCache;
|
|||
use crate::cache::ParsedSourceCache;
|
||||
use crate::colors;
|
||||
use crate::errors::get_error_class_name;
|
||||
use crate::file_fetcher::FetchPermissionsOption;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::resolver::SloppyImportsResolver;
|
||||
use crate::resolver::CliSloppyImportsResolver;
|
||||
use crate::resolver::SloppyImportsCachedFs;
|
||||
use crate::tools::check;
|
||||
use crate::tools::check::TypeChecker;
|
||||
use crate::util::file_watcher::WatcherCommunicator;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
use deno_config::workspace::JsrPackageConfig;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_graph::source::LoaderChecksum;
|
||||
use deno_graph::FillFromLockfileOptions;
|
||||
use deno_graph::JsrLoadError;
|
||||
use deno_graph::ModuleLoadError;
|
||||
use deno_graph::WorkspaceFastCheckOption;
|
||||
|
@ -31,7 +33,6 @@ use deno_core::error::AnyError;
|
|||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::source::Loader;
|
||||
use deno_graph::source::ResolutionMode;
|
||||
use deno_graph::source::ResolveError;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_graph::ModuleError;
|
||||
|
@ -39,12 +40,13 @@ use deno_graph::ModuleGraph;
|
|||
use deno_graph::ModuleGraphError;
|
||||
use deno_graph::ResolutionError;
|
||||
use deno_graph::SpecifierError;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_semver::jsr::JsrDepPackageReq;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::Version;
|
||||
use import_map::ImportMapError;
|
||||
use std::collections::HashSet;
|
||||
use std::error::Error;
|
||||
|
@ -52,14 +54,14 @@ use std::ops::Deref;
|
|||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
#[derive(Clone)]
|
||||
pub struct GraphValidOptions {
|
||||
pub check_js: bool,
|
||||
pub follow_type_only: bool,
|
||||
pub is_vendoring: bool,
|
||||
/// Whether to exit the process for lockfile errors.
|
||||
/// Otherwise, surfaces lockfile errors as errors.
|
||||
pub exit_lockfile_errors: bool,
|
||||
pub kind: GraphKind,
|
||||
/// Whether to exit the process for integrity check errors such as
|
||||
/// lockfile checksum mismatches and JSR integrity failures.
|
||||
/// Otherwise, surfaces integrity errors as errors.
|
||||
pub exit_integrity_errors: bool,
|
||||
}
|
||||
|
||||
/// Check if `roots` and their deps are available. Returns `Ok(())` if
|
||||
|
@ -75,17 +77,54 @@ pub fn graph_valid(
|
|||
roots: &[ModuleSpecifier],
|
||||
options: GraphValidOptions,
|
||||
) -> Result<(), AnyError> {
|
||||
if options.exit_lockfile_errors {
|
||||
graph_exit_lock_errors(graph);
|
||||
if options.exit_integrity_errors {
|
||||
graph_exit_integrity_errors(graph);
|
||||
}
|
||||
|
||||
let mut errors = graph
|
||||
let mut errors = graph_walk_errors(
|
||||
graph,
|
||||
fs,
|
||||
roots,
|
||||
GraphWalkErrorsOptions {
|
||||
check_js: options.check_js,
|
||||
kind: options.kind,
|
||||
},
|
||||
);
|
||||
if let Some(error) = errors.next() {
|
||||
Err(error)
|
||||
} else {
|
||||
// finally surface the npm resolution result
|
||||
if let Err(err) = &graph.npm_dep_graph_result {
|
||||
return Err(custom_error(
|
||||
get_error_class_name(err),
|
||||
format_deno_graph_error(err.as_ref().deref()),
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct GraphWalkErrorsOptions {
|
||||
pub check_js: bool,
|
||||
pub kind: GraphKind,
|
||||
}
|
||||
|
||||
/// Walks the errors found in the module graph that should be surfaced to users
|
||||
/// and enhances them with CLI information.
|
||||
pub fn graph_walk_errors<'a>(
|
||||
graph: &'a ModuleGraph,
|
||||
fs: &'a Arc<dyn FileSystem>,
|
||||
roots: &'a [ModuleSpecifier],
|
||||
options: GraphWalkErrorsOptions,
|
||||
) -> impl Iterator<Item = AnyError> + 'a {
|
||||
graph
|
||||
.walk(
|
||||
roots.iter(),
|
||||
deno_graph::WalkOptions {
|
||||
check_js: options.check_js,
|
||||
follow_type_only: options.follow_type_only,
|
||||
follow_dynamic: options.is_vendoring,
|
||||
kind: options.kind,
|
||||
follow_dynamic: false,
|
||||
prefer_fast_check_graph: false,
|
||||
},
|
||||
)
|
||||
|
@ -109,7 +148,7 @@ pub fn graph_valid(
|
|||
)
|
||||
}
|
||||
ModuleGraphError::ModuleError(error) => {
|
||||
enhanced_lockfile_error_message(error)
|
||||
enhanced_integrity_error_message(error)
|
||||
.or_else(|| enhanced_sloppy_imports_error_message(fs, error))
|
||||
.unwrap_or_else(|| format_deno_graph_error(error))
|
||||
}
|
||||
|
@ -132,56 +171,18 @@ pub fn graph_valid(
|
|||
return None;
|
||||
}
|
||||
|
||||
if options.is_vendoring {
|
||||
// warn about failing dynamic imports when vendoring, but don't fail completely
|
||||
if matches!(
|
||||
error,
|
||||
ModuleGraphError::ModuleError(ModuleError::MissingDynamic(_, _))
|
||||
) {
|
||||
log::warn!("Ignoring: {}", message);
|
||||
return None;
|
||||
}
|
||||
|
||||
// ignore invalid downgrades and invalid local imports when vendoring
|
||||
match &error {
|
||||
ModuleGraphError::ResolutionError(err)
|
||||
| ModuleGraphError::TypesResolutionError(err) => {
|
||||
if matches!(
|
||||
err,
|
||||
ResolutionError::InvalidDowngrade { .. }
|
||||
| ResolutionError::InvalidLocalImport { .. }
|
||||
) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
ModuleGraphError::ModuleError(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
Some(custom_error(get_error_class_name(&error.into()), message))
|
||||
});
|
||||
if let Some(error) = errors.next() {
|
||||
Err(error)
|
||||
} else {
|
||||
// finally surface the npm resolution result
|
||||
if let Err(err) = &graph.npm_dep_graph_result {
|
||||
return Err(custom_error(
|
||||
get_error_class_name(err),
|
||||
format_deno_graph_error(err.as_ref().deref()),
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn graph_exit_lock_errors(graph: &ModuleGraph) {
|
||||
pub fn graph_exit_integrity_errors(graph: &ModuleGraph) {
|
||||
for error in graph.module_errors() {
|
||||
exit_for_lockfile_error(error);
|
||||
exit_for_integrity_error(error);
|
||||
}
|
||||
}
|
||||
|
||||
fn exit_for_lockfile_error(err: &ModuleError) {
|
||||
if let Some(err_message) = enhanced_lockfile_error_message(err) {
|
||||
fn exit_for_integrity_error(err: &ModuleError) {
|
||||
if let Some(err_message) = enhanced_integrity_error_message(err) {
|
||||
log::error!("{} {}", colors::red("error:"), err_message);
|
||||
std::process::exit(10);
|
||||
}
|
||||
|
@ -249,6 +250,19 @@ impl ModuleGraphCreator {
|
|||
package_configs: &[JsrPackageConfig],
|
||||
build_fast_check_graph: bool,
|
||||
) -> Result<ModuleGraph, AnyError> {
|
||||
fn graph_has_external_remote(graph: &ModuleGraph) -> bool {
|
||||
// Earlier on, we marked external non-JSR modules as external.
|
||||
// If the graph contains any of those, it would cause type checking
|
||||
// to crash, so since publishing is going to fail anyway, skip type
|
||||
// checking.
|
||||
graph.modules().any(|module| match module {
|
||||
deno_graph::Module::External(external_module) => {
|
||||
matches!(external_module.specifier.scheme(), "http" | "https")
|
||||
}
|
||||
_ => false,
|
||||
})
|
||||
}
|
||||
|
||||
let mut roots = Vec::new();
|
||||
for package_config in package_configs {
|
||||
roots.extend(package_config.config_file.resolve_export_value_urls()?);
|
||||
|
@ -262,9 +276,12 @@ impl ModuleGraphCreator {
|
|||
})
|
||||
.await?;
|
||||
self.graph_valid(&graph)?;
|
||||
if self.options.type_check_mode().is_true() {
|
||||
if self.options.type_check_mode().is_true()
|
||||
&& !graph_has_external_remote(&graph)
|
||||
{
|
||||
self.type_check_graph(graph.clone()).await?;
|
||||
}
|
||||
|
||||
if build_fast_check_graph {
|
||||
let fast_check_workspace_members = package_configs
|
||||
.iter()
|
||||
|
@ -279,6 +296,7 @@ impl ModuleGraphCreator {
|
|||
},
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(graph)
|
||||
}
|
||||
|
||||
|
@ -370,6 +388,7 @@ pub struct ModuleGraphBuilder {
|
|||
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
global_http_cache: Arc<GlobalHttpCache>,
|
||||
root_permissions_container: PermissionsContainer,
|
||||
}
|
||||
|
||||
impl ModuleGraphBuilder {
|
||||
|
@ -386,6 +405,7 @@ impl ModuleGraphBuilder {
|
|||
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
global_http_cache: Arc<GlobalHttpCache>,
|
||||
root_permissions_container: PermissionsContainer,
|
||||
) -> Self {
|
||||
Self {
|
||||
options,
|
||||
|
@ -399,6 +419,7 @@ impl ModuleGraphBuilder {
|
|||
maybe_file_watcher_reporter,
|
||||
file_fetcher,
|
||||
global_http_cache,
|
||||
root_permissions_container,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -553,33 +574,19 @@ impl ModuleGraphBuilder {
|
|||
// populate the information from the lockfile
|
||||
if let Some(lockfile) = &self.lockfile {
|
||||
let lockfile = lockfile.lock();
|
||||
for (from, to) in &lockfile.content.redirects {
|
||||
if let Ok(from) = ModuleSpecifier::parse(from) {
|
||||
if let Ok(to) = ModuleSpecifier::parse(to) {
|
||||
if !matches!(from.scheme(), "file" | "npm" | "jsr") {
|
||||
graph.redirects.insert(from, to);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (req_dep, value) in &lockfile.content.packages.specifiers {
|
||||
match req_dep.kind {
|
||||
deno_semver::package::PackageKind::Jsr => {
|
||||
if let Ok(version) = Version::parse_standard(value) {
|
||||
graph.packages.add_nv(
|
||||
req_dep.req.clone(),
|
||||
PackageNv {
|
||||
name: req_dep.req.name.clone(),
|
||||
version,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
deno_semver::package::PackageKind::Npm => {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
graph.fill_from_lockfile(FillFromLockfileOptions {
|
||||
redirects: lockfile
|
||||
.content
|
||||
.redirects
|
||||
.iter()
|
||||
.map(|(from, to)| (from.as_str(), to.as_str())),
|
||||
package_specifiers: lockfile
|
||||
.content
|
||||
.packages
|
||||
.specifiers
|
||||
.iter()
|
||||
.map(|(dep, id)| (dep, id.as_str())),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -587,6 +594,12 @@ impl ModuleGraphBuilder {
|
|||
let initial_package_deps_len = graph.packages.package_deps_sum();
|
||||
let initial_package_mappings_len = graph.packages.mappings().len();
|
||||
|
||||
if roots.iter().any(|r| r.scheme() == "npm")
|
||||
&& self.npm_resolver.as_byonm().is_some()
|
||||
{
|
||||
bail!("Resolving npm specifier entrypoints this way is currently not supported with \"nodeModules\": \"manual\". In the meantime, try with --node-modules-dir=auto instead");
|
||||
}
|
||||
|
||||
graph.build(roots, loader, options).await;
|
||||
|
||||
let has_redirects_changed = graph.redirects.len() != initial_redirects_len;
|
||||
|
@ -670,20 +683,26 @@ impl ModuleGraphBuilder {
|
|||
|
||||
/// Creates the default loader used for creating a graph.
|
||||
pub fn create_graph_loader(&self) -> cache::FetchCacher {
|
||||
self.create_fetch_cacher(FetchPermissionsOption::AllowAll)
|
||||
self.create_fetch_cacher(self.root_permissions_container.clone())
|
||||
}
|
||||
|
||||
pub fn create_fetch_cacher(
|
||||
&self,
|
||||
permissions: FetchPermissionsOption,
|
||||
permissions: PermissionsContainer,
|
||||
) -> cache::FetchCacher {
|
||||
cache::FetchCacher::new(
|
||||
self.file_fetcher.clone(),
|
||||
self.options.resolve_file_header_overrides(),
|
||||
self.global_http_cache.clone(),
|
||||
self.npm_resolver.clone(),
|
||||
self.module_info_cache.clone(),
|
||||
permissions,
|
||||
cache::FetchCacherOptions {
|
||||
file_header_overrides: self.options.resolve_file_header_overrides(),
|
||||
permissions,
|
||||
is_deno_publish: matches!(
|
||||
self.options.sub_command(),
|
||||
crate::args::DenoSubcommand::Publish { .. }
|
||||
),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -707,10 +726,13 @@ impl ModuleGraphBuilder {
|
|||
&self.fs,
|
||||
roots,
|
||||
GraphValidOptions {
|
||||
is_vendoring: false,
|
||||
follow_type_only: self.options.type_check_mode().is_true(),
|
||||
kind: if self.options.type_check_mode().is_true() {
|
||||
GraphKind::All
|
||||
} else {
|
||||
GraphKind::CodeOnly
|
||||
},
|
||||
check_js: self.options.check_js(),
|
||||
exit_lockfile_errors: true,
|
||||
exit_integrity_errors: true,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
@ -751,8 +773,8 @@ fn enhanced_sloppy_imports_error_message(
|
|||
match error {
|
||||
ModuleError::LoadingErr(specifier, _, ModuleLoadError::Loader(_)) // ex. "Is a directory" error
|
||||
| ModuleError::Missing(specifier, _) => {
|
||||
let additional_message = SloppyImportsResolver::new(fs.clone())
|
||||
.resolve(specifier, ResolutionMode::Execution)?
|
||||
let additional_message = CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(fs.clone()))
|
||||
.resolve(specifier, SloppyImportsResolutionMode::Execution)?
|
||||
.as_suggestion_message();
|
||||
Some(format!(
|
||||
"{} {} or run with --unstable-sloppy-imports",
|
||||
|
@ -764,7 +786,7 @@ fn enhanced_sloppy_imports_error_message(
|
|||
}
|
||||
}
|
||||
|
||||
fn enhanced_lockfile_error_message(err: &ModuleError) -> Option<String> {
|
||||
fn enhanced_integrity_error_message(err: &ModuleError) -> Option<String> {
|
||||
match err {
|
||||
ModuleError::LoadingErr(
|
||||
specifier,
|
||||
|
@ -928,13 +950,13 @@ pub fn has_graph_root_local_dependent_changed(
|
|||
std::iter::once(root),
|
||||
deno_graph::WalkOptions {
|
||||
follow_dynamic: true,
|
||||
follow_type_only: true,
|
||||
kind: GraphKind::All,
|
||||
prefer_fast_check_graph: true,
|
||||
check_js: true,
|
||||
},
|
||||
);
|
||||
while let Some((s, _)) = dependent_specifiers.next() {
|
||||
if let Ok(path) = specifier_to_file_path(s) {
|
||||
if let Ok(path) = url_to_file_path(s) {
|
||||
if let Ok(path) = canonicalize_path(&path) {
|
||||
if canonicalized_changed_paths.contains(&path) {
|
||||
return true;
|
||||
|
|
|
@ -23,8 +23,8 @@ use deno_core::serde::Serialize;
|
|||
use deno_core::serde_json;
|
||||
use deno_core::serde_json::json;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_runtime::deno_node::PathClean;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
use deno_semver::jsr::JsrPackageNvReference;
|
||||
use deno_semver::jsr::JsrPackageReqReference;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
|
@ -401,7 +401,7 @@ impl<'a> TsResponseImportMapper<'a> {
|
|||
.flatten()?;
|
||||
let root_folder = package_json.path.parent()?;
|
||||
|
||||
let specifier_path = specifier_to_file_path(specifier).ok()?;
|
||||
let specifier_path = url_to_file_path(specifier).ok()?;
|
||||
let mut search_paths = vec![specifier_path.clone()];
|
||||
// TypeScript will provide a .js extension for quick fixes, so do
|
||||
// a search for the .d.ts file instead
|
||||
|
|
|
@ -10,7 +10,7 @@ use crate::lsp::logging::lsp_warn;
|
|||
|
||||
use deno_core::url::Url;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
@ -24,7 +24,7 @@ pub fn calculate_fs_version(
|
|||
) -> Option<String> {
|
||||
match specifier.scheme() {
|
||||
"npm" | "node" | "data" | "blob" => None,
|
||||
"file" => specifier_to_file_path(specifier)
|
||||
"file" => url_to_file_path(specifier)
|
||||
.ok()
|
||||
.and_then(|path| calculate_fs_version_at_path(&path)),
|
||||
_ => calculate_fs_version_in_cache(cache, specifier, file_referrer),
|
||||
|
@ -82,7 +82,7 @@ impl Default for LspCache {
|
|||
impl LspCache {
|
||||
pub fn new(global_cache_url: Option<Url>) -> Self {
|
||||
let global_cache_path = global_cache_url.and_then(|s| {
|
||||
specifier_to_file_path(&s)
|
||||
url_to_file_path(&s)
|
||||
.inspect(|p| {
|
||||
lsp_log!("Resolved global cache path: \"{}\"", p.to_string_lossy());
|
||||
})
|
||||
|
@ -165,7 +165,7 @@ impl LspCache {
|
|||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<ModuleSpecifier> {
|
||||
let path = specifier_to_file_path(specifier).ok()?;
|
||||
let path = url_to_file_path(specifier).ok()?;
|
||||
let vendor = self
|
||||
.vendors_by_scope
|
||||
.iter()
|
||||
|
@ -176,7 +176,7 @@ impl LspCache {
|
|||
}
|
||||
|
||||
pub fn is_valid_file_referrer(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
if let Ok(path) = specifier_to_file_path(specifier) {
|
||||
if let Ok(path) = url_to_file_path(specifier) {
|
||||
if !path.starts_with(&self.deno_dir().root) {
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ use deno_core::serde::Serialize;
|
|||
use deno_core::serde_json::json;
|
||||
use deno_core::url::Position;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_semver::jsr::JsrPackageReqReference;
|
||||
use deno_semver::package::PackageNv;
|
||||
use import_map::ImportMap;
|
||||
|
@ -380,7 +380,7 @@ fn get_local_completions(
|
|||
ResolutionMode::Execution,
|
||||
)
|
||||
.ok()?;
|
||||
let resolved_parent_path = specifier_to_file_path(&resolved_parent).ok()?;
|
||||
let resolved_parent_path = url_to_file_path(&resolved_parent).ok()?;
|
||||
let raw_parent =
|
||||
&text[..text.char_indices().rfind(|(_, c)| *c == '/')?.0 + 1];
|
||||
if resolved_parent_path.is_dir() {
|
||||
|
|
|
@ -36,8 +36,8 @@ use deno_core::ModuleSpecifier;
|
|||
use deno_lint::linter::LintConfig as DenoLintConfig;
|
||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||
use deno_package_json::PackageJsonCache;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
use indexmap::IndexSet;
|
||||
use lsp_types::ClientCapabilities;
|
||||
use std::collections::BTreeMap;
|
||||
|
@ -59,7 +59,8 @@ use crate::args::LintOptions;
|
|||
use crate::cache::FastInsecureHasher;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::lsp::logging::lsp_warn;
|
||||
use crate::resolver::SloppyImportsResolver;
|
||||
use crate::resolver::CliSloppyImportsResolver;
|
||||
use crate::resolver::SloppyImportsCachedFs;
|
||||
use crate::tools::lint::CliLinter;
|
||||
use crate::tools::lint::CliLinterOptions;
|
||||
use crate::tools::lint::LintRuleProvider;
|
||||
|
@ -801,7 +802,7 @@ impl Settings {
|
|||
/// Returns `None` if the value should be deferred to the presence of a
|
||||
/// `deno.json` file.
|
||||
pub fn specifier_enabled(&self, specifier: &ModuleSpecifier) -> Option<bool> {
|
||||
let Ok(path) = specifier_to_file_path(specifier) else {
|
||||
let Ok(path) = url_to_file_path(specifier) else {
|
||||
// Non-file URLs are not disabled by these settings.
|
||||
return Some(true);
|
||||
};
|
||||
|
@ -810,7 +811,7 @@ impl Settings {
|
|||
let mut disable_paths = vec![];
|
||||
let mut enable_paths = None;
|
||||
if let Some(folder_uri) = folder_uri {
|
||||
if let Ok(folder_path) = specifier_to_file_path(folder_uri) {
|
||||
if let Ok(folder_path) = url_to_file_path(folder_uri) {
|
||||
disable_paths = settings
|
||||
.disable_paths
|
||||
.iter()
|
||||
|
@ -847,12 +848,12 @@ impl Settings {
|
|||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> (&WorkspaceSettings, Option<&ModuleSpecifier>) {
|
||||
let Ok(path) = specifier_to_file_path(specifier) else {
|
||||
let Ok(path) = url_to_file_path(specifier) else {
|
||||
return (&self.unscoped, self.first_folder.as_ref());
|
||||
};
|
||||
for (folder_uri, settings) in self.by_workspace_folder.iter().rev() {
|
||||
if let Some(settings) = settings {
|
||||
let Ok(folder_path) = specifier_to_file_path(folder_uri) else {
|
||||
let Ok(folder_path) = url_to_file_path(folder_uri) else {
|
||||
continue;
|
||||
};
|
||||
if path.starts_with(folder_path) {
|
||||
|
@ -1181,7 +1182,7 @@ pub struct ConfigData {
|
|||
pub lockfile: Option<Arc<CliLockfile>>,
|
||||
pub npmrc: Option<Arc<ResolvedNpmRc>>,
|
||||
pub resolver: Arc<WorkspaceResolver>,
|
||||
pub sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
|
||||
pub sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
|
||||
pub import_map_from_settings: Option<ModuleSpecifier>,
|
||||
watched_files: HashMap<ModuleSpecifier, ConfigWatchedFileType>,
|
||||
}
|
||||
|
@ -1584,9 +1585,11 @@ impl ConfigData {
|
|||
.is_ok()
|
||||
|| member_dir.workspace.has_unstable("sloppy-imports");
|
||||
let sloppy_imports_resolver = unstable_sloppy_imports.then(|| {
|
||||
Arc::new(SloppyImportsResolver::new_without_stat_cache(Arc::new(
|
||||
deno_runtime::deno_fs::RealFs,
|
||||
)))
|
||||
Arc::new(CliSloppyImportsResolver::new(
|
||||
SloppyImportsCachedFs::new_without_stat_cache(Arc::new(
|
||||
deno_runtime::deno_fs::RealFs,
|
||||
)),
|
||||
))
|
||||
});
|
||||
let resolver = Arc::new(resolver);
|
||||
let lint_rule_provider = LintRuleProvider::new(
|
||||
|
@ -1767,7 +1770,7 @@ impl ConfigTree {
|
|||
let config_file_path = (|| {
|
||||
let config_setting = ws_settings.config.as_ref()?;
|
||||
let config_uri = folder_uri.join(config_setting).ok()?;
|
||||
specifier_to_file_path(&config_uri).ok()
|
||||
url_to_file_path(&config_uri).ok()
|
||||
})();
|
||||
if config_file_path.is_some() || ws_settings.import_map.is_some() {
|
||||
scopes.insert(
|
||||
|
@ -1844,7 +1847,7 @@ impl ConfigTree {
|
|||
let scope = config_file.specifier.join(".").unwrap();
|
||||
let json_text = serde_json::to_string(&config_file.json).unwrap();
|
||||
let test_fs = deno_runtime::deno_fs::InMemoryFs::default();
|
||||
let config_path = specifier_to_file_path(&config_file.specifier).unwrap();
|
||||
let config_path = url_to_file_path(&config_file.specifier).unwrap();
|
||||
test_fs.setup_text_files(vec![(
|
||||
config_path.to_string_lossy().to_string(),
|
||||
json_text,
|
||||
|
|
|
@ -19,8 +19,8 @@ use super::urls::LspUrlMap;
|
|||
use crate::graph_util;
|
||||
use crate::graph_util::enhanced_resolution_error_message;
|
||||
use crate::lsp::lsp_custom::DiagnosticBatchNotificationParams;
|
||||
use crate::resolver::SloppyImportsResolution;
|
||||
use crate::resolver::SloppyImportsResolver;
|
||||
use crate::resolver::CliSloppyImportsResolver;
|
||||
use crate::resolver::SloppyImportsCachedFs;
|
||||
use crate::tools::lint::CliLinter;
|
||||
use crate::tools::lint::CliLinterOptions;
|
||||
use crate::tools::lint::LintRuleProvider;
|
||||
|
@ -40,11 +40,12 @@ use deno_core::unsync::spawn_blocking;
|
|||
use deno_core::unsync::JoinHandle;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::source::ResolutionMode;
|
||||
use deno_graph::source::ResolveError;
|
||||
use deno_graph::Resolution;
|
||||
use deno_graph::ResolutionError;
|
||||
use deno_graph::SpecifierError;
|
||||
use deno_resolver::sloppy_imports::SloppyImportsResolution;
|
||||
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node;
|
||||
use deno_runtime::tokio_util::create_basic_runtime;
|
||||
|
@ -1263,7 +1264,9 @@ impl DenoDiagnostic {
|
|||
Self::NotInstalledJsr(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("JSR package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
|
||||
Self::NotInstalledNpm(pkg_req, specifier) => (lsp::DiagnosticSeverity::ERROR, format!("NPM package \"{pkg_req}\" is not installed or doesn't exist."), Some(json!({ "specifier": specifier }))),
|
||||
Self::NoLocal(specifier) => {
|
||||
let maybe_sloppy_resolution = SloppyImportsResolver::new(Arc::new(deno_fs::RealFs)).resolve(specifier, ResolutionMode::Execution);
|
||||
let maybe_sloppy_resolution = CliSloppyImportsResolver::new(
|
||||
SloppyImportsCachedFs::new(Arc::new(deno_fs::RealFs))
|
||||
).resolve(specifier, SloppyImportsResolutionMode::Execution);
|
||||
let data = maybe_sloppy_resolution.as_ref().map(|res| {
|
||||
json!({
|
||||
"specifier": specifier,
|
||||
|
|
|
@ -26,8 +26,8 @@ use deno_core::parking_lot::Mutex;
|
|||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::source::ResolutionMode;
|
||||
use deno_graph::Resolution;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_runtime::deno_node;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
use deno_semver::jsr::JsrPackageReqReference;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use deno_semver::package::PackageReq;
|
||||
|
@ -849,7 +849,7 @@ impl FileSystemDocuments {
|
|||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<Arc<Document>> {
|
||||
let doc = if specifier.scheme() == "file" {
|
||||
let path = specifier_to_file_path(specifier).ok()?;
|
||||
let path = url_to_file_path(specifier).ok()?;
|
||||
let bytes = fs::read(path).ok()?;
|
||||
let content =
|
||||
deno_graph::source::decode_owned_source(specifier, bytes, None).ok()?;
|
||||
|
@ -1136,7 +1136,7 @@ impl Documents {
|
|||
return true;
|
||||
}
|
||||
if specifier.scheme() == "file" {
|
||||
return specifier_to_file_path(&specifier)
|
||||
return url_to_file_path(&specifier)
|
||||
.map(|p| p.is_file())
|
||||
.unwrap_or(false);
|
||||
}
|
||||
|
@ -1325,7 +1325,7 @@ impl Documents {
|
|||
let fs_docs = &self.file_system_docs;
|
||||
// Clean up non-existent documents.
|
||||
fs_docs.docs.retain(|specifier, _| {
|
||||
let Ok(path) = specifier_to_file_path(specifier) else {
|
||||
let Ok(path) = url_to_file_path(specifier) else {
|
||||
// Remove non-file schemed docs (deps). They may not be dependencies
|
||||
// anymore after updating resolvers.
|
||||
return false;
|
||||
|
|
|
@ -15,9 +15,9 @@ use deno_core::url::Url;
|
|||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_graph::Resolution;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_runtime::deno_tls::rustls::RootCertStore;
|
||||
use deno_runtime::deno_tls::RootCertStoreProvider;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
use deno_semver::jsr::JsrPackageReqReference;
|
||||
use indexmap::Equivalent;
|
||||
use indexmap::IndexSet;
|
||||
|
@ -274,10 +274,9 @@ impl LanguageServer {
|
|||
factory.fs(),
|
||||
&roots,
|
||||
graph_util::GraphValidOptions {
|
||||
is_vendoring: false,
|
||||
follow_type_only: true,
|
||||
kind: GraphKind::All,
|
||||
check_js: false,
|
||||
exit_lockfile_errors: false,
|
||||
exit_integrity_errors: false,
|
||||
},
|
||||
)?;
|
||||
|
||||
|
@ -627,7 +626,7 @@ impl Inner {
|
|||
let maybe_root_path = self
|
||||
.config
|
||||
.root_uri()
|
||||
.and_then(|uri| specifier_to_file_path(uri).ok());
|
||||
.and_then(|uri| url_to_file_path(uri).ok());
|
||||
let root_cert_store = get_root_cert_store(
|
||||
maybe_root_path,
|
||||
workspace_settings.certificate_stores.clone(),
|
||||
|
@ -803,7 +802,7 @@ impl Inner {
|
|||
let mut roots = config
|
||||
.workspace_folders
|
||||
.iter()
|
||||
.filter_map(|p| specifier_to_file_path(&p.0).ok())
|
||||
.filter_map(|p| url_to_file_path(&p.0).ok())
|
||||
.collect::<Vec<_>>();
|
||||
roots.sort();
|
||||
let roots = roots
|
||||
|
@ -1125,7 +1124,7 @@ impl Inner {
|
|||
{
|
||||
return;
|
||||
}
|
||||
match specifier_to_file_path(&specifier) {
|
||||
match url_to_file_path(&specifier) {
|
||||
Ok(path) if is_importable_ext(&path) => {}
|
||||
_ => return,
|
||||
}
|
||||
|
@ -1363,7 +1362,7 @@ impl Inner {
|
|||
{
|
||||
specifier = uri_to_url(¶ms.text_document.uri);
|
||||
}
|
||||
let file_path = specifier_to_file_path(&specifier).map_err(|err| {
|
||||
let file_path = url_to_file_path(&specifier).map_err(|err| {
|
||||
error!("{:#}", err);
|
||||
LspError::invalid_request()
|
||||
})?;
|
||||
|
@ -2509,7 +2508,7 @@ impl Inner {
|
|||
let maybe_root_path_owned = self
|
||||
.config
|
||||
.root_uri()
|
||||
.and_then(|uri| specifier_to_file_path(uri).ok());
|
||||
.and_then(|uri| url_to_file_path(uri).ok());
|
||||
let mut resolved_items = Vec::<CallHierarchyIncomingCall>::new();
|
||||
for item in incoming_calls.iter() {
|
||||
if let Some(resolved) = item.try_resolve_call_hierarchy_incoming_call(
|
||||
|
@ -2555,7 +2554,7 @@ impl Inner {
|
|||
let maybe_root_path_owned = self
|
||||
.config
|
||||
.root_uri()
|
||||
.and_then(|uri| specifier_to_file_path(uri).ok());
|
||||
.and_then(|uri| url_to_file_path(uri).ok());
|
||||
let mut resolved_items = Vec::<CallHierarchyOutgoingCall>::new();
|
||||
for item in outgoing_calls.iter() {
|
||||
if let Some(resolved) = item.try_resolve_call_hierarchy_outgoing_call(
|
||||
|
@ -2604,7 +2603,7 @@ impl Inner {
|
|||
let maybe_root_path_owned = self
|
||||
.config
|
||||
.root_uri()
|
||||
.and_then(|uri| specifier_to_file_path(uri).ok());
|
||||
.and_then(|uri| url_to_file_path(uri).ok());
|
||||
let mut resolved_items = Vec::<CallHierarchyItem>::new();
|
||||
match one_or_many {
|
||||
tsc::OneOrMany::One(item) => {
|
||||
|
@ -3614,6 +3613,11 @@ impl Inner {
|
|||
}),
|
||||
// bit of a hack to force the lsp to cache the @types/node package
|
||||
type_check_mode: crate::args::TypeCheckMode::Local,
|
||||
permissions: crate::args::PermissionFlags {
|
||||
// allow remote import permissions in the lsp for now
|
||||
allow_import: Some(vec![]),
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
}),
|
||||
initial_cwd,
|
||||
|
|
|
@ -10,10 +10,10 @@ use deno_graph::source::Resolver;
|
|||
use deno_graph::GraphImport;
|
||||
use deno_graph::ModuleSpecifier;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
use deno_semver::jsr::JsrPackageReqReference;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use deno_semver::package::PackageNv;
|
||||
|
@ -42,13 +42,14 @@ use crate::lsp::config::Config;
|
|||
use crate::lsp::config::ConfigData;
|
||||
use crate::lsp::logging::lsp_warn;
|
||||
use crate::npm::create_cli_npm_resolver_for_lsp;
|
||||
use crate::npm::CliByonmNpmResolverCreateOptions;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::npm::CliNpmResolverByonmCreateOptions;
|
||||
use crate::npm::CliNpmResolverCreateOptions;
|
||||
use crate::npm::CliNpmResolverManagedCreateOptions;
|
||||
use crate::npm::CliNpmResolverManagedSnapshotOption;
|
||||
use crate::npm::ManagedCliNpmResolver;
|
||||
use crate::resolver::CjsResolutionStore;
|
||||
use crate::resolver::CliDenoResolverFs;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::resolver::CliGraphResolverOptions;
|
||||
use crate::resolver::CliNodeResolver;
|
||||
|
@ -439,11 +440,11 @@ async fn create_npm_resolver(
|
|||
) -> Option<Arc<dyn CliNpmResolver>> {
|
||||
let enable_byonm = config_data.map(|d| d.byonm).unwrap_or(false);
|
||||
let options = if enable_byonm {
|
||||
CliNpmResolverCreateOptions::Byonm(CliNpmResolverByonmCreateOptions {
|
||||
fs: Arc::new(deno_fs::RealFs),
|
||||
CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions {
|
||||
fs: CliDenoResolverFs(Arc::new(deno_fs::RealFs)),
|
||||
root_node_modules_dir: config_data.and_then(|config_data| {
|
||||
config_data.node_modules_dir.clone().or_else(|| {
|
||||
specifier_to_file_path(&config_data.scope)
|
||||
url_to_file_path(&config_data.scope)
|
||||
.ok()
|
||||
.map(|p| p.join("node_modules/"))
|
||||
})
|
||||
|
|
|
@ -62,7 +62,7 @@ use deno_core::ModuleSpecifier;
|
|||
use deno_core::OpState;
|
||||
use deno_core::PollEventLoopOptions;
|
||||
use deno_core::RuntimeOptions;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_runtime::inspector_server::InspectorServer;
|
||||
use deno_runtime::tokio_util::create_basic_runtime;
|
||||
use indexmap::IndexMap;
|
||||
|
@ -3191,7 +3191,7 @@ impl CallHierarchyItem {
|
|||
let use_file_name = self.is_source_file_item();
|
||||
let maybe_file_path = if uri.scheme().is_some_and(|s| s.as_str() == "file")
|
||||
{
|
||||
specifier_to_file_path(&uri_to_url(&uri)).ok()
|
||||
url_to_file_path(&uri_to_url(&uri)).ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
|
|
@ -385,6 +385,13 @@ fn get_suggestions_for_terminal_errors(e: &JsError) -> Vec<FixSuggestion> {
|
|||
FixSuggestion::info("window global is not available in Deno 2."),
|
||||
FixSuggestion::hint("Replace `window` with `globalThis`."),
|
||||
];
|
||||
} else if msg.contains("UnsafeWindowSurface is not a constructor") {
|
||||
return vec![
|
||||
FixSuggestion::info("Deno.UnsafeWindowSurface is an unstable API."),
|
||||
FixSuggestion::hint(
|
||||
"Run again with `--unstable-webgpu` flag to enable this API.",
|
||||
),
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ use crate::graph_container::ModuleGraphUpdatePermit;
|
|||
use crate::graph_util::CreateGraphOptions;
|
||||
use crate::graph_util::ModuleGraphBuilder;
|
||||
use crate::node;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::resolver::CliGraphResolver;
|
||||
use crate::resolver::CliNodeResolver;
|
||||
use crate::resolver::ModuleCodeStringSource;
|
||||
|
@ -104,7 +105,7 @@ impl ModuleLoadPreparer {
|
|||
roots: &[ModuleSpecifier],
|
||||
is_dynamic: bool,
|
||||
lib: TsTypeLib,
|
||||
permissions: crate::file_fetcher::FetchPermissionsOption,
|
||||
permissions: PermissionsContainer,
|
||||
ext_overwrite: Option<&String>,
|
||||
) -> Result<(), AnyError> {
|
||||
log::debug!("Preparing module load.");
|
||||
|
@ -203,6 +204,7 @@ struct SharedCliModuleLoaderState {
|
|||
main_module_graph_container: Arc<MainModuleGraphContainer>,
|
||||
module_load_preparer: Arc<ModuleLoadPreparer>,
|
||||
node_resolver: Arc<CliNodeResolver>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
npm_module_loader: NpmModuleLoader,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
|
@ -221,6 +223,7 @@ impl CliModuleLoaderFactory {
|
|||
main_module_graph_container: Arc<MainModuleGraphContainer>,
|
||||
module_load_preparer: Arc<ModuleLoadPreparer>,
|
||||
node_resolver: Arc<CliNodeResolver>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
npm_module_loader: NpmModuleLoader,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
resolver: Arc<CliGraphResolver>,
|
||||
|
@ -241,6 +244,7 @@ impl CliModuleLoaderFactory {
|
|||
main_module_graph_container,
|
||||
module_load_preparer,
|
||||
node_resolver,
|
||||
npm_resolver,
|
||||
npm_module_loader,
|
||||
parsed_source_cache,
|
||||
resolver,
|
||||
|
@ -252,13 +256,15 @@ impl CliModuleLoaderFactory {
|
|||
&self,
|
||||
graph_container: TGraphContainer,
|
||||
lib: TsTypeLib,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
is_worker: bool,
|
||||
parent_permissions: PermissionsContainer,
|
||||
permissions: PermissionsContainer,
|
||||
) -> ModuleLoaderAndSourceMapGetter {
|
||||
let loader = Rc::new(CliModuleLoader(Rc::new(CliModuleLoaderInner {
|
||||
lib,
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
is_worker,
|
||||
parent_permissions,
|
||||
permissions,
|
||||
graph_container,
|
||||
emitter: self.shared.emitter.clone(),
|
||||
parsed_source_cache: self.shared.parsed_source_cache.clone(),
|
||||
|
@ -274,20 +280,20 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory {
|
|||
fn create_for_main(
|
||||
&self,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
) -> ModuleLoaderAndSourceMapGetter {
|
||||
self.create_with_lib(
|
||||
(*self.shared.main_module_graph_container).clone(),
|
||||
self.shared.lib_window,
|
||||
/* is worker */ false,
|
||||
root_permissions.clone(),
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
)
|
||||
}
|
||||
|
||||
fn create_for_worker(
|
||||
&self,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
parent_permissions: PermissionsContainer,
|
||||
permissions: PermissionsContainer,
|
||||
) -> ModuleLoaderAndSourceMapGetter {
|
||||
self.create_with_lib(
|
||||
// create a fresh module graph for the worker
|
||||
|
@ -295,21 +301,21 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory {
|
|||
self.shared.graph_kind,
|
||||
))),
|
||||
self.shared.lib_worker,
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
/* is worker */ true,
|
||||
parent_permissions,
|
||||
permissions,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
struct CliModuleLoaderInner<TGraphContainer: ModuleGraphContainer> {
|
||||
lib: TsTypeLib,
|
||||
is_worker: bool,
|
||||
/// The initial set of permissions used to resolve the static imports in the
|
||||
/// worker. These are "allow all" for main worker, and parent thread
|
||||
/// permissions for Web Worker.
|
||||
root_permissions: PermissionsContainer,
|
||||
/// Permissions used to resolve dynamic imports, these get passed as
|
||||
/// "root permissions" for Web Worker.
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
parent_permissions: PermissionsContainer,
|
||||
permissions: PermissionsContainer,
|
||||
shared: Arc<SharedCliModuleLoaderState>,
|
||||
emitter: Arc<Emitter>,
|
||||
parsed_source_cache: Arc<ParsedSourceCache>,
|
||||
|
@ -476,7 +482,6 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
Some(Module::Npm(module)) => {
|
||||
let package_folder = self
|
||||
.shared
|
||||
.node_resolver
|
||||
.npm_resolver
|
||||
.as_managed()
|
||||
.unwrap() // byonm won't create a Module::Npm
|
||||
|
@ -769,11 +774,12 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
|
|||
}
|
||||
}
|
||||
|
||||
let root_permissions = if is_dynamic {
|
||||
inner.dynamic_permissions.clone()
|
||||
let permissions = if is_dynamic {
|
||||
inner.permissions.clone()
|
||||
} else {
|
||||
inner.root_permissions.clone()
|
||||
inner.parent_permissions.clone()
|
||||
};
|
||||
let is_dynamic = is_dynamic || inner.is_worker; // consider workers as dynamic for permissions
|
||||
let lib = inner.lib;
|
||||
let mut update_permit = graph_container.acquire_update_permit().await;
|
||||
let graph = update_permit.graph_mut();
|
||||
|
@ -783,7 +789,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
|
|||
&[specifier],
|
||||
is_dynamic,
|
||||
lib,
|
||||
root_permissions.into(),
|
||||
permissions,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
|
353
cli/npm/byonm.rs
353
cli/npm/byonm.rs
|
@ -1,276 +1,36 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node::DenoPkgJsonFsAdapter;
|
||||
use deno_core::url::Url;
|
||||
use deno_resolver::npm::ByonmNpmResolver;
|
||||
use deno_resolver::npm::ByonmNpmResolverCreateOptions;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_runtime::deno_node::NodeRequireResolver;
|
||||
use deno_runtime::deno_node::NpmProcessStateProvider;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
use deno_runtime::ops::process::NpmProcessStateProvider;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::Version;
|
||||
use node_resolver::errors::PackageFolderResolveError;
|
||||
use node_resolver::errors::PackageFolderResolveIoError;
|
||||
use node_resolver::errors::PackageJsonLoadError;
|
||||
use node_resolver::errors::PackageNotFoundError;
|
||||
use node_resolver::load_pkg_json;
|
||||
use node_resolver::NpmResolver;
|
||||
|
||||
use crate::args::NpmProcessState;
|
||||
use crate::args::NpmProcessStateKind;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
|
||||
use crate::resolver::CliDenoResolverFs;
|
||||
|
||||
use super::managed::normalize_pkg_name_for_node_modules_deno_folder;
|
||||
use super::CliNpmResolver;
|
||||
use super::InnerCliNpmResolverRef;
|
||||
|
||||
pub struct CliNpmResolverByonmCreateOptions {
|
||||
pub fs: Arc<dyn FileSystem>,
|
||||
// todo(dsherret): investigate removing this
|
||||
pub root_node_modules_dir: Option<PathBuf>,
|
||||
}
|
||||
|
||||
pub fn create_byonm_npm_resolver(
|
||||
options: CliNpmResolverByonmCreateOptions,
|
||||
) -> Arc<dyn CliNpmResolver> {
|
||||
Arc::new(ByonmCliNpmResolver {
|
||||
fs: options.fs,
|
||||
root_node_modules_dir: options.root_node_modules_dir,
|
||||
})
|
||||
}
|
||||
pub type CliByonmNpmResolverCreateOptions =
|
||||
ByonmNpmResolverCreateOptions<CliDenoResolverFs>;
|
||||
pub type CliByonmNpmResolver = ByonmNpmResolver<CliDenoResolverFs>;
|
||||
|
||||
// todo(dsherret): the services hanging off `CliNpmResolver` doesn't seem ideal. We should probably decouple.
|
||||
#[derive(Debug)]
|
||||
pub struct ByonmCliNpmResolver {
|
||||
fs: Arc<dyn FileSystem>,
|
||||
root_node_modules_dir: Option<PathBuf>,
|
||||
}
|
||||
struct CliByonmWrapper(Arc<CliByonmNpmResolver>);
|
||||
|
||||
impl ByonmCliNpmResolver {
|
||||
fn load_pkg_json(
|
||||
&self,
|
||||
path: &Path,
|
||||
) -> Result<Option<Arc<PackageJson>>, PackageJsonLoadError> {
|
||||
load_pkg_json(&DenoPkgJsonFsAdapter(self.fs.as_ref()), path)
|
||||
}
|
||||
|
||||
/// Finds the ancestor package.json that contains the specified dependency.
|
||||
pub fn find_ancestor_package_json_with_dep(
|
||||
&self,
|
||||
dep_name: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Option<Arc<PackageJson>> {
|
||||
let referrer_path = referrer.to_file_path().ok()?;
|
||||
let mut current_folder = referrer_path.parent()?;
|
||||
loop {
|
||||
let pkg_json_path = current_folder.join("package.json");
|
||||
if let Ok(Some(pkg_json)) = self.load_pkg_json(&pkg_json_path) {
|
||||
if let Some(deps) = &pkg_json.dependencies {
|
||||
if deps.contains_key(dep_name) {
|
||||
return Some(pkg_json);
|
||||
}
|
||||
}
|
||||
if let Some(deps) = &pkg_json.dev_dependencies {
|
||||
if deps.contains_key(dep_name) {
|
||||
return Some(pkg_json);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(parent) = current_folder.parent() {
|
||||
current_folder = parent;
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_pkg_json_and_alias_for_req(
|
||||
&self,
|
||||
req: &PackageReq,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<Option<(Arc<PackageJson>, String)>, AnyError> {
|
||||
fn resolve_alias_from_pkg_json(
|
||||
req: &PackageReq,
|
||||
pkg_json: &PackageJson,
|
||||
) -> Option<String> {
|
||||
let deps = pkg_json.resolve_local_package_json_deps();
|
||||
for (key, value) in deps {
|
||||
if let Ok(value) = value {
|
||||
match value {
|
||||
PackageJsonDepValue::Req(dep_req) => {
|
||||
if dep_req.name == req.name
|
||||
&& dep_req.version_req.intersects(&req.version_req)
|
||||
{
|
||||
return Some(key);
|
||||
}
|
||||
}
|
||||
PackageJsonDepValue::Workspace(_workspace) => {
|
||||
if key == req.name && req.version_req.tag() == Some("workspace") {
|
||||
return Some(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
// attempt to resolve the npm specifier from the referrer's package.json,
|
||||
if let Ok(file_path) = specifier_to_file_path(referrer) {
|
||||
let mut current_path = file_path.as_path();
|
||||
while let Some(dir_path) = current_path.parent() {
|
||||
let package_json_path = dir_path.join("package.json");
|
||||
if let Some(pkg_json) = self.load_pkg_json(&package_json_path)? {
|
||||
if let Some(alias) =
|
||||
resolve_alias_from_pkg_json(req, pkg_json.as_ref())
|
||||
{
|
||||
return Ok(Some((pkg_json, alias)));
|
||||
}
|
||||
}
|
||||
current_path = dir_path;
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise, fall fallback to the project's package.json
|
||||
if let Some(root_node_modules_dir) = &self.root_node_modules_dir {
|
||||
let root_pkg_json_path =
|
||||
root_node_modules_dir.parent().unwrap().join("package.json");
|
||||
if let Some(pkg_json) = self.load_pkg_json(&root_pkg_json_path)? {
|
||||
if let Some(alias) = resolve_alias_from_pkg_json(req, pkg_json.as_ref())
|
||||
{
|
||||
return Ok(Some((pkg_json, alias)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn resolve_folder_in_root_node_modules(
|
||||
&self,
|
||||
req: &PackageReq,
|
||||
) -> Option<PathBuf> {
|
||||
// now check if node_modules/.deno/ matches this constraint
|
||||
let root_node_modules_dir = self.root_node_modules_dir.as_ref()?;
|
||||
let node_modules_deno_dir = root_node_modules_dir.join(".deno");
|
||||
let Ok(entries) = self.fs.read_dir_sync(&node_modules_deno_dir) else {
|
||||
return None;
|
||||
};
|
||||
let search_prefix = format!(
|
||||
"{}@",
|
||||
normalize_pkg_name_for_node_modules_deno_folder(&req.name)
|
||||
);
|
||||
let mut best_version = None;
|
||||
|
||||
// example entries:
|
||||
// - @denotest+add@1.0.0
|
||||
// - @denotest+add@1.0.0_1
|
||||
for entry in entries {
|
||||
if !entry.is_directory {
|
||||
continue;
|
||||
}
|
||||
let Some(version_and_copy_idx) = entry.name.strip_prefix(&search_prefix)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
let version = version_and_copy_idx
|
||||
.rsplit_once('_')
|
||||
.map(|(v, _)| v)
|
||||
.unwrap_or(version_and_copy_idx);
|
||||
let Ok(version) = Version::parse_from_npm(version) else {
|
||||
continue;
|
||||
};
|
||||
if req.version_req.matches(&version) {
|
||||
if let Some((best_version_version, _)) = &best_version {
|
||||
if version > *best_version_version {
|
||||
best_version = Some((version, entry.name));
|
||||
}
|
||||
} else {
|
||||
best_version = Some((version, entry.name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
best_version.map(|(_version, entry_name)| {
|
||||
join_package_name(
|
||||
&node_modules_deno_dir.join(entry_name).join("node_modules"),
|
||||
&req.name,
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl NpmResolver for ByonmCliNpmResolver {
|
||||
fn resolve_package_folder_from_package(
|
||||
&self,
|
||||
name: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<PathBuf, PackageFolderResolveError> {
|
||||
fn inner(
|
||||
fs: &dyn FileSystem,
|
||||
name: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<PathBuf, PackageFolderResolveError> {
|
||||
let maybe_referrer_file = specifier_to_file_path(referrer).ok();
|
||||
let maybe_start_folder =
|
||||
maybe_referrer_file.as_ref().and_then(|f| f.parent());
|
||||
if let Some(start_folder) = maybe_start_folder {
|
||||
for current_folder in start_folder.ancestors() {
|
||||
let node_modules_folder = if current_folder.ends_with("node_modules")
|
||||
{
|
||||
Cow::Borrowed(current_folder)
|
||||
} else {
|
||||
Cow::Owned(current_folder.join("node_modules"))
|
||||
};
|
||||
|
||||
let sub_dir = join_package_name(&node_modules_folder, name);
|
||||
if fs.is_dir_sync(&sub_dir) {
|
||||
return Ok(sub_dir);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(
|
||||
PackageNotFoundError {
|
||||
package_name: name.to_string(),
|
||||
referrer: referrer.clone(),
|
||||
referrer_extra: None,
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
let path = inner(&*self.fs, name, referrer)?;
|
||||
self.fs.realpath_sync(&path).map_err(|err| {
|
||||
PackageFolderResolveIoError {
|
||||
package_name: name.to_string(),
|
||||
referrer: referrer.clone(),
|
||||
source: err.into_io_error(),
|
||||
}
|
||||
.into()
|
||||
})
|
||||
}
|
||||
|
||||
fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
specifier.scheme() == "file"
|
||||
&& specifier
|
||||
.path()
|
||||
.to_ascii_lowercase()
|
||||
.contains("/node_modules/")
|
||||
}
|
||||
}
|
||||
|
||||
impl NodeRequireResolver for ByonmCliNpmResolver {
|
||||
impl NodeRequireResolver for CliByonmWrapper {
|
||||
fn ensure_read_permission(
|
||||
&self,
|
||||
permissions: &mut dyn NodePermissions,
|
||||
|
@ -286,110 +46,54 @@ impl NodeRequireResolver for ByonmCliNpmResolver {
|
|||
}
|
||||
}
|
||||
|
||||
impl NpmProcessStateProvider for ByonmCliNpmResolver {
|
||||
impl NpmProcessStateProvider for CliByonmWrapper {
|
||||
fn get_npm_process_state(&self) -> String {
|
||||
serde_json::to_string(&NpmProcessState {
|
||||
kind: NpmProcessStateKind::Byonm,
|
||||
local_node_modules_path: self
|
||||
.root_node_modules_dir
|
||||
.as_ref()
|
||||
.0
|
||||
.root_node_modules_dir()
|
||||
.map(|p| p.to_string_lossy().to_string()),
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl CliNpmResolver for ByonmCliNpmResolver {
|
||||
impl CliNpmResolver for CliByonmNpmResolver {
|
||||
fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver> {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver> {
|
||||
self
|
||||
Arc::new(CliByonmWrapper(self))
|
||||
}
|
||||
|
||||
fn into_process_state_provider(
|
||||
self: Arc<Self>,
|
||||
) -> Arc<dyn NpmProcessStateProvider> {
|
||||
self
|
||||
Arc::new(CliByonmWrapper(self))
|
||||
}
|
||||
|
||||
fn clone_snapshotted(&self) -> Arc<dyn CliNpmResolver> {
|
||||
Arc::new(Self {
|
||||
fs: self.fs.clone(),
|
||||
root_node_modules_dir: self.root_node_modules_dir.clone(),
|
||||
})
|
||||
Arc::new(self.clone())
|
||||
}
|
||||
|
||||
fn as_inner(&self) -> InnerCliNpmResolverRef {
|
||||
InnerCliNpmResolverRef::Byonm(self)
|
||||
}
|
||||
|
||||
fn root_node_modules_path(&self) -> Option<&PathBuf> {
|
||||
self.root_node_modules_dir.as_ref()
|
||||
fn root_node_modules_path(&self) -> Option<&Path> {
|
||||
self.root_node_modules_dir()
|
||||
}
|
||||
|
||||
fn resolve_pkg_folder_from_deno_module_req(
|
||||
&self,
|
||||
req: &PackageReq,
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer: &Url,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
fn node_resolve_dir(
|
||||
fs: &dyn FileSystem,
|
||||
alias: &str,
|
||||
start_dir: &Path,
|
||||
) -> Result<Option<PathBuf>, AnyError> {
|
||||
for ancestor in start_dir.ancestors() {
|
||||
let node_modules_folder = ancestor.join("node_modules");
|
||||
let sub_dir = join_package_name(&node_modules_folder, alias);
|
||||
if fs.is_dir_sync(&sub_dir) {
|
||||
return Ok(Some(canonicalize_path_maybe_not_exists_with_fs(
|
||||
&sub_dir, fs,
|
||||
)?));
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
// now attempt to resolve if it's found in any package.json
|
||||
let maybe_pkg_json_and_alias =
|
||||
self.resolve_pkg_json_and_alias_for_req(req, referrer)?;
|
||||
match maybe_pkg_json_and_alias {
|
||||
Some((pkg_json, alias)) => {
|
||||
// now try node resolution
|
||||
if let Some(resolved) =
|
||||
node_resolve_dir(self.fs.as_ref(), &alias, pkg_json.dir_path())?
|
||||
{
|
||||
return Ok(resolved);
|
||||
}
|
||||
|
||||
bail!(
|
||||
concat!(
|
||||
"Could not find \"{}\" in a node_modules folder. ",
|
||||
"Deno expects the node_modules/ directory to be up to date. ",
|
||||
"Did you forget to run `deno install`?"
|
||||
),
|
||||
alias,
|
||||
);
|
||||
}
|
||||
None => {
|
||||
// now check if node_modules/.deno/ matches this constraint
|
||||
if let Some(folder) = self.resolve_folder_in_root_node_modules(req) {
|
||||
return Ok(folder);
|
||||
}
|
||||
|
||||
bail!(
|
||||
concat!(
|
||||
"Could not find a matching package for 'npm:{}' in the node_modules ",
|
||||
"directory. Ensure you have all your JSR and npm dependencies listed ",
|
||||
"in your deno.json or package.json, then run `deno install`. Alternatively, ",
|
||||
r#"turn on auto-install by specifying `"nodeModulesDir": "auto"` in your "#,
|
||||
"deno.json file."
|
||||
),
|
||||
req,
|
||||
);
|
||||
}
|
||||
}
|
||||
ByonmNpmResolver::resolve_pkg_folder_from_deno_module_req(
|
||||
self, req, referrer,
|
||||
)
|
||||
}
|
||||
|
||||
fn check_state_hash(&self) -> Option<u64> {
|
||||
|
@ -398,12 +102,3 @@ impl CliNpmResolver for ByonmCliNpmResolver {
|
|||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn join_package_name(path: &Path, package_name: &str) -> PathBuf {
|
||||
let mut path = path.to_path_buf();
|
||||
// ensure backslashes are used on windows
|
||||
for part in package_name.split('/') {
|
||||
path = path.join(part);
|
||||
}
|
||||
path
|
||||
}
|
||||
|
|
|
@ -1,295 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::url::Url;
|
||||
use deno_npm::NpmPackageCacheFolderId;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::Version;
|
||||
|
||||
use crate::util::fs::canonicalize_path;
|
||||
use crate::util::path::root_url_to_safe_local_dirname;
|
||||
|
||||
/// The global cache directory of npm packages.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct NpmCacheDir {
|
||||
root_dir: PathBuf,
|
||||
// cached url representation of the root directory
|
||||
root_dir_url: Url,
|
||||
// A list of all registry that were discovered via `.npmrc` files
|
||||
// turned into a safe directory names.
|
||||
known_registries_dirnames: Vec<String>,
|
||||
}
|
||||
|
||||
impl NpmCacheDir {
|
||||
pub fn new(root_dir: PathBuf, known_registries_urls: Vec<Url>) -> Self {
|
||||
fn try_get_canonicalized_root_dir(
|
||||
root_dir: &Path,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
if !root_dir.exists() {
|
||||
std::fs::create_dir_all(root_dir)
|
||||
.with_context(|| format!("Error creating {}", root_dir.display()))?;
|
||||
}
|
||||
Ok(canonicalize_path(root_dir)?)
|
||||
}
|
||||
|
||||
// this may fail on readonly file systems, so just ignore if so
|
||||
let root_dir =
|
||||
try_get_canonicalized_root_dir(&root_dir).unwrap_or(root_dir);
|
||||
let root_dir_url = Url::from_directory_path(&root_dir).unwrap();
|
||||
|
||||
let known_registries_dirnames: Vec<_> = known_registries_urls
|
||||
.into_iter()
|
||||
.map(|url| {
|
||||
root_url_to_safe_local_dirname(&url)
|
||||
.to_string_lossy()
|
||||
.replace('\\', "/")
|
||||
})
|
||||
.collect();
|
||||
|
||||
Self {
|
||||
root_dir,
|
||||
root_dir_url,
|
||||
known_registries_dirnames,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn root_dir(&self) -> &Path {
|
||||
&self.root_dir
|
||||
}
|
||||
|
||||
pub fn root_dir_url(&self) -> &Url {
|
||||
&self.root_dir_url
|
||||
}
|
||||
|
||||
pub fn package_folder_for_id(
|
||||
&self,
|
||||
folder_id: &NpmPackageCacheFolderId,
|
||||
registry_url: &Url,
|
||||
) -> PathBuf {
|
||||
if folder_id.copy_index == 0 {
|
||||
self.package_folder_for_nv(&folder_id.nv, registry_url)
|
||||
} else {
|
||||
self
|
||||
.package_name_folder(&folder_id.nv.name, registry_url)
|
||||
.join(format!("{}_{}", folder_id.nv.version, folder_id.copy_index))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn package_folder_for_nv(
|
||||
&self,
|
||||
package: &PackageNv,
|
||||
registry_url: &Url,
|
||||
) -> PathBuf {
|
||||
self
|
||||
.package_name_folder(&package.name, registry_url)
|
||||
.join(package.version.to_string())
|
||||
}
|
||||
|
||||
pub fn package_name_folder(&self, name: &str, registry_url: &Url) -> PathBuf {
|
||||
let mut dir = self.registry_folder(registry_url);
|
||||
if name.to_lowercase() != name {
|
||||
let encoded_name = mixed_case_package_name_encode(name);
|
||||
// Using the encoded directory may have a collision with an actual package name
|
||||
// so prefix it with an underscore since npm packages can't start with that
|
||||
dir.join(format!("_{encoded_name}"))
|
||||
} else {
|
||||
// ensure backslashes are used on windows
|
||||
for part in name.split('/') {
|
||||
dir = dir.join(part);
|
||||
}
|
||||
dir
|
||||
}
|
||||
}
|
||||
|
||||
fn registry_folder(&self, registry_url: &Url) -> PathBuf {
|
||||
self
|
||||
.root_dir
|
||||
.join(root_url_to_safe_local_dirname(registry_url))
|
||||
}
|
||||
|
||||
pub fn resolve_package_folder_id_from_specifier(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<NpmPackageCacheFolderId> {
|
||||
let mut maybe_relative_url = None;
|
||||
|
||||
// Iterate through known registries and try to get a match.
|
||||
for registry_dirname in &self.known_registries_dirnames {
|
||||
let registry_root_dir = self
|
||||
.root_dir_url
|
||||
.join(&format!("{}/", registry_dirname))
|
||||
// this not succeeding indicates a fatal issue, so unwrap
|
||||
.unwrap();
|
||||
|
||||
let Some(relative_url) = registry_root_dir.make_relative(specifier)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if relative_url.starts_with("../") {
|
||||
continue;
|
||||
}
|
||||
|
||||
maybe_relative_url = Some(relative_url);
|
||||
break;
|
||||
}
|
||||
|
||||
let mut relative_url = maybe_relative_url?;
|
||||
|
||||
// base32 decode the url if it starts with an underscore
|
||||
// * Ex. _{base32(package_name)}/
|
||||
if let Some(end_url) = relative_url.strip_prefix('_') {
|
||||
let mut parts = end_url
|
||||
.split('/')
|
||||
.map(ToOwned::to_owned)
|
||||
.collect::<Vec<_>>();
|
||||
match mixed_case_package_name_decode(&parts[0]) {
|
||||
Some(part) => {
|
||||
parts[0] = part;
|
||||
}
|
||||
None => return None,
|
||||
}
|
||||
relative_url = parts.join("/");
|
||||
}
|
||||
|
||||
// examples:
|
||||
// * chalk/5.0.1/
|
||||
// * @types/chalk/5.0.1/
|
||||
// * some-package/5.0.1_1/ -- where the `_1` (/_\d+/) is a copy of the folder for peer deps
|
||||
let is_scoped_package = relative_url.starts_with('@');
|
||||
let mut parts = relative_url
|
||||
.split('/')
|
||||
.enumerate()
|
||||
.take(if is_scoped_package { 3 } else { 2 })
|
||||
.map(|(_, part)| part)
|
||||
.collect::<Vec<_>>();
|
||||
if parts.len() < 2 {
|
||||
return None;
|
||||
}
|
||||
let version_part = parts.pop().unwrap();
|
||||
let name = parts.join("/");
|
||||
let (version, copy_index) =
|
||||
if let Some((version, copy_count)) = version_part.split_once('_') {
|
||||
(version, copy_count.parse::<u8>().ok()?)
|
||||
} else {
|
||||
(version_part, 0)
|
||||
};
|
||||
Some(NpmPackageCacheFolderId {
|
||||
nv: PackageNv {
|
||||
name,
|
||||
version: Version::parse_from_npm(version).ok()?,
|
||||
},
|
||||
copy_index,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_cache_location(&self) -> PathBuf {
|
||||
self.root_dir.clone()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mixed_case_package_name_encode(name: &str) -> String {
|
||||
// use base32 encoding because it's reversible and the character set
|
||||
// only includes the characters within 0-9 and A-Z so it can be lower cased
|
||||
base32::encode(
|
||||
base32::Alphabet::Rfc4648Lower { padding: false },
|
||||
name.as_bytes(),
|
||||
)
|
||||
.to_lowercase()
|
||||
}
|
||||
|
||||
pub fn mixed_case_package_name_decode(name: &str) -> Option<String> {
|
||||
base32::decode(base32::Alphabet::Rfc4648Lower { padding: false }, name)
|
||||
.and_then(|b| String::from_utf8(b).ok())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use deno_core::url::Url;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::Version;
|
||||
|
||||
use super::NpmCacheDir;
|
||||
use crate::npm::cache_dir::NpmPackageCacheFolderId;
|
||||
|
||||
#[test]
|
||||
fn should_get_package_folder() {
|
||||
let deno_dir = crate::cache::DenoDir::new(None).unwrap();
|
||||
let root_dir = deno_dir.npm_folder_path();
|
||||
let registry_url = Url::parse("https://registry.npmjs.org/").unwrap();
|
||||
let cache = NpmCacheDir::new(root_dir.clone(), vec![registry_url.clone()]);
|
||||
|
||||
assert_eq!(
|
||||
cache.package_folder_for_id(
|
||||
&NpmPackageCacheFolderId {
|
||||
nv: PackageNv {
|
||||
name: "json".to_string(),
|
||||
version: Version::parse_from_npm("1.2.5").unwrap(),
|
||||
},
|
||||
copy_index: 0,
|
||||
},
|
||||
®istry_url,
|
||||
),
|
||||
root_dir
|
||||
.join("registry.npmjs.org")
|
||||
.join("json")
|
||||
.join("1.2.5"),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
cache.package_folder_for_id(
|
||||
&NpmPackageCacheFolderId {
|
||||
nv: PackageNv {
|
||||
name: "json".to_string(),
|
||||
version: Version::parse_from_npm("1.2.5").unwrap(),
|
||||
},
|
||||
copy_index: 1,
|
||||
},
|
||||
®istry_url,
|
||||
),
|
||||
root_dir
|
||||
.join("registry.npmjs.org")
|
||||
.join("json")
|
||||
.join("1.2.5_1"),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
cache.package_folder_for_id(
|
||||
&NpmPackageCacheFolderId {
|
||||
nv: PackageNv {
|
||||
name: "JSON".to_string(),
|
||||
version: Version::parse_from_npm("2.1.5").unwrap(),
|
||||
},
|
||||
copy_index: 0,
|
||||
},
|
||||
®istry_url,
|
||||
),
|
||||
root_dir
|
||||
.join("registry.npmjs.org")
|
||||
.join("_jjju6tq")
|
||||
.join("2.1.5"),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
cache.package_folder_for_id(
|
||||
&NpmPackageCacheFolderId {
|
||||
nv: PackageNv {
|
||||
name: "@types/JSON".to_string(),
|
||||
version: Version::parse_from_npm("2.1.5").unwrap(),
|
||||
},
|
||||
copy_index: 0,
|
||||
},
|
||||
®istry_url,
|
||||
),
|
||||
root_dir
|
||||
.join("registry.npmjs.org")
|
||||
.join("_ib2hs4dfomxuuu2pjy")
|
||||
.join("2.1.5"),
|
||||
);
|
||||
}
|
||||
}
|
44
cli/npm/managed/cache/mod.rs
vendored
44
cli/npm/managed/cache/mod.rs
vendored
|
@ -8,6 +8,7 @@ use std::path::PathBuf;
|
|||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_cache_dir::npm::NpmCacheDir;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -18,10 +19,10 @@ use deno_npm::npm_rc::ResolvedNpmRc;
|
|||
use deno_npm::registry::NpmPackageInfo;
|
||||
use deno_npm::NpmPackageCacheFolderId;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::Version;
|
||||
|
||||
use crate::args::CacheSetting;
|
||||
use crate::cache::CACHE_PERM;
|
||||
use crate::npm::NpmCacheDir;
|
||||
use crate::util::fs::atomic_write_file_with_retries;
|
||||
use crate::util::fs::hard_link_dir_recursive;
|
||||
|
||||
|
@ -87,9 +88,12 @@ impl NpmCache {
|
|||
) -> Result<(), AnyError> {
|
||||
let registry_url = self.npmrc.get_registry_url(&folder_id.nv.name);
|
||||
assert_ne!(folder_id.copy_index, 0);
|
||||
let package_folder = self
|
||||
.cache_dir
|
||||
.package_folder_for_id(folder_id, registry_url);
|
||||
let package_folder = self.cache_dir.package_folder_for_id(
|
||||
&folder_id.nv.name,
|
||||
&folder_id.nv.version.to_string(),
|
||||
folder_id.copy_index,
|
||||
registry_url,
|
||||
);
|
||||
|
||||
if package_folder.exists()
|
||||
// if this file exists, then the package didn't successfully initialize
|
||||
|
@ -100,9 +104,12 @@ impl NpmCache {
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
let original_package_folder = self
|
||||
.cache_dir
|
||||
.package_folder_for_nv(&folder_id.nv, registry_url);
|
||||
let original_package_folder = self.cache_dir.package_folder_for_id(
|
||||
&folder_id.nv.name,
|
||||
&folder_id.nv.version.to_string(),
|
||||
0, // original copy index
|
||||
registry_url,
|
||||
);
|
||||
|
||||
// it seems Windows does an "AccessDenied" error when moving a
|
||||
// directory with hard links, so that's why this solution is done
|
||||
|
@ -114,7 +121,12 @@ impl NpmCache {
|
|||
|
||||
pub fn package_folder_for_id(&self, id: &NpmPackageCacheFolderId) -> PathBuf {
|
||||
let registry_url = self.npmrc.get_registry_url(&id.nv.name);
|
||||
self.cache_dir.package_folder_for_id(id, registry_url)
|
||||
self.cache_dir.package_folder_for_id(
|
||||
&id.nv.name,
|
||||
&id.nv.version.to_string(),
|
||||
id.copy_index,
|
||||
registry_url,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn package_folder_for_nv(&self, package: &PackageNv) -> PathBuf {
|
||||
|
@ -127,7 +139,12 @@ impl NpmCache {
|
|||
package: &PackageNv,
|
||||
registry_url: &Url,
|
||||
) -> PathBuf {
|
||||
self.cache_dir.package_folder_for_nv(package, registry_url)
|
||||
self.cache_dir.package_folder_for_id(
|
||||
&package.name,
|
||||
&package.version.to_string(),
|
||||
0, // original copy_index
|
||||
registry_url,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn package_name_folder(&self, name: &str) -> PathBuf {
|
||||
|
@ -146,6 +163,15 @@ impl NpmCache {
|
|||
self
|
||||
.cache_dir
|
||||
.resolve_package_folder_id_from_specifier(specifier)
|
||||
.and_then(|cache_id| {
|
||||
Some(NpmPackageCacheFolderId {
|
||||
nv: PackageNv {
|
||||
name: cache_id.name,
|
||||
version: Version::parse_from_npm(&cache_id.version).ok()?,
|
||||
},
|
||||
copy_index: cache_id.copy_index,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn load_package_info(
|
||||
|
|
|
@ -7,6 +7,7 @@ use std::sync::Arc;
|
|||
use cache::RegistryInfoDownloader;
|
||||
use cache::TarballCache;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_cache_dir::npm::NpmCacheDir;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
|
@ -22,7 +23,7 @@ use deno_npm::NpmSystemInfo;
|
|||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_runtime::deno_node::NodeRequireResolver;
|
||||
use deno_runtime::deno_node::NpmProcessStateProvider;
|
||||
use deno_runtime::ops::process::NpmProcessStateProvider;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::package::PackageReq;
|
||||
use node_resolver::errors::PackageFolderResolveError;
|
||||
|
@ -35,6 +36,7 @@ use crate::args::LifecycleScriptsConfig;
|
|||
use crate::args::NpmInstallDepsProvider;
|
||||
use crate::args::NpmProcessState;
|
||||
use crate::args::NpmProcessStateKind;
|
||||
use crate::cache::DenoCacheEnvFsAdapter;
|
||||
use crate::cache::FastInsecureHasher;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
|
||||
|
@ -45,12 +47,10 @@ use self::cache::NpmCache;
|
|||
use self::registry::CliNpmRegistryApi;
|
||||
use self::resolution::NpmResolution;
|
||||
use self::resolvers::create_npm_fs_resolver;
|
||||
pub use self::resolvers::normalize_pkg_name_for_node_modules_deno_folder;
|
||||
use self::resolvers::NpmPackageFsResolver;
|
||||
|
||||
use super::CliNpmResolver;
|
||||
use super::InnerCliNpmResolverRef;
|
||||
use super::NpmCacheDir;
|
||||
|
||||
mod cache;
|
||||
mod registry;
|
||||
|
@ -188,6 +188,7 @@ fn create_inner(
|
|||
fn create_cache(options: &CliNpmResolverManagedCreateOptions) -> Arc<NpmCache> {
|
||||
Arc::new(NpmCache::new(
|
||||
NpmCacheDir::new(
|
||||
&DenoCacheEnvFsAdapter(options.fs.as_ref()),
|
||||
options.npm_global_cache_dir.clone(),
|
||||
options.npmrc.get_all_known_registries_urls(),
|
||||
),
|
||||
|
@ -573,7 +574,7 @@ impl NpmProcessStateProvider for ManagedCliNpmResolver {
|
|||
fn get_npm_process_state(&self) -> String {
|
||||
npm_process_state(
|
||||
self.resolution.serialized_valid_snapshot(),
|
||||
self.fs_resolver.node_modules_path().map(|p| p.as_path()),
|
||||
self.fs_resolver.node_modules_path(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -630,7 +631,7 @@ impl CliNpmResolver for ManagedCliNpmResolver {
|
|||
InnerCliNpmResolverRef::Managed(self)
|
||||
}
|
||||
|
||||
fn root_node_modules_path(&self) -> Option<&PathBuf> {
|
||||
fn root_node_modules_path(&self) -> Option<&Path> {
|
||||
self.fs_resolver.node_modules_path()
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
pub mod bin_entries;
|
||||
pub mod lifecycle_scripts;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::io::ErrorKind;
|
||||
use std::path::Path;
|
||||
|
@ -30,7 +33,7 @@ pub trait NpmPackageFsResolver: Send + Sync {
|
|||
fn root_dir_url(&self) -> &Url;
|
||||
|
||||
/// The local node_modules folder if it is applicable to the implementation.
|
||||
fn node_modules_path(&self) -> Option<&PathBuf>;
|
||||
fn node_modules_path(&self) -> Option<&Path>;
|
||||
|
||||
fn maybe_package_folder(&self, package_id: &NpmPackageId) -> Option<PathBuf>;
|
||||
|
||||
|
@ -134,7 +137,7 @@ impl RegistryReadPermissionChecker {
|
|||
|
||||
/// Caches all the packages in parallel.
|
||||
pub async fn cache_packages(
|
||||
packages: Vec<NpmResolutionPackage>,
|
||||
packages: &[NpmResolutionPackage],
|
||||
tarball_cache: &Arc<TarballCache>,
|
||||
) -> Result<(), AnyError> {
|
||||
let mut futures_unordered = futures::stream::FuturesUnordered::new();
|
||||
|
|
|
@ -12,12 +12,12 @@ use std::path::Path;
|
|||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Default)]
|
||||
pub(super) struct BinEntries {
|
||||
pub struct BinEntries<'a> {
|
||||
/// Packages that have colliding bin names
|
||||
collisions: HashSet<NpmPackageId>,
|
||||
seen_names: HashMap<String, NpmPackageId>,
|
||||
collisions: HashSet<&'a NpmPackageId>,
|
||||
seen_names: HashMap<&'a str, &'a NpmPackageId>,
|
||||
/// The bin entries
|
||||
entries: Vec<(NpmResolutionPackage, PathBuf)>,
|
||||
entries: Vec<(&'a NpmResolutionPackage, PathBuf)>,
|
||||
}
|
||||
|
||||
/// Returns the name of the default binary for the given package.
|
||||
|
@ -31,37 +31,32 @@ fn default_bin_name(package: &NpmResolutionPackage) -> &str {
|
|||
.map_or(package.id.nv.name.as_str(), |(_, name)| name)
|
||||
}
|
||||
|
||||
impl BinEntries {
|
||||
pub(super) fn new() -> Self {
|
||||
impl<'a> BinEntries<'a> {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Add a new bin entry (package with a bin field)
|
||||
pub(super) fn add(
|
||||
pub fn add(
|
||||
&mut self,
|
||||
package: NpmResolutionPackage,
|
||||
package: &'a NpmResolutionPackage,
|
||||
package_path: PathBuf,
|
||||
) {
|
||||
// check for a new collision, if we haven't already
|
||||
// found one
|
||||
match package.bin.as_ref().unwrap() {
|
||||
deno_npm::registry::NpmPackageVersionBinEntry::String(_) => {
|
||||
let bin_name = default_bin_name(&package);
|
||||
let bin_name = default_bin_name(package);
|
||||
|
||||
if let Some(other) = self
|
||||
.seen_names
|
||||
.insert(bin_name.to_string(), package.id.clone())
|
||||
{
|
||||
self.collisions.insert(package.id.clone());
|
||||
if let Some(other) = self.seen_names.insert(bin_name, &package.id) {
|
||||
self.collisions.insert(&package.id);
|
||||
self.collisions.insert(other);
|
||||
}
|
||||
}
|
||||
deno_npm::registry::NpmPackageVersionBinEntry::Map(entries) => {
|
||||
for name in entries.keys() {
|
||||
if let Some(other) =
|
||||
self.seen_names.insert(name.to_string(), package.id.clone())
|
||||
{
|
||||
self.collisions.insert(package.id.clone());
|
||||
if let Some(other) = self.seen_names.insert(name, &package.id) {
|
||||
self.collisions.insert(&package.id);
|
||||
self.collisions.insert(other);
|
||||
}
|
||||
}
|
||||
|
@ -74,7 +69,11 @@ impl BinEntries {
|
|||
fn for_each_entry(
|
||||
&mut self,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
mut f: impl FnMut(
|
||||
mut already_seen: impl FnMut(
|
||||
&Path,
|
||||
&str, // bin script
|
||||
) -> Result<(), AnyError>,
|
||||
mut new: impl FnMut(
|
||||
&NpmResolutionPackage,
|
||||
&Path,
|
||||
&str, // bin name
|
||||
|
@ -95,18 +94,20 @@ impl BinEntries {
|
|||
deno_npm::registry::NpmPackageVersionBinEntry::String(script) => {
|
||||
let name = default_bin_name(package);
|
||||
if !seen.insert(name) {
|
||||
already_seen(package_path, script)?;
|
||||
// we already set up a bin entry with this name
|
||||
continue;
|
||||
}
|
||||
f(package, package_path, name, script)?;
|
||||
new(package, package_path, name, script)?;
|
||||
}
|
||||
deno_npm::registry::NpmPackageVersionBinEntry::Map(entries) => {
|
||||
for (name, script) in entries {
|
||||
if !seen.insert(name) {
|
||||
already_seen(package_path, script)?;
|
||||
// we already set up a bin entry with this name
|
||||
continue;
|
||||
}
|
||||
f(package, package_path, name, script)?;
|
||||
new(package, package_path, name, script)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -117,23 +118,27 @@ impl BinEntries {
|
|||
}
|
||||
|
||||
/// Collect the bin entries into a vec of (name, script path)
|
||||
pub(super) fn into_bin_files(
|
||||
pub fn into_bin_files(
|
||||
mut self,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
) -> Vec<(String, PathBuf)> {
|
||||
let mut bins = Vec::new();
|
||||
self
|
||||
.for_each_entry(snapshot, |_, package_path, name, script| {
|
||||
bins.push((name.to_string(), package_path.join(script)));
|
||||
Ok(())
|
||||
})
|
||||
.for_each_entry(
|
||||
snapshot,
|
||||
|_, _| Ok(()),
|
||||
|_, package_path, name, script| {
|
||||
bins.push((name.to_string(), package_path.join(script)));
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
bins
|
||||
}
|
||||
|
||||
/// Finish setting up the bin entries, writing the necessary files
|
||||
/// to disk.
|
||||
pub(super) fn finish(
|
||||
pub fn finish(
|
||||
mut self,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
bin_node_modules_dir_path: &Path,
|
||||
|
@ -144,15 +149,26 @@ impl BinEntries {
|
|||
)?;
|
||||
}
|
||||
|
||||
self.for_each_entry(snapshot, |package, package_path, name, script| {
|
||||
set_up_bin_entry(
|
||||
package,
|
||||
name,
|
||||
script,
|
||||
package_path,
|
||||
bin_node_modules_dir_path,
|
||||
)
|
||||
})?;
|
||||
self.for_each_entry(
|
||||
snapshot,
|
||||
|_package_path, _script| {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
let path = _package_path.join(_script);
|
||||
make_executable_if_exists(&path)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
|package, package_path, name, script| {
|
||||
set_up_bin_entry(
|
||||
package,
|
||||
name,
|
||||
script,
|
||||
package_path,
|
||||
bin_node_modules_dir_path,
|
||||
)
|
||||
},
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -162,8 +178,8 @@ impl BinEntries {
|
|||
// that has a bin entry, then sort them by depth
|
||||
fn sort_by_depth(
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
bin_entries: &mut [(NpmResolutionPackage, PathBuf)],
|
||||
collisions: &mut HashSet<NpmPackageId>,
|
||||
bin_entries: &mut [(&NpmResolutionPackage, PathBuf)],
|
||||
collisions: &mut HashSet<&NpmPackageId>,
|
||||
) {
|
||||
enum Entry<'a> {
|
||||
Pkg(&'a NpmPackageId),
|
||||
|
@ -217,7 +233,7 @@ fn sort_by_depth(
|
|||
});
|
||||
}
|
||||
|
||||
pub(super) fn set_up_bin_entry(
|
||||
pub fn set_up_bin_entry(
|
||||
package: &NpmResolutionPackage,
|
||||
bin_name: &str,
|
||||
#[allow(unused_variables)] bin_script: &str,
|
||||
|
@ -259,6 +275,32 @@ fn set_up_bin_shim(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
/// Make the file at `path` executable if it exists.
|
||||
/// Returns `true` if the file exists, `false` otherwise.
|
||||
fn make_executable_if_exists(path: &Path) -> Result<bool, AnyError> {
|
||||
use std::io;
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let mut perms = match std::fs::metadata(path) {
|
||||
Ok(metadata) => metadata.permissions(),
|
||||
Err(err) => {
|
||||
if err.kind() == io::ErrorKind::NotFound {
|
||||
return Ok(false);
|
||||
}
|
||||
return Err(err.into());
|
||||
}
|
||||
};
|
||||
if perms.mode() & 0o111 == 0 {
|
||||
// if the original file is not executable, make it executable
|
||||
perms.set_mode(perms.mode() | 0o111);
|
||||
std::fs::set_permissions(path, perms).with_context(|| {
|
||||
format!("Setting permissions on '{}'", path.display())
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn symlink_bin_entry(
|
||||
_package: &NpmResolutionPackage,
|
||||
|
@ -272,32 +314,20 @@ fn symlink_bin_entry(
|
|||
let link = bin_node_modules_dir_path.join(bin_name);
|
||||
let original = package_path.join(bin_script);
|
||||
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let mut perms = match std::fs::metadata(&original) {
|
||||
Ok(metadata) => metadata.permissions(),
|
||||
Err(err) => {
|
||||
if err.kind() == io::ErrorKind::NotFound {
|
||||
log::warn!(
|
||||
"{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.",
|
||||
deno_terminal::colors::yellow("Warning"),
|
||||
bin_name,
|
||||
package_path.display(),
|
||||
original.display()
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
return Err(err).with_context(|| {
|
||||
format!("Can't set up '{}' bin at {}", bin_name, original.display())
|
||||
});
|
||||
}
|
||||
};
|
||||
if perms.mode() & 0o111 == 0 {
|
||||
// if the original file is not executable, make it executable
|
||||
perms.set_mode(perms.mode() | 0o111);
|
||||
std::fs::set_permissions(&original, perms).with_context(|| {
|
||||
format!("Setting permissions on '{}'", original.display())
|
||||
})?;
|
||||
let found = make_executable_if_exists(&original).with_context(|| {
|
||||
format!("Can't set up '{}' bin at {}", bin_name, original.display())
|
||||
})?;
|
||||
if !found {
|
||||
log::warn!(
|
||||
"{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.",
|
||||
deno_terminal::colors::yellow("Warning"),
|
||||
bin_name,
|
||||
package_path.display(),
|
||||
original.display()
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let original_relative =
|
||||
crate::util::path::relative_path(bin_node_modules_dir_path, &original)
|
||||
.unwrap_or(original);
|
335
cli/npm/managed/resolvers/common/lifecycle_scripts.rs
Normal file
335
cli/npm/managed/resolvers/common/lifecycle_scripts.rs
Normal file
|
@ -0,0 +1,335 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use super::bin_entries::BinEntries;
|
||||
use crate::args::LifecycleScriptsConfig;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_npm::resolution::NpmResolutionSnapshot;
|
||||
use deno_runtime::deno_io::FromRawIoHandle;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::Version;
|
||||
use std::borrow::Cow;
|
||||
use std::rc::Rc;
|
||||
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_npm::NpmResolutionPackage;
|
||||
|
||||
pub trait LifecycleScriptsStrategy {
|
||||
fn can_run_scripts(&self) -> bool {
|
||||
true
|
||||
}
|
||||
fn package_path(&self, package: &NpmResolutionPackage) -> PathBuf;
|
||||
|
||||
fn warn_on_scripts_not_run(
|
||||
&self,
|
||||
packages: &[(&NpmResolutionPackage, PathBuf)],
|
||||
) -> Result<(), AnyError>;
|
||||
|
||||
fn has_warned(&self, package: &NpmResolutionPackage) -> bool;
|
||||
|
||||
fn has_run(&self, package: &NpmResolutionPackage) -> bool;
|
||||
|
||||
fn did_run_scripts(
|
||||
&self,
|
||||
package: &NpmResolutionPackage,
|
||||
) -> Result<(), AnyError>;
|
||||
}
|
||||
|
||||
pub struct LifecycleScripts<'a> {
|
||||
packages_with_scripts: Vec<(&'a NpmResolutionPackage, PathBuf)>,
|
||||
packages_with_scripts_not_run: Vec<(&'a NpmResolutionPackage, PathBuf)>,
|
||||
|
||||
config: &'a LifecycleScriptsConfig,
|
||||
strategy: Box<dyn LifecycleScriptsStrategy + 'a>,
|
||||
}
|
||||
|
||||
impl<'a> LifecycleScripts<'a> {
|
||||
pub fn new<T: LifecycleScriptsStrategy + 'a>(
|
||||
config: &'a LifecycleScriptsConfig,
|
||||
strategy: T,
|
||||
) -> Self {
|
||||
Self {
|
||||
config,
|
||||
packages_with_scripts: Vec::new(),
|
||||
packages_with_scripts_not_run: Vec::new(),
|
||||
strategy: Box::new(strategy),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn has_lifecycle_scripts(
|
||||
package: &NpmResolutionPackage,
|
||||
package_path: &Path,
|
||||
) -> bool {
|
||||
if let Some(install) = package.scripts.get("install") {
|
||||
// default script
|
||||
if !is_broken_default_install_script(install, package_path) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
package.scripts.contains_key("preinstall")
|
||||
|| package.scripts.contains_key("postinstall")
|
||||
}
|
||||
|
||||
// npm defaults to running `node-gyp rebuild` if there is a `binding.gyp` file
|
||||
// but it always fails if the package excludes the `binding.gyp` file when they publish.
|
||||
// (for example, `fsevents` hits this)
|
||||
fn is_broken_default_install_script(script: &str, package_path: &Path) -> bool {
|
||||
script == "node-gyp rebuild" && !package_path.join("binding.gyp").exists()
|
||||
}
|
||||
|
||||
impl<'a> LifecycleScripts<'a> {
|
||||
fn can_run_scripts(&self, package_nv: &PackageNv) -> bool {
|
||||
if !self.strategy.can_run_scripts() {
|
||||
return false;
|
||||
}
|
||||
use crate::args::PackagesAllowedScripts;
|
||||
match &self.config.allowed {
|
||||
PackagesAllowedScripts::All => true,
|
||||
// TODO: make this more correct
|
||||
PackagesAllowedScripts::Some(allow_list) => allow_list.iter().any(|s| {
|
||||
let s = s.strip_prefix("npm:").unwrap_or(s);
|
||||
s == package_nv.name || s == package_nv.to_string()
|
||||
}),
|
||||
PackagesAllowedScripts::None => false,
|
||||
}
|
||||
}
|
||||
/// Register a package for running lifecycle scripts, if applicable.
|
||||
///
|
||||
/// `package_path` is the path containing the package's code (its root dir).
|
||||
/// `package_meta_path` is the path to serve as the base directory for lifecycle
|
||||
/// script-related metadata (e.g. to store whether the scripts have been run already)
|
||||
pub fn add(
|
||||
&mut self,
|
||||
package: &'a NpmResolutionPackage,
|
||||
package_path: Cow<Path>,
|
||||
) {
|
||||
if has_lifecycle_scripts(package, &package_path) {
|
||||
if self.can_run_scripts(&package.id.nv) {
|
||||
if !self.strategy.has_run(package) {
|
||||
self
|
||||
.packages_with_scripts
|
||||
.push((package, package_path.into_owned()));
|
||||
}
|
||||
} else if !self.strategy.has_run(package)
|
||||
&& (self.config.explicit_install || !self.strategy.has_warned(package))
|
||||
{
|
||||
// Skip adding `esbuild` as it is known that it can work properly without lifecycle script
|
||||
// being run, and it's also very popular - any project using Vite would raise warnings.
|
||||
{
|
||||
let nv = &package.id.nv;
|
||||
if nv.name == "esbuild"
|
||||
&& nv.version >= Version::parse_standard("0.18.0").unwrap()
|
||||
{
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
.packages_with_scripts_not_run
|
||||
.push((package, package_path.into_owned()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn warn_not_run_scripts(&self) -> Result<(), AnyError> {
|
||||
if !self.packages_with_scripts_not_run.is_empty() {
|
||||
self
|
||||
.strategy
|
||||
.warn_on_scripts_not_run(&self.packages_with_scripts_not_run)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn finish(
|
||||
self,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
packages: &[NpmResolutionPackage],
|
||||
root_node_modules_dir_path: Option<&Path>,
|
||||
) -> Result<(), AnyError> {
|
||||
self.warn_not_run_scripts()?;
|
||||
let get_package_path =
|
||||
|p: &NpmResolutionPackage| self.strategy.package_path(p);
|
||||
let mut failed_packages = Vec::new();
|
||||
if !self.packages_with_scripts.is_empty() {
|
||||
// get custom commands for each bin available in the node_modules dir (essentially
|
||||
// the scripts that are in `node_modules/.bin`)
|
||||
let base =
|
||||
resolve_baseline_custom_commands(snapshot, packages, get_package_path)?;
|
||||
let init_cwd = &self.config.initial_cwd;
|
||||
let process_state = crate::npm::managed::npm_process_state(
|
||||
snapshot.as_valid_serialized(),
|
||||
root_node_modules_dir_path,
|
||||
);
|
||||
|
||||
let mut env_vars = crate::task_runner::real_env_vars();
|
||||
// we want to pass the current state of npm resolution down to the deno subprocess
|
||||
// (that may be running as part of the script). we do this with an inherited temp file
|
||||
//
|
||||
// SAFETY: we are sharing a single temp file across all of the scripts. the file position
|
||||
// will be shared among these, which is okay since we run only one script at a time.
|
||||
// However, if we concurrently run scripts in the future we will
|
||||
// have to have multiple temp files.
|
||||
let temp_file_fd =
|
||||
deno_runtime::ops::process::npm_process_state_tempfile(
|
||||
process_state.as_bytes(),
|
||||
).context("failed to create npm process state tempfile for running lifecycle scripts")?;
|
||||
// SAFETY: fd/handle is valid
|
||||
let _temp_file =
|
||||
unsafe { std::fs::File::from_raw_io_handle(temp_file_fd) }; // make sure the file gets closed
|
||||
env_vars.insert(
|
||||
deno_runtime::ops::process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME
|
||||
.to_string(),
|
||||
(temp_file_fd as usize).to_string(),
|
||||
);
|
||||
for (package, package_path) in self.packages_with_scripts {
|
||||
// add custom commands for binaries from the package's dependencies. this will take precedence over the
|
||||
// baseline commands, so if the package relies on a bin that conflicts with one higher in the dependency tree, the
|
||||
// correct bin will be used.
|
||||
let custom_commands = resolve_custom_commands_from_deps(
|
||||
base.clone(),
|
||||
package,
|
||||
snapshot,
|
||||
get_package_path,
|
||||
)?;
|
||||
for script_name in ["preinstall", "install", "postinstall"] {
|
||||
if let Some(script) = package.scripts.get(script_name) {
|
||||
if script_name == "install"
|
||||
&& is_broken_default_install_script(script, &package_path)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
let exit_code = crate::task_runner::run_task(
|
||||
crate::task_runner::RunTaskOptions {
|
||||
task_name: script_name,
|
||||
script,
|
||||
cwd: &package_path,
|
||||
env_vars: env_vars.clone(),
|
||||
custom_commands: custom_commands.clone(),
|
||||
init_cwd,
|
||||
argv: &[],
|
||||
root_node_modules_dir: root_node_modules_dir_path,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
if exit_code != 0 {
|
||||
log::warn!(
|
||||
"error: script '{}' in '{}' failed with exit code {}",
|
||||
script_name,
|
||||
package.id.nv,
|
||||
exit_code,
|
||||
);
|
||||
failed_packages.push(&package.id.nv);
|
||||
// assume if earlier script fails, later ones will fail too
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
self.strategy.did_run_scripts(package)?;
|
||||
}
|
||||
}
|
||||
if failed_packages.is_empty() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(AnyError::msg(format!(
|
||||
"failed to run scripts for packages: {}",
|
||||
failed_packages
|
||||
.iter()
|
||||
.map(|p| p.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// take in all (non copy) packages from snapshot,
|
||||
// and resolve the set of available binaries to create
|
||||
// custom commands available to the task runner
|
||||
fn resolve_baseline_custom_commands(
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
packages: &[NpmResolutionPackage],
|
||||
get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf,
|
||||
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
|
||||
let mut custom_commands = crate::task_runner::TaskCustomCommands::new();
|
||||
custom_commands
|
||||
.insert("npx".to_string(), Rc::new(crate::task_runner::NpxCommand));
|
||||
|
||||
custom_commands
|
||||
.insert("npm".to_string(), Rc::new(crate::task_runner::NpmCommand));
|
||||
|
||||
custom_commands
|
||||
.insert("node".to_string(), Rc::new(crate::task_runner::NodeCommand));
|
||||
|
||||
custom_commands.insert(
|
||||
"node-gyp".to_string(),
|
||||
Rc::new(crate::task_runner::NodeGypCommand),
|
||||
);
|
||||
|
||||
// TODO: this recreates the bin entries which could be redoing some work, but the ones
|
||||
// we compute earlier in `sync_resolution_with_fs` may not be exhaustive (because we skip
|
||||
// doing it for packages that are set up already.
|
||||
// realistically, scripts won't be run very often so it probably isn't too big of an issue.
|
||||
resolve_custom_commands_from_packages(
|
||||
custom_commands,
|
||||
snapshot,
|
||||
packages,
|
||||
get_package_path,
|
||||
)
|
||||
}
|
||||
|
||||
// resolves the custom commands from an iterator of packages
|
||||
// and adds them to the existing custom commands.
|
||||
// note that this will overwrite any existing custom commands
|
||||
fn resolve_custom_commands_from_packages<
|
||||
'a,
|
||||
P: IntoIterator<Item = &'a NpmResolutionPackage>,
|
||||
>(
|
||||
mut commands: crate::task_runner::TaskCustomCommands,
|
||||
snapshot: &'a NpmResolutionSnapshot,
|
||||
packages: P,
|
||||
get_package_path: impl Fn(&'a NpmResolutionPackage) -> PathBuf,
|
||||
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
|
||||
let mut bin_entries = BinEntries::new();
|
||||
for package in packages {
|
||||
let package_path = get_package_path(package);
|
||||
|
||||
if package.bin.is_some() {
|
||||
bin_entries.add(package, package_path);
|
||||
}
|
||||
}
|
||||
let bins = bin_entries.into_bin_files(snapshot);
|
||||
for (bin_name, script_path) in bins {
|
||||
commands.insert(
|
||||
bin_name.clone(),
|
||||
Rc::new(crate::task_runner::NodeModulesFileRunCommand {
|
||||
command_name: bin_name,
|
||||
path: script_path,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(commands)
|
||||
}
|
||||
|
||||
// resolves the custom commands from the dependencies of a package
|
||||
// and adds them to the existing custom commands.
|
||||
// note that this will overwrite any existing custom commands.
|
||||
fn resolve_custom_commands_from_deps(
|
||||
baseline: crate::task_runner::TaskCustomCommands,
|
||||
package: &NpmResolutionPackage,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf,
|
||||
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
|
||||
resolve_custom_commands_from_packages(
|
||||
baseline,
|
||||
snapshot,
|
||||
package
|
||||
.dependencies
|
||||
.values()
|
||||
.map(|id| snapshot.package_from_id(id).unwrap()),
|
||||
get_package_path,
|
||||
)
|
||||
}
|
|
@ -2,16 +2,19 @@
|
|||
|
||||
//! Code for global npm cache resolution.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::colors;
|
||||
use async_trait::async_trait;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::url::Url;
|
||||
use deno_npm::NpmPackageCacheFolderId;
|
||||
use deno_npm::NpmPackageId;
|
||||
use deno_npm::NpmResolutionPackage;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
|
@ -19,10 +22,14 @@ use node_resolver::errors::PackageFolderResolveError;
|
|||
use node_resolver::errors::PackageNotFoundError;
|
||||
use node_resolver::errors::ReferrerNotFoundError;
|
||||
|
||||
use crate::args::LifecycleScriptsConfig;
|
||||
use crate::cache::FastInsecureHasher;
|
||||
|
||||
use super::super::cache::NpmCache;
|
||||
use super::super::cache::TarballCache;
|
||||
use super::super::resolution::NpmResolution;
|
||||
use super::common::cache_packages;
|
||||
use super::common::lifecycle_scripts::LifecycleScriptsStrategy;
|
||||
use super::common::NpmPackageFsResolver;
|
||||
use super::common::RegistryReadPermissionChecker;
|
||||
|
||||
|
@ -34,6 +41,7 @@ pub struct GlobalNpmPackageResolver {
|
|||
resolution: Arc<NpmResolution>,
|
||||
system_info: NpmSystemInfo,
|
||||
registry_read_permission_checker: RegistryReadPermissionChecker,
|
||||
lifecycle_scripts: LifecycleScriptsConfig,
|
||||
}
|
||||
|
||||
impl GlobalNpmPackageResolver {
|
||||
|
@ -43,6 +51,7 @@ impl GlobalNpmPackageResolver {
|
|||
tarball_cache: Arc<TarballCache>,
|
||||
resolution: Arc<NpmResolution>,
|
||||
system_info: NpmSystemInfo,
|
||||
lifecycle_scripts: LifecycleScriptsConfig,
|
||||
) -> Self {
|
||||
Self {
|
||||
registry_read_permission_checker: RegistryReadPermissionChecker::new(
|
||||
|
@ -53,6 +62,7 @@ impl GlobalNpmPackageResolver {
|
|||
tarball_cache,
|
||||
resolution,
|
||||
system_info,
|
||||
lifecycle_scripts,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -63,7 +73,7 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
|
|||
self.cache.root_dir_url()
|
||||
}
|
||||
|
||||
fn node_modules_path(&self) -> Option<&PathBuf> {
|
||||
fn node_modules_path(&self) -> Option<&Path> {
|
||||
None
|
||||
}
|
||||
|
||||
|
@ -149,8 +159,7 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
|
|||
let package_partitions = self
|
||||
.resolution
|
||||
.all_system_packages_partitioned(&self.system_info);
|
||||
|
||||
cache_packages(package_partitions.packages, &self.tarball_cache).await?;
|
||||
cache_packages(&package_partitions.packages, &self.tarball_cache).await?;
|
||||
|
||||
// create the copy package folders
|
||||
for copy in package_partitions.copy_packages {
|
||||
|
@ -159,6 +168,18 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
|
|||
.ensure_copy_package(©.get_package_cache_folder_id())?;
|
||||
}
|
||||
|
||||
let mut lifecycle_scripts =
|
||||
super::common::lifecycle_scripts::LifecycleScripts::new(
|
||||
&self.lifecycle_scripts,
|
||||
GlobalLifecycleScripts::new(self, &self.lifecycle_scripts.root_dir),
|
||||
);
|
||||
for package in &package_partitions.packages {
|
||||
let package_folder = self.cache.package_folder_for_nv(&package.id.nv);
|
||||
lifecycle_scripts.add(package, Cow::Borrowed(&package_folder));
|
||||
}
|
||||
|
||||
lifecycle_scripts.warn_not_run_scripts()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -172,3 +193,78 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver {
|
|||
.ensure_registry_read_permission(permissions, path)
|
||||
}
|
||||
}
|
||||
|
||||
struct GlobalLifecycleScripts<'a> {
|
||||
resolver: &'a GlobalNpmPackageResolver,
|
||||
path_hash: u64,
|
||||
}
|
||||
|
||||
impl<'a> GlobalLifecycleScripts<'a> {
|
||||
fn new(resolver: &'a GlobalNpmPackageResolver, root_dir: &Path) -> Self {
|
||||
let mut hasher = FastInsecureHasher::new_without_deno_version();
|
||||
hasher.write(root_dir.to_string_lossy().as_bytes());
|
||||
let path_hash = hasher.finish();
|
||||
Self {
|
||||
resolver,
|
||||
path_hash,
|
||||
}
|
||||
}
|
||||
|
||||
fn warned_scripts_file(&self, package: &NpmResolutionPackage) -> PathBuf {
|
||||
self
|
||||
.package_path(package)
|
||||
.join(format!(".scripts-warned-{}", self.path_hash))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> super::common::lifecycle_scripts::LifecycleScriptsStrategy
|
||||
for GlobalLifecycleScripts<'a>
|
||||
{
|
||||
fn can_run_scripts(&self) -> bool {
|
||||
false
|
||||
}
|
||||
fn package_path(&self, package: &NpmResolutionPackage) -> PathBuf {
|
||||
self.resolver.cache.package_folder_for_nv(&package.id.nv)
|
||||
}
|
||||
|
||||
fn warn_on_scripts_not_run(
|
||||
&self,
|
||||
packages: &[(&NpmResolutionPackage, PathBuf)],
|
||||
) -> std::result::Result<(), deno_core::anyhow::Error> {
|
||||
log::warn!("{} The following packages contained npm lifecycle scripts ({}) that were not executed:", colors::yellow("Warning"), colors::gray("preinstall/install/postinstall"));
|
||||
for (package, _) in packages {
|
||||
log::warn!("┠─ {}", colors::gray(format!("npm:{}", package.id.nv)));
|
||||
}
|
||||
log::warn!("┃");
|
||||
log::warn!(
|
||||
"┠─ {}",
|
||||
colors::italic("This may cause the packages to not work correctly.")
|
||||
);
|
||||
log::warn!("┠─ {}", colors::italic("Lifecycle scripts are only supported when using a `node_modules` directory."));
|
||||
log::warn!(
|
||||
"┠─ {}",
|
||||
colors::italic("Enable it in your deno config file:")
|
||||
);
|
||||
log::warn!("┖─ {}", colors::bold("\"nodeModulesDir\": \"auto\""));
|
||||
|
||||
for (package, _) in packages {
|
||||
std::fs::write(self.warned_scripts_file(package), "")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn did_run_scripts(
|
||||
&self,
|
||||
_package: &NpmResolutionPackage,
|
||||
) -> std::result::Result<(), deno_core::anyhow::Error> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn has_warned(&self, package: &NpmResolutionPackage) -> bool {
|
||||
self.warned_scripts_file(package).exists()
|
||||
}
|
||||
|
||||
fn has_run(&self, _package: &NpmResolutionPackage) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
|
||||
//! Code for local node_modules resolution.
|
||||
|
||||
mod bin_entries;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::cell::RefCell;
|
||||
use std::cmp::Ordering;
|
||||
|
@ -18,11 +16,10 @@ use std::rc::Rc;
|
|||
use std::sync::Arc;
|
||||
|
||||
use crate::args::LifecycleScriptsConfig;
|
||||
use crate::args::PackagesAllowedScripts;
|
||||
use crate::colors;
|
||||
use async_trait::async_trait;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow;
|
||||
use deno_cache_dir::npm::mixed_case_package_name_decode;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::stream::FuturesUnordered;
|
||||
|
@ -34,6 +31,7 @@ use deno_npm::NpmPackageCacheFolderId;
|
|||
use deno_npm::NpmPackageId;
|
||||
use deno_npm::NpmResolutionPackage;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_resolver::npm::normalize_pkg_name_for_node_modules_deno_folder;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_node::NodePermissions;
|
||||
use deno_semver::package::PackageNv;
|
||||
|
@ -46,8 +44,6 @@ use serde::Serialize;
|
|||
|
||||
use crate::args::NpmInstallDepsProvider;
|
||||
use crate::cache::CACHE_PERM;
|
||||
use crate::npm::cache_dir::mixed_case_package_name_decode;
|
||||
use crate::npm::cache_dir::mixed_case_package_name_encode;
|
||||
use crate::util::fs::atomic_write_file_with_retries;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
|
||||
use crate::util::fs::clone_dir_recursive;
|
||||
|
@ -163,8 +159,8 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
&self.root_node_modules_url
|
||||
}
|
||||
|
||||
fn node_modules_path(&self) -> Option<&PathBuf> {
|
||||
Some(&self.root_node_modules_path)
|
||||
fn node_modules_path(&self) -> Option<&Path> {
|
||||
Some(self.root_node_modules_path.as_ref())
|
||||
}
|
||||
|
||||
fn maybe_package_folder(&self, id: &NpmPackageId) -> Option<PathBuf> {
|
||||
|
@ -272,77 +268,10 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
}
|
||||
}
|
||||
|
||||
// take in all (non copy) packages from snapshot,
|
||||
// and resolve the set of available binaries to create
|
||||
// custom commands available to the task runner
|
||||
fn resolve_baseline_custom_commands(
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
packages: &[NpmResolutionPackage],
|
||||
local_registry_dir: &Path,
|
||||
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
|
||||
let mut custom_commands = crate::task_runner::TaskCustomCommands::new();
|
||||
custom_commands
|
||||
.insert("npx".to_string(), Rc::new(crate::task_runner::NpxCommand));
|
||||
|
||||
custom_commands
|
||||
.insert("npm".to_string(), Rc::new(crate::task_runner::NpmCommand));
|
||||
|
||||
custom_commands
|
||||
.insert("node".to_string(), Rc::new(crate::task_runner::NodeCommand));
|
||||
|
||||
custom_commands.insert(
|
||||
"node-gyp".to_string(),
|
||||
Rc::new(crate::task_runner::NodeGypCommand),
|
||||
);
|
||||
|
||||
// TODO: this recreates the bin entries which could be redoing some work, but the ones
|
||||
// we compute earlier in `sync_resolution_with_fs` may not be exhaustive (because we skip
|
||||
// doing it for packages that are set up already.
|
||||
// realistically, scripts won't be run very often so it probably isn't too big of an issue.
|
||||
resolve_custom_commands_from_packages(
|
||||
custom_commands,
|
||||
snapshot,
|
||||
packages,
|
||||
local_registry_dir,
|
||||
)
|
||||
}
|
||||
|
||||
// resolves the custom commands from an iterator of packages
|
||||
// and adds them to the existing custom commands.
|
||||
// note that this will overwrite any existing custom commands
|
||||
fn resolve_custom_commands_from_packages<
|
||||
'a,
|
||||
P: IntoIterator<Item = &'a NpmResolutionPackage>,
|
||||
>(
|
||||
mut commands: crate::task_runner::TaskCustomCommands,
|
||||
snapshot: &'a NpmResolutionSnapshot,
|
||||
packages: P,
|
||||
local_registry_dir: &Path,
|
||||
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
|
||||
let mut bin_entries = bin_entries::BinEntries::new();
|
||||
for package in packages {
|
||||
let package_path =
|
||||
local_node_modules_package_path(local_registry_dir, package);
|
||||
|
||||
if package.bin.is_some() {
|
||||
bin_entries.add(package.clone(), package_path);
|
||||
}
|
||||
}
|
||||
let bins = bin_entries.into_bin_files(snapshot);
|
||||
for (bin_name, script_path) in bins {
|
||||
commands.insert(
|
||||
bin_name.clone(),
|
||||
Rc::new(crate::task_runner::NodeModulesFileRunCommand {
|
||||
command_name: bin_name,
|
||||
path: script_path,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(commands)
|
||||
}
|
||||
|
||||
fn local_node_modules_package_path(
|
||||
/// `node_modules/.deno/<package>/node_modules/<package_name>`
|
||||
///
|
||||
/// Where the actual package is stored.
|
||||
fn local_node_modules_package_contents_path(
|
||||
local_registry_dir: &Path,
|
||||
package: &NpmResolutionPackage,
|
||||
) -> PathBuf {
|
||||
|
@ -354,62 +283,6 @@ fn local_node_modules_package_path(
|
|||
.join(&package.id.nv.name)
|
||||
}
|
||||
|
||||
// resolves the custom commands from the dependencies of a package
|
||||
// and adds them to the existing custom commands.
|
||||
// note that this will overwrite any existing custom commands.
|
||||
fn resolve_custom_commands_from_deps(
|
||||
baseline: crate::task_runner::TaskCustomCommands,
|
||||
package: &NpmResolutionPackage,
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
local_registry_dir: &Path,
|
||||
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
|
||||
resolve_custom_commands_from_packages(
|
||||
baseline,
|
||||
snapshot,
|
||||
package
|
||||
.dependencies
|
||||
.values()
|
||||
.map(|id| snapshot.package_from_id(id).unwrap()),
|
||||
local_registry_dir,
|
||||
)
|
||||
}
|
||||
|
||||
fn can_run_scripts(
|
||||
allow_scripts: &PackagesAllowedScripts,
|
||||
package_nv: &PackageNv,
|
||||
) -> bool {
|
||||
match allow_scripts {
|
||||
PackagesAllowedScripts::All => true,
|
||||
// TODO: make this more correct
|
||||
PackagesAllowedScripts::Some(allow_list) => allow_list.iter().any(|s| {
|
||||
let s = s.strip_prefix("npm:").unwrap_or(s);
|
||||
s == package_nv.name || s == package_nv.to_string()
|
||||
}),
|
||||
PackagesAllowedScripts::None => false,
|
||||
}
|
||||
}
|
||||
|
||||
// npm defaults to running `node-gyp rebuild` if there is a `binding.gyp` file
|
||||
// but it always fails if the package excludes the `binding.gyp` file when they publish.
|
||||
// (for example, `fsevents` hits this)
|
||||
fn is_broken_default_install_script(script: &str, package_path: &Path) -> bool {
|
||||
script == "node-gyp rebuild" && !package_path.join("binding.gyp").exists()
|
||||
}
|
||||
|
||||
fn has_lifecycle_scripts(
|
||||
package: &NpmResolutionPackage,
|
||||
package_path: &Path,
|
||||
) -> bool {
|
||||
if let Some(install) = package.scripts.get("install") {
|
||||
// default script
|
||||
if !is_broken_default_install_script(install, package_path) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
package.scripts.contains_key("preinstall")
|
||||
|| package.scripts.contains_key("postinstall")
|
||||
}
|
||||
|
||||
/// Creates a pnpm style folder structure.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn sync_resolution_with_fs(
|
||||
|
@ -460,9 +333,15 @@ async fn sync_resolution_with_fs(
|
|||
let mut cache_futures = FuturesUnordered::new();
|
||||
let mut newest_packages_by_name: HashMap<&String, &NpmResolutionPackage> =
|
||||
HashMap::with_capacity(package_partitions.packages.len());
|
||||
let bin_entries = Rc::new(RefCell::new(bin_entries::BinEntries::new()));
|
||||
let mut packages_with_scripts = Vec::with_capacity(2);
|
||||
let mut packages_with_scripts_not_run = Vec::new();
|
||||
let bin_entries =
|
||||
Rc::new(RefCell::new(super::common::bin_entries::BinEntries::new()));
|
||||
let mut lifecycle_scripts =
|
||||
super::common::lifecycle_scripts::LifecycleScripts::new(
|
||||
lifecycle_scripts,
|
||||
LocalLifecycleScripts {
|
||||
deno_local_registry_dir: &deno_local_registry_dir,
|
||||
},
|
||||
);
|
||||
let packages_with_deprecation_warnings = Arc::new(Mutex::new(Vec::new()));
|
||||
for package in &package_partitions.packages {
|
||||
if let Some(current_pkg) =
|
||||
|
@ -518,9 +397,7 @@ async fn sync_resolution_with_fs(
|
|||
.await??;
|
||||
|
||||
if package.bin.is_some() {
|
||||
bin_entries_to_setup
|
||||
.borrow_mut()
|
||||
.add(package.clone(), package_path);
|
||||
bin_entries_to_setup.borrow_mut().add(package, package_path);
|
||||
}
|
||||
|
||||
if let Some(deprecated) = &package.deprecated {
|
||||
|
@ -538,21 +415,7 @@ async fn sync_resolution_with_fs(
|
|||
let sub_node_modules = folder_path.join("node_modules");
|
||||
let package_path =
|
||||
join_package_name(&sub_node_modules, &package.id.nv.name);
|
||||
if has_lifecycle_scripts(package, &package_path) {
|
||||
let scripts_run = folder_path.join(".scripts-run");
|
||||
let has_warned = folder_path.join(".scripts-warned");
|
||||
if can_run_scripts(&lifecycle_scripts.allowed, &package.id.nv) {
|
||||
if !scripts_run.exists() {
|
||||
packages_with_scripts.push((
|
||||
package.clone(),
|
||||
package_path,
|
||||
scripts_run,
|
||||
));
|
||||
}
|
||||
} else if !scripts_run.exists() && !has_warned.exists() {
|
||||
packages_with_scripts_not_run.push((has_warned, package.id.nv.clone()));
|
||||
}
|
||||
}
|
||||
lifecycle_scripts.add(package, package_path.into());
|
||||
}
|
||||
|
||||
while let Some(result) = cache_futures.next().await {
|
||||
|
@ -789,74 +652,12 @@ async fn sync_resolution_with_fs(
|
|||
}
|
||||
}
|
||||
|
||||
if !packages_with_scripts.is_empty() {
|
||||
// get custom commands for each bin available in the node_modules dir (essentially
|
||||
// the scripts that are in `node_modules/.bin`)
|
||||
let base = resolve_baseline_custom_commands(
|
||||
snapshot,
|
||||
&package_partitions.packages,
|
||||
&deno_local_registry_dir,
|
||||
)?;
|
||||
let init_cwd = lifecycle_scripts.initial_cwd.as_deref().unwrap();
|
||||
let process_state = crate::npm::managed::npm_process_state(
|
||||
snapshot.as_valid_serialized(),
|
||||
Some(root_node_modules_dir_path),
|
||||
);
|
||||
|
||||
let mut env_vars = crate::task_runner::real_env_vars();
|
||||
env_vars.insert(
|
||||
crate::args::NPM_RESOLUTION_STATE_ENV_VAR_NAME.to_string(),
|
||||
process_state,
|
||||
);
|
||||
for (package, package_path, scripts_run_path) in packages_with_scripts {
|
||||
// add custom commands for binaries from the package's dependencies. this will take precedence over the
|
||||
// baseline commands, so if the package relies on a bin that conflicts with one higher in the dependency tree, the
|
||||
// correct bin will be used.
|
||||
let custom_commands = resolve_custom_commands_from_deps(
|
||||
base.clone(),
|
||||
&package,
|
||||
snapshot,
|
||||
&deno_local_registry_dir,
|
||||
)?;
|
||||
for script_name in ["preinstall", "install", "postinstall"] {
|
||||
if let Some(script) = package.scripts.get(script_name) {
|
||||
if script_name == "install"
|
||||
&& is_broken_default_install_script(script, &package_path)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
let exit_code =
|
||||
crate::task_runner::run_task(crate::task_runner::RunTaskOptions {
|
||||
task_name: script_name,
|
||||
script,
|
||||
cwd: &package_path,
|
||||
env_vars: env_vars.clone(),
|
||||
custom_commands: custom_commands.clone(),
|
||||
init_cwd,
|
||||
argv: &[],
|
||||
root_node_modules_dir: Some(root_node_modules_dir_path),
|
||||
})
|
||||
.await?;
|
||||
if exit_code != 0 {
|
||||
anyhow::bail!(
|
||||
"script '{}' in '{}' failed with exit code {}",
|
||||
script_name,
|
||||
package.id.nv,
|
||||
exit_code,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
fs::write(scripts_run_path, "")?;
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
let packages_with_deprecation_warnings =
|
||||
packages_with_deprecation_warnings.lock();
|
||||
if !packages_with_deprecation_warnings.is_empty() {
|
||||
log::warn!(
|
||||
"{} Following packages are deprecated:",
|
||||
"{} The following packages are deprecated:",
|
||||
colors::yellow("Warning")
|
||||
);
|
||||
let len = packages_with_deprecation_warnings.len();
|
||||
|
@ -870,7 +671,7 @@ async fn sync_resolution_with_fs(
|
|||
);
|
||||
} else {
|
||||
log::warn!(
|
||||
"┗─ {}",
|
||||
"┖─ {}",
|
||||
colors::gray(format!("npm:{:?} ({})", package_id, msg))
|
||||
);
|
||||
}
|
||||
|
@ -878,36 +679,13 @@ async fn sync_resolution_with_fs(
|
|||
}
|
||||
}
|
||||
|
||||
if !packages_with_scripts_not_run.is_empty() {
|
||||
log::warn!("{} Following packages contained npm lifecycle scripts ({}) that were not executed:", colors::yellow("Warning"), colors::gray("preinstall/install/postinstall"));
|
||||
|
||||
for (_, package_nv) in packages_with_scripts_not_run.iter() {
|
||||
log::warn!("┠─ {}", colors::gray(format!("npm:{package_nv}")));
|
||||
}
|
||||
|
||||
log::warn!("┃");
|
||||
log::warn!(
|
||||
"┠─ {}",
|
||||
colors::italic("This may cause the packages to not work correctly.")
|
||||
);
|
||||
log::warn!("┗─ {}", colors::italic("To run lifecycle scripts, use the `--allow-scripts` flag with `deno install`:"));
|
||||
let packages_comma_separated = packages_with_scripts_not_run
|
||||
.iter()
|
||||
.map(|(_, p)| format!("npm:{p}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(",");
|
||||
log::warn!(
|
||||
" {}",
|
||||
colors::bold(format!(
|
||||
"deno install --allow-scripts={}",
|
||||
packages_comma_separated
|
||||
))
|
||||
);
|
||||
|
||||
for (scripts_warned_path, _) in packages_with_scripts_not_run {
|
||||
let _ignore_err = fs::write(scripts_warned_path, "");
|
||||
}
|
||||
}
|
||||
lifecycle_scripts
|
||||
.finish(
|
||||
snapshot,
|
||||
&package_partitions.packages,
|
||||
Some(root_node_modules_dir_path),
|
||||
)
|
||||
.await?;
|
||||
|
||||
setup_cache.save();
|
||||
drop(single_process_lock);
|
||||
|
@ -916,6 +694,98 @@ async fn sync_resolution_with_fs(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// `node_modules/.deno/<package>/`
|
||||
fn local_node_modules_package_folder(
|
||||
local_registry_dir: &Path,
|
||||
package: &NpmResolutionPackage,
|
||||
) -> PathBuf {
|
||||
local_registry_dir.join(get_package_folder_id_folder_name(
|
||||
&package.get_package_cache_folder_id(),
|
||||
))
|
||||
}
|
||||
|
||||
struct LocalLifecycleScripts<'a> {
|
||||
deno_local_registry_dir: &'a Path,
|
||||
}
|
||||
|
||||
impl<'a> LocalLifecycleScripts<'a> {
|
||||
/// `node_modules/.deno/<package>/.scripts-run`
|
||||
fn ran_scripts_file(&self, package: &NpmResolutionPackage) -> PathBuf {
|
||||
local_node_modules_package_folder(self.deno_local_registry_dir, package)
|
||||
.join(".scripts-run")
|
||||
}
|
||||
|
||||
/// `node_modules/.deno/<package>/.scripts-warned`
|
||||
fn warned_scripts_file(&self, package: &NpmResolutionPackage) -> PathBuf {
|
||||
local_node_modules_package_folder(self.deno_local_registry_dir, package)
|
||||
.join(".scripts-warned")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> super::common::lifecycle_scripts::LifecycleScriptsStrategy
|
||||
for LocalLifecycleScripts<'a>
|
||||
{
|
||||
fn package_path(&self, package: &NpmResolutionPackage) -> PathBuf {
|
||||
local_node_modules_package_contents_path(
|
||||
self.deno_local_registry_dir,
|
||||
package,
|
||||
)
|
||||
}
|
||||
|
||||
fn did_run_scripts(
|
||||
&self,
|
||||
package: &NpmResolutionPackage,
|
||||
) -> std::result::Result<(), deno_core::anyhow::Error> {
|
||||
std::fs::write(self.ran_scripts_file(package), "")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn warn_on_scripts_not_run(
|
||||
&self,
|
||||
packages: &[(&NpmResolutionPackage, std::path::PathBuf)],
|
||||
) -> Result<(), AnyError> {
|
||||
if !packages.is_empty() {
|
||||
log::warn!("{} The following packages contained npm lifecycle scripts ({}) that were not executed:", colors::yellow("Warning"), colors::gray("preinstall/install/postinstall"));
|
||||
|
||||
for (package, _) in packages {
|
||||
log::warn!("┠─ {}", colors::gray(format!("npm:{}", package.id.nv)));
|
||||
}
|
||||
|
||||
log::warn!("┃");
|
||||
log::warn!(
|
||||
"┠─ {}",
|
||||
colors::italic("This may cause the packages to not work correctly.")
|
||||
);
|
||||
log::warn!("┖─ {}", colors::italic("To run lifecycle scripts, use the `--allow-scripts` flag with `deno install`:"));
|
||||
let packages_comma_separated = packages
|
||||
.iter()
|
||||
.map(|(p, _)| format!("npm:{}", p.id.nv))
|
||||
.collect::<Vec<_>>()
|
||||
.join(",");
|
||||
log::warn!(
|
||||
" {}",
|
||||
colors::bold(format!(
|
||||
"deno install --allow-scripts={}",
|
||||
packages_comma_separated
|
||||
))
|
||||
);
|
||||
|
||||
for (package, _) in packages {
|
||||
let _ignore_err = fs::write(self.warned_scripts_file(package), "");
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn has_warned(&self, package: &NpmResolutionPackage) -> bool {
|
||||
self.warned_scripts_file(package).exists()
|
||||
}
|
||||
|
||||
fn has_run(&self, package: &NpmResolutionPackage) -> bool {
|
||||
self.ran_scripts_file(package).exists()
|
||||
}
|
||||
}
|
||||
|
||||
// Uses BTreeMap to preserve the ordering of the elements in memory, to ensure
|
||||
// the file generated from this datastructure is deterministic.
|
||||
// See: https://github.com/denoland/deno/issues/24479
|
||||
|
@ -1050,20 +920,6 @@ impl SetupCache {
|
|||
}
|
||||
}
|
||||
|
||||
/// Normalizes a package name for use at `node_modules/.deno/<pkg-name>@<version>[_<copy_index>]`
|
||||
pub fn normalize_pkg_name_for_node_modules_deno_folder(name: &str) -> Cow<str> {
|
||||
let name = if name.to_lowercase() == name {
|
||||
Cow::Borrowed(name)
|
||||
} else {
|
||||
Cow::Owned(format!("_{}", mixed_case_package_name_encode(name)))
|
||||
};
|
||||
if name.starts_with('@') {
|
||||
name.replace('/', "+").into()
|
||||
} else {
|
||||
name
|
||||
}
|
||||
}
|
||||
|
||||
fn get_package_folder_id_folder_name(
|
||||
folder_id: &NpmPackageCacheFolderId,
|
||||
) -> String {
|
||||
|
|
|
@ -15,7 +15,6 @@ use crate::args::NpmInstallDepsProvider;
|
|||
use crate::util::progress_bar::ProgressBar;
|
||||
|
||||
pub use self::common::NpmPackageFsResolver;
|
||||
pub use self::local::normalize_pkg_name_for_node_modules_deno_folder;
|
||||
|
||||
use self::global::GlobalNpmPackageResolver;
|
||||
use self::local::LocalNpmPackageResolver;
|
||||
|
@ -54,6 +53,7 @@ pub fn create_npm_fs_resolver(
|
|||
tarball_cache,
|
||||
resolution,
|
||||
system_info,
|
||||
lifecycle_scripts,
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
mod byonm;
|
||||
mod cache_dir;
|
||||
mod common;
|
||||
mod managed;
|
||||
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
|
@ -13,8 +13,9 @@ use deno_ast::ModuleSpecifier;
|
|||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_npm::registry::NpmPackageInfo;
|
||||
use deno_resolver::npm::ByonmNpmResolver;
|
||||
use deno_runtime::deno_node::NodeRequireResolver;
|
||||
use deno_runtime::deno_node::NpmProcessStateProvider;
|
||||
use deno_runtime::ops::process::NpmProcessStateProvider;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::package::PackageReq;
|
||||
use node_resolver::NpmResolver;
|
||||
|
@ -22,16 +23,15 @@ use node_resolver::NpmResolver;
|
|||
use crate::args::npm_registry_url;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
|
||||
pub use self::byonm::ByonmCliNpmResolver;
|
||||
pub use self::byonm::CliNpmResolverByonmCreateOptions;
|
||||
pub use self::cache_dir::NpmCacheDir;
|
||||
pub use self::byonm::CliByonmNpmResolver;
|
||||
pub use self::byonm::CliByonmNpmResolverCreateOptions;
|
||||
pub use self::managed::CliNpmResolverManagedCreateOptions;
|
||||
pub use self::managed::CliNpmResolverManagedSnapshotOption;
|
||||
pub use self::managed::ManagedCliNpmResolver;
|
||||
|
||||
pub enum CliNpmResolverCreateOptions {
|
||||
Managed(CliNpmResolverManagedCreateOptions),
|
||||
Byonm(CliNpmResolverByonmCreateOptions),
|
||||
Byonm(CliByonmNpmResolverCreateOptions),
|
||||
}
|
||||
|
||||
pub async fn create_cli_npm_resolver_for_lsp(
|
||||
|
@ -42,7 +42,7 @@ pub async fn create_cli_npm_resolver_for_lsp(
|
|||
Managed(options) => {
|
||||
managed::create_managed_npm_resolver_for_lsp(options).await
|
||||
}
|
||||
Byonm(options) => byonm::create_byonm_npm_resolver(options),
|
||||
Byonm(options) => Arc::new(ByonmNpmResolver::new(options)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -52,14 +52,14 @@ pub async fn create_cli_npm_resolver(
|
|||
use CliNpmResolverCreateOptions::*;
|
||||
match options {
|
||||
Managed(options) => managed::create_managed_npm_resolver(options).await,
|
||||
Byonm(options) => Ok(byonm::create_byonm_npm_resolver(options)),
|
||||
Byonm(options) => Ok(Arc::new(ByonmNpmResolver::new(options))),
|
||||
}
|
||||
}
|
||||
|
||||
pub enum InnerCliNpmResolverRef<'a> {
|
||||
Managed(&'a ManagedCliNpmResolver),
|
||||
#[allow(dead_code)]
|
||||
Byonm(&'a ByonmCliNpmResolver),
|
||||
Byonm(&'a CliByonmNpmResolver),
|
||||
}
|
||||
|
||||
pub trait CliNpmResolver: NpmResolver {
|
||||
|
@ -80,14 +80,14 @@ pub trait CliNpmResolver: NpmResolver {
|
|||
}
|
||||
}
|
||||
|
||||
fn as_byonm(&self) -> Option<&ByonmCliNpmResolver> {
|
||||
fn as_byonm(&self) -> Option<&CliByonmNpmResolver> {
|
||||
match self.as_inner() {
|
||||
InnerCliNpmResolverRef::Managed(_) => None,
|
||||
InnerCliNpmResolverRef::Byonm(inner) => Some(inner),
|
||||
}
|
||||
}
|
||||
|
||||
fn root_node_modules_path(&self) -> Option<&PathBuf>;
|
||||
fn root_node_modules_path(&self) -> Option<&Path>;
|
||||
|
||||
fn resolve_pkg_folder_from_deno_module_req(
|
||||
&self,
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::sync::Arc;
|
||||
use std::time;
|
||||
|
||||
use deno_core::error::generic_error;
|
||||
|
@ -12,9 +11,7 @@ use deno_core::op2;
|
|||
use deno_core::v8;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_core::OpState;
|
||||
use deno_runtime::deno_permissions::create_child_permissions;
|
||||
use deno_runtime::deno_permissions::ChildPermissionsArg;
|
||||
use deno_runtime::deno_permissions::PermissionDescriptorParser;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use tokio::sync::mpsc::UnboundedSender;
|
||||
use uuid::Uuid;
|
||||
|
@ -61,19 +58,8 @@ pub fn op_pledge_test_permissions(
|
|||
#[serde] args: ChildPermissionsArg,
|
||||
) -> Result<Uuid, AnyError> {
|
||||
let token = Uuid::new_v4();
|
||||
let permission_desc_parser = state
|
||||
.borrow::<Arc<dyn PermissionDescriptorParser>>()
|
||||
.clone();
|
||||
let parent_permissions = state.borrow_mut::<PermissionsContainer>();
|
||||
let worker_permissions = {
|
||||
let mut parent_permissions = parent_permissions.inner.lock();
|
||||
let perms = create_child_permissions(
|
||||
permission_desc_parser.as_ref(),
|
||||
&mut parent_permissions,
|
||||
args,
|
||||
)?;
|
||||
PermissionsContainer::new(permission_desc_parser, perms)
|
||||
};
|
||||
let worker_permissions = parent_permissions.create_child_permissions(args)?;
|
||||
let parent_permissions = parent_permissions.clone();
|
||||
|
||||
if state.try_take::<PermissionsHolder>().is_some() {
|
||||
|
@ -83,7 +69,6 @@ pub fn op_pledge_test_permissions(
|
|||
state.put::<PermissionsHolder>(PermissionsHolder(token, parent_permissions));
|
||||
|
||||
// NOTE: This call overrides current permission set for the worker
|
||||
state.put(worker_permissions.inner.clone());
|
||||
state.put::<PermissionsContainer>(worker_permissions);
|
||||
|
||||
Ok(token)
|
||||
|
@ -100,7 +85,6 @@ pub fn op_restore_test_permissions(
|
|||
}
|
||||
|
||||
let permissions = permissions_holder.1;
|
||||
state.put(permissions.inner.clone());
|
||||
state.put::<PermissionsContainer>(permissions);
|
||||
Ok(())
|
||||
} else {
|
||||
|
|
|
@ -16,13 +16,10 @@ use deno_core::op2;
|
|||
use deno_core::v8;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_core::OpState;
|
||||
use deno_runtime::deno_permissions::create_child_permissions;
|
||||
use deno_runtime::deno_permissions::ChildPermissionsArg;
|
||||
use deno_runtime::deno_permissions::PermissionDescriptorParser;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::sync::Arc;
|
||||
use uuid::Uuid;
|
||||
|
||||
deno_core::extension!(deno_test,
|
||||
|
@ -56,19 +53,8 @@ pub fn op_pledge_test_permissions(
|
|||
#[serde] args: ChildPermissionsArg,
|
||||
) -> Result<Uuid, AnyError> {
|
||||
let token = Uuid::new_v4();
|
||||
let permission_desc_parser = state
|
||||
.borrow::<Arc<dyn PermissionDescriptorParser>>()
|
||||
.clone();
|
||||
let parent_permissions = state.borrow_mut::<PermissionsContainer>();
|
||||
let worker_permissions = {
|
||||
let mut parent_permissions = parent_permissions.inner.lock();
|
||||
let perms = create_child_permissions(
|
||||
permission_desc_parser.as_ref(),
|
||||
&mut parent_permissions,
|
||||
args,
|
||||
)?;
|
||||
PermissionsContainer::new(permission_desc_parser, perms)
|
||||
};
|
||||
let worker_permissions = parent_permissions.create_child_permissions(args)?;
|
||||
let parent_permissions = parent_permissions.clone();
|
||||
|
||||
if state.try_take::<PermissionsHolder>().is_some() {
|
||||
|
@ -77,7 +63,6 @@ pub fn op_pledge_test_permissions(
|
|||
state.put::<PermissionsHolder>(PermissionsHolder(token, parent_permissions));
|
||||
|
||||
// NOTE: This call overrides current permission set for the worker
|
||||
state.put(worker_permissions.inner.clone());
|
||||
state.put::<PermissionsContainer>(worker_permissions);
|
||||
|
||||
Ok(token)
|
||||
|
@ -94,7 +79,6 @@ pub fn op_restore_test_permissions(
|
|||
}
|
||||
|
||||
let permissions = permissions_holder.1;
|
||||
state.put(permissions.inner.clone());
|
||||
state.put::<PermissionsContainer>(permissions);
|
||||
Ok(())
|
||||
} else {
|
||||
|
|
568
cli/resolver.rs
568
cli/resolver.rs
|
@ -22,12 +22,13 @@ use deno_graph::NpmLoadError;
|
|||
use deno_graph::NpmResolvePkgReqsResult;
|
||||
use deno_npm::resolution::NpmResolutionError;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
|
||||
use deno_resolver::sloppy_imports::SloppyImportsResolver;
|
||||
use deno_runtime::colors;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node::is_builtin_node_module;
|
||||
use deno_runtime::deno_node::NodeResolver;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use deno_semver::package::PackageReq;
|
||||
use node_resolver::errors::ClosestPkgJsonError;
|
||||
|
@ -60,13 +61,52 @@ pub struct ModuleCodeStringSource {
|
|||
pub media_type: MediaType,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CliDenoResolverFs(pub Arc<dyn FileSystem>);
|
||||
|
||||
impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs {
|
||||
fn read_to_string_lossy(&self, path: &Path) -> std::io::Result<String> {
|
||||
self
|
||||
.0
|
||||
.read_text_file_lossy_sync(path, None)
|
||||
.map_err(|e| e.into_io_error())
|
||||
}
|
||||
|
||||
fn realpath_sync(&self, path: &Path) -> std::io::Result<PathBuf> {
|
||||
self.0.realpath_sync(path).map_err(|e| e.into_io_error())
|
||||
}
|
||||
|
||||
fn is_dir_sync(&self, path: &Path) -> bool {
|
||||
self.0.is_dir_sync(path)
|
||||
}
|
||||
|
||||
fn read_dir_sync(
|
||||
&self,
|
||||
dir_path: &Path,
|
||||
) -> std::io::Result<Vec<deno_resolver::fs::DirEntry>> {
|
||||
self
|
||||
.0
|
||||
.read_dir_sync(dir_path)
|
||||
.map(|entries| {
|
||||
entries
|
||||
.into_iter()
|
||||
.map(|e| deno_resolver::fs::DirEntry {
|
||||
name: e.name,
|
||||
is_file: e.is_file,
|
||||
is_directory: e.is_directory,
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.map_err(|err| err.into_io_error())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CliNodeResolver {
|
||||
cjs_resolutions: Arc<CjsResolutionStore>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
node_resolver: Arc<NodeResolver>,
|
||||
// todo(dsherret): remove this pub(crate)
|
||||
pub(crate) npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
}
|
||||
|
||||
impl CliNodeResolver {
|
||||
|
@ -421,13 +461,16 @@ impl CjsResolutionStore {
|
|||
}
|
||||
}
|
||||
|
||||
pub type CliSloppyImportsResolver =
|
||||
SloppyImportsResolver<SloppyImportsCachedFs>;
|
||||
|
||||
/// A resolver that takes care of resolution, taking into account loaded
|
||||
/// import map, JSX settings.
|
||||
#[derive(Debug)]
|
||||
pub struct CliGraphResolver {
|
||||
node_resolver: Option<Arc<CliNodeResolver>>,
|
||||
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
|
||||
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
|
||||
sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
|
||||
workspace_resolver: Arc<WorkspaceResolver>,
|
||||
maybe_default_jsx_import_source: Option<String>,
|
||||
maybe_default_jsx_import_source_types: Option<String>,
|
||||
|
@ -441,7 +484,7 @@ pub struct CliGraphResolver {
|
|||
pub struct CliGraphResolverOptions<'a> {
|
||||
pub node_resolver: Option<Arc<CliNodeResolver>>,
|
||||
pub npm_resolver: Option<Arc<dyn CliNpmResolver>>,
|
||||
pub sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
|
||||
pub sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
|
||||
pub workspace_resolver: Arc<WorkspaceResolver>,
|
||||
pub bare_node_builtins_enabled: bool,
|
||||
pub maybe_jsx_import_source_config: Option<JsxImportSourceConfig>,
|
||||
|
@ -565,7 +608,15 @@ impl Resolver for CliGraphResolver {
|
|||
if let Some(sloppy_imports_resolver) = &self.sloppy_imports_resolver {
|
||||
Ok(
|
||||
sloppy_imports_resolver
|
||||
.resolve(&specifier, mode)
|
||||
.resolve(
|
||||
&specifier,
|
||||
match mode {
|
||||
ResolutionMode::Execution => {
|
||||
SloppyImportsResolutionMode::Execution
|
||||
}
|
||||
ResolutionMode::Types => SloppyImportsResolutionMode::Types,
|
||||
},
|
||||
)
|
||||
.map(|s| s.into_specifier())
|
||||
.unwrap_or(specifier),
|
||||
)
|
||||
|
@ -847,96 +898,18 @@ impl<'a> deno_graph::source::NpmResolver for WorkerCliNpmGraphResolver<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum SloppyImportsFsEntry {
|
||||
File,
|
||||
Dir,
|
||||
}
|
||||
|
||||
impl SloppyImportsFsEntry {
|
||||
pub fn from_fs_stat(
|
||||
stat: &deno_runtime::deno_io::fs::FsStat,
|
||||
) -> Option<SloppyImportsFsEntry> {
|
||||
if stat.is_file {
|
||||
Some(SloppyImportsFsEntry::File)
|
||||
} else if stat.is_directory {
|
||||
Some(SloppyImportsFsEntry::Dir)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum SloppyImportsResolution {
|
||||
/// Ex. `./file.js` to `./file.ts`
|
||||
JsToTs(ModuleSpecifier),
|
||||
/// Ex. `./file` to `./file.ts`
|
||||
NoExtension(ModuleSpecifier),
|
||||
/// Ex. `./dir` to `./dir/index.ts`
|
||||
Directory(ModuleSpecifier),
|
||||
}
|
||||
|
||||
impl SloppyImportsResolution {
|
||||
pub fn as_specifier(&self) -> &ModuleSpecifier {
|
||||
match self {
|
||||
Self::JsToTs(specifier) => specifier,
|
||||
Self::NoExtension(specifier) => specifier,
|
||||
Self::Directory(specifier) => specifier,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_specifier(self) -> ModuleSpecifier {
|
||||
match self {
|
||||
Self::JsToTs(specifier) => specifier,
|
||||
Self::NoExtension(specifier) => specifier,
|
||||
Self::Directory(specifier) => specifier,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_suggestion_message(&self) -> String {
|
||||
format!("Maybe {}", self.as_base_message())
|
||||
}
|
||||
|
||||
pub fn as_quick_fix_message(&self) -> String {
|
||||
let message = self.as_base_message();
|
||||
let mut chars = message.chars();
|
||||
format!(
|
||||
"{}{}.",
|
||||
chars.next().unwrap().to_uppercase(),
|
||||
chars.as_str()
|
||||
)
|
||||
}
|
||||
|
||||
fn as_base_message(&self) -> String {
|
||||
match self {
|
||||
SloppyImportsResolution::JsToTs(specifier) => {
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
format!("change the extension to '{}'", media_type.as_ts_extension())
|
||||
}
|
||||
SloppyImportsResolution::NoExtension(specifier) => {
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
format!("add a '{}' extension", media_type.as_ts_extension())
|
||||
}
|
||||
SloppyImportsResolution::Directory(specifier) => {
|
||||
let file_name = specifier
|
||||
.path()
|
||||
.rsplit_once('/')
|
||||
.map(|(_, file_name)| file_name)
|
||||
.unwrap_or(specifier.path());
|
||||
format!("specify path to '{}' file in directory instead", file_name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SloppyImportsResolver {
|
||||
fs: Arc<dyn FileSystem>,
|
||||
cache: Option<DashMap<PathBuf, Option<SloppyImportsFsEntry>>>,
|
||||
pub struct SloppyImportsCachedFs {
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
cache: Option<
|
||||
DashMap<
|
||||
PathBuf,
|
||||
Option<deno_resolver::sloppy_imports::SloppyImportsFsEntry>,
|
||||
>,
|
||||
>,
|
||||
}
|
||||
|
||||
impl SloppyImportsResolver {
|
||||
impl SloppyImportsCachedFs {
|
||||
pub fn new(fs: Arc<dyn FileSystem>) -> Self {
|
||||
Self {
|
||||
fs,
|
||||
|
@ -947,409 +920,34 @@ impl SloppyImportsResolver {
|
|||
pub fn new_without_stat_cache(fs: Arc<dyn FileSystem>) -> Self {
|
||||
Self { fs, cache: None }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve(
|
||||
impl deno_resolver::sloppy_imports::SloppyImportResolverFs
|
||||
for SloppyImportsCachedFs
|
||||
{
|
||||
fn stat_sync(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
mode: ResolutionMode,
|
||||
) -> Option<SloppyImportsResolution> {
|
||||
fn path_without_ext(
|
||||
path: &Path,
|
||||
media_type: MediaType,
|
||||
) -> Option<Cow<str>> {
|
||||
let old_path_str = path.to_string_lossy();
|
||||
match media_type {
|
||||
MediaType::Unknown => Some(old_path_str),
|
||||
_ => old_path_str
|
||||
.strip_suffix(media_type.as_ts_extension())
|
||||
.map(|s| Cow::Owned(s.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
fn media_types_to_paths(
|
||||
path_no_ext: &str,
|
||||
original_media_type: MediaType,
|
||||
probe_media_type_types: Vec<MediaType>,
|
||||
reason: SloppyImportsResolutionReason,
|
||||
) -> Vec<(PathBuf, SloppyImportsResolutionReason)> {
|
||||
probe_media_type_types
|
||||
.into_iter()
|
||||
.filter(|media_type| *media_type != original_media_type)
|
||||
.map(|media_type| {
|
||||
(
|
||||
PathBuf::from(format!(
|
||||
"{}{}",
|
||||
path_no_ext,
|
||||
media_type.as_ts_extension()
|
||||
)),
|
||||
reason,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
if specifier.scheme() != "file" {
|
||||
return None;
|
||||
}
|
||||
|
||||
let path = specifier_to_file_path(specifier).ok()?;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
enum SloppyImportsResolutionReason {
|
||||
JsToTs,
|
||||
NoExtension,
|
||||
Directory,
|
||||
}
|
||||
|
||||
let probe_paths: Vec<(PathBuf, SloppyImportsResolutionReason)> =
|
||||
match self.stat_sync(&path) {
|
||||
Some(SloppyImportsFsEntry::File) => {
|
||||
if mode.is_types() {
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
// attempt to resolve the .d.ts file before the .js file
|
||||
let probe_media_type_types = match media_type {
|
||||
MediaType::JavaScript => {
|
||||
vec![(MediaType::Dts), MediaType::JavaScript]
|
||||
}
|
||||
MediaType::Mjs => {
|
||||
vec![MediaType::Dmts, MediaType::Dts, MediaType::Mjs]
|
||||
}
|
||||
MediaType::Cjs => {
|
||||
vec![MediaType::Dcts, MediaType::Dts, MediaType::Cjs]
|
||||
}
|
||||
_ => return None,
|
||||
};
|
||||
let path_no_ext = path_without_ext(&path, media_type)?;
|
||||
media_types_to_paths(
|
||||
&path_no_ext,
|
||||
media_type,
|
||||
probe_media_type_types,
|
||||
SloppyImportsResolutionReason::JsToTs,
|
||||
)
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
entry @ None | entry @ Some(SloppyImportsFsEntry::Dir) => {
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
let probe_media_type_types = match media_type {
|
||||
MediaType::JavaScript => (
|
||||
if mode.is_types() {
|
||||
vec![MediaType::TypeScript, MediaType::Tsx, MediaType::Dts]
|
||||
} else {
|
||||
vec![MediaType::TypeScript, MediaType::Tsx]
|
||||
},
|
||||
SloppyImportsResolutionReason::JsToTs,
|
||||
),
|
||||
MediaType::Jsx => {
|
||||
(vec![MediaType::Tsx], SloppyImportsResolutionReason::JsToTs)
|
||||
}
|
||||
MediaType::Mjs => (
|
||||
if mode.is_types() {
|
||||
vec![MediaType::Mts, MediaType::Dmts, MediaType::Dts]
|
||||
} else {
|
||||
vec![MediaType::Mts]
|
||||
},
|
||||
SloppyImportsResolutionReason::JsToTs,
|
||||
),
|
||||
MediaType::Cjs => (
|
||||
if mode.is_types() {
|
||||
vec![MediaType::Cts, MediaType::Dcts, MediaType::Dts]
|
||||
} else {
|
||||
vec![MediaType::Cts]
|
||||
},
|
||||
SloppyImportsResolutionReason::JsToTs,
|
||||
),
|
||||
MediaType::TypeScript
|
||||
| MediaType::Mts
|
||||
| MediaType::Cts
|
||||
| MediaType::Dts
|
||||
| MediaType::Dmts
|
||||
| MediaType::Dcts
|
||||
| MediaType::Tsx
|
||||
| MediaType::Json
|
||||
| MediaType::Wasm
|
||||
| MediaType::TsBuildInfo
|
||||
| MediaType::SourceMap => {
|
||||
return None;
|
||||
}
|
||||
MediaType::Unknown => (
|
||||
if mode.is_types() {
|
||||
vec![
|
||||
MediaType::TypeScript,
|
||||
MediaType::Tsx,
|
||||
MediaType::Mts,
|
||||
MediaType::Dts,
|
||||
MediaType::Dmts,
|
||||
MediaType::Dcts,
|
||||
MediaType::JavaScript,
|
||||
MediaType::Jsx,
|
||||
MediaType::Mjs,
|
||||
]
|
||||
} else {
|
||||
vec![
|
||||
MediaType::TypeScript,
|
||||
MediaType::JavaScript,
|
||||
MediaType::Tsx,
|
||||
MediaType::Jsx,
|
||||
MediaType::Mts,
|
||||
MediaType::Mjs,
|
||||
]
|
||||
},
|
||||
SloppyImportsResolutionReason::NoExtension,
|
||||
),
|
||||
};
|
||||
let mut probe_paths = match path_without_ext(&path, media_type) {
|
||||
Some(path_no_ext) => media_types_to_paths(
|
||||
&path_no_ext,
|
||||
media_type,
|
||||
probe_media_type_types.0,
|
||||
probe_media_type_types.1,
|
||||
),
|
||||
None => vec![],
|
||||
};
|
||||
|
||||
if matches!(entry, Some(SloppyImportsFsEntry::Dir)) {
|
||||
// try to resolve at the index file
|
||||
if mode.is_types() {
|
||||
probe_paths.push((
|
||||
path.join("index.ts"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
|
||||
probe_paths.push((
|
||||
path.join("index.mts"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.d.ts"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.d.mts"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.js"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.mjs"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.tsx"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.jsx"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
} else {
|
||||
probe_paths.push((
|
||||
path.join("index.ts"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.mts"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.tsx"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.js"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.mjs"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.jsx"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
}
|
||||
}
|
||||
if probe_paths.is_empty() {
|
||||
return None;
|
||||
}
|
||||
probe_paths
|
||||
}
|
||||
};
|
||||
|
||||
for (probe_path, reason) in probe_paths {
|
||||
if self.stat_sync(&probe_path) == Some(SloppyImportsFsEntry::File) {
|
||||
if let Ok(specifier) = ModuleSpecifier::from_file_path(probe_path) {
|
||||
match reason {
|
||||
SloppyImportsResolutionReason::JsToTs => {
|
||||
return Some(SloppyImportsResolution::JsToTs(specifier));
|
||||
}
|
||||
SloppyImportsResolutionReason::NoExtension => {
|
||||
return Some(SloppyImportsResolution::NoExtension(specifier));
|
||||
}
|
||||
SloppyImportsResolutionReason::Directory => {
|
||||
return Some(SloppyImportsResolution::Directory(specifier));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn stat_sync(&self, path: &Path) -> Option<SloppyImportsFsEntry> {
|
||||
path: &Path,
|
||||
) -> Option<deno_resolver::sloppy_imports::SloppyImportsFsEntry> {
|
||||
if let Some(cache) = &self.cache {
|
||||
if let Some(entry) = cache.get(path) {
|
||||
return *entry;
|
||||
}
|
||||
}
|
||||
|
||||
let entry = self
|
||||
.fs
|
||||
.stat_sync(path)
|
||||
.ok()
|
||||
.and_then(|stat| SloppyImportsFsEntry::from_fs_stat(&stat));
|
||||
let entry = self.fs.stat_sync(path).ok().and_then(|stat| {
|
||||
if stat.is_file {
|
||||
Some(deno_resolver::sloppy_imports::SloppyImportsFsEntry::File)
|
||||
} else if stat.is_directory {
|
||||
Some(deno_resolver::sloppy_imports::SloppyImportsFsEntry::Dir)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(cache) = &self.cache {
|
||||
cache.insert(path.to_owned(), entry);
|
||||
}
|
||||
entry
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use test_util::TestContext;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_unstable_sloppy_imports() {
|
||||
fn resolve(specifier: &ModuleSpecifier) -> Option<SloppyImportsResolution> {
|
||||
resolve_with_mode(specifier, ResolutionMode::Execution)
|
||||
}
|
||||
|
||||
fn resolve_types(
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<SloppyImportsResolution> {
|
||||
resolve_with_mode(specifier, ResolutionMode::Types)
|
||||
}
|
||||
|
||||
fn resolve_with_mode(
|
||||
specifier: &ModuleSpecifier,
|
||||
mode: ResolutionMode,
|
||||
) -> Option<SloppyImportsResolution> {
|
||||
SloppyImportsResolver::new(Arc::new(deno_fs::RealFs))
|
||||
.resolve(specifier, mode)
|
||||
}
|
||||
|
||||
let context = TestContext::default();
|
||||
let temp_dir = context.temp_dir().path();
|
||||
|
||||
// scenarios like resolving ./example.js to ./example.ts
|
||||
for (ext_from, ext_to) in [("js", "ts"), ("js", "tsx"), ("mjs", "mts")] {
|
||||
let ts_file = temp_dir.join(format!("file.{}", ext_to));
|
||||
ts_file.write("");
|
||||
assert_eq!(resolve(&ts_file.url_file()), None);
|
||||
assert_eq!(
|
||||
resolve(
|
||||
&temp_dir
|
||||
.url_dir()
|
||||
.join(&format!("file.{}", ext_from))
|
||||
.unwrap()
|
||||
),
|
||||
Some(SloppyImportsResolution::JsToTs(ts_file.url_file())),
|
||||
);
|
||||
ts_file.remove_file();
|
||||
}
|
||||
|
||||
// no extension scenarios
|
||||
for ext in ["js", "ts", "js", "tsx", "jsx", "mjs", "mts"] {
|
||||
let file = temp_dir.join(format!("file.{}", ext));
|
||||
file.write("");
|
||||
assert_eq!(
|
||||
resolve(
|
||||
&temp_dir
|
||||
.url_dir()
|
||||
.join("file") // no ext
|
||||
.unwrap()
|
||||
),
|
||||
Some(SloppyImportsResolution::NoExtension(file.url_file()))
|
||||
);
|
||||
file.remove_file();
|
||||
}
|
||||
|
||||
// .ts and .js exists, .js specified (goes to specified)
|
||||
{
|
||||
let ts_file = temp_dir.join("file.ts");
|
||||
ts_file.write("");
|
||||
let js_file = temp_dir.join("file.js");
|
||||
js_file.write("");
|
||||
assert_eq!(resolve(&js_file.url_file()), None);
|
||||
}
|
||||
|
||||
// only js exists, .js specified
|
||||
{
|
||||
let js_only_file = temp_dir.join("js_only.js");
|
||||
js_only_file.write("");
|
||||
assert_eq!(resolve(&js_only_file.url_file()), None);
|
||||
assert_eq!(resolve_types(&js_only_file.url_file()), None);
|
||||
}
|
||||
|
||||
// resolving a directory to an index file
|
||||
{
|
||||
let routes_dir = temp_dir.join("routes");
|
||||
routes_dir.create_dir_all();
|
||||
let index_file = routes_dir.join("index.ts");
|
||||
index_file.write("");
|
||||
assert_eq!(
|
||||
resolve(&routes_dir.url_file()),
|
||||
Some(SloppyImportsResolution::Directory(index_file.url_file())),
|
||||
);
|
||||
}
|
||||
|
||||
// both a directory and a file with specifier is present
|
||||
{
|
||||
let api_dir = temp_dir.join("api");
|
||||
api_dir.create_dir_all();
|
||||
let bar_file = api_dir.join("bar.ts");
|
||||
bar_file.write("");
|
||||
let api_file = temp_dir.join("api.ts");
|
||||
api_file.write("");
|
||||
assert_eq!(
|
||||
resolve(&api_dir.url_file()),
|
||||
Some(SloppyImportsResolution::NoExtension(api_file.url_file())),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sloppy_import_resolution_suggestion_message() {
|
||||
// directory
|
||||
assert_eq!(
|
||||
SloppyImportsResolution::Directory(
|
||||
ModuleSpecifier::parse("file:///dir/index.js").unwrap()
|
||||
)
|
||||
.as_suggestion_message(),
|
||||
"Maybe specify path to 'index.js' file in directory instead"
|
||||
);
|
||||
// no ext
|
||||
assert_eq!(
|
||||
SloppyImportsResolution::NoExtension(
|
||||
ModuleSpecifier::parse("file:///dir/index.mjs").unwrap()
|
||||
)
|
||||
.as_suggestion_message(),
|
||||
"Maybe add a '.mjs' extension"
|
||||
);
|
||||
// js to ts
|
||||
assert_eq!(
|
||||
SloppyImportsResolution::JsToTs(
|
||||
ModuleSpecifier::parse("file:///dir/index.mts").unwrap()
|
||||
)
|
||||
.as_suggestion_message(),
|
||||
"Maybe change the extension to '.mts'"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -427,13 +427,9 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
binary_name
|
||||
)
|
||||
}
|
||||
ReleaseChannel::Stable => {
|
||||
_ => {
|
||||
format!("release/v{}/{}", env!("CARGO_PKG_VERSION"), binary_name)
|
||||
}
|
||||
_ => bail!(
|
||||
"`deno compile` current doesn't support {} release channel",
|
||||
crate::version::DENO_VERSION_INFO.release_channel.name()
|
||||
),
|
||||
};
|
||||
|
||||
let download_directory = self.deno_dir.dl_folder_path();
|
||||
|
|
|
@ -102,7 +102,7 @@ impl FileSystem for DenoCompileFileSystem {
|
|||
&self,
|
||||
path: &Path,
|
||||
recursive: bool,
|
||||
mode: u32,
|
||||
mode: Option<u32>,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(path)?;
|
||||
RealFs.mkdir_sync(path, recursive, mode)
|
||||
|
@ -111,7 +111,7 @@ impl FileSystem for DenoCompileFileSystem {
|
|||
&self,
|
||||
path: PathBuf,
|
||||
recursive: bool,
|
||||
mode: u32,
|
||||
mode: Option<u32>,
|
||||
) -> FsResult<()> {
|
||||
self.error_if_in_vfs(&path)?;
|
||||
RealFs.mkdir_async(path, recursive, mode).await
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
#![allow(unused_imports)]
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_cache_dir::npm::NpmCacheDir;
|
||||
use deno_config::workspace::MappedResolution;
|
||||
use deno_config::workspace::MappedResolutionError;
|
||||
use deno_config::workspace::ResolverWorkspaceJsrPackage;
|
||||
|
@ -55,15 +56,16 @@ use crate::args::StorageKeyResolver;
|
|||
use crate::cache::Caches;
|
||||
use crate::cache::DenoDirProvider;
|
||||
use crate::cache::NodeAnalysisCache;
|
||||
use crate::cache::RealDenoCacheEnv;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::node::CliCjsCodeAnalyzer;
|
||||
use crate::npm::create_cli_npm_resolver;
|
||||
use crate::npm::CliNpmResolverByonmCreateOptions;
|
||||
use crate::npm::CliByonmNpmResolverCreateOptions;
|
||||
use crate::npm::CliNpmResolverCreateOptions;
|
||||
use crate::npm::CliNpmResolverManagedCreateOptions;
|
||||
use crate::npm::CliNpmResolverManagedSnapshotOption;
|
||||
use crate::npm::NpmCacheDir;
|
||||
use crate::resolver::CjsResolutionStore;
|
||||
use crate::resolver::CliDenoResolverFs;
|
||||
use crate::resolver::CliNodeResolver;
|
||||
use crate::resolver::NpmModuleLoader;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
|
@ -130,8 +132,6 @@ struct SharedModuleLoaderState {
|
|||
#[derive(Clone)]
|
||||
struct EmbeddedModuleLoader {
|
||||
shared: Arc<SharedModuleLoaderState>,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
}
|
||||
|
||||
pub const MODULE_NOT_FOUND: &str = "Module not found";
|
||||
|
@ -402,28 +402,23 @@ struct StandaloneModuleLoaderFactory {
|
|||
impl ModuleLoaderFactory for StandaloneModuleLoaderFactory {
|
||||
fn create_for_main(
|
||||
&self,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
_root_permissions: PermissionsContainer,
|
||||
) -> ModuleLoaderAndSourceMapGetter {
|
||||
ModuleLoaderAndSourceMapGetter {
|
||||
module_loader: Rc::new(EmbeddedModuleLoader {
|
||||
shared: self.shared.clone(),
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn create_for_worker(
|
||||
&self,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
_parent_permissions: PermissionsContainer,
|
||||
_permissions: PermissionsContainer,
|
||||
) -> ModuleLoaderAndSourceMapGetter {
|
||||
ModuleLoaderAndSourceMapGetter {
|
||||
module_loader: Rc::new(EmbeddedModuleLoader {
|
||||
shared: self.shared.clone(),
|
||||
root_permissions,
|
||||
dynamic_permissions,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
@ -471,6 +466,7 @@ pub async fn run(
|
|||
let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap();
|
||||
let root_node_modules_path = root_path.join("node_modules");
|
||||
let npm_cache_dir = NpmCacheDir::new(
|
||||
&RealDenoCacheEnv,
|
||||
root_node_modules_path.clone(),
|
||||
vec![npm_registry_url.clone()],
|
||||
);
|
||||
|
@ -535,8 +531,8 @@ pub async fn run(
|
|||
let fs = Arc::new(DenoCompileFileSystem::new(vfs))
|
||||
as Arc<dyn deno_fs::FileSystem>;
|
||||
let npm_resolver = create_cli_npm_resolver(
|
||||
CliNpmResolverCreateOptions::Byonm(CliNpmResolverByonmCreateOptions {
|
||||
fs: fs.clone(),
|
||||
CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions {
|
||||
fs: CliDenoResolverFs(fs.clone()),
|
||||
root_node_modules_dir,
|
||||
}),
|
||||
)
|
||||
|
@ -664,7 +660,8 @@ pub async fn run(
|
|||
};
|
||||
|
||||
let permissions = {
|
||||
let mut permissions = metadata.permissions.to_options();
|
||||
let mut permissions =
|
||||
metadata.permissions.to_options(/* cli_arg_urls */ &[]);
|
||||
// if running with an npm vfs, grant read access to it
|
||||
if let Some(vfs_root) = maybe_vfs_root {
|
||||
match &mut permissions.allow_read {
|
||||
|
@ -697,8 +694,6 @@ pub async fn run(
|
|||
}
|
||||
checker
|
||||
});
|
||||
let permission_desc_parser =
|
||||
Arc::new(RuntimePermissionDescriptorParser::new(fs.clone()));
|
||||
let worker_factory = CliMainWorkerFactory::new(
|
||||
Arc::new(BlobStore::default()),
|
||||
// Code cache is not supported for standalone binary yet.
|
||||
|
@ -711,8 +706,8 @@ pub async fn run(
|
|||
Box::new(module_loader_factory),
|
||||
node_resolver,
|
||||
npm_resolver,
|
||||
permission_desc_parser,
|
||||
root_cert_store_provider,
|
||||
permissions,
|
||||
StorageKeyResolver::empty(),
|
||||
crate::args::DenoSubcommand::Run(Default::default()),
|
||||
CliMainWorkerOptions {
|
||||
|
@ -752,7 +747,7 @@ pub async fn run(
|
|||
deno_core::JsRuntime::init_platform(None, true);
|
||||
|
||||
let mut worker = worker_factory
|
||||
.create_main_worker(WorkerExecutionMode::Run, main_module, permissions)
|
||||
.create_main_worker(WorkerExecutionMode::Run, main_module)
|
||||
.await?;
|
||||
|
||||
let exit_code = worker.run().await?;
|
||||
|
|
|
@ -51,6 +51,7 @@ pub async fn check(
|
|||
|
||||
let specifiers_for_typecheck = if check_flags.doc || check_flags.doc_only {
|
||||
let file_fetcher = factory.file_fetcher()?;
|
||||
let root_permissions = factory.root_permissions_container()?;
|
||||
|
||||
let mut specifiers_for_typecheck = if check_flags.doc {
|
||||
specifiers.clone()
|
||||
|
@ -59,7 +60,7 @@ pub async fn check(
|
|||
};
|
||||
|
||||
for s in specifiers {
|
||||
let file = file_fetcher.fetch_bypass_permissions(&s).await?;
|
||||
let file = file_fetcher.fetch(&s, root_permissions).await?;
|
||||
let snippet_files = extract::extract_snippet_files(file)?;
|
||||
for snippet_file in snippet_files {
|
||||
specifiers_for_typecheck.push(snippet_file.specifier.clone());
|
||||
|
|
|
@ -135,7 +135,7 @@ pub async fn compile(
|
|||
file,
|
||||
eszip,
|
||||
root_dir_url,
|
||||
&module_specifier,
|
||||
module_specifier,
|
||||
&compile_flags,
|
||||
cli_options,
|
||||
)
|
||||
|
|
|
@ -7,7 +7,9 @@ use crate::args::Flags;
|
|||
use crate::colors;
|
||||
use crate::display;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::graph_util::graph_exit_lock_errors;
|
||||
use crate::graph_util::graph_exit_integrity_errors;
|
||||
use crate::graph_util::graph_walk_errors;
|
||||
use crate::graph_util::GraphWalkErrorsOptions;
|
||||
use crate::tsc::get_types_declaration_file_text;
|
||||
use crate::util::fs::collect_specifiers;
|
||||
use deno_ast::diagnostics::Diagnostic;
|
||||
|
@ -107,7 +109,7 @@ pub async fn doc(
|
|||
}
|
||||
DocSourceFileFlag::Paths(ref source_files) => {
|
||||
let module_graph_creator = factory.module_graph_creator().await?;
|
||||
let maybe_lockfile = cli_options.maybe_lockfile();
|
||||
let fs = factory.fs();
|
||||
|
||||
let module_specifiers = collect_specifiers(
|
||||
FilePatterns {
|
||||
|
@ -127,8 +129,18 @@ pub async fn doc(
|
|||
.create_graph(GraphKind::TypesOnly, module_specifiers.clone())
|
||||
.await?;
|
||||
|
||||
if maybe_lockfile.is_some() {
|
||||
graph_exit_lock_errors(&graph);
|
||||
graph_exit_integrity_errors(&graph);
|
||||
let errors = graph_walk_errors(
|
||||
&graph,
|
||||
fs,
|
||||
&module_specifiers,
|
||||
GraphWalkErrorsOptions {
|
||||
check_js: false,
|
||||
kind: GraphKind::TypesOnly,
|
||||
},
|
||||
);
|
||||
for error in errors {
|
||||
log::warn!("{} {}", colors::yellow("Warning"), error);
|
||||
}
|
||||
|
||||
let doc_parser = doc::DocParser::new(
|
||||
|
|
|
@ -33,6 +33,7 @@ use deno_core::error::AnyError;
|
|||
use deno_core::futures;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::unsync::spawn_blocking;
|
||||
use deno_core::url::Url;
|
||||
use log::debug;
|
||||
use log::info;
|
||||
use log::warn;
|
||||
|
@ -297,12 +298,7 @@ fn format_markdown(
|
|||
Ok(None)
|
||||
}
|
||||
}
|
||||
"yml" | "yaml" => pretty_yaml::format_text(
|
||||
text,
|
||||
&get_resolved_yaml_config(fmt_options),
|
||||
)
|
||||
.map(Some)
|
||||
.map_err(AnyError::from),
|
||||
"yml" | "yaml" => format_yaml(text, fmt_options),
|
||||
_ => {
|
||||
let mut codeblock_config =
|
||||
get_resolved_typescript_config(fmt_options);
|
||||
|
@ -339,13 +335,33 @@ pub fn format_css(
|
|||
file_text: &str,
|
||||
fmt_options: &FmtOptionsConfig,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
malva::format_text(
|
||||
let formatted_str = malva::format_text(
|
||||
file_text,
|
||||
malva::detect_syntax(file_path).unwrap_or(malva::Syntax::Css),
|
||||
&get_resolved_malva_config(fmt_options),
|
||||
)
|
||||
.map(Some)
|
||||
.map_err(AnyError::from)
|
||||
.map_err(AnyError::from)?;
|
||||
|
||||
Ok(if formatted_str == file_text {
|
||||
None
|
||||
} else {
|
||||
Some(formatted_str)
|
||||
})
|
||||
}
|
||||
|
||||
fn format_yaml(
|
||||
file_text: &str,
|
||||
fmt_options: &FmtOptionsConfig,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
let formatted_str =
|
||||
pretty_yaml::format_text(file_text, &get_resolved_yaml_config(fmt_options))
|
||||
.map_err(AnyError::from)?;
|
||||
|
||||
Ok(if formatted_str == file_text {
|
||||
None
|
||||
} else {
|
||||
Some(formatted_str)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn format_html(
|
||||
|
@ -353,7 +369,7 @@ pub fn format_html(
|
|||
file_text: &str,
|
||||
fmt_options: &FmtOptionsConfig,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
markup_fmt::format_text(
|
||||
let format_result = markup_fmt::format_text(
|
||||
file_text,
|
||||
markup_fmt::detect_language(file_path)
|
||||
.unwrap_or(markup_fmt::Language::Html),
|
||||
|
@ -419,9 +435,30 @@ pub fn format_html(
|
|||
}
|
||||
},
|
||||
)
|
||||
.map(Some)
|
||||
.map_err(|error| match error {
|
||||
markup_fmt::FormatError::Syntax(error) => AnyError::from(error),
|
||||
markup_fmt::FormatError::Syntax(error) => {
|
||||
fn inner(
|
||||
error: &markup_fmt::SyntaxError,
|
||||
file_path: &Path,
|
||||
) -> Option<String> {
|
||||
let url = Url::from_file_path(file_path).ok()?;
|
||||
|
||||
let error_msg = format!(
|
||||
"Syntax error ({}) at {}:{}:{}\n",
|
||||
error.kind,
|
||||
url.as_str(),
|
||||
error.line,
|
||||
error.column
|
||||
);
|
||||
Some(error_msg)
|
||||
}
|
||||
|
||||
if let Some(error_msg) = inner(&error, file_path) {
|
||||
AnyError::from(generic_error(error_msg))
|
||||
} else {
|
||||
AnyError::from(error)
|
||||
}
|
||||
}
|
||||
markup_fmt::FormatError::External(errors) => {
|
||||
let last = errors.len() - 1;
|
||||
AnyError::msg(
|
||||
|
@ -438,6 +475,14 @@ pub fn format_html(
|
|||
.collect::<String>(),
|
||||
)
|
||||
}
|
||||
});
|
||||
|
||||
let formatted_str = format_result?;
|
||||
|
||||
Ok(if formatted_str == file_text {
|
||||
None
|
||||
} else {
|
||||
Some(formatted_str)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -469,12 +514,7 @@ pub fn format_file(
|
|||
Ok(None)
|
||||
}
|
||||
}
|
||||
"yml" | "yaml" => pretty_yaml::format_text(
|
||||
file_text,
|
||||
&get_resolved_yaml_config(fmt_options),
|
||||
)
|
||||
.map(Some)
|
||||
.map_err(AnyError::from),
|
||||
"yml" | "yaml" => format_yaml(file_text, fmt_options),
|
||||
"ipynb" => dprint_plugin_jupyter::format_text(
|
||||
file_text,
|
||||
|file_path: &Path, file_text: String| {
|
||||
|
|
|
@ -29,7 +29,7 @@ use crate::args::Flags;
|
|||
use crate::args::InfoFlags;
|
||||
use crate::display;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::graph_util::graph_exit_lock_errors;
|
||||
use crate::graph_util::graph_exit_integrity_errors;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::npm::ManagedCliNpmResolver;
|
||||
use crate::util::checksum;
|
||||
|
@ -75,14 +75,18 @@ pub async fn info(
|
|||
|
||||
// write out the lockfile if there is one
|
||||
if let Some(lockfile) = &maybe_lockfile {
|
||||
graph_exit_lock_errors(&graph);
|
||||
graph_exit_integrity_errors(&graph);
|
||||
lockfile.write_if_changed()?;
|
||||
}
|
||||
|
||||
if info_flags.json {
|
||||
let mut json_graph = serde_json::json!(graph);
|
||||
if let Some(output) = json_graph.as_object_mut() {
|
||||
output.insert("version".to_string(), JSON_SCHEMA_VERSION.into());
|
||||
output.shift_insert(
|
||||
0,
|
||||
"version".to_string(),
|
||||
JSON_SCHEMA_VERSION.into(),
|
||||
);
|
||||
}
|
||||
add_npm_packages_to_json(&mut json_graph, npm_resolver.as_ref());
|
||||
display::write_json_to_stdout(&json_graph)?;
|
||||
|
@ -644,8 +648,21 @@ impl<'a> GraphDisplayContext<'a> {
|
|||
ModuleError::InvalidTypeAssertion { .. } => {
|
||||
self.build_error_msg(specifier, "(invalid import attribute)")
|
||||
}
|
||||
ModuleError::LoadingErr(_, _, _) => {
|
||||
self.build_error_msg(specifier, "(loading error)")
|
||||
ModuleError::LoadingErr(_, _, err) => {
|
||||
use deno_graph::ModuleLoadError::*;
|
||||
let message = match err {
|
||||
HttpsChecksumIntegrity(_) => "(checksum integrity error)",
|
||||
Decode(_) => "(loading decode error)",
|
||||
Loader(err) => match deno_core::error::get_custom_error_class(err) {
|
||||
Some("NotCapable") => "(not capable, requires --allow-import)",
|
||||
_ => "(loading error)",
|
||||
},
|
||||
Jsr(_) => "(loading error)",
|
||||
NodeUnknownBuiltinModule(_) => "(unknown node built-in error)",
|
||||
Npm(_) => "(npm loading error)",
|
||||
TooManyRedirects => "(too many redirects error)",
|
||||
};
|
||||
self.build_error_msg(specifier, message.as_ref())
|
||||
}
|
||||
ModuleError::ParseErr(_, _) => {
|
||||
self.build_error_msg(specifier, "(parsing error)")
|
||||
|
|
|
@ -14,7 +14,7 @@ use deno_graph::ModuleGraph;
|
|||
use deno_lint::diagnostic::LintDiagnostic;
|
||||
use deno_lint::rules::LintRule;
|
||||
|
||||
use crate::resolver::SloppyImportsResolver;
|
||||
use crate::resolver::CliSloppyImportsResolver;
|
||||
|
||||
mod no_sloppy_imports;
|
||||
mod no_slow_types;
|
||||
|
@ -144,13 +144,13 @@ impl ConfiguredRules {
|
|||
}
|
||||
|
||||
pub struct LintRuleProvider {
|
||||
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
|
||||
sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
|
||||
workspace_resolver: Option<Arc<WorkspaceResolver>>,
|
||||
}
|
||||
|
||||
impl LintRuleProvider {
|
||||
pub fn new(
|
||||
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
|
||||
sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
|
||||
workspace_resolver: Option<Arc<WorkspaceResolver>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
|
|
|
@ -16,24 +16,25 @@ use deno_lint::diagnostic::LintDiagnosticRange;
|
|||
use deno_lint::diagnostic::LintFix;
|
||||
use deno_lint::diagnostic::LintFixChange;
|
||||
use deno_lint::rules::LintRule;
|
||||
use deno_resolver::sloppy_imports::SloppyImportsResolution;
|
||||
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
|
||||
use text_lines::LineAndColumnIndex;
|
||||
|
||||
use crate::graph_util::CliJsrUrlProvider;
|
||||
use crate::resolver::SloppyImportsResolution;
|
||||
use crate::resolver::SloppyImportsResolver;
|
||||
use crate::resolver::CliSloppyImportsResolver;
|
||||
|
||||
use super::ExtendedLintRule;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct NoSloppyImportsRule {
|
||||
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
|
||||
sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
|
||||
// None for making printing out the lint rules easy
|
||||
workspace_resolver: Option<Arc<WorkspaceResolver>>,
|
||||
}
|
||||
|
||||
impl NoSloppyImportsRule {
|
||||
pub fn new(
|
||||
sloppy_imports_resolver: Option<Arc<SloppyImportsResolver>>,
|
||||
sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
|
||||
workspace_resolver: Option<Arc<WorkspaceResolver>>,
|
||||
) -> Self {
|
||||
NoSloppyImportsRule {
|
||||
|
@ -172,7 +173,7 @@ impl LintRule for NoSloppyImportsRule {
|
|||
#[derive(Debug)]
|
||||
struct SloppyImportCaptureResolver<'a> {
|
||||
workspace_resolver: &'a WorkspaceResolver,
|
||||
sloppy_imports_resolver: &'a SloppyImportsResolver,
|
||||
sloppy_imports_resolver: &'a CliSloppyImportsResolver,
|
||||
captures: RefCell<HashMap<Range, SloppyImportsResolution>>,
|
||||
}
|
||||
|
||||
|
@ -194,7 +195,13 @@ impl<'a> deno_graph::source::Resolver for SloppyImportCaptureResolver<'a> {
|
|||
}
|
||||
| deno_config::workspace::MappedResolution::ImportMap {
|
||||
specifier, ..
|
||||
} => match self.sloppy_imports_resolver.resolve(&specifier, mode) {
|
||||
} => match self.sloppy_imports_resolver.resolve(
|
||||
&specifier,
|
||||
match mode {
|
||||
ResolutionMode::Execution => SloppyImportsResolutionMode::Execution,
|
||||
ResolutionMode::Types => SloppyImportsResolutionMode::Types,
|
||||
},
|
||||
) {
|
||||
Some(res) => {
|
||||
self
|
||||
.captures
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
use std::borrow::Cow;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use deno_ast::diagnostics::Diagnostic;
|
||||
use deno_ast::diagnostics::DiagnosticLevel;
|
||||
|
@ -21,6 +20,7 @@ use deno_ast::SourceRanged;
|
|||
use deno_ast::SourceTextInfo;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::url::Url;
|
||||
use deno_graph::FastCheckDiagnostic;
|
||||
use deno_semver::Version;
|
||||
|
@ -36,7 +36,7 @@ impl PublishDiagnosticsCollector {
|
|||
pub fn print_and_error(&self) -> Result<(), AnyError> {
|
||||
let mut errors = 0;
|
||||
let mut has_slow_types_errors = false;
|
||||
let mut diagnostics = self.diagnostics.lock().unwrap().take();
|
||||
let mut diagnostics = self.diagnostics.lock().take();
|
||||
|
||||
diagnostics.sort_by_cached_key(|d| d.sorting_key());
|
||||
|
||||
|
@ -75,8 +75,16 @@ impl PublishDiagnosticsCollector {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn has_error(&self) -> bool {
|
||||
self
|
||||
.diagnostics
|
||||
.lock()
|
||||
.iter()
|
||||
.any(|d| matches!(d.level(), DiagnosticLevel::Error))
|
||||
}
|
||||
|
||||
pub fn push(&self, diagnostic: PublishDiagnostic) {
|
||||
self.diagnostics.lock().unwrap().push(diagnostic);
|
||||
self.diagnostics.lock().push(diagnostic);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -128,7 +128,7 @@ impl GraphDiagnosticsCollector {
|
|||
follow_dynamic: true,
|
||||
// search the entire graph and not just the fast check subset
|
||||
prefer_fast_check_graph: false,
|
||||
follow_type_only: true,
|
||||
kind: deno_graph::GraphKind::All,
|
||||
};
|
||||
let mut iter = graph.walk(graph.roots.iter(), options);
|
||||
while let Some((specifier, entry)) = iter.next() {
|
||||
|
|
|
@ -43,7 +43,8 @@ use crate::cache::ParsedSourceCache;
|
|||
use crate::factory::CliFactory;
|
||||
use crate::graph_util::ModuleGraphCreator;
|
||||
use crate::http_util::HttpClient;
|
||||
use crate::resolver::SloppyImportsResolver;
|
||||
use crate::resolver::CliSloppyImportsResolver;
|
||||
use crate::resolver::SloppyImportsCachedFs;
|
||||
use crate::tools::check::CheckOptions;
|
||||
use crate::tools::lint::collect_no_slow_type_diagnostics;
|
||||
use crate::tools::registry::diagnostics::PublishDiagnostic;
|
||||
|
@ -108,7 +109,9 @@ pub async fn publish(
|
|||
}
|
||||
let specifier_unfurler = Arc::new(SpecifierUnfurler::new(
|
||||
if cli_options.unstable_sloppy_imports() {
|
||||
Some(SloppyImportsResolver::new(cli_factory.fs().clone()))
|
||||
Some(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(
|
||||
cli_factory.fs().clone(),
|
||||
)))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
|
@ -341,13 +344,11 @@ impl PublishPreparer {
|
|||
bail!("Exiting due to DENO_INTERNAL_FAST_CHECK_OVERWRITE")
|
||||
} else {
|
||||
log::info!("Checking for slow types in the public API...");
|
||||
let mut any_pkg_had_diagnostics = false;
|
||||
for package in package_configs {
|
||||
let export_urls = package.config_file.resolve_export_value_urls()?;
|
||||
let diagnostics =
|
||||
collect_no_slow_type_diagnostics(&graph, &export_urls);
|
||||
if !diagnostics.is_empty() {
|
||||
any_pkg_had_diagnostics = true;
|
||||
for diagnostic in diagnostics {
|
||||
diagnostics_collector
|
||||
.push(PublishDiagnostic::FastCheck(diagnostic));
|
||||
|
@ -355,7 +356,9 @@ impl PublishPreparer {
|
|||
}
|
||||
}
|
||||
|
||||
if any_pkg_had_diagnostics {
|
||||
// skip type checking the slow type graph if there are any errors because
|
||||
// errors like remote modules existing will cause type checking to crash
|
||||
if diagnostics_collector.has_error() {
|
||||
Ok(Arc::new(graph))
|
||||
} else {
|
||||
// fast check passed, type check the output as a temporary measure
|
||||
|
|
|
@ -5,6 +5,7 @@ mod cache_deps;
|
|||
pub use cache_deps::cache_top_level_deps;
|
||||
use deno_semver::jsr::JsrPackageReqReference;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use deno_semver::VersionReq;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::path::PathBuf;
|
||||
|
@ -717,7 +718,18 @@ impl AddPackageReq {
|
|||
Prefix::Npm => {
|
||||
let req_ref =
|
||||
NpmPackageReqReference::from_str(&format!("npm:{}", entry_text))?;
|
||||
let package_req = req_ref.into_inner().req;
|
||||
let mut package_req = req_ref.into_inner().req;
|
||||
// deno_semver defaults to a version req of `*` if none is specified
|
||||
// we want to default to `latest` instead
|
||||
if package_req.version_req == *deno_semver::WILDCARD_VERSION_REQ
|
||||
&& package_req.version_req.version_text() == "*"
|
||||
&& !entry_text.contains("@*")
|
||||
{
|
||||
package_req.version_req = VersionReq::from_raw_text_and_inner(
|
||||
"latest".into(),
|
||||
deno_semver::RangeSetOrTag::Tag("latest".into()),
|
||||
);
|
||||
}
|
||||
Ok(Ok(AddPackageReq {
|
||||
alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()),
|
||||
value: AddPackageReqValue::Npm(package_req),
|
||||
|
@ -888,7 +900,9 @@ mod test {
|
|||
AddPackageReq::parse("@alias/pkg@npm:foo").unwrap().unwrap(),
|
||||
AddPackageReq {
|
||||
alias: "@alias/pkg".to_string(),
|
||||
value: AddPackageReqValue::Npm(PackageReq::from_str("foo").unwrap())
|
||||
value: AddPackageReqValue::Npm(
|
||||
PackageReq::from_str("foo@latest").unwrap()
|
||||
)
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
|
|
|
@ -16,6 +16,7 @@ pub async fn cache_top_level_deps(
|
|||
) -> Result<(), AnyError> {
|
||||
let npm_resolver = factory.npm_resolver().await?;
|
||||
let cli_options = factory.cli_options()?;
|
||||
let root_permissions = factory.root_permissions_container()?;
|
||||
if let Some(npm_resolver) = npm_resolver.as_managed() {
|
||||
if !npm_resolver.ensure_top_level_package_json_install().await? {
|
||||
if let Some(lockfile) = cli_options.maybe_lockfile() {
|
||||
|
@ -106,7 +107,7 @@ pub async fn cache_top_level_deps(
|
|||
&roots,
|
||||
false,
|
||||
deno_config::deno_json::TsTypeLib::DenoWorker,
|
||||
crate::file_fetcher::FetchPermissionsOption::AllowAll,
|
||||
root_permissions.clone(),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
|
|
@ -12,9 +12,10 @@ use deno_graph::DynamicTemplatePart;
|
|||
use deno_graph::ParserModuleAnalyzer;
|
||||
use deno_graph::TypeScriptReference;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_resolver::sloppy_imports::SloppyImportsResolutionMode;
|
||||
use deno_runtime::deno_node::is_builtin_node_module;
|
||||
|
||||
use crate::resolver::SloppyImportsResolver;
|
||||
use crate::resolver::CliSloppyImportsResolver;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum SpecifierUnfurlerDiagnostic {
|
||||
|
@ -42,14 +43,14 @@ impl SpecifierUnfurlerDiagnostic {
|
|||
}
|
||||
|
||||
pub struct SpecifierUnfurler {
|
||||
sloppy_imports_resolver: Option<SloppyImportsResolver>,
|
||||
sloppy_imports_resolver: Option<CliSloppyImportsResolver>,
|
||||
workspace_resolver: WorkspaceResolver,
|
||||
bare_node_builtins: bool,
|
||||
}
|
||||
|
||||
impl SpecifierUnfurler {
|
||||
pub fn new(
|
||||
sloppy_imports_resolver: Option<SloppyImportsResolver>,
|
||||
sloppy_imports_resolver: Option<CliSloppyImportsResolver>,
|
||||
workspace_resolver: WorkspaceResolver,
|
||||
bare_node_builtins: bool,
|
||||
) -> Self {
|
||||
|
@ -179,7 +180,7 @@ impl SpecifierUnfurler {
|
|||
let resolved =
|
||||
if let Some(sloppy_imports_resolver) = &self.sloppy_imports_resolver {
|
||||
sloppy_imports_resolver
|
||||
.resolve(&resolved, deno_graph::source::ResolutionMode::Execution)
|
||||
.resolve(&resolved, SloppyImportsResolutionMode::Execution)
|
||||
.map(|res| res.into_specifier())
|
||||
.unwrap_or(resolved)
|
||||
} else {
|
||||
|
@ -388,6 +389,8 @@ fn to_range(
|
|||
mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::resolver::SloppyImportsCachedFs;
|
||||
|
||||
use super::*;
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
|
@ -455,7 +458,9 @@ mod tests {
|
|||
);
|
||||
let fs = Arc::new(RealFs);
|
||||
let unfurler = SpecifierUnfurler::new(
|
||||
Some(SloppyImportsResolver::new(fs)),
|
||||
Some(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new(
|
||||
fs,
|
||||
))),
|
||||
workspace_resolver,
|
||||
true,
|
||||
);
|
||||
|
|
|
@ -162,7 +162,7 @@ pub async fn run(
|
|||
let factory = CliFactory::from_flags(flags);
|
||||
let cli_options = factory.cli_options()?;
|
||||
let main_module = cli_options.resolve_main_module()?;
|
||||
let permissions = factory.create_permissions_container()?;
|
||||
let permissions = factory.root_permissions_container()?;
|
||||
let npm_resolver = factory.npm_resolver().await?.clone();
|
||||
let resolver = factory.resolver().await?.clone();
|
||||
let file_fetcher = factory.file_fetcher()?;
|
||||
|
@ -177,7 +177,7 @@ pub async fn run(
|
|||
.create_custom_worker(
|
||||
WorkerExecutionMode::Repl,
|
||||
main_module.clone(),
|
||||
permissions,
|
||||
permissions.clone(),
|
||||
vec![crate::ops::testing::deno_test::init_ops(test_event_sender)],
|
||||
Default::default(),
|
||||
)
|
||||
|
@ -189,7 +189,7 @@ pub async fn run(
|
|||
npm_resolver,
|
||||
resolver,
|
||||
worker,
|
||||
main_module,
|
||||
main_module.clone(),
|
||||
test_event_receiver,
|
||||
)
|
||||
.await?;
|
||||
|
|
|
@ -60,10 +60,9 @@ pub async fn run_script(
|
|||
|
||||
maybe_npm_install(&factory).await?;
|
||||
|
||||
let permissions = factory.create_permissions_container()?;
|
||||
let worker_factory = factory.create_cli_main_worker_factory().await?;
|
||||
let mut worker = worker_factory
|
||||
.create_main_worker(mode, main_module, permissions)
|
||||
.create_main_worker(mode, main_module.clone())
|
||||
.await?;
|
||||
|
||||
let exit_code = worker.run().await?;
|
||||
|
@ -79,7 +78,6 @@ pub async fn run_from_stdin(flags: Arc<Flags>) -> Result<i32, AnyError> {
|
|||
|
||||
let file_fetcher = factory.file_fetcher()?;
|
||||
let worker_factory = factory.create_cli_main_worker_factory().await?;
|
||||
let permissions = factory.create_permissions_container()?;
|
||||
let mut source = Vec::new();
|
||||
std::io::stdin().read_to_end(&mut source)?;
|
||||
// Save a fake file into file fetcher cache
|
||||
|
@ -91,7 +89,7 @@ pub async fn run_from_stdin(flags: Arc<Flags>) -> Result<i32, AnyError> {
|
|||
});
|
||||
|
||||
let mut worker = worker_factory
|
||||
.create_main_worker(WorkerExecutionMode::Run, main_module, permissions)
|
||||
.create_main_worker(WorkerExecutionMode::Run, main_module.clone())
|
||||
.await?;
|
||||
let exit_code = worker.run().await?;
|
||||
Ok(exit_code)
|
||||
|
@ -125,11 +123,10 @@ async fn run_with_watch(
|
|||
|
||||
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
|
||||
|
||||
let permissions = factory.create_permissions_container()?;
|
||||
let mut worker = factory
|
||||
.create_cli_main_worker_factory()
|
||||
.await?
|
||||
.create_main_worker(mode, main_module, permissions)
|
||||
.create_main_worker(mode, main_module.clone())
|
||||
.await?;
|
||||
|
||||
if watch_flags.hmr {
|
||||
|
@ -173,10 +170,9 @@ pub async fn eval_command(
|
|||
source: source_code.into_bytes().into(),
|
||||
});
|
||||
|
||||
let permissions = factory.create_permissions_container()?;
|
||||
let worker_factory = factory.create_cli_main_worker_factory().await?;
|
||||
let mut worker = worker_factory
|
||||
.create_main_worker(WorkerExecutionMode::Eval, main_module, permissions)
|
||||
.create_main_worker(WorkerExecutionMode::Eval, main_module.clone())
|
||||
.await?;
|
||||
let exit_code = worker.run().await?;
|
||||
Ok(exit_code)
|
||||
|
|
|
@ -5,7 +5,6 @@ use std::sync::Arc;
|
|||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::TryFutureExt;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
|
||||
use super::run::check_permission_before_script;
|
||||
use super::run::maybe_npm_install;
|
||||
|
@ -44,13 +43,11 @@ pub async fn serve(
|
|||
|
||||
maybe_npm_install(&factory).await?;
|
||||
|
||||
let permissions = factory.create_permissions_container()?;
|
||||
let worker_factory = factory.create_cli_main_worker_factory().await?;
|
||||
|
||||
do_serve(
|
||||
worker_factory,
|
||||
main_module,
|
||||
permissions,
|
||||
main_module.clone(),
|
||||
serve_flags.worker_count,
|
||||
false,
|
||||
)
|
||||
|
@ -60,7 +57,6 @@ pub async fn serve(
|
|||
async fn do_serve(
|
||||
worker_factory: CliMainWorkerFactory,
|
||||
main_module: ModuleSpecifier,
|
||||
permissions: PermissionsContainer,
|
||||
worker_count: Option<usize>,
|
||||
hmr: bool,
|
||||
) -> Result<i32, AnyError> {
|
||||
|
@ -71,7 +67,6 @@ async fn do_serve(
|
|||
worker_count,
|
||||
},
|
||||
main_module.clone(),
|
||||
permissions.clone(),
|
||||
)
|
||||
.await?;
|
||||
let worker_count = match worker_count {
|
||||
|
@ -87,15 +82,13 @@ async fn do_serve(
|
|||
for i in 0..extra_workers {
|
||||
let worker_factory = worker_factory.clone();
|
||||
let main_module = main_module.clone();
|
||||
let permissions = permissions.clone();
|
||||
let (tx, rx) = tokio::sync::oneshot::channel();
|
||||
channels.push(rx);
|
||||
std::thread::Builder::new()
|
||||
.name(format!("serve-worker-{i}"))
|
||||
.spawn(move || {
|
||||
deno_runtime::tokio_util::create_and_run_current_thread(async move {
|
||||
let result =
|
||||
run_worker(i, worker_factory, main_module, permissions, hmr).await;
|
||||
let result = run_worker(i, worker_factory, main_module, hmr).await;
|
||||
let _ = tx.send(result);
|
||||
});
|
||||
})?;
|
||||
|
@ -124,7 +117,6 @@ async fn run_worker(
|
|||
worker_count: usize,
|
||||
worker_factory: CliMainWorkerFactory,
|
||||
main_module: ModuleSpecifier,
|
||||
permissions: PermissionsContainer,
|
||||
hmr: bool,
|
||||
) -> Result<i32, AnyError> {
|
||||
let mut worker = worker_factory
|
||||
|
@ -134,7 +126,6 @@ async fn run_worker(
|
|||
worker_count: Some(worker_count),
|
||||
},
|
||||
main_module,
|
||||
permissions,
|
||||
)
|
||||
.await?;
|
||||
if hmr {
|
||||
|
@ -171,11 +162,9 @@ async fn serve_with_watch(
|
|||
maybe_npm_install(&factory).await?;
|
||||
|
||||
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
|
||||
|
||||
let permissions = factory.create_permissions_container()?;
|
||||
let worker_factory = factory.create_cli_main_worker_factory().await?;
|
||||
|
||||
do_serve(worker_factory, main_module, permissions, worker_count, hmr)
|
||||
do_serve(worker_factory, main_module.clone(), worker_count, hmr)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
|
|
|
@ -16,7 +16,7 @@ use deno_core::anyhow::anyhow;
|
|||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::normalize_path;
|
||||
use deno_path_util::normalize_path;
|
||||
use deno_task_shell::ShellCommand;
|
||||
|
||||
use crate::args::CliOptions;
|
||||
|
@ -190,9 +190,7 @@ async fn run_task(opts: RunTaskOptions<'_>) -> Result<i32, AnyError> {
|
|||
custom_commands,
|
||||
init_cwd: opts.cli_options.initial_cwd(),
|
||||
argv: cli_options.argv(),
|
||||
root_node_modules_dir: npm_resolver
|
||||
.root_node_modules_path()
|
||||
.map(|p| p.as_path()),
|
||||
root_node_modules_dir: npm_resolver.root_node_modules_path(),
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
|
|
@ -140,7 +140,9 @@ impl Diagnostic {
|
|||
pub fn include_when_remote(&self) -> bool {
|
||||
/// TS6133: value is declared but its value is never read (noUnusedParameters and noUnusedLocals)
|
||||
const TS6133: u64 = 6133;
|
||||
self.code != TS6133
|
||||
/// TS4114: This member must have an 'override' modifier because it overrides a member in the base class 'X'.
|
||||
const TS4114: u64 = 4114;
|
||||
!matches!(self.code, TS6133 | TS4114)
|
||||
}
|
||||
|
||||
fn fmt_category_and_code(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
|
|
4
cli/tsc/dts/lib.deno.ns.d.ts
vendored
4
cli/tsc/dts/lib.deno.ns.d.ts
vendored
|
@ -6041,9 +6041,11 @@ declare namespace Deno {
|
|||
*
|
||||
* @category Fetch
|
||||
*/
|
||||
export interface HttpClient extends Disposable {
|
||||
export class HttpClient implements Disposable {
|
||||
/** Close the HTTP client. */
|
||||
close(): void;
|
||||
|
||||
[Symbol.dispose](): void;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
194
cli/util/fs.rs
194
cli/util/fs.rs
|
@ -20,7 +20,6 @@ use deno_core::error::AnyError;
|
|||
use deno_core::unsync::spawn_blocking;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_node::PathClean;
|
||||
|
||||
use crate::util::path::get_atomic_file_path;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
|
@ -37,10 +36,98 @@ pub fn atomic_write_file_with_retries<T: AsRef<[u8]>>(
|
|||
file_path: &Path,
|
||||
data: T,
|
||||
mode: u32,
|
||||
) -> std::io::Result<()> {
|
||||
struct RealAtomicWriteFileFs {
|
||||
mode: u32,
|
||||
}
|
||||
|
||||
impl AtomicWriteFileFs for RealAtomicWriteFileFs {
|
||||
fn write_file(&self, path: &Path, bytes: &[u8]) -> std::io::Result<()> {
|
||||
write_file(path, bytes, self.mode)
|
||||
}
|
||||
fn rename_file(&self, from: &Path, to: &Path) -> std::io::Result<()> {
|
||||
std::fs::rename(from, to)
|
||||
}
|
||||
fn remove_file(&self, path: &Path) -> std::io::Result<()> {
|
||||
std::fs::remove_file(path)
|
||||
}
|
||||
fn create_dir_all(&self, dir_path: &Path) -> std::io::Result<()> {
|
||||
std::fs::create_dir_all(dir_path)
|
||||
}
|
||||
fn path_exists(&self, path: &Path) -> bool {
|
||||
path.exists()
|
||||
}
|
||||
}
|
||||
|
||||
atomic_write_file_with_retries_and_fs(
|
||||
&RealAtomicWriteFileFs { mode },
|
||||
file_path,
|
||||
data.as_ref(),
|
||||
)
|
||||
}
|
||||
|
||||
pub trait AtomicWriteFileFs {
|
||||
fn write_file(&self, path: &Path, bytes: &[u8]) -> std::io::Result<()>;
|
||||
fn rename_file(&self, from: &Path, to: &Path) -> std::io::Result<()>;
|
||||
fn remove_file(&self, path: &Path) -> std::io::Result<()>;
|
||||
fn create_dir_all(&self, dir_path: &Path) -> std::io::Result<()>;
|
||||
fn path_exists(&self, path: &Path) -> bool;
|
||||
}
|
||||
|
||||
pub struct AtomicWriteFileFsAdapter<'a> {
|
||||
pub fs: &'a dyn FileSystem,
|
||||
pub write_mode: u32,
|
||||
}
|
||||
|
||||
impl<'a> AtomicWriteFileFs for AtomicWriteFileFsAdapter<'a> {
|
||||
fn write_file(&self, path: &Path, bytes: &[u8]) -> std::io::Result<()> {
|
||||
self
|
||||
.fs
|
||||
.write_file_sync(
|
||||
path,
|
||||
deno_runtime::deno_fs::OpenOptions::write(
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
Some(self.write_mode),
|
||||
),
|
||||
None,
|
||||
bytes,
|
||||
)
|
||||
.map_err(|e| e.into_io_error())
|
||||
}
|
||||
|
||||
fn rename_file(&self, from: &Path, to: &Path) -> std::io::Result<()> {
|
||||
self.fs.rename_sync(from, to).map_err(|e| e.into_io_error())
|
||||
}
|
||||
|
||||
fn remove_file(&self, path: &Path) -> std::io::Result<()> {
|
||||
self
|
||||
.fs
|
||||
.remove_sync(path, false)
|
||||
.map_err(|e| e.into_io_error())
|
||||
}
|
||||
|
||||
fn create_dir_all(&self, dir_path: &Path) -> std::io::Result<()> {
|
||||
self
|
||||
.fs
|
||||
.mkdir_sync(dir_path, /* recursive */ true, None)
|
||||
.map_err(|e| e.into_io_error())
|
||||
}
|
||||
|
||||
fn path_exists(&self, path: &Path) -> bool {
|
||||
self.fs.exists_sync(path)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn atomic_write_file_with_retries_and_fs<T: AsRef<[u8]>>(
|
||||
fs: &impl AtomicWriteFileFs,
|
||||
file_path: &Path,
|
||||
data: T,
|
||||
) -> std::io::Result<()> {
|
||||
let mut count = 0;
|
||||
loop {
|
||||
match atomic_write_file(file_path, data.as_ref(), mode) {
|
||||
match atomic_write_file(fs, file_path, data.as_ref()) {
|
||||
Ok(()) => return Ok(()),
|
||||
Err(err) => {
|
||||
if count >= 5 {
|
||||
|
@ -61,63 +148,54 @@ pub fn atomic_write_file_with_retries<T: AsRef<[u8]>>(
|
|||
///
|
||||
/// This also handles creating the directory if a NotFound error
|
||||
/// occurs.
|
||||
fn atomic_write_file<T: AsRef<[u8]>>(
|
||||
fn atomic_write_file(
|
||||
fs: &impl AtomicWriteFileFs,
|
||||
file_path: &Path,
|
||||
data: T,
|
||||
mode: u32,
|
||||
data: &[u8],
|
||||
) -> std::io::Result<()> {
|
||||
fn atomic_write_file_raw(
|
||||
fs: &impl AtomicWriteFileFs,
|
||||
temp_file_path: &Path,
|
||||
file_path: &Path,
|
||||
data: &[u8],
|
||||
mode: u32,
|
||||
) -> std::io::Result<()> {
|
||||
write_file(temp_file_path, data, mode)?;
|
||||
std::fs::rename(temp_file_path, file_path).map_err(|err| {
|
||||
fs.write_file(temp_file_path, data)?;
|
||||
fs.rename_file(temp_file_path, file_path).map_err(|err| {
|
||||
// clean up the created temp file on error
|
||||
let _ = std::fs::remove_file(temp_file_path);
|
||||
let _ = fs.remove_file(temp_file_path);
|
||||
err
|
||||
})
|
||||
}
|
||||
|
||||
fn inner(file_path: &Path, data: &[u8], mode: u32) -> std::io::Result<()> {
|
||||
let temp_file_path = get_atomic_file_path(file_path);
|
||||
let temp_file_path = get_atomic_file_path(file_path);
|
||||
|
||||
if let Err(write_err) =
|
||||
atomic_write_file_raw(&temp_file_path, file_path, data, mode)
|
||||
{
|
||||
if write_err.kind() == ErrorKind::NotFound {
|
||||
let parent_dir_path = file_path.parent().unwrap();
|
||||
match std::fs::create_dir_all(parent_dir_path) {
|
||||
Ok(()) => {
|
||||
return atomic_write_file_raw(
|
||||
&temp_file_path,
|
||||
file_path,
|
||||
data,
|
||||
mode,
|
||||
)
|
||||
if let Err(write_err) =
|
||||
atomic_write_file_raw(fs, &temp_file_path, file_path, data)
|
||||
{
|
||||
if write_err.kind() == ErrorKind::NotFound {
|
||||
let parent_dir_path = file_path.parent().unwrap();
|
||||
match fs.create_dir_all(parent_dir_path) {
|
||||
Ok(()) => {
|
||||
return atomic_write_file_raw(fs, &temp_file_path, file_path, data)
|
||||
.map_err(|err| add_file_context_to_err(file_path, err));
|
||||
}
|
||||
Err(create_err) => {
|
||||
if !parent_dir_path.exists() {
|
||||
return Err(Error::new(
|
||||
create_err.kind(),
|
||||
format!(
|
||||
"{:#} (for '{}')\nCheck the permission of the directory.",
|
||||
create_err,
|
||||
parent_dir_path.display()
|
||||
),
|
||||
));
|
||||
}
|
||||
}
|
||||
Err(create_err) => {
|
||||
if !fs.path_exists(parent_dir_path) {
|
||||
return Err(Error::new(
|
||||
create_err.kind(),
|
||||
format!(
|
||||
"{:#} (for '{}')\nCheck the permission of the directory.",
|
||||
create_err,
|
||||
parent_dir_path.display()
|
||||
),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
return Err(add_file_context_to_err(file_path, write_err));
|
||||
}
|
||||
Ok(())
|
||||
return Err(add_file_context_to_err(file_path, write_err));
|
||||
}
|
||||
|
||||
inner(file_path, data.as_ref(), mode)
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Creates a std::fs::File handling if the parent does not exist.
|
||||
|
@ -211,48 +289,18 @@ pub fn canonicalize_path(path: &Path) -> Result<PathBuf, Error> {
|
|||
pub fn canonicalize_path_maybe_not_exists(
|
||||
path: &Path,
|
||||
) -> Result<PathBuf, Error> {
|
||||
canonicalize_path_maybe_not_exists_with_custom_fn(path, canonicalize_path)
|
||||
deno_path_util::canonicalize_path_maybe_not_exists(path, &canonicalize_path)
|
||||
}
|
||||
|
||||
pub fn canonicalize_path_maybe_not_exists_with_fs(
|
||||
path: &Path,
|
||||
fs: &dyn FileSystem,
|
||||
) -> Result<PathBuf, Error> {
|
||||
canonicalize_path_maybe_not_exists_with_custom_fn(path, |path| {
|
||||
deno_path_util::canonicalize_path_maybe_not_exists(path, &|path| {
|
||||
fs.realpath_sync(path).map_err(|err| err.into_io_error())
|
||||
})
|
||||
}
|
||||
|
||||
fn canonicalize_path_maybe_not_exists_with_custom_fn(
|
||||
path: &Path,
|
||||
canonicalize: impl Fn(&Path) -> Result<PathBuf, Error>,
|
||||
) -> Result<PathBuf, Error> {
|
||||
let path = path.to_path_buf().clean();
|
||||
let mut path = path.as_path();
|
||||
let mut names_stack = Vec::new();
|
||||
loop {
|
||||
match canonicalize(path) {
|
||||
Ok(mut canonicalized_path) => {
|
||||
for name in names_stack.into_iter().rev() {
|
||||
canonicalized_path = canonicalized_path.join(name);
|
||||
}
|
||||
return Ok(canonicalized_path);
|
||||
}
|
||||
Err(err) if err.kind() == ErrorKind::NotFound => {
|
||||
names_stack.push(match path.file_name() {
|
||||
Some(name) => name.to_owned(),
|
||||
None => return Err(err),
|
||||
});
|
||||
path = match path.parent() {
|
||||
Some(parent) => parent,
|
||||
None => return Err(err),
|
||||
};
|
||||
}
|
||||
Err(err) => return Err(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Collects module specifiers that satisfy the given predicate as a file path, by recursively walking `include`.
|
||||
/// Specifiers that start with http and https are left intact.
|
||||
/// Note: This ignores all .git and node_modules folders.
|
||||
|
@ -708,8 +756,8 @@ pub fn specifier_from_file_path(
|
|||
mod tests {
|
||||
use super::*;
|
||||
use deno_core::futures;
|
||||
use deno_core::normalize_path;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_path_util::normalize_path;
|
||||
use pretty_assertions::assert_eq;
|
||||
use test_util::PathRef;
|
||||
use test_util::TempDir;
|
||||
|
|
|
@ -165,48 +165,6 @@ pub fn relative_path(from: &Path, to: &Path) -> Option<PathBuf> {
|
|||
pathdiff::diff_paths(to, from)
|
||||
}
|
||||
|
||||
/// Gets if the provided character is not supported on all
|
||||
/// kinds of file systems.
|
||||
pub fn is_banned_path_char(c: char) -> bool {
|
||||
matches!(c, '<' | '>' | ':' | '"' | '|' | '?' | '*')
|
||||
}
|
||||
|
||||
/// Gets a safe local directory name for the provided url.
|
||||
///
|
||||
/// For example:
|
||||
/// https://deno.land:8080/path -> deno.land_8080/path
|
||||
pub fn root_url_to_safe_local_dirname(root: &ModuleSpecifier) -> PathBuf {
|
||||
fn sanitize_segment(text: &str) -> String {
|
||||
text
|
||||
.chars()
|
||||
.map(|c| if is_banned_segment_char(c) { '_' } else { c })
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn is_banned_segment_char(c: char) -> bool {
|
||||
matches!(c, '/' | '\\') || is_banned_path_char(c)
|
||||
}
|
||||
|
||||
let mut result = String::new();
|
||||
if let Some(domain) = root.domain() {
|
||||
result.push_str(&sanitize_segment(domain));
|
||||
}
|
||||
if let Some(port) = root.port() {
|
||||
if !result.is_empty() {
|
||||
result.push('_');
|
||||
}
|
||||
result.push_str(&port.to_string());
|
||||
}
|
||||
let mut result = PathBuf::from(result);
|
||||
if let Some(segments) = root.path_segments() {
|
||||
for segment in segments.filter(|s| !s.is_empty()) {
|
||||
result = result.join(sanitize_segment(segment));
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Slightly different behaviour than the default matching
|
||||
/// where an exact path needs to be matched to be opted-in
|
||||
/// rather than just a partial directory match.
|
||||
|
|
141
cli/worker.rs
141
cli/worker.rs
|
@ -29,12 +29,14 @@ use deno_runtime::deno_tls::RootCertStoreProvider;
|
|||
use deno_runtime::deno_web::BlobStore;
|
||||
use deno_runtime::fmt_errors::format_js_error;
|
||||
use deno_runtime::inspector_server::InspectorServer;
|
||||
use deno_runtime::ops::process::NpmProcessStateProviderRc;
|
||||
use deno_runtime::ops::worker_host::CreateWebWorkerCb;
|
||||
use deno_runtime::permissions::RuntimePermissionDescriptorParser;
|
||||
use deno_runtime::web_worker::WebWorker;
|
||||
use deno_runtime::web_worker::WebWorkerOptions;
|
||||
use deno_runtime::web_worker::WebWorkerServiceOptions;
|
||||
use deno_runtime::worker::MainWorker;
|
||||
use deno_runtime::worker::WorkerOptions;
|
||||
use deno_runtime::worker::WorkerServiceOptions;
|
||||
use deno_runtime::BootstrapOptions;
|
||||
use deno_runtime::WorkerExecutionMode;
|
||||
use deno_runtime::WorkerLogLevel;
|
||||
|
@ -62,13 +64,12 @@ pub trait ModuleLoaderFactory: Send + Sync {
|
|||
fn create_for_main(
|
||||
&self,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
) -> ModuleLoaderAndSourceMapGetter;
|
||||
|
||||
fn create_for_worker(
|
||||
&self,
|
||||
root_permissions: PermissionsContainer,
|
||||
dynamic_permissions: PermissionsContainer,
|
||||
parent_permissions: PermissionsContainer,
|
||||
permissions: PermissionsContainer,
|
||||
) -> ModuleLoaderAndSourceMapGetter;
|
||||
}
|
||||
|
||||
|
@ -134,8 +135,8 @@ struct SharedWorkerState {
|
|||
module_loader_factory: Box<dyn ModuleLoaderFactory>,
|
||||
node_resolver: Arc<NodeResolver>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
permission_desc_parser: Arc<RuntimePermissionDescriptorParser>,
|
||||
root_cert_store_provider: Arc<dyn RootCertStoreProvider>,
|
||||
root_permissions: PermissionsContainer,
|
||||
shared_array_buffer_store: SharedArrayBufferStore,
|
||||
storage_key_resolver: StorageKeyResolver,
|
||||
options: CliMainWorkerOptions,
|
||||
|
@ -147,13 +148,13 @@ impl SharedWorkerState {
|
|||
NodeExtInitServices {
|
||||
node_require_resolver: self.npm_resolver.clone().into_require_resolver(),
|
||||
node_resolver: self.node_resolver.clone(),
|
||||
npm_process_state_provider: self
|
||||
.npm_resolver
|
||||
.clone()
|
||||
.into_process_state_provider(),
|
||||
npm_resolver: self.npm_resolver.clone().into_npm_resolver(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn npm_process_state_provider(&self) -> NpmProcessStateProviderRc {
|
||||
self.npm_resolver.clone().into_process_state_provider()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CliMainWorker {
|
||||
|
@ -430,8 +431,8 @@ impl CliMainWorkerFactory {
|
|||
module_loader_factory: Box<dyn ModuleLoaderFactory>,
|
||||
node_resolver: Arc<NodeResolver>,
|
||||
npm_resolver: Arc<dyn CliNpmResolver>,
|
||||
permission_parser: Arc<RuntimePermissionDescriptorParser>,
|
||||
root_cert_store_provider: Arc<dyn RootCertStoreProvider>,
|
||||
root_permissions: PermissionsContainer,
|
||||
storage_key_resolver: StorageKeyResolver,
|
||||
subcommand: DenoSubcommand,
|
||||
options: CliMainWorkerOptions,
|
||||
|
@ -450,8 +451,8 @@ impl CliMainWorkerFactory {
|
|||
module_loader_factory,
|
||||
node_resolver,
|
||||
npm_resolver,
|
||||
permission_desc_parser: permission_parser,
|
||||
root_cert_store_provider,
|
||||
root_permissions,
|
||||
shared_array_buffer_store: Default::default(),
|
||||
storage_key_resolver,
|
||||
options,
|
||||
|
@ -464,13 +465,12 @@ impl CliMainWorkerFactory {
|
|||
&self,
|
||||
mode: WorkerExecutionMode,
|
||||
main_module: ModuleSpecifier,
|
||||
permissions: PermissionsContainer,
|
||||
) -> Result<CliMainWorker, AnyError> {
|
||||
self
|
||||
.create_custom_worker(
|
||||
mode,
|
||||
main_module,
|
||||
permissions,
|
||||
self.shared.root_permissions.clone(),
|
||||
vec![],
|
||||
Default::default(),
|
||||
)
|
||||
|
@ -530,13 +530,9 @@ impl CliMainWorkerFactory {
|
|||
(main_module, is_cjs)
|
||||
};
|
||||
|
||||
let ModuleLoaderAndSourceMapGetter { module_loader } =
|
||||
shared.module_loader_factory.create_for_main(
|
||||
PermissionsContainer::allow_all(
|
||||
self.shared.permission_desc_parser.clone(),
|
||||
),
|
||||
permissions.clone(),
|
||||
);
|
||||
let ModuleLoaderAndSourceMapGetter { module_loader } = shared
|
||||
.module_loader_factory
|
||||
.create_for_main(permissions.clone());
|
||||
let maybe_inspector_server = shared.maybe_inspector_server.clone();
|
||||
|
||||
let create_web_worker_cb =
|
||||
|
@ -572,6 +568,22 @@ impl CliMainWorkerFactory {
|
|||
}
|
||||
}
|
||||
|
||||
let services = WorkerServiceOptions {
|
||||
root_cert_store_provider: Some(shared.root_cert_store_provider.clone()),
|
||||
module_loader,
|
||||
fs: shared.fs.clone(),
|
||||
node_services: Some(shared.create_node_init_services()),
|
||||
npm_process_state_provider: Some(shared.npm_process_state_provider()),
|
||||
blob_store: shared.blob_store.clone(),
|
||||
broadcast_channel: shared.broadcast_channel.clone(),
|
||||
shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()),
|
||||
compiled_wasm_module_store: Some(
|
||||
shared.compiled_wasm_module_store.clone(),
|
||||
),
|
||||
feature_checker,
|
||||
permissions,
|
||||
v8_code_cache: shared.code_cache.clone(),
|
||||
};
|
||||
let options = WorkerOptions {
|
||||
bootstrap: BootstrapOptions {
|
||||
deno_version: crate::version::DENO_VERSION_INFO.deno.to_string(),
|
||||
|
@ -606,7 +618,6 @@ impl CliMainWorkerFactory {
|
|||
.options
|
||||
.unsafely_ignore_certificate_errors
|
||||
.clone(),
|
||||
root_cert_store_provider: Some(shared.root_cert_store_provider.clone()),
|
||||
seed: shared.options.seed,
|
||||
format_js_error_fn: Some(Arc::new(format_js_error)),
|
||||
create_web_worker_cb,
|
||||
|
@ -614,28 +625,16 @@ impl CliMainWorkerFactory {
|
|||
should_break_on_first_statement: shared.options.inspect_brk,
|
||||
should_wait_for_inspector_session: shared.options.inspect_wait,
|
||||
strace_ops: shared.options.strace_ops.clone(),
|
||||
module_loader,
|
||||
fs: shared.fs.clone(),
|
||||
node_services: Some(shared.create_node_init_services()),
|
||||
get_error_class_fn: Some(&errors::get_error_class_name),
|
||||
cache_storage_dir,
|
||||
origin_storage_dir,
|
||||
blob_store: shared.blob_store.clone(),
|
||||
broadcast_channel: shared.broadcast_channel.clone(),
|
||||
shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()),
|
||||
compiled_wasm_module_store: Some(
|
||||
shared.compiled_wasm_module_store.clone(),
|
||||
),
|
||||
stdio,
|
||||
feature_checker,
|
||||
permission_desc_parser: shared.permission_desc_parser.clone(),
|
||||
skip_op_registration: shared.options.skip_op_registration,
|
||||
v8_code_cache: shared.code_cache.clone(),
|
||||
};
|
||||
|
||||
let mut worker = MainWorker::bootstrap_from_options(
|
||||
main_module.clone(),
|
||||
permissions,
|
||||
services,
|
||||
options,
|
||||
);
|
||||
|
||||
|
@ -767,7 +766,26 @@ fn create_web_worker_callback(
|
|||
}
|
||||
}
|
||||
|
||||
let services = WebWorkerServiceOptions {
|
||||
root_cert_store_provider: Some(shared.root_cert_store_provider.clone()),
|
||||
module_loader,
|
||||
fs: shared.fs.clone(),
|
||||
node_services: Some(shared.create_node_init_services()),
|
||||
blob_store: shared.blob_store.clone(),
|
||||
broadcast_channel: shared.broadcast_channel.clone(),
|
||||
shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()),
|
||||
compiled_wasm_module_store: Some(
|
||||
shared.compiled_wasm_module_store.clone(),
|
||||
),
|
||||
maybe_inspector_server,
|
||||
feature_checker,
|
||||
npm_process_state_provider: Some(shared.npm_process_state_provider()),
|
||||
permissions: args.permissions,
|
||||
};
|
||||
let options = WebWorkerOptions {
|
||||
name: args.name,
|
||||
main_module: args.main_module.clone(),
|
||||
worker_id: args.worker_id,
|
||||
bootstrap: BootstrapOptions {
|
||||
deno_version: crate::version::DENO_VERSION_INFO.deno.to_string(),
|
||||
args: shared.options.argv.clone(),
|
||||
|
@ -778,7 +796,7 @@ fn create_web_worker_callback(
|
|||
enable_op_summary_metrics: shared.options.enable_op_summary_metrics,
|
||||
enable_testing_features: shared.options.enable_testing_features,
|
||||
locale: deno_core::v8::icu::get_language_tag(),
|
||||
location: Some(args.main_module.clone()),
|
||||
location: Some(args.main_module),
|
||||
no_color: !colors::use_color(),
|
||||
color_level: colors::get_color_level(),
|
||||
is_stdout_tty: deno_terminal::is_stdout_tty(),
|
||||
|
@ -800,38 +818,19 @@ fn create_web_worker_callback(
|
|||
.options
|
||||
.unsafely_ignore_certificate_errors
|
||||
.clone(),
|
||||
root_cert_store_provider: Some(shared.root_cert_store_provider.clone()),
|
||||
seed: shared.options.seed,
|
||||
create_web_worker_cb,
|
||||
format_js_error_fn: Some(Arc::new(format_js_error)),
|
||||
module_loader,
|
||||
fs: shared.fs.clone(),
|
||||
node_services: Some(shared.create_node_init_services()),
|
||||
worker_type: args.worker_type,
|
||||
maybe_inspector_server,
|
||||
get_error_class_fn: Some(&errors::get_error_class_name),
|
||||
blob_store: shared.blob_store.clone(),
|
||||
broadcast_channel: shared.broadcast_channel.clone(),
|
||||
shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()),
|
||||
compiled_wasm_module_store: Some(
|
||||
shared.compiled_wasm_module_store.clone(),
|
||||
),
|
||||
stdio: stdio.clone(),
|
||||
cache_storage_dir,
|
||||
feature_checker,
|
||||
permission_desc_parser: shared.permission_desc_parser.clone(),
|
||||
strace_ops: shared.options.strace_ops.clone(),
|
||||
close_on_idle: args.close_on_idle,
|
||||
maybe_worker_metadata: args.maybe_worker_metadata,
|
||||
};
|
||||
|
||||
WebWorker::bootstrap_from_options(
|
||||
args.name,
|
||||
args.permissions,
|
||||
args.main_module,
|
||||
args.worker_id,
|
||||
options,
|
||||
)
|
||||
WebWorker::bootstrap_from_options(services, options)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -841,23 +840,43 @@ fn create_web_worker_callback(
|
|||
mod tests {
|
||||
use super::*;
|
||||
use deno_core::resolve_path;
|
||||
use deno_core::FsModuleLoader;
|
||||
use deno_fs::RealFs;
|
||||
use deno_runtime::deno_permissions::Permissions;
|
||||
use deno_runtime::permissions::RuntimePermissionDescriptorParser;
|
||||
|
||||
fn create_test_worker() -> MainWorker {
|
||||
let main_module =
|
||||
resolve_path("./hello.js", &std::env::current_dir().unwrap()).unwrap();
|
||||
let permissions = PermissionsContainer::new(
|
||||
Arc::new(RuntimePermissionDescriptorParser::new(Arc::new(RealFs))),
|
||||
Permissions::none_without_prompt(),
|
||||
);
|
||||
|
||||
let fs = Arc::new(RealFs);
|
||||
let permission_desc_parser =
|
||||
Arc::new(RuntimePermissionDescriptorParser::new(fs.clone()));
|
||||
let options = WorkerOptions {
|
||||
startup_snapshot: crate::js::deno_isolate_init(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
MainWorker::bootstrap_from_options(main_module, permissions, options)
|
||||
MainWorker::bootstrap_from_options(
|
||||
main_module,
|
||||
WorkerServiceOptions {
|
||||
module_loader: Rc::new(FsModuleLoader),
|
||||
permissions: PermissionsContainer::new(
|
||||
permission_desc_parser,
|
||||
Permissions::none_without_prompt(),
|
||||
),
|
||||
blob_store: Default::default(),
|
||||
broadcast_channel: Default::default(),
|
||||
feature_checker: Default::default(),
|
||||
node_services: Default::default(),
|
||||
npm_process_state_provider: Default::default(),
|
||||
root_cert_store_provider: Default::default(),
|
||||
shared_array_buffer_store: Default::default(),
|
||||
compiled_wasm_module_store: Default::default(),
|
||||
v8_code_cache: Default::default(),
|
||||
fs,
|
||||
},
|
||||
options,
|
||||
)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
|
@ -21,6 +21,7 @@ async-trait.workspace = true
|
|||
base32.workspace = true
|
||||
deno_core.workspace = true
|
||||
deno_io.workspace = true
|
||||
deno_path_util.workspace = true
|
||||
deno_permissions.workspace = true
|
||||
filetime.workspace = true
|
||||
libc.workspace = true
|
||||
|
|
|
@ -12,12 +12,12 @@ use std::path::PathBuf;
|
|||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_core::normalize_path;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_io::fs::File;
|
||||
use deno_io::fs::FsError;
|
||||
use deno_io::fs::FsResult;
|
||||
use deno_io::fs::FsStat;
|
||||
use deno_path_util::normalize_path;
|
||||
|
||||
use crate::interface::AccessCheckCb;
|
||||
use crate::interface::FsDirEntry;
|
||||
|
@ -44,7 +44,7 @@ impl InMemoryFs {
|
|||
pub fn setup_text_files(&self, files: Vec<(String, String)>) {
|
||||
for (path, text) in files {
|
||||
let path = PathBuf::from(path);
|
||||
self.mkdir_sync(path.parent().unwrap(), true, 0).unwrap();
|
||||
self.mkdir_sync(path.parent().unwrap(), true, None).unwrap();
|
||||
self
|
||||
.write_file_sync(
|
||||
&path,
|
||||
|
@ -101,7 +101,7 @@ impl FileSystem for InMemoryFs {
|
|||
&self,
|
||||
path: &Path,
|
||||
recursive: bool,
|
||||
_mode: u32,
|
||||
_mode: Option<u32>,
|
||||
) -> FsResult<()> {
|
||||
let path = normalize_path(path);
|
||||
|
||||
|
@ -119,7 +119,7 @@ impl FileSystem for InMemoryFs {
|
|||
},
|
||||
None => {
|
||||
if recursive {
|
||||
self.mkdir_sync(parent, true, 0)?;
|
||||
self.mkdir_sync(parent, true, None)?;
|
||||
} else {
|
||||
return Err(FsError::Io(Error::new(
|
||||
ErrorKind::NotFound,
|
||||
|
@ -149,7 +149,7 @@ impl FileSystem for InMemoryFs {
|
|||
&self,
|
||||
path: PathBuf,
|
||||
recursive: bool,
|
||||
mode: u32,
|
||||
mode: Option<u32>,
|
||||
) -> FsResult<()> {
|
||||
self.mkdir_sync(&path, recursive, mode)
|
||||
}
|
||||
|
|
|
@ -121,13 +121,17 @@ pub trait FileSystem: std::fmt::Debug + MaybeSend + MaybeSync {
|
|||
access_check: Option<AccessCheckCb<'a>>,
|
||||
) -> FsResult<Rc<dyn File>>;
|
||||
|
||||
fn mkdir_sync(&self, path: &Path, recursive: bool, mode: u32)
|
||||
-> FsResult<()>;
|
||||
fn mkdir_sync(
|
||||
&self,
|
||||
path: &Path,
|
||||
recursive: bool,
|
||||
mode: Option<u32>,
|
||||
) -> FsResult<()>;
|
||||
async fn mkdir_async(
|
||||
&self,
|
||||
path: PathBuf,
|
||||
recursive: bool,
|
||||
mode: u32,
|
||||
mode: Option<u32>,
|
||||
) -> FsResult<()>;
|
||||
|
||||
fn chmod_sync(&self, path: &Path, mode: u32) -> FsResult<()>;
|
||||
|
|
|
@ -197,7 +197,7 @@ where
|
|||
.check_write(&path, "Deno.mkdirSync()")?;
|
||||
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
fs.mkdir_sync(&path, recursive, mode)
|
||||
fs.mkdir_sync(&path, recursive, Some(mode))
|
||||
.context_path("mkdir", &path)?;
|
||||
|
||||
Ok(())
|
||||
|
@ -221,7 +221,7 @@ where
|
|||
(state.borrow::<FileSystemRc>().clone(), path)
|
||||
};
|
||||
|
||||
fs.mkdir_async(path.clone(), recursive, mode)
|
||||
fs.mkdir_async(path.clone(), recursive, Some(mode))
|
||||
.await
|
||||
.context_path("mkdir", &path)?;
|
||||
|
||||
|
@ -886,7 +886,7 @@ where
|
|||
const MAX_TRIES: u32 = 10;
|
||||
for _ in 0..MAX_TRIES {
|
||||
let path = tmp_name(&mut rng, &dir, prefix.as_deref(), suffix.as_deref())?;
|
||||
match fs.mkdir_sync(&path, false, 0o700) {
|
||||
match fs.mkdir_sync(&path, false, Some(0o700)) {
|
||||
Ok(_) => {
|
||||
// PERMISSIONS: ensure the absolute path is not leaked
|
||||
let path = strip_dir_prefix(&dir, dir_arg.as_deref(), path)?;
|
||||
|
@ -928,7 +928,11 @@ where
|
|||
const MAX_TRIES: u32 = 10;
|
||||
for _ in 0..MAX_TRIES {
|
||||
let path = tmp_name(&mut rng, &dir, prefix.as_deref(), suffix.as_deref())?;
|
||||
match fs.clone().mkdir_async(path.clone(), false, 0o700).await {
|
||||
match fs
|
||||
.clone()
|
||||
.mkdir_async(path.clone(), false, Some(0o700))
|
||||
.await
|
||||
{
|
||||
Ok(_) => {
|
||||
// PERMISSIONS: ensure the absolute path is not leaked
|
||||
let path = strip_dir_prefix(&dir, dir_arg.as_deref(), path)?;
|
||||
|
|
|
@ -11,13 +11,13 @@ use std::path::Path;
|
|||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
|
||||
use deno_core::normalize_path;
|
||||
use deno_core::unsync::spawn_blocking;
|
||||
use deno_io::fs::File;
|
||||
use deno_io::fs::FsError;
|
||||
use deno_io::fs::FsResult;
|
||||
use deno_io::fs::FsStat;
|
||||
use deno_io::StdFileResourceInner;
|
||||
use deno_path_util::normalize_path;
|
||||
|
||||
use crate::interface::AccessCheckCb;
|
||||
use crate::interface::FsDirEntry;
|
||||
|
@ -101,7 +101,7 @@ impl FileSystem for RealFs {
|
|||
&self,
|
||||
path: &Path,
|
||||
recursive: bool,
|
||||
mode: u32,
|
||||
mode: Option<u32>,
|
||||
) -> FsResult<()> {
|
||||
mkdir(path, recursive, mode)
|
||||
}
|
||||
|
@ -109,7 +109,7 @@ impl FileSystem for RealFs {
|
|||
&self,
|
||||
path: PathBuf,
|
||||
recursive: bool,
|
||||
mode: u32,
|
||||
mode: Option<u32>,
|
||||
) -> FsResult<()> {
|
||||
spawn_blocking(move || mkdir(&path, recursive, mode)).await?
|
||||
}
|
||||
|
@ -407,11 +407,11 @@ impl FileSystem for RealFs {
|
|||
}
|
||||
}
|
||||
|
||||
fn mkdir(path: &Path, recursive: bool, mode: u32) -> FsResult<()> {
|
||||
fn mkdir(path: &Path, recursive: bool, mode: Option<u32>) -> FsResult<()> {
|
||||
let mut builder = fs::DirBuilder::new();
|
||||
builder.recursive(recursive);
|
||||
#[cfg(unix)]
|
||||
{
|
||||
if let Some(mode) = mode {
|
||||
use std::os::unix::fs::DirBuilderExt;
|
||||
builder.mode(mode);
|
||||
}
|
||||
|
|
|
@ -11,11 +11,7 @@ use deno_core::RcRef;
|
|||
use tokio::io::AsyncReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
#[cfg(unix)]
|
||||
pub type RawBiPipeHandle = std::os::fd::RawFd;
|
||||
|
||||
#[cfg(windows)]
|
||||
pub type RawBiPipeHandle = std::os::windows::io::RawHandle;
|
||||
pub type RawBiPipeHandle = super::RawIoHandle;
|
||||
|
||||
/// One end of a bidirectional pipe. This implements the
|
||||
/// `Resource` trait.
|
||||
|
|
107
ext/io/lib.rs
107
ext/io/lib.rs
|
@ -67,6 +67,7 @@ pub use pipe::AsyncPipeRead;
|
|||
pub use pipe::AsyncPipeWrite;
|
||||
pub use pipe::PipeRead;
|
||||
pub use pipe::PipeWrite;
|
||||
pub use pipe::RawPipeHandle;
|
||||
|
||||
pub use bi_pipe::bi_pipe_pair_raw;
|
||||
pub use bi_pipe::BiPipe;
|
||||
|
@ -75,6 +76,112 @@ pub use bi_pipe::BiPipeResource;
|
|||
pub use bi_pipe::BiPipeWrite;
|
||||
pub use bi_pipe::RawBiPipeHandle;
|
||||
|
||||
/// Abstraction over `AsRawFd` (unix) and `AsRawHandle` (windows)
|
||||
pub trait AsRawIoHandle {
|
||||
fn as_raw_io_handle(&self) -> RawIoHandle;
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
impl<T> AsRawIoHandle for T
|
||||
where
|
||||
T: std::os::unix::io::AsRawFd,
|
||||
{
|
||||
fn as_raw_io_handle(&self) -> RawIoHandle {
|
||||
self.as_raw_fd()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
impl<T> AsRawIoHandle for T
|
||||
where
|
||||
T: std::os::windows::io::AsRawHandle,
|
||||
{
|
||||
fn as_raw_io_handle(&self) -> RawIoHandle {
|
||||
self.as_raw_handle()
|
||||
}
|
||||
}
|
||||
|
||||
/// Abstraction over `IntoRawFd` (unix) and `IntoRawHandle` (windows)
|
||||
pub trait IntoRawIoHandle {
|
||||
fn into_raw_io_handle(self) -> RawIoHandle;
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
impl<T> IntoRawIoHandle for T
|
||||
where
|
||||
T: std::os::unix::io::IntoRawFd,
|
||||
{
|
||||
fn into_raw_io_handle(self) -> RawIoHandle {
|
||||
self.into_raw_fd()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
impl<T> IntoRawIoHandle for T
|
||||
where
|
||||
T: std::os::windows::io::IntoRawHandle,
|
||||
{
|
||||
fn into_raw_io_handle(self) -> RawIoHandle {
|
||||
self.into_raw_handle()
|
||||
}
|
||||
}
|
||||
|
||||
/// Abstraction over `FromRawFd` (unix) and `FromRawHandle` (windows)
|
||||
pub trait FromRawIoHandle: Sized {
|
||||
/// Constructs a type from a raw io handle (fd/HANDLE).
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// Refer to the standard library docs ([unix](https://doc.rust-lang.org/stable/std/os/windows/io/trait.FromRawHandle.html#tymethod.from_raw_handle)) ([windows](https://doc.rust-lang.org/stable/std/os/fd/trait.FromRawFd.html#tymethod.from_raw_fd))
|
||||
///
|
||||
unsafe fn from_raw_io_handle(handle: RawIoHandle) -> Self;
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
impl<T> FromRawIoHandle for T
|
||||
where
|
||||
T: std::os::unix::io::FromRawFd,
|
||||
{
|
||||
unsafe fn from_raw_io_handle(fd: RawIoHandle) -> T {
|
||||
// SAFETY: upheld by caller
|
||||
unsafe { T::from_raw_fd(fd) }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
impl<T> FromRawIoHandle for T
|
||||
where
|
||||
T: std::os::windows::io::FromRawHandle,
|
||||
{
|
||||
unsafe fn from_raw_io_handle(fd: RawIoHandle) -> T {
|
||||
// SAFETY: upheld by caller
|
||||
unsafe { T::from_raw_handle(fd) }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub type RawIoHandle = std::os::fd::RawFd;
|
||||
|
||||
#[cfg(windows)]
|
||||
pub type RawIoHandle = std::os::windows::io::RawHandle;
|
||||
|
||||
pub fn close_raw_handle(handle: RawIoHandle) {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
// SAFETY: libc call
|
||||
unsafe {
|
||||
libc::close(handle);
|
||||
}
|
||||
}
|
||||
#[cfg(windows)]
|
||||
{
|
||||
// SAFETY: win32 call
|
||||
unsafe {
|
||||
windows_sys::Win32::Foundation::CloseHandle(handle as _);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Store the stdio fd/handles in global statics in order to keep them
|
||||
// alive for the duration of the application since the last handle/fd
|
||||
// being dropped will close the corresponding pipe.
|
||||
|
|
|
@ -3,6 +3,8 @@ use std::io;
|
|||
use std::pin::Pin;
|
||||
use std::process::Stdio;
|
||||
|
||||
pub type RawPipeHandle = super::RawIoHandle;
|
||||
|
||||
// The synchronous read end of a unidirectional pipe.
|
||||
pub struct PipeRead {
|
||||
file: std::fs::File,
|
||||
|
|
|
@ -21,6 +21,7 @@ bytes.workspace = true
|
|||
chrono = { workspace = true, features = ["now"] }
|
||||
deno_core.workspace = true
|
||||
deno_fetch.workspace = true
|
||||
deno_path_util.workspace = true
|
||||
deno_permissions.workspace = true
|
||||
deno_tls.workspace = true
|
||||
denokv_proto.workspace = true
|
||||
|
|
|
@ -16,9 +16,9 @@ use std::sync::OnceLock;
|
|||
use async_trait::async_trait;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::normalize_path;
|
||||
use deno_core::unsync::spawn_blocking;
|
||||
use deno_core::OpState;
|
||||
use deno_path_util::normalize_path;
|
||||
pub use denokv_sqlite::SqliteBackendError;
|
||||
use denokv_sqlite::SqliteConfig;
|
||||
use denokv_sqlite::SqliteNotifier;
|
||||
|
|
|
@ -69,7 +69,6 @@ impl From<SocketAddr> for IpAddr {
|
|||
}
|
||||
|
||||
pub(crate) fn accept_err(e: std::io::Error) -> AnyError {
|
||||
// FIXME(bartlomieju): compatibility with current JS implementation
|
||||
if let std::io::ErrorKind::Interrupted = e.kind() {
|
||||
bad_resource("Listener has been closed")
|
||||
} else {
|
||||
|
|
|
@ -298,10 +298,10 @@ where
|
|||
.resource_table
|
||||
.take::<TcpStreamResource>(rid)?;
|
||||
// This TCP connection might be used somewhere else. If it's the case, we cannot proceed with the
|
||||
// process of starting a TLS connection on top of this TCP connection, so we just return a bad
|
||||
// resource error. See also: https://github.com/denoland/deno/pull/16242
|
||||
// process of starting a TLS connection on top of this TCP connection, so we just return a Busy error.
|
||||
// See also: https://github.com/denoland/deno/pull/16242
|
||||
let resource = Rc::try_unwrap(resource_rc)
|
||||
.map_err(|_| bad_resource("TCP stream is currently in use"))?;
|
||||
.map_err(|_| custom_error("Busy", "TCP stream is currently in use"))?;
|
||||
let (read_half, write_half) = resource.into_inner();
|
||||
let tcp_stream = read_half.reunite(write_half)?;
|
||||
|
||||
|
@ -526,7 +526,6 @@ pub async fn op_net_accept_tls(
|
|||
match listener.accept().try_or_cancel(&cancel_handle).await {
|
||||
Ok(tuple) => tuple,
|
||||
Err(err) if err.kind() == ErrorKind::Interrupted => {
|
||||
// FIXME(bartlomieju): compatibility with current JS implementation.
|
||||
return Err(bad_resource("Listener has been closed"));
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
use crate::io::TcpStreamResource;
|
||||
use crate::ops_tls::TlsStreamResource;
|
||||
use deno_core::error::bad_resource;
|
||||
use deno_core::error::bad_resource_id;
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::AsyncRefCell;
|
||||
use deno_core::CancelHandle;
|
||||
|
@ -70,7 +70,7 @@ impl<T: NetworkStreamListenerTrait + 'static> NetworkListenerResource<T> {
|
|||
) -> Result<Option<NetworkStreamListener>, AnyError> {
|
||||
if let Ok(resource_rc) = resource_table.take::<Self>(listener_rid) {
|
||||
let resource = Rc::try_unwrap(resource_rc)
|
||||
.map_err(|_| bad_resource("Listener is currently in use"))?;
|
||||
.map_err(|_| custom_error("Busy", "Listener is currently in use"))?;
|
||||
return Ok(Some(resource.listener.into_inner().into()));
|
||||
}
|
||||
Ok(None)
|
||||
|
@ -334,7 +334,7 @@ pub fn take_network_stream_resource(
|
|||
{
|
||||
// This TCP connection might be used somewhere else.
|
||||
let resource = Rc::try_unwrap(resource_rc)
|
||||
.map_err(|_| bad_resource("TCP stream is currently in use"))?;
|
||||
.map_err(|_| custom_error("Busy", "TCP stream is currently in use"))?;
|
||||
let (read_half, write_half) = resource.into_inner();
|
||||
let tcp_stream = read_half.reunite(write_half)?;
|
||||
return Ok(NetworkStream::Tcp(tcp_stream));
|
||||
|
@ -344,7 +344,7 @@ pub fn take_network_stream_resource(
|
|||
{
|
||||
// This TLS connection might be used somewhere else.
|
||||
let resource = Rc::try_unwrap(resource_rc)
|
||||
.map_err(|_| bad_resource("TLS stream is currently in use"))?;
|
||||
.map_err(|_| custom_error("Busy", "TLS stream is currently in use"))?;
|
||||
let (read_half, write_half) = resource.into_inner();
|
||||
let tls_stream = read_half.unsplit(write_half);
|
||||
return Ok(NetworkStream::Tls(tls_stream));
|
||||
|
@ -356,7 +356,7 @@ pub fn take_network_stream_resource(
|
|||
{
|
||||
// This UNIX socket might be used somewhere else.
|
||||
let resource = Rc::try_unwrap(resource_rc)
|
||||
.map_err(|_| bad_resource("UNIX stream is currently in use"))?;
|
||||
.map_err(|_| custom_error("Busy", "Unix socket is currently in use"))?;
|
||||
let (read_half, write_half) = resource.into_inner();
|
||||
let unix_stream = read_half.reunite(write_half)?;
|
||||
return Ok(NetworkStream::Unix(unix_stream));
|
||||
|
|
|
@ -34,6 +34,7 @@ deno_io.workspace = true
|
|||
deno_media_type.workspace = true
|
||||
deno_net.workspace = true
|
||||
deno_package_json.workspace = true
|
||||
deno_path_util.workspace = true
|
||||
deno_permissions.workspace = true
|
||||
deno_whoami = "0.1.0"
|
||||
der = { version = "0.7.9", features = ["derive"] }
|
||||
|
@ -87,7 +88,7 @@ sha1.workspace = true
|
|||
sha2.workspace = true
|
||||
sha3 = { version = "0.10.8", features = ["oid"] }
|
||||
signature.workspace = true
|
||||
simd-json = "0.13.4"
|
||||
simd-json = "0.14.0"
|
||||
sm3 = "0.4.2"
|
||||
spki.workspace = true
|
||||
stable_deref_trait = "1.2.0"
|
||||
|
|
|
@ -16,7 +16,6 @@ use deno_core::url::Url;
|
|||
use deno_core::v8;
|
||||
use deno_core::v8::ExternalReference;
|
||||
use deno_core::JsRuntime;
|
||||
use deno_core::OpState;
|
||||
use deno_fs::sync::MaybeSend;
|
||||
use deno_fs::sync::MaybeSync;
|
||||
use node_resolver::NpmResolverRc;
|
||||
|
@ -120,24 +119,6 @@ impl NodePermissions for deno_permissions::PermissionsContainer {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::disallowed_types)]
|
||||
pub type NpmProcessStateProviderRc =
|
||||
deno_fs::sync::MaybeArc<dyn NpmProcessStateProvider>;
|
||||
|
||||
pub trait NpmProcessStateProvider:
|
||||
std::fmt::Debug + MaybeSend + MaybeSync
|
||||
{
|
||||
/// Gets a string containing the serialized npm state of the process.
|
||||
///
|
||||
/// This will be set on the `DENO_DONT_USE_INTERNAL_NODE_COMPAT_STATE` environment
|
||||
/// variable when doing a `child_process.fork`. The implementor can then check this environment
|
||||
/// variable on startup to repopulate the internal npm state.
|
||||
fn get_npm_process_state(&self) -> String {
|
||||
// This method is only used in the CLI.
|
||||
String::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::disallowed_types)]
|
||||
pub type NodeRequireResolverRc =
|
||||
deno_fs::sync::MaybeArc<dyn NodeRequireResolver>;
|
||||
|
@ -165,17 +146,9 @@ fn op_node_build_os() -> String {
|
|||
env!("TARGET").split('-').nth(2).unwrap().to_string()
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
fn op_npm_process_state(state: &mut OpState) -> Result<String, AnyError> {
|
||||
let npm_resolver = state.borrow_mut::<NpmProcessStateProviderRc>();
|
||||
Ok(npm_resolver.get_npm_process_state())
|
||||
}
|
||||
|
||||
pub struct NodeExtInitServices {
|
||||
pub node_require_resolver: NodeRequireResolverRc,
|
||||
pub node_resolver: NodeResolverRc,
|
||||
pub npm_process_state_provider: NpmProcessStateProviderRc,
|
||||
pub npm_resolver: NpmResolverRc,
|
||||
}
|
||||
|
||||
|
@ -375,7 +348,6 @@ deno_core::extension!(deno_node,
|
|||
ops::os::op_cpus<P>,
|
||||
ops::os::op_homedir<P>,
|
||||
op_node_build_os,
|
||||
op_npm_process_state,
|
||||
ops::require::op_require_can_parse_as_esm,
|
||||
ops::require::op_require_init_paths,
|
||||
ops::require::op_require_node_module_paths<P>,
|
||||
|
@ -663,7 +635,6 @@ deno_core::extension!(deno_node,
|
|||
state.put(init.node_require_resolver.clone());
|
||||
state.put(init.node_resolver.clone());
|
||||
state.put(init.npm_resolver.clone());
|
||||
state.put(init.npm_process_state_provider.clone());
|
||||
}
|
||||
},
|
||||
global_template_middleware = global_template_middleware,
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::normalize_path;
|
||||
use deno_core::op2;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::v8;
|
||||
|
@ -12,6 +11,7 @@ use deno_core::ModuleSpecifier;
|
|||
use deno_core::OpState;
|
||||
use deno_fs::FileSystemRc;
|
||||
use deno_package_json::PackageJsonRc;
|
||||
use deno_path_util::normalize_path;
|
||||
use node_resolver::NodeModuleKind;
|
||||
use node_resolver::NodeResolutionMode;
|
||||
use node_resolver::REQUIRE_CONDITIONS;
|
||||
|
@ -104,7 +104,7 @@ where
|
|||
} else {
|
||||
let current_dir =
|
||||
&(fs.cwd().map_err(AnyError::from)).context("Unable to get CWD")?;
|
||||
deno_core::normalize_path(current_dir.join(from))
|
||||
deno_path_util::normalize_path(current_dir.join(from))
|
||||
};
|
||||
|
||||
ensure_read_permission::<P>(state, &from)?;
|
||||
|
|
|
@ -410,8 +410,8 @@ impl ContextifyContext {
|
|||
fn sandbox<'a>(
|
||||
&self,
|
||||
scope: &mut v8::HandleScope<'a>,
|
||||
) -> v8::Local<'a, v8::Object> {
|
||||
self.sandbox.get(scope).unwrap()
|
||||
) -> Option<v8::Local<'a, v8::Object>> {
|
||||
self.sandbox.get(scope)
|
||||
}
|
||||
|
||||
fn microtask_queue(&self) -> Option<&v8::MicrotaskQueue> {
|
||||
|
@ -600,7 +600,9 @@ fn property_query<'s>(
|
|||
|
||||
let context = ctx.context(scope);
|
||||
let scope = &mut v8::ContextScope::new(scope, context);
|
||||
let sandbox = ctx.sandbox(scope);
|
||||
let Some(sandbox) = ctx.sandbox(scope) else {
|
||||
return v8::Intercepted::No;
|
||||
};
|
||||
|
||||
match sandbox.has_real_named_property(scope, property) {
|
||||
None => v8::Intercepted::No,
|
||||
|
@ -645,7 +647,9 @@ fn property_getter<'s>(
|
|||
return v8::Intercepted::No;
|
||||
};
|
||||
|
||||
let sandbox = ctx.sandbox(scope);
|
||||
let Some(sandbox) = ctx.sandbox(scope) else {
|
||||
return v8::Intercepted::No;
|
||||
};
|
||||
|
||||
let tc_scope = &mut v8::TryCatch::new(scope);
|
||||
let maybe_rv = sandbox.get_real_named_property(tc_scope, key).or_else(|| {
|
||||
|
@ -689,14 +693,14 @@ fn property_setter<'s>(
|
|||
None => (v8::PropertyAttribute::NONE, false),
|
||||
};
|
||||
let mut read_only = attributes.is_read_only();
|
||||
|
||||
let (attributes, is_declared_on_sandbox) = match ctx
|
||||
.sandbox(scope)
|
||||
.get_real_named_property_attributes(scope, key)
|
||||
{
|
||||
Some(attr) => (attr, true),
|
||||
None => (v8::PropertyAttribute::NONE, false),
|
||||
let Some(sandbox) = ctx.sandbox(scope) else {
|
||||
return v8::Intercepted::No;
|
||||
};
|
||||
let (attributes, is_declared_on_sandbox) =
|
||||
match sandbox.get_real_named_property_attributes(scope, key) {
|
||||
Some(attr) => (attr, true),
|
||||
None => (v8::PropertyAttribute::NONE, false),
|
||||
};
|
||||
read_only |= attributes.is_read_only();
|
||||
|
||||
if read_only {
|
||||
|
@ -731,14 +735,12 @@ fn property_setter<'s>(
|
|||
return v8::Intercepted::No;
|
||||
};
|
||||
|
||||
if ctx.sandbox(scope).set(scope, key.into(), value).is_none() {
|
||||
if sandbox.set(scope, key.into(), value).is_none() {
|
||||
return v8::Intercepted::No;
|
||||
}
|
||||
|
||||
if is_declared_on_sandbox {
|
||||
if let Some(desc) =
|
||||
ctx.sandbox(scope).get_own_property_descriptor(scope, key)
|
||||
{
|
||||
if let Some(desc) = sandbox.get_own_property_descriptor(scope, key) {
|
||||
if !desc.is_undefined() {
|
||||
let desc_obj: v8::Local<v8::Object> = desc.try_into().unwrap();
|
||||
// We have to specify the return value for any contextual or get/set
|
||||
|
@ -774,7 +776,9 @@ fn property_descriptor<'s>(
|
|||
};
|
||||
|
||||
let context = ctx.context(scope);
|
||||
let sandbox = ctx.sandbox(scope);
|
||||
let Some(sandbox) = ctx.sandbox(scope) else {
|
||||
return v8::Intercepted::No;
|
||||
};
|
||||
let scope = &mut v8::ContextScope::new(scope, context);
|
||||
|
||||
if sandbox.has_own_property(scope, key).unwrap_or(false) {
|
||||
|
@ -818,7 +822,9 @@ fn property_definer<'s>(
|
|||
return v8::Intercepted::No;
|
||||
}
|
||||
|
||||
let sandbox = ctx.sandbox(scope);
|
||||
let Some(sandbox) = ctx.sandbox(scope) else {
|
||||
return v8::Intercepted::No;
|
||||
};
|
||||
|
||||
let define_prop_on_sandbox =
|
||||
|scope: &mut v8::HandleScope,
|
||||
|
@ -880,7 +886,10 @@ fn property_deleter<'s>(
|
|||
};
|
||||
|
||||
let context = ctx.context(scope);
|
||||
let sandbox = ctx.sandbox(scope);
|
||||
let Some(sandbox) = ctx.sandbox(scope) else {
|
||||
return v8::Intercepted::No;
|
||||
};
|
||||
|
||||
let context_scope = &mut v8::ContextScope::new(scope, context);
|
||||
if sandbox.delete(context_scope, key.into()).unwrap_or(false) {
|
||||
return v8::Intercepted::No;
|
||||
|
@ -900,7 +909,10 @@ fn property_enumerator<'s>(
|
|||
};
|
||||
|
||||
let context = ctx.context(scope);
|
||||
let sandbox = ctx.sandbox(scope);
|
||||
let Some(sandbox) = ctx.sandbox(scope) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let context_scope = &mut v8::ContextScope::new(scope, context);
|
||||
let Some(properties) = sandbox
|
||||
.get_property_names(context_scope, v8::GetPropertyNamesArgs::default())
|
||||
|
@ -921,12 +933,14 @@ fn indexed_property_enumerator<'s>(
|
|||
};
|
||||
let context = ctx.context(scope);
|
||||
let scope = &mut v8::ContextScope::new(scope, context);
|
||||
let Some(sandbox) = ctx.sandbox(scope) else {
|
||||
return;
|
||||
};
|
||||
|
||||
// By default, GetPropertyNames returns string and number property names, and
|
||||
// doesn't convert the numbers to strings.
|
||||
let Some(properties) = ctx
|
||||
.sandbox(scope)
|
||||
.get_property_names(scope, v8::GetPropertyNamesArgs::default())
|
||||
let Some(properties) =
|
||||
sandbox.get_property_names(scope, v8::GetPropertyNamesArgs::default())
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
@ -1019,7 +1033,10 @@ fn indexed_property_deleter<'s>(
|
|||
};
|
||||
|
||||
let context = ctx.context(scope);
|
||||
let sandbox = ctx.sandbox(scope);
|
||||
let Some(sandbox) = ctx.sandbox(scope) else {
|
||||
return v8::Intercepted::No;
|
||||
};
|
||||
|
||||
let context_scope = &mut v8::ContextScope::new(scope, context);
|
||||
if !sandbox.delete_index(context_scope, index).unwrap_or(false) {
|
||||
return v8::Intercepted::No;
|
||||
|
|
|
@ -10,7 +10,6 @@ import { internals } from "ext:core/mod.js";
|
|||
import {
|
||||
op_bootstrap_unstable_args,
|
||||
op_node_child_ipc_pipe,
|
||||
op_npm_process_state,
|
||||
} from "ext:core/ops";
|
||||
|
||||
import {
|
||||
|
@ -54,6 +53,7 @@ import {
|
|||
convertToValidSignal,
|
||||
kEmptyObject,
|
||||
} from "ext:deno_node/internal/util.mjs";
|
||||
import { kNeedsNpmProcessState } from "ext:runtime/40_process.js";
|
||||
|
||||
const MAX_BUFFER = 1024 * 1024;
|
||||
|
||||
|
@ -168,9 +168,8 @@ export function fork(
|
|||
options.execPath = options.execPath || Deno.execPath();
|
||||
options.shell = false;
|
||||
|
||||
Object.assign(options.env ??= {}, {
|
||||
DENO_DONT_USE_INTERNAL_NODE_COMPAT_STATE: op_npm_process_state(),
|
||||
});
|
||||
// deno-lint-ignore no-explicit-any
|
||||
(options as any)[kNeedsNpmProcessState] = true;
|
||||
|
||||
return spawn(options.execPath, args, options);
|
||||
}
|
||||
|
|
|
@ -56,7 +56,12 @@ import { StringPrototypeSlice } from "ext:deno_node/internal/primordials.mjs";
|
|||
import { StreamBase } from "ext:deno_node/internal_binding/stream_wrap.ts";
|
||||
import { Pipe, socketType } from "ext:deno_node/internal_binding/pipe_wrap.ts";
|
||||
import { Socket } from "node:net";
|
||||
import { kDetached, kExtraStdio, kIpc } from "ext:runtime/40_process.js";
|
||||
import {
|
||||
kDetached,
|
||||
kExtraStdio,
|
||||
kIpc,
|
||||
kNeedsNpmProcessState,
|
||||
} from "ext:runtime/40_process.js";
|
||||
|
||||
export function mapValues<T, O>(
|
||||
record: Readonly<Record<string, T>>,
|
||||
|
@ -281,6 +286,8 @@ export class ChildProcess extends EventEmitter {
|
|||
[kIpc]: ipc, // internal
|
||||
[kExtraStdio]: extraStdioNormalized,
|
||||
[kDetached]: detached,
|
||||
// deno-lint-ignore no-explicit-any
|
||||
[kNeedsNpmProcessState]: (options ?? {} as any)[kNeedsNpmProcessState],
|
||||
}).spawn();
|
||||
this.pid = this.#process.pid;
|
||||
|
||||
|
|
|
@ -955,7 +955,6 @@ export class Socket extends Duplex {
|
|||
*/
|
||||
override pause(): this {
|
||||
if (
|
||||
this[kBuffer] &&
|
||||
!this.connecting &&
|
||||
this._handle &&
|
||||
this._handle.reading
|
||||
|
|
|
@ -1,179 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::path::Component;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use url::Url;
|
||||
|
||||
/// Extension to path_clean::PathClean
|
||||
pub trait PathClean<T> {
|
||||
fn clean(&self) -> T;
|
||||
}
|
||||
|
||||
impl PathClean<PathBuf> for PathBuf {
|
||||
fn clean(&self) -> PathBuf {
|
||||
fn is_clean_path(path: &Path) -> bool {
|
||||
let path = path.to_string_lossy();
|
||||
let mut current_index = 0;
|
||||
while let Some(index) = path[current_index..].find("\\.") {
|
||||
let trailing_index = index + current_index + 2;
|
||||
let mut trailing_chars = path[trailing_index..].chars();
|
||||
match trailing_chars.next() {
|
||||
Some('.') => match trailing_chars.next() {
|
||||
Some('/') | Some('\\') | None => {
|
||||
return false;
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
Some('/') | Some('\\') => {
|
||||
return false;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
current_index = trailing_index;
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
let path = path_clean::PathClean::clean(self);
|
||||
if cfg!(windows) && !is_clean_path(&path) {
|
||||
// temporary workaround because path_clean::PathClean::clean is
|
||||
// not good enough on windows
|
||||
let mut components = Vec::new();
|
||||
|
||||
for component in path.components() {
|
||||
match component {
|
||||
Component::CurDir => {
|
||||
// skip
|
||||
}
|
||||
Component::ParentDir => {
|
||||
let maybe_last_component = components.pop();
|
||||
if !matches!(maybe_last_component, Some(Component::Normal(_))) {
|
||||
panic!("Error normalizing: {}", path.display());
|
||||
}
|
||||
}
|
||||
Component::Normal(_) | Component::RootDir | Component::Prefix(_) => {
|
||||
components.push(component);
|
||||
}
|
||||
}
|
||||
}
|
||||
components.into_iter().collect::<PathBuf>()
|
||||
} else {
|
||||
path
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn to_file_specifier(path: &Path) -> Url {
|
||||
match Url::from_file_path(path) {
|
||||
Ok(url) => url,
|
||||
Err(_) => panic!("Invalid path: {}", path.display()),
|
||||
}
|
||||
}
|
||||
|
||||
// todo(dsherret): we have the below code also in deno_core and it
|
||||
// would be good to somehow re-use it in both places (we don't want
|
||||
// to create a dependency on deno_core here)
|
||||
|
||||
#[cfg(not(windows))]
|
||||
#[inline]
|
||||
pub fn strip_unc_prefix(path: PathBuf) -> PathBuf {
|
||||
path
|
||||
}
|
||||
|
||||
/// Strips the unc prefix (ex. \\?\) from Windows paths.
|
||||
#[cfg(windows)]
|
||||
pub fn strip_unc_prefix(path: PathBuf) -> PathBuf {
|
||||
use std::path::Component;
|
||||
use std::path::Prefix;
|
||||
|
||||
let mut components = path.components();
|
||||
match components.next() {
|
||||
Some(Component::Prefix(prefix)) => {
|
||||
match prefix.kind() {
|
||||
// \\?\device
|
||||
Prefix::Verbatim(device) => {
|
||||
let mut path = PathBuf::new();
|
||||
path.push(format!(r"\\{}\", device.to_string_lossy()));
|
||||
path.extend(components.filter(|c| !matches!(c, Component::RootDir)));
|
||||
path
|
||||
}
|
||||
// \\?\c:\path
|
||||
Prefix::VerbatimDisk(_) => {
|
||||
let mut path = PathBuf::new();
|
||||
path.push(prefix.as_os_str().to_string_lossy().replace(r"\\?\", ""));
|
||||
path.extend(components);
|
||||
path
|
||||
}
|
||||
// \\?\UNC\hostname\share_name\path
|
||||
Prefix::VerbatimUNC(hostname, share_name) => {
|
||||
let mut path = PathBuf::new();
|
||||
path.push(format!(
|
||||
r"\\{}\{}\",
|
||||
hostname.to_string_lossy(),
|
||||
share_name.to_string_lossy()
|
||||
));
|
||||
path.extend(components.filter(|c| !matches!(c, Component::RootDir)));
|
||||
path
|
||||
}
|
||||
_ => path,
|
||||
}
|
||||
}
|
||||
_ => path,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
fn test_path_clean() {
|
||||
use super::*;
|
||||
|
||||
run_test("C:\\test\\./file.txt", "C:\\test\\file.txt");
|
||||
run_test("C:\\test\\../other/file.txt", "C:\\other\\file.txt");
|
||||
run_test("C:\\test\\../other\\file.txt", "C:\\other\\file.txt");
|
||||
|
||||
fn run_test(input: &str, expected: &str) {
|
||||
assert_eq!(PathBuf::from(input).clean(), PathBuf::from(expected));
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
fn test_strip_unc_prefix() {
|
||||
use std::path::PathBuf;
|
||||
|
||||
run_test(r"C:\", r"C:\");
|
||||
run_test(r"C:\test\file.txt", r"C:\test\file.txt");
|
||||
|
||||
run_test(r"\\?\C:\", r"C:\");
|
||||
run_test(r"\\?\C:\test\file.txt", r"C:\test\file.txt");
|
||||
|
||||
run_test(r"\\.\C:\", r"\\.\C:\");
|
||||
run_test(r"\\.\C:\Test\file.txt", r"\\.\C:\Test\file.txt");
|
||||
|
||||
run_test(r"\\?\UNC\localhost\", r"\\localhost");
|
||||
run_test(r"\\?\UNC\localhost\c$\", r"\\localhost\c$");
|
||||
run_test(
|
||||
r"\\?\UNC\localhost\c$\Windows\file.txt",
|
||||
r"\\localhost\c$\Windows\file.txt",
|
||||
);
|
||||
run_test(r"\\?\UNC\wsl$\deno.json", r"\\wsl$\deno.json");
|
||||
|
||||
run_test(r"\\?\server1", r"\\server1");
|
||||
run_test(r"\\?\server1\e$\", r"\\server1\e$\");
|
||||
run_test(
|
||||
r"\\?\server1\e$\test\file.txt",
|
||||
r"\\server1\e$\test\file.txt",
|
||||
);
|
||||
|
||||
fn run_test(input: &str, expected: &str) {
|
||||
assert_eq!(
|
||||
super::strip_unc_prefix(PathBuf::from(input)),
|
||||
PathBuf::from(expected)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
29
resolvers/deno/Cargo.toml
Normal file
29
resolvers/deno/Cargo.toml
Normal file
|
@ -0,0 +1,29 @@
|
|||
# Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
[package]
|
||||
name = "deno_resolver"
|
||||
version = "0.0.1"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
readme = "README.md"
|
||||
repository.workspace = true
|
||||
description = "Deno resolution algorithm"
|
||||
|
||||
[lib]
|
||||
path = "lib.rs"
|
||||
|
||||
[features]
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
base32.workspace = true
|
||||
deno_media_type.workspace = true
|
||||
deno_package_json.workspace = true
|
||||
deno_path_util.workspace = true
|
||||
deno_semver.workspace = true
|
||||
node_resolver.workspace = true
|
||||
url.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
test_util.workspace = true
|
3
resolvers/deno/README.md
Normal file
3
resolvers/deno/README.md
Normal file
|
@ -0,0 +1,3 @@
|
|||
# deno_resolver
|
||||
|
||||
Deno resolution algorithm.
|
52
resolvers/deno/clippy.toml
Normal file
52
resolvers/deno/clippy.toml
Normal file
|
@ -0,0 +1,52 @@
|
|||
disallowed-methods = [
|
||||
{ path = "std::env::current_dir", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::Path::canonicalize", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::Path::is_dir", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::Path::is_file", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::Path::is_symlink", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::Path::metadata", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::Path::read_dir", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::Path::read_link", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::Path::try_exists", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::PathBuf::exists", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::PathBuf::is_file", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::PathBuf::metadata", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::PathBuf::read_link", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::env::set_current_dir", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::env::temp_dir", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::canonicalize", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::copy", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::create_dir_all", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::create_dir", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::DirBuilder::new", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::hard_link", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::metadata", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::OpenOptions::new", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::read_dir", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::read_link", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::read_to_string", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::read", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::remove_dir_all", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::remove_dir", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::remove_file", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::rename", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::set_permissions", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::symlink_metadata", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::fs::write", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::Path::canonicalize", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "std::path::Path::exists", reason = "File system operations should be done using DenoResolverFs trait" },
|
||||
{ path = "url::Url::to_file_path", reason = "Use deno_path_util instead so it works in Wasm" },
|
||||
{ path = "url::Url::from_file_path", reason = "Use deno_path_util instead so it works in Wasm" },
|
||||
{ path = "url::Url::from_directory_path", reason = "Use deno_path_util instead so it works in Wasm" },
|
||||
]
|
||||
disallowed-types = [
|
||||
# todo(dsherret): consider for the future
|
||||
# { path = "std::sync::Arc", reason = "use crate::sync::MaybeArc instead" },
|
||||
]
|
27
resolvers/deno/fs.rs
Normal file
27
resolvers/deno/fs.rs
Normal file
|
@ -0,0 +1,27 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub struct DirEntry {
|
||||
pub name: String,
|
||||
pub is_file: bool,
|
||||
pub is_directory: bool,
|
||||
}
|
||||
|
||||
pub trait DenoResolverFs {
|
||||
fn read_to_string_lossy(&self, path: &Path) -> std::io::Result<String>;
|
||||
fn realpath_sync(&self, path: &Path) -> std::io::Result<PathBuf>;
|
||||
fn is_dir_sync(&self, path: &Path) -> bool;
|
||||
fn read_dir_sync(&self, dir_path: &Path) -> std::io::Result<Vec<DirEntry>>;
|
||||
}
|
||||
|
||||
pub(crate) struct DenoPkgJsonFsAdapter<'a, Fs: DenoResolverFs>(pub &'a Fs);
|
||||
|
||||
impl<'a, Fs: DenoResolverFs> deno_package_json::fs::DenoPkgJsonFs
|
||||
for DenoPkgJsonFsAdapter<'a, Fs>
|
||||
{
|
||||
fn read_to_string_lossy(&self, path: &Path) -> std::io::Result<String> {
|
||||
self.0.read_to_string_lossy(path)
|
||||
}
|
||||
}
|
5
resolvers/deno/lib.rs
Normal file
5
resolvers/deno/lib.rs
Normal file
|
@ -0,0 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
pub mod fs;
|
||||
pub mod npm;
|
||||
pub mod sloppy_imports;
|
348
resolvers/deno/npm/byonm.rs
Normal file
348
resolvers/deno/npm/byonm.rs
Normal file
|
@ -0,0 +1,348 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::bail;
|
||||
use anyhow::Error as AnyError;
|
||||
use deno_package_json::PackageJson;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::Version;
|
||||
use node_resolver::errors::PackageFolderResolveError;
|
||||
use node_resolver::errors::PackageFolderResolveIoError;
|
||||
use node_resolver::errors::PackageJsonLoadError;
|
||||
use node_resolver::errors::PackageNotFoundError;
|
||||
use node_resolver::load_pkg_json;
|
||||
use node_resolver::NpmResolver;
|
||||
use url::Url;
|
||||
|
||||
use crate::fs::DenoPkgJsonFsAdapter;
|
||||
use crate::fs::DenoResolverFs;
|
||||
|
||||
use super::local::normalize_pkg_name_for_node_modules_deno_folder;
|
||||
|
||||
pub struct ByonmNpmResolverCreateOptions<Fs: DenoResolverFs> {
|
||||
pub fs: Fs,
|
||||
// todo(dsherret): investigate removing this
|
||||
pub root_node_modules_dir: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ByonmNpmResolver<Fs: DenoResolverFs> {
|
||||
fs: Fs,
|
||||
root_node_modules_dir: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl<Fs: DenoResolverFs + Clone> Clone for ByonmNpmResolver<Fs> {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
fs: self.fs.clone(),
|
||||
root_node_modules_dir: self.root_node_modules_dir.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Fs: DenoResolverFs> ByonmNpmResolver<Fs> {
|
||||
pub fn new(options: ByonmNpmResolverCreateOptions<Fs>) -> Self {
|
||||
Self {
|
||||
fs: options.fs,
|
||||
root_node_modules_dir: options.root_node_modules_dir,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn root_node_modules_dir(&self) -> Option<&Path> {
|
||||
self.root_node_modules_dir.as_deref()
|
||||
}
|
||||
|
||||
fn load_pkg_json(
|
||||
&self,
|
||||
path: &Path,
|
||||
) -> Result<Option<Arc<PackageJson>>, PackageJsonLoadError> {
|
||||
load_pkg_json(&DenoPkgJsonFsAdapter(&self.fs), path)
|
||||
}
|
||||
|
||||
/// Finds the ancestor package.json that contains the specified dependency.
|
||||
pub fn find_ancestor_package_json_with_dep(
|
||||
&self,
|
||||
dep_name: &str,
|
||||
referrer: &Url,
|
||||
) -> Option<Arc<PackageJson>> {
|
||||
let referrer_path = url_to_file_path(referrer).ok()?;
|
||||
let mut current_folder = referrer_path.parent()?;
|
||||
loop {
|
||||
let pkg_json_path = current_folder.join("package.json");
|
||||
if let Ok(Some(pkg_json)) = self.load_pkg_json(&pkg_json_path) {
|
||||
if let Some(deps) = &pkg_json.dependencies {
|
||||
if deps.contains_key(dep_name) {
|
||||
return Some(pkg_json);
|
||||
}
|
||||
}
|
||||
if let Some(deps) = &pkg_json.dev_dependencies {
|
||||
if deps.contains_key(dep_name) {
|
||||
return Some(pkg_json);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(parent) = current_folder.parent() {
|
||||
current_folder = parent;
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_pkg_folder_from_deno_module_req(
|
||||
&self,
|
||||
req: &PackageReq,
|
||||
referrer: &Url,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
fn node_resolve_dir<Fs: DenoResolverFs>(
|
||||
fs: &Fs,
|
||||
alias: &str,
|
||||
start_dir: &Path,
|
||||
) -> Result<Option<PathBuf>, AnyError> {
|
||||
for ancestor in start_dir.ancestors() {
|
||||
let node_modules_folder = ancestor.join("node_modules");
|
||||
let sub_dir = join_package_name(&node_modules_folder, alias);
|
||||
if fs.is_dir_sync(&sub_dir) {
|
||||
return Ok(Some(deno_path_util::canonicalize_path_maybe_not_exists(
|
||||
&sub_dir,
|
||||
&|path| fs.realpath_sync(path),
|
||||
)?));
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
// now attempt to resolve if it's found in any package.json
|
||||
let maybe_pkg_json_and_alias =
|
||||
self.resolve_pkg_json_and_alias_for_req(req, referrer)?;
|
||||
match maybe_pkg_json_and_alias {
|
||||
Some((pkg_json, alias)) => {
|
||||
// now try node resolution
|
||||
if let Some(resolved) =
|
||||
node_resolve_dir(&self.fs, &alias, pkg_json.dir_path())?
|
||||
{
|
||||
return Ok(resolved);
|
||||
}
|
||||
|
||||
bail!(
|
||||
concat!(
|
||||
"Could not find \"{}\" in a node_modules folder. ",
|
||||
"Deno expects the node_modules/ directory to be up to date. ",
|
||||
"Did you forget to run `deno install`?"
|
||||
),
|
||||
alias,
|
||||
);
|
||||
}
|
||||
None => {
|
||||
// now check if node_modules/.deno/ matches this constraint
|
||||
if let Some(folder) = self.resolve_folder_in_root_node_modules(req) {
|
||||
return Ok(folder);
|
||||
}
|
||||
|
||||
bail!(
|
||||
concat!(
|
||||
"Could not find a matching package for 'npm:{}' in the node_modules ",
|
||||
"directory. Ensure you have all your JSR and npm dependencies listed ",
|
||||
"in your deno.json or package.json, then run `deno install`. Alternatively, ",
|
||||
r#"turn on auto-install by specifying `"nodeModulesDir": "auto"` in your "#,
|
||||
"deno.json file."
|
||||
),
|
||||
req,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_pkg_json_and_alias_for_req(
|
||||
&self,
|
||||
req: &PackageReq,
|
||||
referrer: &Url,
|
||||
) -> Result<Option<(Arc<PackageJson>, String)>, AnyError> {
|
||||
fn resolve_alias_from_pkg_json(
|
||||
req: &PackageReq,
|
||||
pkg_json: &PackageJson,
|
||||
) -> Option<String> {
|
||||
let deps = pkg_json.resolve_local_package_json_deps();
|
||||
for (key, value) in deps {
|
||||
if let Ok(value) = value {
|
||||
match value {
|
||||
PackageJsonDepValue::Req(dep_req) => {
|
||||
if dep_req.name == req.name
|
||||
&& dep_req.version_req.intersects(&req.version_req)
|
||||
{
|
||||
return Some(key);
|
||||
}
|
||||
}
|
||||
PackageJsonDepValue::Workspace(_workspace) => {
|
||||
if key == req.name && req.version_req.tag() == Some("workspace") {
|
||||
return Some(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
// attempt to resolve the npm specifier from the referrer's package.json,
|
||||
if let Ok(file_path) = url_to_file_path(referrer) {
|
||||
let mut current_path = file_path.as_path();
|
||||
while let Some(dir_path) = current_path.parent() {
|
||||
let package_json_path = dir_path.join("package.json");
|
||||
if let Some(pkg_json) = self.load_pkg_json(&package_json_path)? {
|
||||
if let Some(alias) =
|
||||
resolve_alias_from_pkg_json(req, pkg_json.as_ref())
|
||||
{
|
||||
return Ok(Some((pkg_json, alias)));
|
||||
}
|
||||
}
|
||||
current_path = dir_path;
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise, fall fallback to the project's package.json
|
||||
if let Some(root_node_modules_dir) = &self.root_node_modules_dir {
|
||||
let root_pkg_json_path =
|
||||
root_node_modules_dir.parent().unwrap().join("package.json");
|
||||
if let Some(pkg_json) = self.load_pkg_json(&root_pkg_json_path)? {
|
||||
if let Some(alias) = resolve_alias_from_pkg_json(req, pkg_json.as_ref())
|
||||
{
|
||||
return Ok(Some((pkg_json, alias)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn resolve_folder_in_root_node_modules(
|
||||
&self,
|
||||
req: &PackageReq,
|
||||
) -> Option<PathBuf> {
|
||||
// now check if node_modules/.deno/ matches this constraint
|
||||
let root_node_modules_dir = self.root_node_modules_dir.as_ref()?;
|
||||
let node_modules_deno_dir = root_node_modules_dir.join(".deno");
|
||||
let Ok(entries) = self.fs.read_dir_sync(&node_modules_deno_dir) else {
|
||||
return None;
|
||||
};
|
||||
let search_prefix = format!(
|
||||
"{}@",
|
||||
normalize_pkg_name_for_node_modules_deno_folder(&req.name)
|
||||
);
|
||||
let mut best_version = None;
|
||||
|
||||
// example entries:
|
||||
// - @denotest+add@1.0.0
|
||||
// - @denotest+add@1.0.0_1
|
||||
for entry in entries {
|
||||
if !entry.is_directory {
|
||||
continue;
|
||||
}
|
||||
let Some(version_and_copy_idx) = entry.name.strip_prefix(&search_prefix)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
let version = version_and_copy_idx
|
||||
.rsplit_once('_')
|
||||
.map(|(v, _)| v)
|
||||
.unwrap_or(version_and_copy_idx);
|
||||
let Ok(version) = Version::parse_from_npm(version) else {
|
||||
continue;
|
||||
};
|
||||
if req.version_req.matches(&version) {
|
||||
if let Some((best_version_version, _)) = &best_version {
|
||||
if version > *best_version_version {
|
||||
best_version = Some((version, entry.name));
|
||||
}
|
||||
} else {
|
||||
best_version = Some((version, entry.name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
best_version.map(|(_version, entry_name)| {
|
||||
join_package_name(
|
||||
&node_modules_deno_dir.join(entry_name).join("node_modules"),
|
||||
&req.name,
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<Fs: DenoResolverFs + Send + Sync + std::fmt::Debug> NpmResolver
|
||||
for ByonmNpmResolver<Fs>
|
||||
{
|
||||
fn resolve_package_folder_from_package(
|
||||
&self,
|
||||
name: &str,
|
||||
referrer: &Url,
|
||||
) -> Result<PathBuf, PackageFolderResolveError> {
|
||||
fn inner<Fs: DenoResolverFs>(
|
||||
fs: &Fs,
|
||||
name: &str,
|
||||
referrer: &Url,
|
||||
) -> Result<PathBuf, PackageFolderResolveError> {
|
||||
let maybe_referrer_file = url_to_file_path(referrer).ok();
|
||||
let maybe_start_folder =
|
||||
maybe_referrer_file.as_ref().and_then(|f| f.parent());
|
||||
if let Some(start_folder) = maybe_start_folder {
|
||||
for current_folder in start_folder.ancestors() {
|
||||
let node_modules_folder = if current_folder.ends_with("node_modules")
|
||||
{
|
||||
Cow::Borrowed(current_folder)
|
||||
} else {
|
||||
Cow::Owned(current_folder.join("node_modules"))
|
||||
};
|
||||
|
||||
let sub_dir = join_package_name(&node_modules_folder, name);
|
||||
if fs.is_dir_sync(&sub_dir) {
|
||||
return Ok(sub_dir);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(
|
||||
PackageNotFoundError {
|
||||
package_name: name.to_string(),
|
||||
referrer: referrer.clone(),
|
||||
referrer_extra: None,
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
let path = inner(&self.fs, name, referrer)?;
|
||||
self.fs.realpath_sync(&path).map_err(|err| {
|
||||
PackageFolderResolveIoError {
|
||||
package_name: name.to_string(),
|
||||
referrer: referrer.clone(),
|
||||
source: err,
|
||||
}
|
||||
.into()
|
||||
})
|
||||
}
|
||||
|
||||
fn in_npm_package(&self, specifier: &Url) -> bool {
|
||||
specifier.scheme() == "file"
|
||||
&& specifier
|
||||
.path()
|
||||
.to_ascii_lowercase()
|
||||
.contains("/node_modules/")
|
||||
}
|
||||
}
|
||||
|
||||
fn join_package_name(path: &Path, package_name: &str) -> PathBuf {
|
||||
let mut path = path.to_path_buf();
|
||||
// ensure backslashes are used on windows
|
||||
for part in package_name.split('/') {
|
||||
path = path.join(part);
|
||||
}
|
||||
path
|
||||
}
|
27
resolvers/deno/npm/local.rs
Normal file
27
resolvers/deno/npm/local.rs
Normal file
|
@ -0,0 +1,27 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
/// Normalizes a package name for use at `node_modules/.deno/<pkg-name>@<version>[_<copy_index>]`
|
||||
pub fn normalize_pkg_name_for_node_modules_deno_folder(name: &str) -> Cow<str> {
|
||||
let name = if name.to_lowercase() == name {
|
||||
Cow::Borrowed(name)
|
||||
} else {
|
||||
Cow::Owned(format!("_{}", mixed_case_package_name_encode(name)))
|
||||
};
|
||||
if name.starts_with('@') {
|
||||
name.replace('/', "+").into()
|
||||
} else {
|
||||
name
|
||||
}
|
||||
}
|
||||
|
||||
fn mixed_case_package_name_encode(name: &str) -> String {
|
||||
// use base32 encoding because it's reversible and the character set
|
||||
// only includes the characters within 0-9 and A-Z so it can be lower cased
|
||||
base32::encode(
|
||||
base32::Alphabet::Rfc4648Lower { padding: false },
|
||||
name.as_bytes(),
|
||||
)
|
||||
.to_lowercase()
|
||||
}
|
8
resolvers/deno/npm/mod.rs
Normal file
8
resolvers/deno/npm/mod.rs
Normal file
|
@ -0,0 +1,8 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
mod byonm;
|
||||
mod local;
|
||||
|
||||
pub use byonm::ByonmNpmResolver;
|
||||
pub use byonm::ByonmNpmResolverCreateOptions;
|
||||
pub use local::normalize_pkg_name_for_node_modules_deno_folder;
|
513
resolvers/deno/sloppy_imports.rs
Normal file
513
resolvers/deno/sloppy_imports.rs
Normal file
|
@ -0,0 +1,513 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_media_type::MediaType;
|
||||
use deno_path_util::url_from_file_path;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use url::Url;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum SloppyImportsFsEntry {
|
||||
File,
|
||||
Dir,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum SloppyImportsResolution {
|
||||
/// Ex. `./file.js` to `./file.ts`
|
||||
JsToTs(Url),
|
||||
/// Ex. `./file` to `./file.ts`
|
||||
NoExtension(Url),
|
||||
/// Ex. `./dir` to `./dir/index.ts`
|
||||
Directory(Url),
|
||||
}
|
||||
|
||||
impl SloppyImportsResolution {
|
||||
pub fn as_specifier(&self) -> &Url {
|
||||
match self {
|
||||
Self::JsToTs(specifier) => specifier,
|
||||
Self::NoExtension(specifier) => specifier,
|
||||
Self::Directory(specifier) => specifier,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_specifier(self) -> Url {
|
||||
match self {
|
||||
Self::JsToTs(specifier) => specifier,
|
||||
Self::NoExtension(specifier) => specifier,
|
||||
Self::Directory(specifier) => specifier,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_suggestion_message(&self) -> String {
|
||||
format!("Maybe {}", self.as_base_message())
|
||||
}
|
||||
|
||||
pub fn as_quick_fix_message(&self) -> String {
|
||||
let message = self.as_base_message();
|
||||
let mut chars = message.chars();
|
||||
format!(
|
||||
"{}{}.",
|
||||
chars.next().unwrap().to_uppercase(),
|
||||
chars.as_str()
|
||||
)
|
||||
}
|
||||
|
||||
fn as_base_message(&self) -> String {
|
||||
match self {
|
||||
SloppyImportsResolution::JsToTs(specifier) => {
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
format!("change the extension to '{}'", media_type.as_ts_extension())
|
||||
}
|
||||
SloppyImportsResolution::NoExtension(specifier) => {
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
format!("add a '{}' extension", media_type.as_ts_extension())
|
||||
}
|
||||
SloppyImportsResolution::Directory(specifier) => {
|
||||
let file_name = specifier
|
||||
.path()
|
||||
.rsplit_once('/')
|
||||
.map(|(_, file_name)| file_name)
|
||||
.unwrap_or(specifier.path());
|
||||
format!("specify path to '{}' file in directory instead", file_name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The kind of resolution currently being done.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum SloppyImportsResolutionMode {
|
||||
/// Resolving for code that will be executed.
|
||||
Execution,
|
||||
/// Resolving for code that will be used for type information.
|
||||
Types,
|
||||
}
|
||||
|
||||
impl SloppyImportsResolutionMode {
|
||||
pub fn is_types(&self) -> bool {
|
||||
*self == SloppyImportsResolutionMode::Types
|
||||
}
|
||||
}
|
||||
|
||||
pub trait SloppyImportResolverFs {
|
||||
fn stat_sync(&self, path: &Path) -> Option<SloppyImportsFsEntry>;
|
||||
|
||||
fn is_file(&self, path: &Path) -> bool {
|
||||
self.stat_sync(path) == Some(SloppyImportsFsEntry::File)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SloppyImportsResolver<Fs: SloppyImportResolverFs> {
|
||||
fs: Fs,
|
||||
}
|
||||
|
||||
impl<Fs: SloppyImportResolverFs> SloppyImportsResolver<Fs> {
|
||||
pub fn new(fs: Fs) -> Self {
|
||||
Self { fs }
|
||||
}
|
||||
|
||||
pub fn resolve(
|
||||
&self,
|
||||
specifier: &Url,
|
||||
mode: SloppyImportsResolutionMode,
|
||||
) -> Option<SloppyImportsResolution> {
|
||||
fn path_without_ext(
|
||||
path: &Path,
|
||||
media_type: MediaType,
|
||||
) -> Option<Cow<str>> {
|
||||
let old_path_str = path.to_string_lossy();
|
||||
match media_type {
|
||||
MediaType::Unknown => Some(old_path_str),
|
||||
_ => old_path_str
|
||||
.strip_suffix(media_type.as_ts_extension())
|
||||
.map(|s| Cow::Owned(s.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
fn media_types_to_paths(
|
||||
path_no_ext: &str,
|
||||
original_media_type: MediaType,
|
||||
probe_media_type_types: Vec<MediaType>,
|
||||
reason: SloppyImportsResolutionReason,
|
||||
) -> Vec<(PathBuf, SloppyImportsResolutionReason)> {
|
||||
probe_media_type_types
|
||||
.into_iter()
|
||||
.filter(|media_type| *media_type != original_media_type)
|
||||
.map(|media_type| {
|
||||
(
|
||||
PathBuf::from(format!(
|
||||
"{}{}",
|
||||
path_no_ext,
|
||||
media_type.as_ts_extension()
|
||||
)),
|
||||
reason,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
if specifier.scheme() != "file" {
|
||||
return None;
|
||||
}
|
||||
|
||||
let path = url_to_file_path(specifier).ok()?;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
enum SloppyImportsResolutionReason {
|
||||
JsToTs,
|
||||
NoExtension,
|
||||
Directory,
|
||||
}
|
||||
|
||||
let probe_paths: Vec<(PathBuf, SloppyImportsResolutionReason)> =
|
||||
match self.fs.stat_sync(&path) {
|
||||
Some(SloppyImportsFsEntry::File) => {
|
||||
if mode.is_types() {
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
// attempt to resolve the .d.ts file before the .js file
|
||||
let probe_media_type_types = match media_type {
|
||||
MediaType::JavaScript => {
|
||||
vec![(MediaType::Dts), MediaType::JavaScript]
|
||||
}
|
||||
MediaType::Mjs => {
|
||||
vec![MediaType::Dmts, MediaType::Dts, MediaType::Mjs]
|
||||
}
|
||||
MediaType::Cjs => {
|
||||
vec![MediaType::Dcts, MediaType::Dts, MediaType::Cjs]
|
||||
}
|
||||
_ => return None,
|
||||
};
|
||||
let path_no_ext = path_without_ext(&path, media_type)?;
|
||||
media_types_to_paths(
|
||||
&path_no_ext,
|
||||
media_type,
|
||||
probe_media_type_types,
|
||||
SloppyImportsResolutionReason::JsToTs,
|
||||
)
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
entry @ None | entry @ Some(SloppyImportsFsEntry::Dir) => {
|
||||
let media_type = MediaType::from_specifier(specifier);
|
||||
let probe_media_type_types = match media_type {
|
||||
MediaType::JavaScript => (
|
||||
if mode.is_types() {
|
||||
vec![MediaType::TypeScript, MediaType::Tsx, MediaType::Dts]
|
||||
} else {
|
||||
vec![MediaType::TypeScript, MediaType::Tsx]
|
||||
},
|
||||
SloppyImportsResolutionReason::JsToTs,
|
||||
),
|
||||
MediaType::Jsx => {
|
||||
(vec![MediaType::Tsx], SloppyImportsResolutionReason::JsToTs)
|
||||
}
|
||||
MediaType::Mjs => (
|
||||
if mode.is_types() {
|
||||
vec![MediaType::Mts, MediaType::Dmts, MediaType::Dts]
|
||||
} else {
|
||||
vec![MediaType::Mts]
|
||||
},
|
||||
SloppyImportsResolutionReason::JsToTs,
|
||||
),
|
||||
MediaType::Cjs => (
|
||||
if mode.is_types() {
|
||||
vec![MediaType::Cts, MediaType::Dcts, MediaType::Dts]
|
||||
} else {
|
||||
vec![MediaType::Cts]
|
||||
},
|
||||
SloppyImportsResolutionReason::JsToTs,
|
||||
),
|
||||
MediaType::TypeScript
|
||||
| MediaType::Mts
|
||||
| MediaType::Cts
|
||||
| MediaType::Dts
|
||||
| MediaType::Dmts
|
||||
| MediaType::Dcts
|
||||
| MediaType::Tsx
|
||||
| MediaType::Json
|
||||
| MediaType::Wasm
|
||||
| MediaType::TsBuildInfo
|
||||
| MediaType::SourceMap => {
|
||||
return None;
|
||||
}
|
||||
MediaType::Unknown => (
|
||||
if mode.is_types() {
|
||||
vec![
|
||||
MediaType::TypeScript,
|
||||
MediaType::Tsx,
|
||||
MediaType::Mts,
|
||||
MediaType::Dts,
|
||||
MediaType::Dmts,
|
||||
MediaType::Dcts,
|
||||
MediaType::JavaScript,
|
||||
MediaType::Jsx,
|
||||
MediaType::Mjs,
|
||||
]
|
||||
} else {
|
||||
vec![
|
||||
MediaType::TypeScript,
|
||||
MediaType::JavaScript,
|
||||
MediaType::Tsx,
|
||||
MediaType::Jsx,
|
||||
MediaType::Mts,
|
||||
MediaType::Mjs,
|
||||
]
|
||||
},
|
||||
SloppyImportsResolutionReason::NoExtension,
|
||||
),
|
||||
};
|
||||
let mut probe_paths = match path_without_ext(&path, media_type) {
|
||||
Some(path_no_ext) => media_types_to_paths(
|
||||
&path_no_ext,
|
||||
media_type,
|
||||
probe_media_type_types.0,
|
||||
probe_media_type_types.1,
|
||||
),
|
||||
None => vec![],
|
||||
};
|
||||
|
||||
if matches!(entry, Some(SloppyImportsFsEntry::Dir)) {
|
||||
// try to resolve at the index file
|
||||
if mode.is_types() {
|
||||
probe_paths.push((
|
||||
path.join("index.ts"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
|
||||
probe_paths.push((
|
||||
path.join("index.mts"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.d.ts"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.d.mts"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.js"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.mjs"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.tsx"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.jsx"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
} else {
|
||||
probe_paths.push((
|
||||
path.join("index.ts"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.mts"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.tsx"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.js"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.mjs"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
probe_paths.push((
|
||||
path.join("index.jsx"),
|
||||
SloppyImportsResolutionReason::Directory,
|
||||
));
|
||||
}
|
||||
}
|
||||
if probe_paths.is_empty() {
|
||||
return None;
|
||||
}
|
||||
probe_paths
|
||||
}
|
||||
};
|
||||
|
||||
for (probe_path, reason) in probe_paths {
|
||||
if self.fs.is_file(&probe_path) {
|
||||
if let Ok(specifier) = url_from_file_path(&probe_path) {
|
||||
match reason {
|
||||
SloppyImportsResolutionReason::JsToTs => {
|
||||
return Some(SloppyImportsResolution::JsToTs(specifier));
|
||||
}
|
||||
SloppyImportsResolutionReason::NoExtension => {
|
||||
return Some(SloppyImportsResolution::NoExtension(specifier));
|
||||
}
|
||||
SloppyImportsResolutionReason::Directory => {
|
||||
return Some(SloppyImportsResolution::Directory(specifier));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use test_util::TestContext;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_unstable_sloppy_imports() {
|
||||
fn resolve(specifier: &Url) -> Option<SloppyImportsResolution> {
|
||||
resolve_with_mode(specifier, SloppyImportsResolutionMode::Execution)
|
||||
}
|
||||
|
||||
fn resolve_types(specifier: &Url) -> Option<SloppyImportsResolution> {
|
||||
resolve_with_mode(specifier, SloppyImportsResolutionMode::Types)
|
||||
}
|
||||
|
||||
fn resolve_with_mode(
|
||||
specifier: &Url,
|
||||
mode: SloppyImportsResolutionMode,
|
||||
) -> Option<SloppyImportsResolution> {
|
||||
struct RealSloppyImportsResolverFs;
|
||||
impl SloppyImportResolverFs for RealSloppyImportsResolverFs {
|
||||
fn stat_sync(&self, path: &Path) -> Option<SloppyImportsFsEntry> {
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let stat = std::fs::metadata(path).ok()?;
|
||||
if stat.is_dir() {
|
||||
Some(SloppyImportsFsEntry::Dir)
|
||||
} else if stat.is_file() {
|
||||
Some(SloppyImportsFsEntry::File)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
SloppyImportsResolver::new(RealSloppyImportsResolverFs)
|
||||
.resolve(specifier, mode)
|
||||
}
|
||||
|
||||
let context = TestContext::default();
|
||||
let temp_dir = context.temp_dir().path();
|
||||
|
||||
// scenarios like resolving ./example.js to ./example.ts
|
||||
for (ext_from, ext_to) in [("js", "ts"), ("js", "tsx"), ("mjs", "mts")] {
|
||||
let ts_file = temp_dir.join(format!("file.{}", ext_to));
|
||||
ts_file.write("");
|
||||
assert_eq!(resolve(&ts_file.url_file()), None);
|
||||
assert_eq!(
|
||||
resolve(
|
||||
&temp_dir
|
||||
.url_dir()
|
||||
.join(&format!("file.{}", ext_from))
|
||||
.unwrap()
|
||||
),
|
||||
Some(SloppyImportsResolution::JsToTs(ts_file.url_file())),
|
||||
);
|
||||
ts_file.remove_file();
|
||||
}
|
||||
|
||||
// no extension scenarios
|
||||
for ext in ["js", "ts", "js", "tsx", "jsx", "mjs", "mts"] {
|
||||
let file = temp_dir.join(format!("file.{}", ext));
|
||||
file.write("");
|
||||
assert_eq!(
|
||||
resolve(
|
||||
&temp_dir
|
||||
.url_dir()
|
||||
.join("file") // no ext
|
||||
.unwrap()
|
||||
),
|
||||
Some(SloppyImportsResolution::NoExtension(file.url_file()))
|
||||
);
|
||||
file.remove_file();
|
||||
}
|
||||
|
||||
// .ts and .js exists, .js specified (goes to specified)
|
||||
{
|
||||
let ts_file = temp_dir.join("file.ts");
|
||||
ts_file.write("");
|
||||
let js_file = temp_dir.join("file.js");
|
||||
js_file.write("");
|
||||
assert_eq!(resolve(&js_file.url_file()), None);
|
||||
}
|
||||
|
||||
// only js exists, .js specified
|
||||
{
|
||||
let js_only_file = temp_dir.join("js_only.js");
|
||||
js_only_file.write("");
|
||||
assert_eq!(resolve(&js_only_file.url_file()), None);
|
||||
assert_eq!(resolve_types(&js_only_file.url_file()), None);
|
||||
}
|
||||
|
||||
// resolving a directory to an index file
|
||||
{
|
||||
let routes_dir = temp_dir.join("routes");
|
||||
routes_dir.create_dir_all();
|
||||
let index_file = routes_dir.join("index.ts");
|
||||
index_file.write("");
|
||||
assert_eq!(
|
||||
resolve(&routes_dir.url_file()),
|
||||
Some(SloppyImportsResolution::Directory(index_file.url_file())),
|
||||
);
|
||||
}
|
||||
|
||||
// both a directory and a file with specifier is present
|
||||
{
|
||||
let api_dir = temp_dir.join("api");
|
||||
api_dir.create_dir_all();
|
||||
let bar_file = api_dir.join("bar.ts");
|
||||
bar_file.write("");
|
||||
let api_file = temp_dir.join("api.ts");
|
||||
api_file.write("");
|
||||
assert_eq!(
|
||||
resolve(&api_dir.url_file()),
|
||||
Some(SloppyImportsResolution::NoExtension(api_file.url_file())),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sloppy_import_resolution_suggestion_message() {
|
||||
// directory
|
||||
assert_eq!(
|
||||
SloppyImportsResolution::Directory(
|
||||
Url::parse("file:///dir/index.js").unwrap()
|
||||
)
|
||||
.as_suggestion_message(),
|
||||
"Maybe specify path to 'index.js' file in directory instead"
|
||||
);
|
||||
// no ext
|
||||
assert_eq!(
|
||||
SloppyImportsResolution::NoExtension(
|
||||
Url::parse("file:///dir/index.mjs").unwrap()
|
||||
)
|
||||
.as_suggestion_message(),
|
||||
"Maybe add a '.mjs' extension"
|
||||
);
|
||||
// js to ts
|
||||
assert_eq!(
|
||||
SloppyImportsResolution::JsToTs(
|
||||
Url::parse("file:///dir/index.mts").unwrap()
|
||||
)
|
||||
.as_suggestion_message(),
|
||||
"Maybe change the extension to '.mts'"
|
||||
);
|
||||
}
|
||||
}
|
|
@ -21,6 +21,7 @@ anyhow.workspace = true
|
|||
async-trait.workspace = true
|
||||
deno_media_type.workspace = true
|
||||
deno_package_json.workspace = true
|
||||
deno_path_util.workspace = true
|
||||
futures.workspace = true
|
||||
lazy-regex.workspace = true
|
||||
once_cell.workspace = true
|
|
@ -6,6 +6,8 @@ use std::collections::HashSet;
|
|||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_path_util::url_from_file_path;
|
||||
use deno_path_util::url_to_file_path;
|
||||
use futures::future::LocalBoxFuture;
|
||||
use futures::stream::FuturesUnordered;
|
||||
use futures::FutureExt;
|
||||
|
@ -18,7 +20,6 @@ use url::Url;
|
|||
|
||||
use crate::env::NodeResolverEnv;
|
||||
use crate::package_json::load_pkg_json;
|
||||
use crate::path::to_file_specifier;
|
||||
use crate::resolution::NodeResolverRc;
|
||||
use crate::NodeModuleKind;
|
||||
use crate::NodeResolutionMode;
|
||||
|
@ -135,8 +136,7 @@ impl<TCjsCodeAnalyzer: CjsCodeAnalyzer, TNodeResolverEnv: NodeResolverEnv>
|
|||
|
||||
source.push(format!(
|
||||
"const mod = require(\"{}\");",
|
||||
entry_specifier
|
||||
.to_file_path()
|
||||
url_to_file_path(entry_specifier)
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
|
@ -297,15 +297,13 @@ impl<TCjsCodeAnalyzer: CjsCodeAnalyzer, TNodeResolverEnv: NodeResolverEnv>
|
|||
todo!();
|
||||
}
|
||||
|
||||
let referrer_path = referrer.to_file_path().unwrap();
|
||||
let referrer_path = url_to_file_path(referrer).unwrap();
|
||||
if specifier.starts_with("./") || specifier.starts_with("../") {
|
||||
if let Some(parent) = referrer_path.parent() {
|
||||
return Some(
|
||||
self
|
||||
.file_extension_probe(parent.join(specifier), &referrer_path)
|
||||
.map(|p| to_file_specifier(&p)),
|
||||
)
|
||||
.transpose();
|
||||
return self
|
||||
.file_extension_probe(parent.join(specifier), &referrer_path)
|
||||
.and_then(|p| url_from_file_path(&p).map_err(AnyError::from))
|
||||
.map(Some);
|
||||
} else {
|
||||
todo!();
|
||||
}
|
||||
|
@ -362,24 +360,22 @@ impl<TCjsCodeAnalyzer: CjsCodeAnalyzer, TNodeResolverEnv: NodeResolverEnv>
|
|||
load_pkg_json(self.env.pkg_json_fs(), &package_json_path)?;
|
||||
if let Some(package_json) = maybe_package_json {
|
||||
if let Some(main) = package_json.main(NodeModuleKind::Cjs) {
|
||||
return Ok(Some(to_file_specifier(&d.join(main).clean())));
|
||||
return Ok(Some(url_from_file_path(&d.join(main).clean())?));
|
||||
}
|
||||
}
|
||||
|
||||
return Ok(Some(to_file_specifier(&d.join("index.js").clean())));
|
||||
return Ok(Some(url_from_file_path(&d.join("index.js").clean())?));
|
||||
}
|
||||
return Some(
|
||||
self
|
||||
.file_extension_probe(d, &referrer_path)
|
||||
.map(|p| to_file_specifier(&p)),
|
||||
)
|
||||
.transpose();
|
||||
return self
|
||||
.file_extension_probe(d, &referrer_path)
|
||||
.and_then(|p| url_from_file_path(&p).map_err(AnyError::from))
|
||||
.map(Some);
|
||||
} else if let Some(main) = package_json.main(NodeModuleKind::Cjs) {
|
||||
return Ok(Some(to_file_specifier(&module_dir.join(main).clean())));
|
||||
return Ok(Some(url_from_file_path(&module_dir.join(main).clean())?));
|
||||
} else {
|
||||
return Ok(Some(to_file_specifier(
|
||||
return Ok(Some(url_from_file_path(
|
||||
&module_dir.join("index.js").clean(),
|
||||
)));
|
||||
)?));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -395,7 +391,7 @@ impl<TCjsCodeAnalyzer: CjsCodeAnalyzer, TNodeResolverEnv: NodeResolverEnv>
|
|||
parent.join("node_modules").join(specifier)
|
||||
};
|
||||
if let Ok(path) = self.file_extension_probe(path, &referrer_path) {
|
||||
return Ok(Some(to_file_specifier(&path)));
|
||||
return Ok(Some(url_from_file_path(&path)?));
|
||||
}
|
||||
last = parent;
|
||||
}
|
|
@ -42,6 +42,9 @@ disallowed-methods = [
|
|||
{ path = "std::fs::write", reason = "File system operations should be done using NodeResolverFs trait" },
|
||||
{ path = "std::path::Path::canonicalize", reason = "File system operations should be done using NodeResolverFs trait" },
|
||||
{ path = "std::path::Path::exists", reason = "File system operations should be done using NodeResolverFs trait" },
|
||||
{ path = "url::Url::to_file_path", reason = "Use deno_path_util instead so it works in Wasm" },
|
||||
{ path = "url::Url::from_file_path", reason = "Use deno_path_util instead so it works in Wasm" },
|
||||
{ path = "url::Url::from_directory_path", reason = "Use deno_path_util instead so it works in Wasm" },
|
||||
]
|
||||
disallowed-types = [
|
||||
{ path = "std::sync::Arc", reason = "use crate::sync::MaybeArc instead" },
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue