mirror of
https://github.com/denoland/deno.git
synced 2024-11-30 16:40:57 -05:00
Merge branch 'main' into Fix-UNC-Path-Permissions-Issue-on-Windows
This commit is contained in:
commit
f3456f1871
291 changed files with 3312 additions and 2923 deletions
2
.github/workflows/ci.generate.ts
vendored
2
.github/workflows/ci.generate.ts
vendored
|
@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
|
|||
// Bump this number when you want to purge the cache.
|
||||
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
|
||||
// automatically via regex, so ensure that this line maintains this format.
|
||||
const cacheVersion = 14;
|
||||
const cacheVersion = 15;
|
||||
|
||||
const ubuntuX86Runner = "ubuntu-22.04";
|
||||
const ubuntuX86XlRunner = "ubuntu-22.04-xl";
|
||||
|
|
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
|
@ -367,8 +367,8 @@ jobs:
|
|||
path: |-
|
||||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
key: '14-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '14-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
key: '15-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '15-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
if: '!(matrix.skip)'
|
||||
- name: Restore cache build output (PR)
|
||||
uses: actions/cache/restore@v4
|
||||
|
@ -381,7 +381,7 @@ jobs:
|
|||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: never_saved
|
||||
restore-keys: '14-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
restore-keys: '15-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
- name: Apply and update mtime cache
|
||||
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
|
||||
uses: ./.github/mtime_cache
|
||||
|
@ -670,7 +670,7 @@ jobs:
|
|||
!./target/*/gn_out
|
||||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: '14-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
key: '15-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
publish-canary:
|
||||
name: publish canary
|
||||
runs-on: ubuntu-22.04
|
||||
|
|
235
Cargo.lock
generated
235
Cargo.lock
generated
|
@ -167,9 +167,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "anstyle"
|
||||
version = "1.0.6"
|
||||
version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc"
|
||||
checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1"
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-parse"
|
||||
|
@ -465,7 +465,7 @@ dependencies = [
|
|||
"proc-macro2",
|
||||
"quote",
|
||||
"regex",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"shlex",
|
||||
"syn 2.0.72",
|
||||
"which 4.4.2",
|
||||
|
@ -693,18 +693,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.13"
|
||||
version = "4.5.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fbb260a053428790f3de475e304ff84cdbc4face759ea7a3e64c1edd938a7fc"
|
||||
checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.13"
|
||||
version = "4.5.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64b17d7ea74e9f833c7dbf2cbe4fb12ff26783eda4782a8975b72f895c9b4d99"
|
||||
checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
|
@ -715,9 +715,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_complete"
|
||||
version = "4.5.12"
|
||||
version = "4.5.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a8670053e87c316345e384ca1f3eba3006fc6355ed8b8a1140d104e109e3df34"
|
||||
checksum = "6d7db6eca8c205649e8d3ccd05aa5042b1800a784e56bc7c43524fde8abbfa9b"
|
||||
dependencies = [
|
||||
"clap",
|
||||
]
|
||||
|
@ -1113,7 +1113,7 @@ dependencies = [
|
|||
"hashbrown",
|
||||
"lock_api",
|
||||
"once_cell",
|
||||
"parking_lot_core 0.9.9",
|
||||
"parking_lot_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1169,7 +1169,6 @@ dependencies = [
|
|||
"deno_config",
|
||||
"deno_core",
|
||||
"deno_doc",
|
||||
"deno_emit",
|
||||
"deno_graph",
|
||||
"deno_lint",
|
||||
"deno_lockfile",
|
||||
|
@ -1316,7 +1315,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_bench_util"
|
||||
version = "0.161.0"
|
||||
version = "0.162.0"
|
||||
dependencies = [
|
||||
"bencher",
|
||||
"deno_core",
|
||||
|
@ -1325,7 +1324,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_broadcast_channel"
|
||||
version = "0.161.0"
|
||||
version = "0.162.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"deno_core",
|
||||
|
@ -1335,7 +1334,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_cache"
|
||||
version = "0.99.0"
|
||||
version = "0.100.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"deno_core",
|
||||
|
@ -1355,7 +1354,7 @@ dependencies = [
|
|||
"indexmap",
|
||||
"log",
|
||||
"once_cell",
|
||||
"parking_lot 0.12.3",
|
||||
"parking_lot",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
|
@ -1365,7 +1364,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_canvas"
|
||||
version = "0.36.0"
|
||||
version = "0.37.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_webgpu",
|
||||
|
@ -1375,9 +1374,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_config"
|
||||
version = "0.32.0"
|
||||
version = "0.33.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c774f2e49b4ca47f1fe5c39e1775d1434280a4f168252fed8f4a3f2230868448"
|
||||
checksum = "495df7ebed4feee5c0eb7631b0b86432bb6370638cf81d5eeb5769aab55fb2de"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"deno_package_json",
|
||||
|
@ -1389,6 +1388,7 @@ dependencies = [
|
|||
"jsonc-parser",
|
||||
"log",
|
||||
"percent-encoding",
|
||||
"phf 0.11.2",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror",
|
||||
|
@ -1397,7 +1397,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_console"
|
||||
version = "0.167.0"
|
||||
version = "0.168.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
]
|
||||
|
@ -1420,7 +1420,7 @@ dependencies = [
|
|||
"futures",
|
||||
"libc",
|
||||
"memoffset 0.9.1",
|
||||
"parking_lot 0.12.3",
|
||||
"parking_lot",
|
||||
"percent-encoding",
|
||||
"pin-project",
|
||||
"serde",
|
||||
|
@ -1442,7 +1442,7 @@ checksum = "a13951ea98c0a4c372f162d669193b4c9d991512de9f2381dd161027f34b26b1"
|
|||
|
||||
[[package]]
|
||||
name = "deno_cron"
|
||||
version = "0.47.0"
|
||||
version = "0.48.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
|
@ -1454,7 +1454,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_crypto"
|
||||
version = "0.181.0"
|
||||
version = "0.182.0"
|
||||
dependencies = [
|
||||
"aes",
|
||||
"aes-gcm",
|
||||
|
@ -1511,26 +1511,9 @@ dependencies = [
|
|||
"termcolor",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_emit"
|
||||
version = "0.45.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33aca9546e36a1b85efb630add94a4c2ac13c2333bb48df4439002c002f4c5b2"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base64 0.21.7",
|
||||
"deno_ast",
|
||||
"deno_graph",
|
||||
"escape8259",
|
||||
"futures",
|
||||
"import_map",
|
||||
"parking_lot 0.11.2",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_fetch"
|
||||
version = "0.191.0"
|
||||
version = "0.192.0"
|
||||
dependencies = [
|
||||
"base64 0.21.7",
|
||||
"bytes",
|
||||
|
@ -1562,7 +1545,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_ffi"
|
||||
version = "0.154.0"
|
||||
version = "0.155.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_permissions",
|
||||
|
@ -1579,7 +1562,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_fs"
|
||||
version = "0.77.0"
|
||||
version = "0.78.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"base32",
|
||||
|
@ -1616,7 +1599,7 @@ dependencies = [
|
|||
"log",
|
||||
"monch",
|
||||
"once_cell",
|
||||
"parking_lot 0.12.3",
|
||||
"parking_lot",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
@ -1628,7 +1611,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_http"
|
||||
version = "0.165.0"
|
||||
version = "0.166.0"
|
||||
dependencies = [
|
||||
"async-compression",
|
||||
"async-trait",
|
||||
|
@ -1667,7 +1650,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_io"
|
||||
version = "0.77.0"
|
||||
version = "0.78.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"deno_core",
|
||||
|
@ -1677,7 +1660,7 @@ dependencies = [
|
|||
"log",
|
||||
"once_cell",
|
||||
"os_pipe",
|
||||
"parking_lot 0.12.3",
|
||||
"parking_lot",
|
||||
"pin-project",
|
||||
"rand",
|
||||
"tokio",
|
||||
|
@ -1688,7 +1671,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_kv"
|
||||
version = "0.75.0"
|
||||
version = "0.76.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
|
@ -1758,7 +1741,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_napi"
|
||||
version = "0.98.0"
|
||||
version = "0.99.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_permissions",
|
||||
|
@ -1780,7 +1763,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_net"
|
||||
version = "0.159.0"
|
||||
version = "0.160.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_permissions",
|
||||
|
@ -1796,7 +1779,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_node"
|
||||
version = "0.104.0"
|
||||
version = "0.105.0"
|
||||
dependencies = [
|
||||
"aead-gcm-stream",
|
||||
"aes",
|
||||
|
@ -1932,7 +1915,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_permissions"
|
||||
version = "0.27.0"
|
||||
version = "0.28.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_terminal 0.2.0",
|
||||
|
@ -1947,7 +1930,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_runtime"
|
||||
version = "0.176.0"
|
||||
version = "0.177.0"
|
||||
dependencies = [
|
||||
"deno_ast",
|
||||
"deno_broadcast_channel",
|
||||
|
@ -2060,7 +2043,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_tls"
|
||||
version = "0.154.0"
|
||||
version = "0.155.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_native_certs",
|
||||
|
@ -2102,13 +2085,13 @@ version = "0.4.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2f36b4ef61a04ce201b925a5dffa90f88437d37fee4836c758470dd15ba7f05e"
|
||||
dependencies = [
|
||||
"parking_lot 0.12.3",
|
||||
"parking_lot",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_url"
|
||||
version = "0.167.0"
|
||||
version = "0.168.0"
|
||||
dependencies = [
|
||||
"deno_bench_util",
|
||||
"deno_console",
|
||||
|
@ -2119,7 +2102,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_web"
|
||||
version = "0.198.0"
|
||||
version = "0.199.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"base64-simd 0.8.0",
|
||||
|
@ -2140,7 +2123,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_webgpu"
|
||||
version = "0.134.0"
|
||||
version = "0.135.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"raw-window-handle",
|
||||
|
@ -2152,7 +2135,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_webidl"
|
||||
version = "0.167.0"
|
||||
version = "0.168.0"
|
||||
dependencies = [
|
||||
"deno_bench_util",
|
||||
"deno_core",
|
||||
|
@ -2160,7 +2143,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_websocket"
|
||||
version = "0.172.0"
|
||||
version = "0.173.0"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"deno_core",
|
||||
|
@ -2181,7 +2164,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_webstorage"
|
||||
version = "0.162.0"
|
||||
version = "0.163.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_web",
|
||||
|
@ -2475,7 +2458,7 @@ dependencies = [
|
|||
"bumpalo",
|
||||
"hashbrown",
|
||||
"indexmap",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"serde",
|
||||
"unicode-width",
|
||||
]
|
||||
|
@ -2543,7 +2526,7 @@ dependencies = [
|
|||
"dprint-core",
|
||||
"dprint-core-macros",
|
||||
"percent-encoding",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"serde",
|
||||
]
|
||||
|
||||
|
@ -2556,7 +2539,7 @@ dependencies = [
|
|||
"allocator-api2",
|
||||
"bumpalo",
|
||||
"num-bigint",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"swc_atoms",
|
||||
"swc_common",
|
||||
"swc_ecma_ast",
|
||||
|
@ -2808,15 +2791,6 @@ version = "1.0.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "31ae425815400e5ed474178a7a22e275a9687086a12ca63ec793ff292d8fdae8"
|
||||
|
||||
[[package]]
|
||||
name = "escape8259"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ba4f4911e3666fcd7826997b4745c8224295a6f3072f1418c3067b97a67557ee"
|
||||
dependencies = [
|
||||
"rustversion",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "eszip"
|
||||
version = "0.78.0"
|
||||
|
@ -2956,7 +2930,7 @@ dependencies = [
|
|||
"anyhow",
|
||||
"crossbeam-channel",
|
||||
"deno_terminal 0.1.1",
|
||||
"parking_lot 0.12.3",
|
||||
"parking_lot",
|
||||
"regex",
|
||||
"thiserror",
|
||||
]
|
||||
|
@ -2969,7 +2943,7 @@ checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd"
|
|||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall 0.4.1",
|
||||
"redox_syscall",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
|
@ -3551,7 +3525,7 @@ dependencies = [
|
|||
"new_debug_unreachable",
|
||||
"once_cell",
|
||||
"phf 0.11.2",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"triomphe",
|
||||
]
|
||||
|
||||
|
@ -3853,15 +3827,6 @@ dependencies = [
|
|||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "instant"
|
||||
version = "0.1.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ipconfig"
|
||||
version = "0.3.2"
|
||||
|
@ -4523,7 +4488,7 @@ dependencies = [
|
|||
"indexmap",
|
||||
"log",
|
||||
"num-traits",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"serde",
|
||||
"spirv",
|
||||
"termcolor",
|
||||
|
@ -4548,7 +4513,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "napi_sym"
|
||||
version = "0.97.0"
|
||||
version = "0.98.0"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"serde",
|
||||
|
@ -4617,7 +4582,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "node_resolver"
|
||||
version = "0.6.0"
|
||||
version = "0.7.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
|
@ -4924,17 +4889,6 @@ dependencies = [
|
|||
"sha2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.11.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
|
||||
dependencies = [
|
||||
"instant",
|
||||
"lock_api",
|
||||
"parking_lot_core 0.8.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.12.3"
|
||||
|
@ -4942,21 +4896,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27"
|
||||
dependencies = [
|
||||
"lock_api",
|
||||
"parking_lot_core 0.9.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot_core"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"instant",
|
||||
"libc",
|
||||
"redox_syscall 0.2.16",
|
||||
"smallvec",
|
||||
"winapi",
|
||||
"parking_lot_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -4967,7 +4907,7 @@ checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
|
|||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall 0.4.1",
|
||||
"redox_syscall",
|
||||
"smallvec",
|
||||
"windows-targets 0.48.5",
|
||||
]
|
||||
|
@ -5534,7 +5474,7 @@ dependencies = [
|
|||
"pin-project-lite",
|
||||
"quinn-proto",
|
||||
"quinn-udp",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"rustls",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
|
@ -5543,14 +5483,14 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "quinn-proto"
|
||||
version = "0.11.3"
|
||||
version = "0.11.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ddf517c03a109db8100448a4be38d498df8a210a99fe0e1b9eaf39e78c640efe"
|
||||
checksum = "fadfaed2cd7f389d0161bb73eeb07b7b78f8691047a6f3e73caaeae55310a4a6"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"rand",
|
||||
"ring",
|
||||
"rustc-hash",
|
||||
"rustc-hash 2.0.0",
|
||||
"rustls",
|
||||
"slab",
|
||||
"thiserror",
|
||||
|
@ -5685,15 +5625,6 @@ dependencies = [
|
|||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.2.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.4.1"
|
||||
|
@ -5881,7 +5812,7 @@ dependencies = [
|
|||
"countme",
|
||||
"hashbrown",
|
||||
"memoffset 0.9.1",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"text-size",
|
||||
]
|
||||
|
||||
|
@ -5955,6 +5886,12 @@ version = "1.1.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
||||
|
||||
[[package]]
|
||||
name = "rustc-hash"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152"
|
||||
|
||||
[[package]]
|
||||
name = "rustc_version"
|
||||
version = "0.2.3"
|
||||
|
@ -6546,7 +6483,7 @@ dependencies = [
|
|||
"data-encoding",
|
||||
"debugid",
|
||||
"if_chain",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"rustc_version 0.2.3",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
@ -6565,7 +6502,7 @@ dependencies = [
|
|||
"data-encoding",
|
||||
"debugid",
|
||||
"if_chain",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"rustc_version 0.2.3",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
@ -6631,7 +6568,7 @@ checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b"
|
|||
dependencies = [
|
||||
"new_debug_unreachable",
|
||||
"once_cell",
|
||||
"parking_lot 0.12.3",
|
||||
"parking_lot",
|
||||
"phf_shared 0.10.0",
|
||||
"precomputed-hash",
|
||||
"serde",
|
||||
|
@ -6713,7 +6650,7 @@ dependencies = [
|
|||
"bumpalo",
|
||||
"hashbrown",
|
||||
"ptr_meta",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"triomphe",
|
||||
]
|
||||
|
||||
|
@ -6725,7 +6662,7 @@ checksum = "bb6567e4e67485b3e7662b486f1565bdae54bd5b9d6b16b2ba1a9babb1e42125"
|
|||
dependencies = [
|
||||
"hstr",
|
||||
"once_cell",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"serde",
|
||||
]
|
||||
|
||||
|
@ -6740,7 +6677,7 @@ dependencies = [
|
|||
"indexmap",
|
||||
"is-macro",
|
||||
"once_cell",
|
||||
"parking_lot 0.12.3",
|
||||
"parking_lot",
|
||||
"petgraph",
|
||||
"radix_fmt",
|
||||
"relative-path",
|
||||
|
@ -6787,7 +6724,7 @@ dependencies = [
|
|||
"new_debug_unreachable",
|
||||
"num-bigint",
|
||||
"once_cell",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"serde",
|
||||
"siphasher",
|
||||
"sourcemap 9.0.0",
|
||||
|
@ -6922,7 +6859,7 @@ dependencies = [
|
|||
"indexmap",
|
||||
"once_cell",
|
||||
"phf 0.11.2",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"serde",
|
||||
"smallvec",
|
||||
"swc_atoms",
|
||||
|
@ -6970,7 +6907,7 @@ dependencies = [
|
|||
"indexmap",
|
||||
"once_cell",
|
||||
"petgraph",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"serde_json",
|
||||
"swc_atoms",
|
||||
"swc_common",
|
||||
|
@ -6991,7 +6928,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "79938ff510fc647febd8c6c3ef4143d099fdad87a223680e632623d056dae2dd"
|
||||
dependencies = [
|
||||
"either",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"serde",
|
||||
"smallvec",
|
||||
"swc_atoms",
|
||||
|
@ -7055,7 +6992,7 @@ dependencies = [
|
|||
"indexmap",
|
||||
"num_cpus",
|
||||
"once_cell",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"ryu-js",
|
||||
"swc_atoms",
|
||||
"swc_common",
|
||||
|
@ -7099,7 +7036,7 @@ checksum = "357e2c97bb51431d65080f25b436bc4e2fc1a7f64a643bc21a8353e478dc799f"
|
|||
dependencies = [
|
||||
"indexmap",
|
||||
"petgraph",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"swc_common",
|
||||
]
|
||||
|
||||
|
@ -7325,7 +7262,7 @@ dependencies = [
|
|||
"nix 0.26.2",
|
||||
"once_cell",
|
||||
"os_pipe",
|
||||
"parking_lot 0.12.3",
|
||||
"parking_lot",
|
||||
"pretty_assertions",
|
||||
"prost",
|
||||
"prost-build",
|
||||
|
@ -7457,7 +7394,7 @@ dependencies = [
|
|||
"libc",
|
||||
"mio",
|
||||
"num_cpus",
|
||||
"parking_lot 0.12.3",
|
||||
"parking_lot",
|
||||
"pin-project-lite",
|
||||
"signal-hook-registry",
|
||||
"socket2",
|
||||
|
@ -7728,7 +7665,7 @@ dependencies = [
|
|||
"ipconfig",
|
||||
"lru-cache",
|
||||
"once_cell",
|
||||
"parking_lot 0.12.3",
|
||||
"parking_lot",
|
||||
"rand",
|
||||
"resolv-conf",
|
||||
"serde",
|
||||
|
@ -8201,11 +8138,11 @@ dependencies = [
|
|||
"log",
|
||||
"naga",
|
||||
"once_cell",
|
||||
"parking_lot 0.12.3",
|
||||
"parking_lot",
|
||||
"profiling",
|
||||
"raw-window-handle",
|
||||
"ron",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"serde",
|
||||
"smallvec",
|
||||
"thiserror",
|
||||
|
@ -8243,11 +8180,11 @@ dependencies = [
|
|||
"ndk-sys",
|
||||
"objc",
|
||||
"once_cell",
|
||||
"parking_lot 0.12.3",
|
||||
"parking_lot",
|
||||
"profiling",
|
||||
"range-alloc",
|
||||
"raw-window-handle",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"smallvec",
|
||||
"thiserror",
|
||||
"wasm-bindgen",
|
||||
|
@ -8298,7 +8235,7 @@ version = "1.5.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a44ab49fad634e88f55bf8f9bb3abd2f27d7204172a112c7c9987e01c1c94ea9"
|
||||
dependencies = [
|
||||
"redox_syscall 0.4.1",
|
||||
"redox_syscall",
|
||||
"wasite",
|
||||
"web-sys",
|
||||
]
|
||||
|
@ -8735,7 +8672,7 @@ dependencies = [
|
|||
"log",
|
||||
"num-traits",
|
||||
"once_cell",
|
||||
"parking_lot 0.12.3",
|
||||
"parking_lot",
|
||||
"rand",
|
||||
"regex",
|
||||
"thiserror",
|
||||
|
|
54
Cargo.toml
54
Cargo.toml
|
@ -47,14 +47,14 @@ repository = "https://github.com/denoland/deno"
|
|||
deno_ast = { version = "=0.42.0", features = ["transpiling"] }
|
||||
deno_core = { version = "0.307.0" }
|
||||
|
||||
deno_bench_util = { version = "0.161.0", path = "./bench_util" }
|
||||
deno_bench_util = { version = "0.162.0", path = "./bench_util" }
|
||||
deno_lockfile = "=0.23.0"
|
||||
deno_media_type = { version = "0.1.4", features = ["module_specifier"] }
|
||||
deno_permissions = { version = "0.27.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.176.0", path = "./runtime" }
|
||||
deno_permissions = { version = "0.28.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.177.0", path = "./runtime" }
|
||||
deno_semver = "=0.5.13"
|
||||
deno_terminal = "0.2.0"
|
||||
napi_sym = { version = "0.97.0", path = "./cli/napi/sym" }
|
||||
napi_sym = { version = "0.98.0", path = "./cli/napi/sym" }
|
||||
test_util = { package = "test_server", path = "./tests/util/server" }
|
||||
|
||||
denokv_proto = "0.8.1"
|
||||
|
@ -63,29 +63,29 @@ denokv_remote = "0.8.1"
|
|||
denokv_sqlite = { default-features = false, version = "0.8.2" }
|
||||
|
||||
# exts
|
||||
deno_broadcast_channel = { version = "0.161.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.99.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.36.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.167.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.47.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.181.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.191.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.154.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.77.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.165.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.77.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.75.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.98.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.159.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.104.0", path = "./ext/node" }
|
||||
deno_tls = { version = "0.154.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.167.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.198.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.134.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.167.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.172.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.162.0", path = "./ext/webstorage" }
|
||||
node_resolver = { version = "0.6.0", path = "./ext/node_resolver" }
|
||||
deno_broadcast_channel = { version = "0.162.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.100.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.37.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.168.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.48.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.182.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.192.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.155.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.78.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.166.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.78.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.76.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.99.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.160.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.105.0", path = "./ext/node" }
|
||||
deno_tls = { version = "0.155.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.168.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.199.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.135.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.168.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.173.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.163.0", path = "./ext/webstorage" }
|
||||
node_resolver = { version = "0.7.0", path = "./ext/node_resolver" }
|
||||
|
||||
aes = "=0.8.3"
|
||||
anyhow = "1.0.57"
|
||||
|
|
11
Releases.md
11
Releases.md
|
@ -6,6 +6,17 @@ https://github.com/denoland/deno/releases
|
|||
We also have one-line install commands at:
|
||||
https://github.com/denoland/deno_install
|
||||
|
||||
### 1.46.3 / 2024.09.04
|
||||
|
||||
- feat(upgrade): print info links for Deno 2 RC releases (#25225)
|
||||
- fix(cli): Map error kind to `PermissionDenied` when symlinking fails due to
|
||||
permissions (#25398)
|
||||
- fix(cli/tools): correct `deno init --serve` template behavior (#25318)
|
||||
- fix(ext/node): session close during stream setup (#25170)
|
||||
- fix(publish): ensure provenance is spec compliant (#25200)
|
||||
- fix(upgrade): more informative information on invalid version (#25319)
|
||||
- fix: fix jupyter display function type (#25326)
|
||||
|
||||
### 1.46.2 / 2024.08.29
|
||||
|
||||
- Revert "feat(fetch): accept async iterables for body" (#25207)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_bench_util"
|
||||
version = "0.161.0"
|
||||
version = "0.162.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -65,10 +65,9 @@ winres.workspace = true
|
|||
[dependencies]
|
||||
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
|
||||
deno_cache_dir = { workspace = true }
|
||||
deno_config = { version = "=0.32.0", features = ["workspace", "sync"] }
|
||||
deno_config = { version = "=0.33.1", features = ["workspace", "sync"] }
|
||||
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_doc = { version = "0.148.0", features = ["html", "syntect"] }
|
||||
deno_emit = "=0.45.0"
|
||||
deno_graph = { version = "=0.82.0" }
|
||||
deno_lint = { version = "=0.64.0", features = ["docs"] }
|
||||
deno_lockfile.workspace = true
|
||||
|
@ -91,8 +90,8 @@ bincode = "=1.3.3"
|
|||
bytes.workspace = true
|
||||
cache_control.workspace = true
|
||||
chrono = { workspace = true, features = ["now"] }
|
||||
clap = { version = "=4.5.13", features = ["env", "string", "wrap_help"] }
|
||||
clap_complete = "=4.5.12"
|
||||
clap = { version = "=4.5.16", features = ["env", "string", "wrap_help", "error-context"] }
|
||||
clap_complete = "=4.5.24"
|
||||
clap_complete_fig = "=4.5.2"
|
||||
color-print = "0.3.5"
|
||||
console_static_text.workspace = true
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
use std::collections::HashSet;
|
||||
|
||||
use deno_config::deno_json::TsConfigForEmit;
|
||||
use deno_core::serde_json;
|
||||
use deno_semver::jsr::JsrDepPackageReq;
|
||||
use deno_semver::jsr::JsrPackageReqReference;
|
||||
|
@ -105,3 +106,18 @@ fn values_to_set<'a>(
|
|||
}
|
||||
entries
|
||||
}
|
||||
|
||||
pub fn check_warn_tsconfig(ts_config: &TsConfigForEmit) {
|
||||
if let Some(ignored_options) = &ts_config.maybe_ignored_options {
|
||||
log::warn!("{}", ignored_options);
|
||||
}
|
||||
let serde_json::Value::Object(obj) = &ts_config.ts_config.0 else {
|
||||
return;
|
||||
};
|
||||
if obj.get("experimentalDecorators") == Some(&serde_json::Value::Bool(true)) {
|
||||
log::warn!(
|
||||
"{} experimentalDecorators compiler option is deprecated and may be removed at any time",
|
||||
deno_runtime::colors::yellow("Warning"),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
1098
cli/args/flags.rs
1098
cli/args/flags.rs
File diff suppressed because it is too large
Load diff
|
@ -42,9 +42,10 @@ pub use deno_config::deno_json::TsConfigForEmit;
|
|||
pub use deno_config::deno_json::TsConfigType;
|
||||
pub use deno_config::deno_json::TsTypeLib;
|
||||
pub use deno_config::glob::FilePatterns;
|
||||
pub use deno_json::check_warn_tsconfig;
|
||||
pub use flags::*;
|
||||
pub use lockfile::CliLockfile;
|
||||
pub use package_json::PackageJsonInstallDepsProvider;
|
||||
pub use package_json::NpmInstallDepsProvider;
|
||||
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::bail;
|
||||
|
@ -1220,7 +1221,7 @@ impl CliOptions {
|
|||
if let Some(flag) = self.flags.node_modules_dir {
|
||||
return Ok(Some(flag));
|
||||
}
|
||||
self.workspace().node_modules_dir_mode().map_err(Into::into)
|
||||
self.workspace().node_modules_dir().map_err(Into::into)
|
||||
}
|
||||
|
||||
pub fn vendor_dir_path(&self) -> Option<&PathBuf> {
|
||||
|
@ -1731,7 +1732,7 @@ fn resolve_node_modules_folder(
|
|||
Some(mode.uses_node_modules_dir())
|
||||
} else {
|
||||
workspace
|
||||
.node_modules_dir_mode()?
|
||||
.node_modules_dir()?
|
||||
.map(|m| m.uses_node_modules_dir())
|
||||
.or(flags.vendor)
|
||||
.or_else(|| root_folder.deno_json.as_ref().and_then(|c| c.json.vendor))
|
||||
|
|
|
@ -1,17 +1,20 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_config::workspace::Workspace;
|
||||
use deno_core::serde_json;
|
||||
use deno_package_json::PackageJsonDepValue;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use deno_semver::package::PackageReq;
|
||||
|
||||
use crate::util::path::is_banned_path_char;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct InstallNpmRemotePkg {
|
||||
pub alias: String,
|
||||
// todo(24419): use this when setting up the node_modules dir
|
||||
#[allow(dead_code)]
|
||||
pub base_dir: PathBuf,
|
||||
pub req: PackageReq,
|
||||
}
|
||||
|
@ -19,74 +22,126 @@ pub struct InstallNpmRemotePkg {
|
|||
#[derive(Debug)]
|
||||
pub struct InstallNpmWorkspacePkg {
|
||||
pub alias: String,
|
||||
// todo(24419): use this when setting up the node_modules dir
|
||||
#[allow(dead_code)]
|
||||
pub base_dir: PathBuf,
|
||||
pub target_dir: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct PackageJsonInstallDepsProvider {
|
||||
pub struct NpmInstallDepsProvider {
|
||||
remote_pkgs: Vec<InstallNpmRemotePkg>,
|
||||
workspace_pkgs: Vec<InstallNpmWorkspacePkg>,
|
||||
}
|
||||
|
||||
impl PackageJsonInstallDepsProvider {
|
||||
impl NpmInstallDepsProvider {
|
||||
pub fn empty() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn from_workspace(workspace: &Arc<Workspace>) -> Self {
|
||||
// todo(dsherret): estimate capacity?
|
||||
let mut workspace_pkgs = Vec::new();
|
||||
let mut remote_pkgs = Vec::new();
|
||||
let workspace_npm_pkgs = workspace.npm_packages();
|
||||
for pkg_json in workspace.package_jsons() {
|
||||
let deps = pkg_json.resolve_local_package_json_deps();
|
||||
let mut pkg_pkgs = Vec::with_capacity(deps.len());
|
||||
for (alias, dep) in deps {
|
||||
let Ok(dep) = dep else {
|
||||
continue;
|
||||
};
|
||||
match dep {
|
||||
PackageJsonDepValue::Req(pkg_req) => {
|
||||
let workspace_pkg = workspace_npm_pkgs.iter().find(|pkg| {
|
||||
pkg.matches_req(&pkg_req)
|
||||
// do not resolve to the current package
|
||||
&& pkg.pkg_json.path != pkg_json.path
|
||||
});
|
||||
|
||||
for (_, folder) in workspace.config_folders() {
|
||||
let mut deno_json_aliases = HashSet::new();
|
||||
|
||||
// deal with the deno.json first because it takes precedence during resolution
|
||||
if let Some(deno_json) = &folder.deno_json {
|
||||
// don't bother with externally referenced import maps as users
|
||||
// should inline their import map to get this behaviour
|
||||
if let Some(serde_json::Value::Object(obj)) = &deno_json.json.imports {
|
||||
deno_json_aliases.reserve(obj.len());
|
||||
let mut pkg_pkgs = Vec::with_capacity(obj.len());
|
||||
for (alias, value) in obj {
|
||||
let serde_json::Value::String(specifier) = value else {
|
||||
continue;
|
||||
};
|
||||
let Ok(npm_req_ref) = NpmPackageReqReference::from_str(specifier)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
// skip any aliases with banned characters
|
||||
if alias.chars().any(|c| c == '\\' || is_banned_path_char(c)) {
|
||||
continue;
|
||||
}
|
||||
deno_json_aliases.insert(alias.to_lowercase());
|
||||
let pkg_req = npm_req_ref.into_inner().req;
|
||||
let workspace_pkg = workspace_npm_pkgs
|
||||
.iter()
|
||||
.find(|pkg| pkg.matches_req(&pkg_req));
|
||||
|
||||
if let Some(pkg) = workspace_pkg {
|
||||
workspace_pkgs.push(InstallNpmWorkspacePkg {
|
||||
alias,
|
||||
base_dir: pkg_json.dir_path().to_path_buf(),
|
||||
alias: alias.to_string(),
|
||||
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
|
||||
});
|
||||
} else {
|
||||
pkg_pkgs.push(InstallNpmRemotePkg {
|
||||
alias,
|
||||
base_dir: pkg_json.dir_path().to_path_buf(),
|
||||
alias: alias.to_string(),
|
||||
base_dir: deno_json.dir_path(),
|
||||
req: pkg_req,
|
||||
});
|
||||
}
|
||||
}
|
||||
PackageJsonDepValue::Workspace(version_req) => {
|
||||
if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| {
|
||||
pkg.matches_name_and_version_req(&alias, &version_req)
|
||||
}) {
|
||||
workspace_pkgs.push(InstallNpmWorkspacePkg {
|
||||
alias,
|
||||
base_dir: pkg_json.dir_path().to_path_buf(),
|
||||
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
|
||||
|
||||
// sort within each package (more like npm resolution)
|
||||
pkg_pkgs.sort_by(|a, b| a.alias.cmp(&b.alias));
|
||||
remote_pkgs.extend(pkg_pkgs);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(pkg_json) = &folder.pkg_json {
|
||||
let deps = pkg_json.resolve_local_package_json_deps();
|
||||
let mut pkg_pkgs = Vec::with_capacity(deps.len());
|
||||
for (alias, dep) in deps {
|
||||
let Ok(dep) = dep else {
|
||||
continue;
|
||||
};
|
||||
if deno_json_aliases.contains(&alias.to_lowercase()) {
|
||||
// aliases in deno.json take precedence over package.json, so
|
||||
// since this can't be resolved don't bother installing it
|
||||
continue;
|
||||
}
|
||||
match dep {
|
||||
PackageJsonDepValue::Req(pkg_req) => {
|
||||
let workspace_pkg = workspace_npm_pkgs.iter().find(|pkg| {
|
||||
pkg.matches_req(&pkg_req)
|
||||
// do not resolve to the current package
|
||||
&& pkg.pkg_json.path != pkg_json.path
|
||||
});
|
||||
|
||||
if let Some(pkg) = workspace_pkg {
|
||||
workspace_pkgs.push(InstallNpmWorkspacePkg {
|
||||
alias,
|
||||
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
|
||||
});
|
||||
} else {
|
||||
pkg_pkgs.push(InstallNpmRemotePkg {
|
||||
alias,
|
||||
base_dir: pkg_json.dir_path().to_path_buf(),
|
||||
req: pkg_req,
|
||||
});
|
||||
}
|
||||
}
|
||||
PackageJsonDepValue::Workspace(version_req) => {
|
||||
if let Some(pkg) = workspace_npm_pkgs.iter().find(|pkg| {
|
||||
pkg.matches_name_and_version_req(&alias, &version_req)
|
||||
}) {
|
||||
workspace_pkgs.push(InstallNpmWorkspacePkg {
|
||||
alias,
|
||||
target_dir: pkg.pkg_json.dir_path().to_path_buf(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// sort within each package
|
||||
pkg_pkgs.sort_by(|a, b| a.alias.cmp(&b.alias));
|
||||
|
||||
remote_pkgs.extend(pkg_pkgs);
|
||||
// sort within each package as npm does
|
||||
pkg_pkgs.sort_by(|a, b| a.alias.cmp(&b.alias));
|
||||
remote_pkgs.extend(pkg_pkgs);
|
||||
}
|
||||
}
|
||||
|
||||
remote_pkgs.shrink_to_fit();
|
||||
workspace_pkgs.shrink_to_fit();
|
||||
Self {
|
||||
|
|
|
@ -46,8 +46,7 @@ Deno.bench("b64_rt_short", { n: 1e6 }, () => {
|
|||
const buf = new Uint8Array(100);
|
||||
const file = Deno.openSync("/dev/zero");
|
||||
Deno.bench("read_zero", { n: 5e5 }, () => {
|
||||
// deno-lint-ignore no-deprecated-deno-api
|
||||
Deno.readSync(file.rid, buf);
|
||||
file.readSync(buf);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::check_warn_tsconfig;
|
||||
use crate::args::get_root_cert_store;
|
||||
use crate::args::CaData;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::DenoSubcommand;
|
||||
use crate::args::Flags;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::args::NpmInstallDepsProvider;
|
||||
use crate::args::StorageKeyResolver;
|
||||
use crate::args::TsConfigType;
|
||||
use crate::cache::Caches;
|
||||
|
@ -386,9 +387,7 @@ impl CliFactory {
|
|||
cache_setting: cli_options.cache_setting(),
|
||||
text_only_progress_bar: self.text_only_progress_bar().clone(),
|
||||
maybe_node_modules_path: cli_options.node_modules_dir_path().cloned(),
|
||||
package_json_deps_provider: Arc::new(PackageJsonInstallDepsProvider::from_workspace(
|
||||
cli_options.workspace(),
|
||||
)),
|
||||
npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::from_workspace(cli_options.workspace())),
|
||||
npm_system_info: cli_options.npm_system_info(),
|
||||
npmrc: cli_options.npmrc().clone(),
|
||||
lifecycle_scripts: cli_options.lifecycle_scripts_config(),
|
||||
|
@ -522,9 +521,7 @@ impl CliFactory {
|
|||
let cli_options = self.cli_options()?;
|
||||
let ts_config_result =
|
||||
cli_options.resolve_ts_config_for_emit(TsConfigType::Emit)?;
|
||||
if let Some(ignored_options) = ts_config_result.maybe_ignored_options {
|
||||
warn!("{}", ignored_options);
|
||||
}
|
||||
check_warn_tsconfig(&ts_config_result);
|
||||
let (transpile_options, emit_options) =
|
||||
crate::args::ts_config_to_transpile_and_emit_options(
|
||||
ts_config_result.ts_config,
|
||||
|
|
|
@ -20,7 +20,7 @@ use crate::tools::check::TypeChecker;
|
|||
use crate::util::file_watcher::WatcherCommunicator;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
use deno_config::workspace::JsrPackageConfig;
|
||||
use deno_emit::LoaderChecksum;
|
||||
use deno_graph::source::LoaderChecksum;
|
||||
use deno_graph::JsrLoadError;
|
||||
use deno_graph::ModuleLoadError;
|
||||
use deno_graph::WorkspaceFastCheckOption;
|
||||
|
@ -724,12 +724,25 @@ impl ModuleGraphBuilder {
|
|||
pub fn enhanced_resolution_error_message(error: &ResolutionError) -> String {
|
||||
let mut message = format_deno_graph_error(error);
|
||||
|
||||
if let Some(specifier) = get_resolution_error_bare_node_specifier(error) {
|
||||
let maybe_hint = if let Some(specifier) =
|
||||
get_resolution_error_bare_node_specifier(error)
|
||||
{
|
||||
if !*DENO_DISABLE_PEDANTIC_NODE_WARNINGS {
|
||||
message.push_str(&format!(
|
||||
"\nIf you want to use a built-in Node module, add a \"node:\" prefix (ex. \"node:{specifier}\")."
|
||||
));
|
||||
Some(format!("If you want to use a built-in Node module, add a \"node:\" prefix (ex. \"node:{specifier}\")."))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
get_import_prefix_missing_error(error).map(|specifier| {
|
||||
format!(
|
||||
"If you want to use a JSR or npm package, try running `deno add {}`",
|
||||
specifier
|
||||
)
|
||||
})
|
||||
};
|
||||
|
||||
if let Some(hint) = maybe_hint {
|
||||
message.push_str(&format!("\n {} {}", colors::cyan("hint:"), hint));
|
||||
}
|
||||
|
||||
message
|
||||
|
@ -864,6 +877,45 @@ fn get_resolution_error_bare_specifier(
|
|||
}
|
||||
}
|
||||
|
||||
fn get_import_prefix_missing_error(error: &ResolutionError) -> Option<&str> {
|
||||
let mut maybe_specifier = None;
|
||||
if let ResolutionError::InvalidSpecifier {
|
||||
error: SpecifierError::ImportPrefixMissing { specifier, .. },
|
||||
..
|
||||
} = error
|
||||
{
|
||||
maybe_specifier = Some(specifier);
|
||||
} else if let ResolutionError::ResolverError { error, .. } = error {
|
||||
match error.as_ref() {
|
||||
ResolveError::Specifier(specifier_error) => {
|
||||
if let SpecifierError::ImportPrefixMissing { specifier, .. } =
|
||||
specifier_error
|
||||
{
|
||||
maybe_specifier = Some(specifier);
|
||||
}
|
||||
}
|
||||
ResolveError::Other(other_error) => {
|
||||
if let Some(SpecifierError::ImportPrefixMissing { specifier, .. }) =
|
||||
other_error.downcast_ref::<SpecifierError>()
|
||||
{
|
||||
maybe_specifier = Some(specifier);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE(bartlomieju): For now, return None if a specifier contains a dot or a space. This is because
|
||||
// suggesting to `deno add bad-module.ts` makes no sense and is worse than not providing
|
||||
// a suggestion at all. This should be improved further in the future
|
||||
if let Some(specifier) = maybe_specifier {
|
||||
if specifier.contains('.') || specifier.contains(' ') {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
maybe_specifier.map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Gets if any of the specified root's "file:" dependents are in the
|
||||
/// provided changed set.
|
||||
pub fn has_graph_root_local_dependent_changed(
|
||||
|
|
|
@ -104,12 +104,12 @@ function bench(
|
|||
}
|
||||
if (optionsOrFn.fn != undefined) {
|
||||
throw new TypeError(
|
||||
"Unexpected 'fn' field in options, bench function is already provided as the third argument.",
|
||||
"Unexpected 'fn' field in options, bench function is already provided as the third argument",
|
||||
);
|
||||
}
|
||||
if (optionsOrFn.name != undefined) {
|
||||
throw new TypeError(
|
||||
"Unexpected 'name' field in options, bench name is already provided as the first argument.",
|
||||
"Unexpected 'name' field in options, bench name is already provided as the first argument",
|
||||
);
|
||||
}
|
||||
benchDesc = {
|
||||
|
@ -141,7 +141,7 @@ function bench(
|
|||
fn = optionsOrFn;
|
||||
if (nameOrFnOrOptions.fn != undefined) {
|
||||
throw new TypeError(
|
||||
"Unexpected 'fn' field in options, bench function is already provided as the second argument.",
|
||||
"Unexpected 'fn' field in options, bench function is already provided as the second argument",
|
||||
);
|
||||
}
|
||||
name = nameOrFnOrOptions.name ?? fn.name;
|
||||
|
@ -150,7 +150,7 @@ function bench(
|
|||
!nameOrFnOrOptions.fn || typeof nameOrFnOrOptions.fn !== "function"
|
||||
) {
|
||||
throw new TypeError(
|
||||
"Expected 'fn' field in the first argument to be a bench function.",
|
||||
"Expected 'fn' field in the first argument to be a bench function",
|
||||
);
|
||||
}
|
||||
fn = nameOrFnOrOptions.fn;
|
||||
|
@ -385,12 +385,12 @@ function createBenchContext(desc) {
|
|||
start() {
|
||||
if (currentBenchId !== desc.id) {
|
||||
throw new TypeError(
|
||||
"The benchmark which this context belongs to is not being executed.",
|
||||
"The benchmark which this context belongs to is not being executed",
|
||||
);
|
||||
}
|
||||
if (currentBenchUserExplicitStart != null) {
|
||||
throw new TypeError(
|
||||
"BenchContext::start() has already been invoked.",
|
||||
"BenchContext::start() has already been invoked",
|
||||
);
|
||||
}
|
||||
currentBenchUserExplicitStart = benchNow();
|
||||
|
@ -399,11 +399,11 @@ function createBenchContext(desc) {
|
|||
const end = benchNow();
|
||||
if (currentBenchId !== desc.id) {
|
||||
throw new TypeError(
|
||||
"The benchmark which this context belongs to is not being executed.",
|
||||
"The benchmark which this context belongs to is not being executed",
|
||||
);
|
||||
}
|
||||
if (currentBenchUserExplicitEnd != null) {
|
||||
throw new TypeError("BenchContext::end() has already been invoked.");
|
||||
throw new TypeError("BenchContext::end() has already been invoked");
|
||||
}
|
||||
currentBenchUserExplicitEnd = end;
|
||||
},
|
||||
|
|
|
@ -113,7 +113,7 @@ function assertExit(fn, isTest) {
|
|||
throw new Error(
|
||||
`${
|
||||
isTest ? "Test case" : "Bench"
|
||||
} finished with exit code set to ${exitCode}.`,
|
||||
} finished with exit code set to ${exitCode}`,
|
||||
);
|
||||
}
|
||||
if (innerResult) {
|
||||
|
@ -242,12 +242,12 @@ function testInner(
|
|||
}
|
||||
if (optionsOrFn.fn != undefined) {
|
||||
throw new TypeError(
|
||||
"Unexpected 'fn' field in options, test function is already provided as the third argument.",
|
||||
"Unexpected 'fn' field in options, test function is already provided as the third argument",
|
||||
);
|
||||
}
|
||||
if (optionsOrFn.name != undefined) {
|
||||
throw new TypeError(
|
||||
"Unexpected 'name' field in options, test name is already provided as the first argument.",
|
||||
"Unexpected 'name' field in options, test name is already provided as the first argument",
|
||||
);
|
||||
}
|
||||
testDesc = {
|
||||
|
@ -279,7 +279,7 @@ function testInner(
|
|||
fn = optionsOrFn;
|
||||
if (nameOrFnOrOptions.fn != undefined) {
|
||||
throw new TypeError(
|
||||
"Unexpected 'fn' field in options, test function is already provided as the second argument.",
|
||||
"Unexpected 'fn' field in options, test function is already provided as the second argument",
|
||||
);
|
||||
}
|
||||
name = nameOrFnOrOptions.name ?? fn.name;
|
||||
|
@ -288,7 +288,7 @@ function testInner(
|
|||
!nameOrFnOrOptions.fn || typeof nameOrFnOrOptions.fn !== "function"
|
||||
) {
|
||||
throw new TypeError(
|
||||
"Expected 'fn' field in the first argument to be a test function.",
|
||||
"Expected 'fn' field in the first argument to be a test function",
|
||||
);
|
||||
}
|
||||
fn = nameOrFnOrOptions.fn;
|
||||
|
@ -426,7 +426,7 @@ function createTestContext(desc) {
|
|||
let stepDesc;
|
||||
if (typeof nameOrFnOrOptions === "string") {
|
||||
if (typeof maybeFn !== "function") {
|
||||
throw new TypeError("Expected function for second argument.");
|
||||
throw new TypeError("Expected function for second argument");
|
||||
}
|
||||
stepDesc = {
|
||||
name: nameOrFnOrOptions,
|
||||
|
@ -434,7 +434,7 @@ function createTestContext(desc) {
|
|||
};
|
||||
} else if (typeof nameOrFnOrOptions === "function") {
|
||||
if (!nameOrFnOrOptions.name) {
|
||||
throw new TypeError("The step function must have a name.");
|
||||
throw new TypeError("The step function must have a name");
|
||||
}
|
||||
if (maybeFn != undefined) {
|
||||
throw new TypeError(
|
||||
|
@ -449,7 +449,7 @@ function createTestContext(desc) {
|
|||
stepDesc = nameOrFnOrOptions;
|
||||
} else {
|
||||
throw new TypeError(
|
||||
"Expected a test definition or name and function.",
|
||||
"Expected a test definition or name and function",
|
||||
);
|
||||
}
|
||||
stepDesc.ignore ??= false;
|
||||
|
|
|
@ -1387,10 +1387,8 @@ impl ConfigData {
|
|||
}
|
||||
}
|
||||
|
||||
let node_modules_dir = member_dir
|
||||
.workspace
|
||||
.node_modules_dir_mode()
|
||||
.unwrap_or_default();
|
||||
let node_modules_dir =
|
||||
member_dir.workspace.node_modules_dir().unwrap_or_default();
|
||||
let byonm = match node_modules_dir {
|
||||
Some(mode) => mode == NodeModulesDirMode::Manual,
|
||||
None => member_dir.workspace.root_pkg_json().is_some(),
|
||||
|
@ -1697,9 +1695,14 @@ impl ConfigTree {
|
|||
}
|
||||
|
||||
pub fn is_watched_file(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
if specifier.path().ends_with("/deno.json")
|
||||
|| specifier.path().ends_with("/deno.jsonc")
|
||||
|| specifier.path().ends_with("/package.json")
|
||||
let path = specifier.path();
|
||||
if path.ends_with("/deno.json")
|
||||
|| path.ends_with("/deno.jsonc")
|
||||
|| path.ends_with("/package.json")
|
||||
|| path.ends_with("/node_modules/.package-lock.json")
|
||||
|| path.ends_with("/node_modules/.yarn-integrity.json")
|
||||
|| path.ends_with("/node_modules/.modules.yaml")
|
||||
|| path.ends_with("/node_modules/.deno/.setup-cache.bin")
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
@ -1868,7 +1871,7 @@ fn resolve_node_modules_dir(
|
|||
// `nodeModulesDir: true` setting in the deno.json file. This is to
|
||||
// reduce the chance of modifying someone's node_modules directory
|
||||
// without them having asked us to do so.
|
||||
let node_modules_mode = workspace.node_modules_dir_mode().ok().flatten();
|
||||
let node_modules_mode = workspace.node_modules_dir().ok().flatten();
|
||||
let explicitly_disabled = node_modules_mode == Some(NodeModulesDirMode::None);
|
||||
if explicitly_disabled {
|
||||
return None;
|
||||
|
|
|
@ -1251,7 +1251,7 @@ impl Documents {
|
|||
/// tsc when type checking.
|
||||
pub fn resolve(
|
||||
&self,
|
||||
specifiers: &[String],
|
||||
raw_specifiers: &[String],
|
||||
referrer: &ModuleSpecifier,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Vec<Option<(ModuleSpecifier, MediaType)>> {
|
||||
|
@ -1262,16 +1262,16 @@ impl Documents {
|
|||
.or(file_referrer);
|
||||
let dependencies = document.as_ref().map(|d| d.dependencies());
|
||||
let mut results = Vec::new();
|
||||
for specifier in specifiers {
|
||||
if specifier.starts_with("asset:") {
|
||||
if let Ok(specifier) = ModuleSpecifier::parse(specifier) {
|
||||
for raw_specifier in raw_specifiers {
|
||||
if raw_specifier.starts_with("asset:") {
|
||||
if let Ok(specifier) = ModuleSpecifier::parse(raw_specifier) {
|
||||
let media_type = MediaType::from_specifier(&specifier);
|
||||
results.push(Some((specifier, media_type)));
|
||||
} else {
|
||||
results.push(None);
|
||||
}
|
||||
} else if let Some(dep) =
|
||||
dependencies.as_ref().and_then(|d| d.get(specifier))
|
||||
dependencies.as_ref().and_then(|d| d.get(raw_specifier))
|
||||
{
|
||||
if let Some(specifier) = dep.maybe_type.maybe_specifier() {
|
||||
results.push(self.resolve_dependency(
|
||||
|
@ -1290,7 +1290,7 @@ impl Documents {
|
|||
}
|
||||
} else if let Ok(specifier) =
|
||||
self.resolver.as_graph_resolver(file_referrer).resolve(
|
||||
specifier,
|
||||
raw_specifier,
|
||||
&deno_graph::Range {
|
||||
specifier: referrer.clone(),
|
||||
start: deno_graph::Position::zeroed(),
|
||||
|
|
|
@ -966,9 +966,8 @@ impl Inner {
|
|||
.await;
|
||||
for config_file in self.config.tree.config_files() {
|
||||
(|| {
|
||||
let compiler_options = config_file.to_compiler_options().ok()?.0;
|
||||
let compiler_options_obj = compiler_options.as_object()?;
|
||||
let jsx_import_source = compiler_options_obj.get("jsxImportSource")?;
|
||||
let compiler_options = config_file.to_compiler_options().ok()?.options;
|
||||
let jsx_import_source = compiler_options.get("jsxImportSource")?;
|
||||
let jsx_import_source = jsx_import_source.as_str()?;
|
||||
let referrer = config_file.specifier.clone();
|
||||
let specifier = Url::parse(&format!(
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
use crate::args::create_default_npmrc;
|
||||
use crate::args::CacheSetting;
|
||||
use crate::args::CliLockfile;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::args::NpmInstallDepsProvider;
|
||||
use crate::graph_util::CliJsrUrlProvider;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::lsp::config::Config;
|
||||
|
@ -474,9 +474,7 @@ async fn create_npm_resolver(
|
|||
maybe_node_modules_path: config_data
|
||||
.and_then(|d| d.node_modules_dir.clone()),
|
||||
// only used for top level install, so we can ignore this
|
||||
package_json_deps_provider: Arc::new(
|
||||
PackageJsonInstallDepsProvider::empty(),
|
||||
),
|
||||
npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::empty()),
|
||||
npmrc: config_data
|
||||
.and_then(|d| d.npmrc.clone())
|
||||
.unwrap_or_else(create_default_npmrc),
|
||||
|
|
10
cli/main.rs
10
cli/main.rs
|
@ -32,7 +32,6 @@ mod worker;
|
|||
use crate::args::flags_from_vec;
|
||||
use crate::args::DenoSubcommand;
|
||||
use crate::args::Flags;
|
||||
use crate::graph_container::ModuleGraphContainer;
|
||||
use crate::util::display;
|
||||
use crate::util::v8::get_v8_flags_from_env;
|
||||
use crate::util::v8::init_v8_flags;
|
||||
|
@ -118,14 +117,7 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
|
|||
tools::run::eval_command(flags, eval_flags).await
|
||||
}),
|
||||
DenoSubcommand::Cache(cache_flags) => spawn_subcommand(async move {
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
let emitter = factory.emitter()?;
|
||||
let main_graph_container =
|
||||
factory.main_module_graph_container().await?;
|
||||
main_graph_container
|
||||
.load_and_type_check_files(&cache_flags.files)
|
||||
.await?;
|
||||
emitter.cache_module_emits(&main_graph_container.graph()).await
|
||||
tools::installer::install_from_entrypoints(flags, &cache_flags.files).await
|
||||
}),
|
||||
DenoSubcommand::Check(check_flags) => spawn_subcommand(async move {
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
|
|
|
@ -401,7 +401,7 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
|
||||
fn inner_resolve(
|
||||
&self,
|
||||
specifier: &str,
|
||||
raw_specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
if self.shared.node_resolver.in_npm_package(referrer) {
|
||||
|
@ -409,7 +409,7 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve(specifier, referrer, NodeResolutionMode::Execution)?
|
||||
.resolve(raw_specifier, referrer, NodeResolutionMode::Execution)?
|
||||
.into_url(),
|
||||
);
|
||||
}
|
||||
|
@ -418,7 +418,7 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
let resolution = match graph.get(referrer) {
|
||||
Some(Module::Js(module)) => module
|
||||
.dependencies
|
||||
.get(specifier)
|
||||
.get(raw_specifier)
|
||||
.map(|d| &d.maybe_code)
|
||||
.unwrap_or(&Resolution::None),
|
||||
_ => &Resolution::None,
|
||||
|
@ -433,7 +433,7 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
));
|
||||
}
|
||||
Resolution::None => Cow::Owned(self.shared.resolver.resolve(
|
||||
specifier,
|
||||
raw_specifier,
|
||||
&deno_graph::Range {
|
||||
specifier: referrer.clone(),
|
||||
start: deno_graph::Position::zeroed(),
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "napi_sym"
|
||||
version = "0.97.0"
|
||||
version = "0.98.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
143
cli/npm/byonm.rs
143
cli/npm/byonm.rs
|
@ -17,6 +17,7 @@ use deno_runtime::deno_node::NodeRequireResolver;
|
|||
use deno_runtime::deno_node::NpmProcessStateProvider;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::Version;
|
||||
use node_resolver::errors::PackageFolderResolveError;
|
||||
use node_resolver::errors::PackageFolderResolveIoError;
|
||||
use node_resolver::errors::PackageJsonLoadError;
|
||||
|
@ -29,6 +30,7 @@ use crate::args::NpmProcessStateKind;
|
|||
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
|
||||
use deno_runtime::fs_util::specifier_to_file_path;
|
||||
|
||||
use super::managed::normalize_pkg_name_for_node_modules_deno_folder;
|
||||
use super::CliNpmResolver;
|
||||
use super::InnerCliNpmResolverRef;
|
||||
|
||||
|
@ -60,9 +62,7 @@ impl ByonmCliNpmResolver {
|
|||
) -> Result<Option<Arc<PackageJson>>, PackageJsonLoadError> {
|
||||
load_pkg_json(&DenoPkgJsonFsAdapter(self.fs.as_ref()), path)
|
||||
}
|
||||
}
|
||||
|
||||
impl ByonmCliNpmResolver {
|
||||
/// Finds the ancestor package.json that contains the specified dependency.
|
||||
pub fn find_ancestor_package_json_with_dep(
|
||||
&self,
|
||||
|
@ -98,7 +98,7 @@ impl ByonmCliNpmResolver {
|
|||
&self,
|
||||
req: &PackageReq,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<(Arc<PackageJson>, String), AnyError> {
|
||||
) -> Result<Option<(Arc<PackageJson>, String)>, AnyError> {
|
||||
fn resolve_alias_from_pkg_json(
|
||||
req: &PackageReq,
|
||||
pkg_json: &PackageJson,
|
||||
|
@ -134,7 +134,7 @@ impl ByonmCliNpmResolver {
|
|||
if let Some(alias) =
|
||||
resolve_alias_from_pkg_json(req, pkg_json.as_ref())
|
||||
{
|
||||
return Ok((pkg_json, alias));
|
||||
return Ok(Some((pkg_json, alias)));
|
||||
}
|
||||
}
|
||||
current_path = dir_path;
|
||||
|
@ -148,19 +148,65 @@ impl ByonmCliNpmResolver {
|
|||
if let Some(pkg_json) = self.load_pkg_json(&root_pkg_json_path)? {
|
||||
if let Some(alias) = resolve_alias_from_pkg_json(req, pkg_json.as_ref())
|
||||
{
|
||||
return Ok((pkg_json, alias));
|
||||
return Ok(Some((pkg_json, alias)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bail!(
|
||||
concat!(
|
||||
"Could not find a matching package for 'npm:{}' in a package.json file. ",
|
||||
"You must specify this as a package.json dependency when the ",
|
||||
"node_modules folder is not managed by Deno.",
|
||||
),
|
||||
req,
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn resolve_folder_in_root_node_modules(
|
||||
&self,
|
||||
req: &PackageReq,
|
||||
) -> Option<PathBuf> {
|
||||
// now check if node_modules/.deno/ matches this constraint
|
||||
let root_node_modules_dir = self.root_node_modules_dir.as_ref()?;
|
||||
let node_modules_deno_dir = root_node_modules_dir.join(".deno");
|
||||
let Ok(entries) = self.fs.read_dir_sync(&node_modules_deno_dir) else {
|
||||
return None;
|
||||
};
|
||||
let search_prefix = format!(
|
||||
"{}@",
|
||||
normalize_pkg_name_for_node_modules_deno_folder(&req.name)
|
||||
);
|
||||
let mut best_version = None;
|
||||
|
||||
// example entries:
|
||||
// - @denotest+add@1.0.0
|
||||
// - @denotest+add@1.0.0_1
|
||||
for entry in entries {
|
||||
if !entry.is_directory {
|
||||
continue;
|
||||
}
|
||||
let Some(version_and_copy_idx) = entry.name.strip_prefix(&search_prefix)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
let version = version_and_copy_idx
|
||||
.rsplit_once('_')
|
||||
.map(|(v, _)| v)
|
||||
.unwrap_or(version_and_copy_idx);
|
||||
let Ok(version) = Version::parse_from_npm(version) else {
|
||||
continue;
|
||||
};
|
||||
if req.version_req.matches(&version) {
|
||||
if let Some((best_version_version, _)) = &best_version {
|
||||
if version > *best_version_version {
|
||||
best_version = Some((version, entry.name));
|
||||
}
|
||||
} else {
|
||||
best_version = Some((version, entry.name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
best_version.map(|(_version, entry_name)| {
|
||||
join_package_name(
|
||||
&node_modules_deno_dir.join(entry_name).join("node_modules"),
|
||||
&req.name,
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -288,29 +334,62 @@ impl CliNpmResolver for ByonmCliNpmResolver {
|
|||
req: &PackageReq,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
// resolve the pkg json and alias
|
||||
let (pkg_json, alias) =
|
||||
self.resolve_pkg_json_and_alias_for_req(req, referrer)?;
|
||||
// now try node resolution
|
||||
for ancestor in pkg_json.path.parent().unwrap().ancestors() {
|
||||
let node_modules_folder = ancestor.join("node_modules");
|
||||
let sub_dir = join_package_name(&node_modules_folder, &alias);
|
||||
if self.fs.is_dir_sync(&sub_dir) {
|
||||
return Ok(canonicalize_path_maybe_not_exists_with_fs(
|
||||
&sub_dir,
|
||||
self.fs.as_ref(),
|
||||
)?);
|
||||
fn node_resolve_dir(
|
||||
fs: &dyn FileSystem,
|
||||
alias: &str,
|
||||
start_dir: &Path,
|
||||
) -> Result<Option<PathBuf>, AnyError> {
|
||||
for ancestor in start_dir.ancestors() {
|
||||
let node_modules_folder = ancestor.join("node_modules");
|
||||
let sub_dir = join_package_name(&node_modules_folder, alias);
|
||||
if fs.is_dir_sync(&sub_dir) {
|
||||
return Ok(Some(canonicalize_path_maybe_not_exists_with_fs(
|
||||
&sub_dir, fs,
|
||||
)?));
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
bail!(
|
||||
concat!(
|
||||
"Could not find \"{}\" in a node_modules folder. ",
|
||||
"Deno expects the node_modules/ directory to be up to date. ",
|
||||
"Did you forget to run `deno install`?"
|
||||
),
|
||||
alias,
|
||||
);
|
||||
// now attempt to resolve if it's found in any package.json
|
||||
let maybe_pkg_json_and_alias =
|
||||
self.resolve_pkg_json_and_alias_for_req(req, referrer)?;
|
||||
match maybe_pkg_json_and_alias {
|
||||
Some((pkg_json, alias)) => {
|
||||
// now try node resolution
|
||||
if let Some(resolved) =
|
||||
node_resolve_dir(self.fs.as_ref(), &alias, pkg_json.dir_path())?
|
||||
{
|
||||
return Ok(resolved);
|
||||
}
|
||||
|
||||
bail!(
|
||||
concat!(
|
||||
"Could not find \"{}\" in a node_modules folder. ",
|
||||
"Deno expects the node_modules/ directory to be up to date. ",
|
||||
"Did you forget to run `deno install`?"
|
||||
),
|
||||
alias,
|
||||
);
|
||||
}
|
||||
None => {
|
||||
// now check if node_modules/.deno/ matches this constraint
|
||||
if let Some(folder) = self.resolve_folder_in_root_node_modules(req) {
|
||||
return Ok(folder);
|
||||
}
|
||||
|
||||
bail!(
|
||||
concat!(
|
||||
"Could not find a matching package for 'npm:{}' in the node_modules ",
|
||||
"directory. Ensure you have all your JSR and npm dependencies listed ",
|
||||
"in your deno.json or package.json, then run `deno install`. Alternatively, ",
|
||||
r#"turn on auto-install by specifying `"nodeModulesDir": "auto"` in your "#,
|
||||
"deno.json file."
|
||||
),
|
||||
req,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_state_hash(&self) -> Option<u64> {
|
||||
|
|
|
@ -32,9 +32,9 @@ use resolution::AddPkgReqsResult;
|
|||
|
||||
use crate::args::CliLockfile;
|
||||
use crate::args::LifecycleScriptsConfig;
|
||||
use crate::args::NpmInstallDepsProvider;
|
||||
use crate::args::NpmProcessState;
|
||||
use crate::args::NpmProcessStateKind;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::cache::FastInsecureHasher;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
|
||||
|
@ -45,6 +45,7 @@ use self::cache::NpmCache;
|
|||
use self::registry::CliNpmRegistryApi;
|
||||
use self::resolution::NpmResolution;
|
||||
use self::resolvers::create_npm_fs_resolver;
|
||||
pub use self::resolvers::normalize_pkg_name_for_node_modules_deno_folder;
|
||||
use self::resolvers::NpmPackageFsResolver;
|
||||
|
||||
use super::CliNpmResolver;
|
||||
|
@ -71,7 +72,7 @@ pub struct CliNpmResolverManagedCreateOptions {
|
|||
pub text_only_progress_bar: crate::util::progress_bar::ProgressBar,
|
||||
pub maybe_node_modules_path: Option<PathBuf>,
|
||||
pub npm_system_info: NpmSystemInfo,
|
||||
pub package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
pub npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
|
||||
pub npmrc: Arc<ResolvedNpmRc>,
|
||||
pub lifecycle_scripts: LifecycleScriptsConfig,
|
||||
}
|
||||
|
@ -97,7 +98,7 @@ pub async fn create_managed_npm_resolver_for_lsp(
|
|||
npm_api,
|
||||
npm_cache,
|
||||
options.npmrc,
|
||||
options.package_json_deps_provider,
|
||||
options.npm_install_deps_provider,
|
||||
options.text_only_progress_bar,
|
||||
options.maybe_node_modules_path,
|
||||
options.npm_system_info,
|
||||
|
@ -122,7 +123,7 @@ pub async fn create_managed_npm_resolver(
|
|||
npm_api,
|
||||
npm_cache,
|
||||
options.npmrc,
|
||||
options.package_json_deps_provider,
|
||||
options.npm_install_deps_provider,
|
||||
options.text_only_progress_bar,
|
||||
options.maybe_node_modules_path,
|
||||
options.npm_system_info,
|
||||
|
@ -139,7 +140,7 @@ fn create_inner(
|
|||
npm_api: Arc<CliNpmRegistryApi>,
|
||||
npm_cache: Arc<NpmCache>,
|
||||
npm_rc: Arc<ResolvedNpmRc>,
|
||||
package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
|
||||
text_only_progress_bar: crate::util::progress_bar::ProgressBar,
|
||||
node_modules_dir_path: Option<PathBuf>,
|
||||
npm_system_info: NpmSystemInfo,
|
||||
|
@ -161,7 +162,7 @@ fn create_inner(
|
|||
let fs_resolver = create_npm_fs_resolver(
|
||||
fs.clone(),
|
||||
npm_cache.clone(),
|
||||
&package_json_deps_provider,
|
||||
&npm_install_deps_provider,
|
||||
&text_only_progress_bar,
|
||||
resolution.clone(),
|
||||
tarball_cache.clone(),
|
||||
|
@ -175,7 +176,7 @@ fn create_inner(
|
|||
maybe_lockfile,
|
||||
npm_api,
|
||||
npm_cache,
|
||||
package_json_deps_provider,
|
||||
npm_install_deps_provider,
|
||||
resolution,
|
||||
tarball_cache,
|
||||
text_only_progress_bar,
|
||||
|
@ -261,7 +262,7 @@ pub struct ManagedCliNpmResolver {
|
|||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||
npm_api: Arc<CliNpmRegistryApi>,
|
||||
npm_cache: Arc<NpmCache>,
|
||||
package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
|
||||
resolution: Arc<NpmResolution>,
|
||||
tarball_cache: Arc<TarballCache>,
|
||||
text_only_progress_bar: ProgressBar,
|
||||
|
@ -286,7 +287,7 @@ impl ManagedCliNpmResolver {
|
|||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||
npm_api: Arc<CliNpmRegistryApi>,
|
||||
npm_cache: Arc<NpmCache>,
|
||||
package_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
|
||||
resolution: Arc<NpmResolution>,
|
||||
tarball_cache: Arc<TarballCache>,
|
||||
text_only_progress_bar: ProgressBar,
|
||||
|
@ -299,7 +300,7 @@ impl ManagedCliNpmResolver {
|
|||
maybe_lockfile,
|
||||
npm_api,
|
||||
npm_cache,
|
||||
package_json_deps_provider,
|
||||
npm_install_deps_provider,
|
||||
text_only_progress_bar,
|
||||
resolution,
|
||||
tarball_cache,
|
||||
|
@ -476,7 +477,7 @@ impl ManagedCliNpmResolver {
|
|||
if !self.top_level_install_flag.raise() {
|
||||
return Ok(false); // already did this
|
||||
}
|
||||
let pkg_json_remote_pkgs = self.package_json_deps_provider.remote_pkgs();
|
||||
let pkg_json_remote_pkgs = self.npm_install_deps_provider.remote_pkgs();
|
||||
if pkg_json_remote_pkgs.is_empty() {
|
||||
return Ok(false);
|
||||
}
|
||||
|
@ -605,7 +606,7 @@ impl CliNpmResolver for ManagedCliNpmResolver {
|
|||
create_npm_fs_resolver(
|
||||
self.fs.clone(),
|
||||
self.npm_cache.clone(),
|
||||
&self.package_json_deps_provider,
|
||||
&self.npm_install_deps_provider,
|
||||
&self.text_only_progress_bar,
|
||||
npm_resolution.clone(),
|
||||
self.tarball_cache.clone(),
|
||||
|
@ -616,7 +617,7 @@ impl CliNpmResolver for ManagedCliNpmResolver {
|
|||
self.maybe_lockfile.clone(),
|
||||
self.npm_api.clone(),
|
||||
self.npm_cache.clone(),
|
||||
self.package_json_deps_provider.clone(),
|
||||
self.npm_install_deps_provider.clone(),
|
||||
npm_resolution,
|
||||
self.tarball_cache.clone(),
|
||||
self.text_only_progress_bar.clone(),
|
||||
|
|
|
@ -41,7 +41,7 @@ use node_resolver::errors::ReferrerNotFoundError;
|
|||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::args::NpmInstallDepsProvider;
|
||||
use crate::cache::CACHE_PERM;
|
||||
use crate::npm::cache_dir::mixed_case_package_name_decode;
|
||||
use crate::npm::cache_dir::mixed_case_package_name_encode;
|
||||
|
@ -65,7 +65,7 @@ use super::common::RegistryReadPermissionChecker;
|
|||
pub struct LocalNpmPackageResolver {
|
||||
cache: Arc<NpmCache>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
pkg_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
|
||||
progress_bar: ProgressBar,
|
||||
resolution: Arc<NpmResolution>,
|
||||
tarball_cache: Arc<TarballCache>,
|
||||
|
@ -81,7 +81,7 @@ impl LocalNpmPackageResolver {
|
|||
pub fn new(
|
||||
cache: Arc<NpmCache>,
|
||||
fs: Arc<dyn deno_fs::FileSystem>,
|
||||
pkg_json_deps_provider: Arc<PackageJsonInstallDepsProvider>,
|
||||
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
|
||||
progress_bar: ProgressBar,
|
||||
resolution: Arc<NpmResolution>,
|
||||
tarball_cache: Arc<TarballCache>,
|
||||
|
@ -92,7 +92,7 @@ impl LocalNpmPackageResolver {
|
|||
Self {
|
||||
cache,
|
||||
fs: fs.clone(),
|
||||
pkg_json_deps_provider,
|
||||
npm_install_deps_provider,
|
||||
progress_bar,
|
||||
resolution,
|
||||
tarball_cache,
|
||||
|
@ -248,7 +248,7 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver {
|
|||
sync_resolution_with_fs(
|
||||
&self.resolution.snapshot(),
|
||||
&self.cache,
|
||||
&self.pkg_json_deps_provider,
|
||||
&self.npm_install_deps_provider,
|
||||
&self.progress_bar,
|
||||
&self.tarball_cache,
|
||||
&self.root_node_modules_path,
|
||||
|
@ -412,14 +412,16 @@ fn has_lifecycle_scripts(
|
|||
async fn sync_resolution_with_fs(
|
||||
snapshot: &NpmResolutionSnapshot,
|
||||
cache: &Arc<NpmCache>,
|
||||
pkg_json_deps_provider: &PackageJsonInstallDepsProvider,
|
||||
npm_install_deps_provider: &NpmInstallDepsProvider,
|
||||
progress_bar: &ProgressBar,
|
||||
tarball_cache: &Arc<TarballCache>,
|
||||
root_node_modules_dir_path: &Path,
|
||||
system_info: &NpmSystemInfo,
|
||||
lifecycle_scripts: &LifecycleScriptsConfig,
|
||||
) -> Result<(), AnyError> {
|
||||
if snapshot.is_empty() && pkg_json_deps_provider.workspace_pkgs().is_empty() {
|
||||
if snapshot.is_empty()
|
||||
&& npm_install_deps_provider.workspace_pkgs().is_empty()
|
||||
{
|
||||
return Ok(()); // don't create the directory
|
||||
}
|
||||
|
||||
|
@ -620,7 +622,7 @@ async fn sync_resolution_with_fs(
|
|||
|
||||
// 4. Create symlinks for package json dependencies
|
||||
{
|
||||
for remote in pkg_json_deps_provider.remote_pkgs() {
|
||||
for remote in npm_install_deps_provider.remote_pkgs() {
|
||||
let remote_pkg = if let Ok(remote_pkg) =
|
||||
snapshot.resolve_pkg_from_pkg_req(&remote.req)
|
||||
{
|
||||
|
@ -684,7 +686,7 @@ async fn sync_resolution_with_fs(
|
|||
}
|
||||
|
||||
// 5. Create symlinks for the remaining top level packages in the node_modules folder.
|
||||
// (These may be present if they are not in the package.json dependencies, such as )
|
||||
// (These may be present if they are not in the package.json dependencies)
|
||||
// Symlink node_modules/.deno/<package_id>/node_modules/<package_name> to
|
||||
// node_modules/<package_name>
|
||||
let mut ids = snapshot
|
||||
|
@ -757,10 +759,10 @@ async fn sync_resolution_with_fs(
|
|||
|
||||
// 8. Create symlinks for the workspace packages
|
||||
{
|
||||
// todo(#24419): this is not exactly correct because it should
|
||||
// todo(dsherret): this is not exactly correct because it should
|
||||
// install correctly for a workspace (potentially in sub directories),
|
||||
// but this is good enough for a first pass
|
||||
for workspace in pkg_json_deps_provider.workspace_pkgs() {
|
||||
for workspace in npm_install_deps_provider.workspace_pkgs() {
|
||||
symlink_package_dir(
|
||||
&workspace.target_dir,
|
||||
&root_node_modules_dir_path.join(&workspace.alias),
|
||||
|
@ -985,21 +987,31 @@ impl SetupCache {
|
|||
}
|
||||
}
|
||||
|
||||
/// Normalizes a package name for use at `node_modules/.deno/<pkg-name>@<version>[_<copy_index>]`
|
||||
pub fn normalize_pkg_name_for_node_modules_deno_folder(name: &str) -> Cow<str> {
|
||||
let name = if name.to_lowercase() == name {
|
||||
Cow::Borrowed(name)
|
||||
} else {
|
||||
Cow::Owned(format!("_{}", mixed_case_package_name_encode(name)))
|
||||
};
|
||||
if name.starts_with('@') {
|
||||
name.replace('/', "+").into()
|
||||
} else {
|
||||
name
|
||||
}
|
||||
}
|
||||
|
||||
fn get_package_folder_id_folder_name(
|
||||
folder_id: &NpmPackageCacheFolderId,
|
||||
) -> String {
|
||||
let copy_str = if folder_id.copy_index == 0 {
|
||||
"".to_string()
|
||||
Cow::Borrowed("")
|
||||
} else {
|
||||
format!("_{}", folder_id.copy_index)
|
||||
Cow::Owned(format!("_{}", folder_id.copy_index))
|
||||
};
|
||||
let nv = &folder_id.nv;
|
||||
let name = if nv.name.to_lowercase() == nv.name {
|
||||
Cow::Borrowed(&nv.name)
|
||||
} else {
|
||||
Cow::Owned(format!("_{}", mixed_case_package_name_encode(&nv.name)))
|
||||
};
|
||||
format!("{}@{}{}", name, nv.version, copy_str).replace('/', "+")
|
||||
let name = normalize_pkg_name_for_node_modules_deno_folder(&nv.name);
|
||||
format!("{}@{}{}", name, nv.version, copy_str)
|
||||
}
|
||||
|
||||
fn get_package_folder_id_from_folder_name(
|
||||
|
|
|
@ -11,10 +11,11 @@ use deno_npm::NpmSystemInfo;
|
|||
use deno_runtime::deno_fs::FileSystem;
|
||||
|
||||
use crate::args::LifecycleScriptsConfig;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::args::NpmInstallDepsProvider;
|
||||
use crate::util::progress_bar::ProgressBar;
|
||||
|
||||
pub use self::common::NpmPackageFsResolver;
|
||||
pub use self::local::normalize_pkg_name_for_node_modules_deno_folder;
|
||||
|
||||
use self::global::GlobalNpmPackageResolver;
|
||||
use self::local::LocalNpmPackageResolver;
|
||||
|
@ -27,7 +28,7 @@ use super::resolution::NpmResolution;
|
|||
pub fn create_npm_fs_resolver(
|
||||
fs: Arc<dyn FileSystem>,
|
||||
npm_cache: Arc<NpmCache>,
|
||||
pkg_json_deps_provider: &Arc<PackageJsonInstallDepsProvider>,
|
||||
npm_install_deps_provider: &Arc<NpmInstallDepsProvider>,
|
||||
progress_bar: &ProgressBar,
|
||||
resolution: Arc<NpmResolution>,
|
||||
tarball_cache: Arc<TarballCache>,
|
||||
|
@ -39,7 +40,7 @@ pub fn create_npm_fs_resolver(
|
|||
Some(node_modules_folder) => Arc::new(LocalNpmPackageResolver::new(
|
||||
npm_cache,
|
||||
fs,
|
||||
pkg_json_deps_provider.clone(),
|
||||
npm_install_deps_provider.clone(),
|
||||
progress_bar.clone(),
|
||||
resolution,
|
||||
tarball_cache,
|
||||
|
|
|
@ -502,7 +502,7 @@ impl Resolver for CliGraphResolver {
|
|||
|
||||
fn resolve(
|
||||
&self,
|
||||
specifier: &str,
|
||||
raw_specifier: &str,
|
||||
referrer_range: &deno_graph::Range,
|
||||
mode: ResolutionMode,
|
||||
) -> Result<ModuleSpecifier, ResolveError> {
|
||||
|
@ -519,7 +519,7 @@ impl Resolver for CliGraphResolver {
|
|||
if let Some(node_resolver) = self.node_resolver.as_ref() {
|
||||
if referrer.scheme() == "file" && node_resolver.in_npm_package(referrer) {
|
||||
return node_resolver
|
||||
.resolve(specifier, referrer, to_node_mode(mode))
|
||||
.resolve(raw_specifier, referrer, to_node_mode(mode))
|
||||
.map(|res| res.into_url())
|
||||
.map_err(|e| ResolveError::Other(e.into()));
|
||||
}
|
||||
|
@ -528,7 +528,7 @@ impl Resolver for CliGraphResolver {
|
|||
// Attempt to resolve with the workspace resolver
|
||||
let result: Result<_, ResolveError> = self
|
||||
.workspace_resolver
|
||||
.resolve(specifier, referrer)
|
||||
.resolve(raw_specifier, referrer)
|
||||
.map_err(|err| match err {
|
||||
MappedResolutionError::Specifier(err) => ResolveError::Specifier(err),
|
||||
MappedResolutionError::ImportMap(err) => {
|
||||
|
@ -700,7 +700,7 @@ impl Resolver for CliGraphResolver {
|
|||
// If byonm, check if the bare specifier resolves to an npm package
|
||||
if is_byonm && referrer.scheme() == "file" {
|
||||
let maybe_resolution = node_resolver
|
||||
.resolve_if_for_npm_pkg(specifier, referrer, to_node_mode(mode))
|
||||
.resolve_if_for_npm_pkg(raw_specifier, referrer, to_node_mode(mode))
|
||||
.map_err(ResolveError::Other)?;
|
||||
if let Some(res) = maybe_resolution {
|
||||
return Ok(res.into_url());
|
||||
|
|
|
@ -622,6 +622,11 @@
|
|||
{
|
||||
"type": "object",
|
||||
"description": "A map of package exports to files in this JSR package.",
|
||||
"propertyNames": {
|
||||
"description": "Package export name",
|
||||
"examples": [".", "./foo", "./bar"],
|
||||
"pattern": "^\\.(/.*)?$"
|
||||
},
|
||||
"patternProperties": {
|
||||
"^\\.(/.*)?$": {
|
||||
"type": "string",
|
||||
|
|
|
@ -45,7 +45,7 @@ use serde::Serialize;
|
|||
use crate::args::CaData;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::CompileFlags;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::args::NpmInstallDepsProvider;
|
||||
use crate::args::PermissionFlags;
|
||||
use crate::args::UnstableConfig;
|
||||
use crate::cache::DenoDir;
|
||||
|
|
|
@ -48,7 +48,7 @@ use crate::args::get_root_cert_store;
|
|||
use crate::args::npm_pkg_req_ref_to_binary_command;
|
||||
use crate::args::CaData;
|
||||
use crate::args::CacheSetting;
|
||||
use crate::args::PackageJsonInstallDepsProvider;
|
||||
use crate::args::NpmInstallDepsProvider;
|
||||
use crate::args::StorageKeyResolver;
|
||||
use crate::cache::Caches;
|
||||
use crate::cache::DenoDirProvider;
|
||||
|
@ -138,7 +138,7 @@ pub const UNSUPPORTED_SCHEME: &str = "Unsupported scheme";
|
|||
impl ModuleLoader for EmbeddedModuleLoader {
|
||||
fn resolve(
|
||||
&self,
|
||||
specifier: &str,
|
||||
raw_specifier: &str,
|
||||
referrer: &str,
|
||||
kind: ResolutionKind,
|
||||
) -> Result<ModuleSpecifier, AnyError> {
|
||||
|
@ -162,13 +162,15 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
self
|
||||
.shared
|
||||
.node_resolver
|
||||
.resolve(specifier, &referrer, NodeResolutionMode::Execution)?
|
||||
.resolve(raw_specifier, &referrer, NodeResolutionMode::Execution)?
|
||||
.into_url(),
|
||||
);
|
||||
}
|
||||
|
||||
let mapped_resolution =
|
||||
self.shared.workspace_resolver.resolve(specifier, &referrer);
|
||||
let mapped_resolution = self
|
||||
.shared
|
||||
.workspace_resolver
|
||||
.resolve(raw_specifier, &referrer);
|
||||
|
||||
match mapped_resolution {
|
||||
Ok(MappedResolution::WorkspaceJsrPackage { specifier, .. }) => {
|
||||
|
@ -262,7 +264,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
if err.is_unmapped_bare_specifier() && referrer.scheme() == "file" =>
|
||||
{
|
||||
let maybe_res = self.shared.node_resolver.resolve_if_for_npm_pkg(
|
||||
specifier,
|
||||
raw_specifier,
|
||||
&referrer,
|
||||
NodeResolutionMode::Execution,
|
||||
)?;
|
||||
|
@ -502,9 +504,9 @@ pub async fn run(
|
|||
text_only_progress_bar: progress_bar,
|
||||
maybe_node_modules_path,
|
||||
npm_system_info: Default::default(),
|
||||
package_json_deps_provider: Arc::new(
|
||||
npm_install_deps_provider: Arc::new(
|
||||
// this is only used for installing packages, which isn't necessary with deno compile
|
||||
PackageJsonInstallDepsProvider::empty(),
|
||||
NpmInstallDepsProvider::empty(),
|
||||
),
|
||||
// create an npmrc that uses the fake npm_registry_url to resolve packages
|
||||
npmrc: Arc::new(ResolvedNpmRc {
|
||||
|
@ -554,9 +556,9 @@ pub async fn run(
|
|||
text_only_progress_bar: progress_bar,
|
||||
maybe_node_modules_path: None,
|
||||
npm_system_info: Default::default(),
|
||||
package_json_deps_provider: Arc::new(
|
||||
npm_install_deps_provider: Arc::new(
|
||||
// this is only used for installing packages, which isn't necessary with deno compile
|
||||
PackageJsonInstallDepsProvider::empty(),
|
||||
NpmInstallDepsProvider::empty(),
|
||||
),
|
||||
// Packages from different registries are already inlined in the ESZip,
|
||||
// so no need to create actual `.npmrc` configuration.
|
||||
|
|
|
@ -213,8 +213,8 @@ impl ShellCommand for NodeGypCommand {
|
|||
) -> LocalBoxFuture<'static, ExecuteResult> {
|
||||
// at the moment this shell command is just to give a warning if node-gyp is not found
|
||||
// in the future, we could try to run/install node-gyp for the user with deno
|
||||
if which::which("node-gyp").is_err() {
|
||||
log::warn!("{}: node-gyp was used in a script, but was not listed as a dependency. Either add it as a dependency or install it globally (e.g. `npm install -g node-gyp`)", crate::colors::yellow("warning"));
|
||||
if context.state.resolve_command_path("node-gyp").is_err() {
|
||||
log::warn!("{} node-gyp was used in a script, but was not listed as a dependency. Either add it as a dependency or install it globally (e.g. `npm install -g node-gyp`)", crate::colors::yellow("Warning"));
|
||||
}
|
||||
ExecutableCommand::new(
|
||||
"node-gyp".to_string(),
|
||||
|
|
|
@ -14,6 +14,7 @@ use deno_terminal::colors;
|
|||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
|
||||
use crate::args::check_warn_tsconfig;
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::TsConfig;
|
||||
use crate::args::TsConfigType;
|
||||
|
@ -118,9 +119,7 @@ impl TypeChecker {
|
|||
.cli_options
|
||||
.resolve_ts_config_for_emit(TsConfigType::Check { lib: options.lib })?;
|
||||
if options.log_ignored_options {
|
||||
if let Some(ignored_options) = ts_config_result.maybe_ignored_options {
|
||||
log::warn!("{}", ignored_options);
|
||||
}
|
||||
check_warn_tsconfig(&ts_config_result);
|
||||
}
|
||||
|
||||
let type_check_mode = options.type_check_mode;
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::check_warn_tsconfig;
|
||||
use crate::args::CompileFlags;
|
||||
use crate::args::Flags;
|
||||
use crate::factory::CliFactory;
|
||||
|
@ -79,6 +80,7 @@ pub async fn compile(
|
|||
|
||||
let ts_config_for_emit = cli_options
|
||||
.resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?;
|
||||
check_warn_tsconfig(&ts_config_for_emit);
|
||||
let (transpile_options, emit_options) =
|
||||
crate::args::ts_config_to_transpile_and_emit_options(
|
||||
ts_config_for_emit.ts_config,
|
||||
|
|
|
@ -7,14 +7,17 @@ use crate::args::ConfigFlag;
|
|||
use crate::args::Flags;
|
||||
use crate::args::InstallFlags;
|
||||
use crate::args::InstallFlagsGlobal;
|
||||
use crate::args::InstallFlagsLocal;
|
||||
use crate::args::InstallKind;
|
||||
use crate::args::TypeCheckMode;
|
||||
use crate::args::UninstallFlags;
|
||||
use crate::args::UninstallKind;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::graph_container::ModuleGraphContainer;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::util::fs::canonicalize_path_maybe_not_exists;
|
||||
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
|
@ -261,26 +264,65 @@ pub fn uninstall(uninstall_flags: UninstallFlags) -> Result<(), AnyError> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) async fn install_from_entrypoints(
|
||||
flags: Arc<Flags>,
|
||||
entrypoints: &[String],
|
||||
) -> Result<(), AnyError> {
|
||||
let factory = CliFactory::from_flags(flags.clone());
|
||||
let emitter = factory.emitter()?;
|
||||
let main_graph_container = factory.main_module_graph_container().await?;
|
||||
main_graph_container
|
||||
.load_and_type_check_files(entrypoints)
|
||||
.await?;
|
||||
emitter
|
||||
.cache_module_emits(&main_graph_container.graph())
|
||||
.await
|
||||
}
|
||||
|
||||
async fn install_local(
|
||||
flags: Arc<Flags>,
|
||||
maybe_add_flags: Option<AddFlags>,
|
||||
install_flags: InstallFlagsLocal,
|
||||
) -> Result<(), AnyError> {
|
||||
if let Some(add_flags) = maybe_add_flags {
|
||||
return super::registry::add(
|
||||
flags,
|
||||
add_flags,
|
||||
super::registry::AddCommandName::Install,
|
||||
)
|
||||
.await;
|
||||
match install_flags {
|
||||
InstallFlagsLocal::Add(add_flags) => {
|
||||
super::registry::add(
|
||||
flags,
|
||||
add_flags,
|
||||
super::registry::AddCommandName::Install,
|
||||
)
|
||||
.await
|
||||
}
|
||||
InstallFlagsLocal::Entrypoints(entrypoints) => {
|
||||
install_from_entrypoints(flags, &entrypoints).await
|
||||
}
|
||||
InstallFlagsLocal::TopLevel => {
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
crate::tools::registry::cache_top_level_deps(&factory, None).await?;
|
||||
|
||||
if let Some(lockfile) = factory.cli_options()?.maybe_lockfile() {
|
||||
lockfile.write_if_changed()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let factory = CliFactory::from_flags(flags);
|
||||
crate::tools::registry::cache_top_level_deps(&factory, None).await?;
|
||||
|
||||
if let Some(lockfile) = factory.cli_options()?.maybe_lockfile() {
|
||||
lockfile.write_if_changed()?;
|
||||
fn check_if_installs_a_single_package_globally(
|
||||
maybe_add_flags: Option<&AddFlags>,
|
||||
) -> Result<(), AnyError> {
|
||||
let Some(add_flags) = maybe_add_flags else {
|
||||
return Ok(());
|
||||
};
|
||||
if add_flags.packages.len() != 1 {
|
||||
return Ok(());
|
||||
}
|
||||
let Ok(url) = Url::parse(&add_flags.packages[0]) else {
|
||||
return Ok(());
|
||||
};
|
||||
if matches!(url.scheme(), "http" | "https") {
|
||||
bail!("Failed to install \"{}\" specifier. If you are trying to install {} globally, run again with `-g` flag:\n deno install -g {}", url.scheme(), url.as_str(), url.as_str());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -296,8 +338,11 @@ pub async fn install_command(
|
|||
|
||||
install_global(flags, global_flags).await
|
||||
}
|
||||
InstallKind::Local(maybe_add_flags) => {
|
||||
install_local(flags, maybe_add_flags).await
|
||||
InstallKind::Local(local_flags) => {
|
||||
if let InstallFlagsLocal::Add(add_flags) = &local_flags {
|
||||
check_if_installs_a_single_package_globally(Some(add_flags))?;
|
||||
}
|
||||
install_local(flags, local_flags).await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
mod cache_deps;
|
||||
|
||||
pub use cache_deps::cache_top_level_deps;
|
||||
use deno_semver::jsr::JsrPackageReqReference;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
|
@ -501,14 +503,18 @@ impl AddPackageReq {
|
|||
|
||||
match prefix {
|
||||
Prefix::Jsr => {
|
||||
let package_req = PackageReq::from_str(entry_text)?;
|
||||
let req_ref =
|
||||
JsrPackageReqReference::from_str(&format!("jsr:{}", entry_text))?;
|
||||
let package_req = req_ref.into_inner().req;
|
||||
Ok(AddPackageReq {
|
||||
alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()),
|
||||
value: AddPackageReqValue::Jsr(package_req),
|
||||
})
|
||||
}
|
||||
Prefix::Npm => {
|
||||
let package_req = PackageReq::from_str(entry_text)?;
|
||||
let req_ref =
|
||||
NpmPackageReqReference::from_str(&format!("npm:{}", entry_text))?;
|
||||
let package_req = req_ref.into_inner().req;
|
||||
Ok(AddPackageReq {
|
||||
alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()),
|
||||
value: AddPackageReqValue::Npm(package_req),
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::Flags;
|
||||
use crate::args::TaskFlags;
|
||||
use crate::colors;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::task_runner;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_config::deno_json::Task;
|
||||
use deno_config::workspace::TaskOrScript;
|
||||
use deno_config::workspace::WorkspaceDirectory;
|
||||
|
@ -18,13 +18,15 @@ use deno_core::anyhow::Context;
|
|||
use deno_core::error::AnyError;
|
||||
use deno_core::normalize_path;
|
||||
use deno_task_shell::ShellCommand;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::args::CliOptions;
|
||||
use crate::args::Flags;
|
||||
use crate::args::TaskFlags;
|
||||
use crate::colors;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::task_runner;
|
||||
use crate::util::fs::canonicalize_path;
|
||||
|
||||
pub async fn execute_script(
|
||||
flags: Arc<Flags>,
|
||||
|
@ -106,12 +108,9 @@ See https://docs.deno.com/go/config"#
|
|||
.await
|
||||
}
|
||||
TaskOrScript::Script(scripts, _script) => {
|
||||
// ensure the npm packages are installed if using a node_modules
|
||||
// directory and managed resolver
|
||||
if cli_options.has_node_modules_dir() {
|
||||
if let Some(npm_resolver) = npm_resolver.as_managed() {
|
||||
npm_resolver.ensure_top_level_package_json_install().await?;
|
||||
}
|
||||
// ensure the npm packages are installed if using a managed resolver
|
||||
if let Some(npm_resolver) = npm_resolver.as_managed() {
|
||||
npm_resolver.ensure_top_level_package_json_install().await?;
|
||||
}
|
||||
|
||||
let cwd = match task_flags.cwd {
|
||||
|
|
|
@ -319,10 +319,10 @@ pub const OP_DETAILS: phf::Map<&'static str, [&'static str; 2]> = phf_map! {
|
|||
"op_fs_copy_file_async" => ["copy a file", "awaiting the result of a `Deno.copyFile` call"],
|
||||
"op_fs_events_poll" => ["get the next file system event", "breaking out of a for await loop looping over `Deno.FsEvents`"],
|
||||
"op_fs_fdatasync_async" => ["flush pending data operations for a file to disk", "awaiting the result of a `Deno.fdatasync` or `Deno.FsFile.syncData` call"],
|
||||
"op_fs_file_stat_async" => ["get file metadata", "awaiting the result of a `Deno.fstat` or `Deno.FsFile.stat` call"],
|
||||
"op_fs_file_stat_async" => ["get file metadata", "awaiting the result of a `Deno.FsFile.prototype.stat` call"],
|
||||
"op_fs_flock_async" => ["lock a file", "awaiting the result of a `Deno.FsFile.lock` call"],
|
||||
"op_fs_fsync_async" => ["flush pending data operations for a file to disk", "awaiting the result of a `Deno.fsync` or `Deno.FsFile.sync` call"],
|
||||
"op_fs_ftruncate_async" => ["truncate a file", "awaiting the result of a `Deno.ftruncate` or `Deno.FsFile.truncate` call"],
|
||||
"op_fs_file_truncate_async" => ["truncate a file", "awaiting the result of a `Deno.FsFile.prototype.truncate` call"],
|
||||
"op_fs_funlock_async_unstable" => ["unlock a file", "awaiting the result of a `Deno.funlock` call"],
|
||||
"op_fs_funlock_async" => ["unlock a file", "awaiting the result of a `Deno.FsFile.unlock` call"],
|
||||
"op_fs_link_async" => ["create a hard link", "awaiting the result of a `Deno.link` call"],
|
||||
|
|
568
cli/tsc/dts/lib.deno.ns.d.ts
vendored
568
cli/tsc/dts/lib.deno.ns.d.ts
vendored
|
@ -1833,27 +1833,6 @@ declare namespace Deno {
|
|||
seekSync(offset: number | bigint, whence: SeekMode): number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies from `src` to `dst` until either EOF (`null`) is read from `src` or
|
||||
* an error occurs. It resolves to the number of bytes copied or rejects with
|
||||
* the first error encountered while copying.
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category I/O
|
||||
*
|
||||
* @param src The source to copy from
|
||||
* @param dst The destination to copy to
|
||||
* @param options Can be used to tune size of the buffer. Default size is 32kB
|
||||
*/
|
||||
export function copy(
|
||||
src: Reader,
|
||||
dst: Writer,
|
||||
options?: { bufSize?: number },
|
||||
): Promise<number>;
|
||||
|
||||
/** Open a file and resolve to an instance of {@linkcode Deno.FsFile}. The
|
||||
* file does not need to previously exist if using the `create` or `createNew`
|
||||
* open options. The caller may have the resulting file automatically closed
|
||||
|
@ -1941,121 +1920,6 @@ declare namespace Deno {
|
|||
*/
|
||||
export function createSync(path: string | URL): FsFile;
|
||||
|
||||
/** Read from a resource ID (`rid`) into an array buffer (`buffer`).
|
||||
*
|
||||
* Resolves to either the number of bytes read during the operation or EOF
|
||||
* (`null`) if there was nothing more to read.
|
||||
*
|
||||
* It is possible for a read to successfully return with `0` bytes. This does
|
||||
* not indicate EOF.
|
||||
*
|
||||
* This function is one of the lowest level APIs and most users should not
|
||||
* work with this directly, but rather use {@linkcode ReadableStream} and
|
||||
* {@linkcode https://jsr.io/@std/streams/doc/to-array-buffer/~/toArrayBuffer | toArrayBuffer}
|
||||
* instead.
|
||||
*
|
||||
* **It is not guaranteed that the full buffer will be read in a single call.**
|
||||
*
|
||||
* ```ts
|
||||
* // if "/foo/bar.txt" contains the text "hello world":
|
||||
* using file = await Deno.open("/foo/bar.txt");
|
||||
* const buf = new Uint8Array(100);
|
||||
* const numberOfBytesRead = await Deno.read(file.rid, buf); // 11 bytes
|
||||
* const text = new TextDecoder().decode(buf); // "hello world"
|
||||
* ```
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category I/O
|
||||
*/
|
||||
export function read(rid: number, buffer: Uint8Array): Promise<number | null>;
|
||||
|
||||
/** Synchronously read from a resource ID (`rid`) into an array buffer
|
||||
* (`buffer`).
|
||||
*
|
||||
* Returns either the number of bytes read during the operation or EOF
|
||||
* (`null`) if there was nothing more to read.
|
||||
*
|
||||
* It is possible for a read to successfully return with `0` bytes. This does
|
||||
* not indicate EOF.
|
||||
*
|
||||
* This function is one of the lowest level APIs and most users should not
|
||||
* work with this directly, but rather use {@linkcode ReadableStream} and
|
||||
* {@linkcode https://jsr.io/@std/streams/doc/to-array-buffer/~/toArrayBuffer | toArrayBuffer}
|
||||
* instead.
|
||||
*
|
||||
* **It is not guaranteed that the full buffer will be read in a single
|
||||
* call.**
|
||||
*
|
||||
* ```ts
|
||||
* // if "/foo/bar.txt" contains the text "hello world":
|
||||
* using file = Deno.openSync("/foo/bar.txt");
|
||||
* const buf = new Uint8Array(100);
|
||||
* const numberOfBytesRead = Deno.readSync(file.rid, buf); // 11 bytes
|
||||
* const text = new TextDecoder().decode(buf); // "hello world"
|
||||
* ```
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category I/O
|
||||
*/
|
||||
export function readSync(rid: number, buffer: Uint8Array): number | null;
|
||||
|
||||
/** Write to the resource ID (`rid`) the contents of the array buffer (`data`).
|
||||
*
|
||||
* Resolves to the number of bytes written. This function is one of the lowest
|
||||
* level APIs and most users should not work with this directly, but rather
|
||||
* use {@linkcode WritableStream}, {@linkcode ReadableStream.from} and
|
||||
* {@linkcode ReadableStream.pipeTo}.
|
||||
*
|
||||
* **It is not guaranteed that the full buffer will be written in a single
|
||||
* call.**
|
||||
*
|
||||
* ```ts
|
||||
* const encoder = new TextEncoder();
|
||||
* const data = encoder.encode("Hello world");
|
||||
* using file = await Deno.open("/foo/bar.txt", { write: true });
|
||||
* const bytesWritten = await Deno.write(file.rid, data); // 11
|
||||
* ```
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category I/O
|
||||
*/
|
||||
export function write(rid: number, data: Uint8Array): Promise<number>;
|
||||
|
||||
/** Synchronously write to the resource ID (`rid`) the contents of the array
|
||||
* buffer (`data`).
|
||||
*
|
||||
* Returns the number of bytes written. This function is one of the lowest
|
||||
* level APIs and most users should not work with this directly, but rather
|
||||
* use {@linkcode WritableStream}, {@linkcode ReadableStream.from} and
|
||||
* {@linkcode ReadableStream.pipeTo}.
|
||||
*
|
||||
* **It is not guaranteed that the full buffer will be written in a single
|
||||
* call.**
|
||||
*
|
||||
* ```ts
|
||||
* const encoder = new TextEncoder();
|
||||
* const data = encoder.encode("Hello world");
|
||||
* using file = Deno.openSync("/foo/bar.txt", { write: true });
|
||||
* const bytesWritten = Deno.writeSync(file.rid, data); // 11
|
||||
* ```
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category I/O
|
||||
*/
|
||||
export function writeSync(rid: number, data: Uint8Array): number;
|
||||
|
||||
/** Seek a resource ID (`rid`) to the given `offset` under mode given by `whence`.
|
||||
* The call resolves to the new position within the resource (bytes from the start).
|
||||
*
|
||||
|
@ -2666,17 +2530,6 @@ declare namespace Deno {
|
|||
[Symbol.dispose](): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* The Deno abstraction for reading and writing files.
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category File System
|
||||
*/
|
||||
export const File: typeof FsFile;
|
||||
|
||||
/** Gets the size of the console as columns/rows.
|
||||
*
|
||||
* ```ts
|
||||
|
@ -2896,25 +2749,6 @@ declare namespace Deno {
|
|||
signal?: AbortSignal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a given resource id (`rid`) is a TTY (a terminal).
|
||||
*
|
||||
* ```ts
|
||||
* // This example is system and context specific
|
||||
* const nonTTYRid = Deno.openSync("my_file.txt").rid;
|
||||
* const ttyRid = Deno.openSync("/dev/tty6").rid;
|
||||
* console.log(Deno.isatty(nonTTYRid)); // false
|
||||
* console.log(Deno.isatty(ttyRid)); // true
|
||||
* ```
|
||||
*
|
||||
* @deprecated This will be soft-removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category I/O
|
||||
*/
|
||||
export function isatty(rid: number): boolean;
|
||||
|
||||
/**
|
||||
* A variable-sized buffer of bytes with `read()` and `write()` methods.
|
||||
*
|
||||
|
@ -3014,29 +2848,6 @@ declare namespace Deno {
|
|||
*/
|
||||
export function readAllSync(r: ReaderSync): Uint8Array;
|
||||
|
||||
/**
|
||||
* Write all the content of the array buffer (`arr`) to the writer (`w`).
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category I/O
|
||||
*/
|
||||
export function writeAll(w: Writer, arr: Uint8Array): Promise<void>;
|
||||
|
||||
/**
|
||||
* Synchronously write all the content of the array buffer (`arr`) to the
|
||||
* writer (`w`).
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category I/O
|
||||
*/
|
||||
export function writeAllSync(w: WriterSync, arr: Uint8Array): void;
|
||||
|
||||
/**
|
||||
* Options which can be set when using {@linkcode Deno.mkdir} and
|
||||
* {@linkcode Deno.mkdirSync}.
|
||||
|
@ -4038,14 +3849,6 @@ declare namespace Deno {
|
|||
* @category File System
|
||||
*/
|
||||
export interface FsWatcher extends AsyncIterable<FsEvent>, Disposable {
|
||||
/**
|
||||
* The resource id.
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*/
|
||||
readonly rid: number;
|
||||
/** Stops watching the file system and closes the watcher resource. */
|
||||
close(): void;
|
||||
/**
|
||||
|
@ -4103,175 +3906,6 @@ declare namespace Deno {
|
|||
options?: { recursive: boolean },
|
||||
): FsWatcher;
|
||||
|
||||
/**
|
||||
* Options which can be used with {@linkcode Deno.run}.
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category Subprocess */
|
||||
export interface RunOptions {
|
||||
/** Arguments to pass.
|
||||
*
|
||||
* _Note_: the first element needs to be a path to the executable that is
|
||||
* being run. */
|
||||
cmd: readonly string[] | [string | URL, ...string[]];
|
||||
/** The current working directory that should be used when running the
|
||||
* sub-process. */
|
||||
cwd?: string;
|
||||
/** Any environment variables to be set when running the sub-process. */
|
||||
env?: Record<string, string>;
|
||||
/** By default subprocess inherits `stdout` of parent process. To change
|
||||
* this this option can be set to a resource ID (_rid_) of an open file,
|
||||
* `"inherit"`, `"piped"`, or `"null"`:
|
||||
*
|
||||
* - _number_: the resource ID of an open file/resource. This allows you to
|
||||
* write to a file.
|
||||
* - `"inherit"`: The default if unspecified. The subprocess inherits from the
|
||||
* parent.
|
||||
* - `"piped"`: A new pipe should be arranged to connect the parent and child
|
||||
* sub-process.
|
||||
* - `"null"`: This stream will be ignored. This is the equivalent of attaching
|
||||
* the stream to `/dev/null`.
|
||||
*/
|
||||
stdout?: "inherit" | "piped" | "null" | number;
|
||||
/** By default subprocess inherits `stderr` of parent process. To change
|
||||
* this this option can be set to a resource ID (_rid_) of an open file,
|
||||
* `"inherit"`, `"piped"`, or `"null"`:
|
||||
*
|
||||
* - _number_: the resource ID of an open file/resource. This allows you to
|
||||
* write to a file.
|
||||
* - `"inherit"`: The default if unspecified. The subprocess inherits from the
|
||||
* parent.
|
||||
* - `"piped"`: A new pipe should be arranged to connect the parent and child
|
||||
* sub-process.
|
||||
* - `"null"`: This stream will be ignored. This is the equivalent of attaching
|
||||
* the stream to `/dev/null`.
|
||||
*/
|
||||
stderr?: "inherit" | "piped" | "null" | number;
|
||||
/** By default subprocess inherits `stdin` of parent process. To change
|
||||
* this this option can be set to a resource ID (_rid_) of an open file,
|
||||
* `"inherit"`, `"piped"`, or `"null"`:
|
||||
*
|
||||
* - _number_: the resource ID of an open file/resource. This allows you to
|
||||
* read from a file.
|
||||
* - `"inherit"`: The default if unspecified. The subprocess inherits from the
|
||||
* parent.
|
||||
* - `"piped"`: A new pipe should be arranged to connect the parent and child
|
||||
* sub-process.
|
||||
* - `"null"`: This stream will be ignored. This is the equivalent of attaching
|
||||
* the stream to `/dev/null`.
|
||||
*/
|
||||
stdin?: "inherit" | "piped" | "null" | number;
|
||||
}
|
||||
|
||||
/**
|
||||
* The status resolved from the `.status()` method of a
|
||||
* {@linkcode Deno.Process} instance.
|
||||
*
|
||||
* If `success` is `true`, then `code` will be `0`, but if `success` is
|
||||
* `false`, the sub-process exit code will be set in `code`.
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category Subprocess */
|
||||
export type ProcessStatus =
|
||||
| {
|
||||
success: true;
|
||||
code: 0;
|
||||
signal?: undefined;
|
||||
}
|
||||
| {
|
||||
success: false;
|
||||
code: number;
|
||||
signal?: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Represents an instance of a sub process that is returned from
|
||||
* {@linkcode Deno.run} which can be used to manage the sub-process.
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category Subprocess */
|
||||
export class Process<T extends RunOptions = RunOptions> {
|
||||
/** The resource ID of the sub-process. */
|
||||
readonly rid: number;
|
||||
/** The operating system's process ID for the sub-process. */
|
||||
readonly pid: number;
|
||||
/** A reference to the sub-processes `stdin`, which allows interacting with
|
||||
* the sub-process at a low level. */
|
||||
readonly stdin: T["stdin"] extends "piped" ? Writer & Closer & {
|
||||
writable: WritableStream<Uint8Array>;
|
||||
}
|
||||
: (Writer & Closer & { writable: WritableStream<Uint8Array> }) | null;
|
||||
/** A reference to the sub-processes `stdout`, which allows interacting with
|
||||
* the sub-process at a low level. */
|
||||
readonly stdout: T["stdout"] extends "piped" ? Reader & Closer & {
|
||||
readable: ReadableStream<Uint8Array>;
|
||||
}
|
||||
: (Reader & Closer & { readable: ReadableStream<Uint8Array> }) | null;
|
||||
/** A reference to the sub-processes `stderr`, which allows interacting with
|
||||
* the sub-process at a low level. */
|
||||
readonly stderr: T["stderr"] extends "piped" ? Reader & Closer & {
|
||||
readable: ReadableStream<Uint8Array>;
|
||||
}
|
||||
: (Reader & Closer & { readable: ReadableStream<Uint8Array> }) | null;
|
||||
/** Wait for the process to exit and return its exit status.
|
||||
*
|
||||
* Calling this function multiple times will return the same status.
|
||||
*
|
||||
* The `stdin` reference to the process will be closed before waiting to
|
||||
* avoid a deadlock.
|
||||
*
|
||||
* If `stdout` and/or `stderr` were set to `"piped"`, they must be closed
|
||||
* manually before the process can exit.
|
||||
*
|
||||
* To run process to completion and collect output from both `stdout` and
|
||||
* `stderr` use:
|
||||
*
|
||||
* ```ts
|
||||
* const p = Deno.run({ cmd: [ "echo", "hello world" ], stderr: 'piped', stdout: 'piped' });
|
||||
* const [status, stdout, stderr] = await Promise.all([
|
||||
* p.status(),
|
||||
* p.output(),
|
||||
* p.stderrOutput()
|
||||
* ]);
|
||||
* p.close();
|
||||
* ```
|
||||
*/
|
||||
status(): Promise<ProcessStatus>;
|
||||
/** Buffer the stdout until EOF and return it as `Uint8Array`.
|
||||
*
|
||||
* You must set `stdout` to `"piped"` when creating the process.
|
||||
*
|
||||
* This calls `close()` on stdout after its done. */
|
||||
output(): Promise<Uint8Array>;
|
||||
/** Buffer the stderr until EOF and return it as `Uint8Array`.
|
||||
*
|
||||
* You must set `stderr` to `"piped"` when creating the process.
|
||||
*
|
||||
* This calls `close()` on stderr after its done. */
|
||||
stderrOutput(): Promise<Uint8Array>;
|
||||
/** Clean up resources associated with the sub-process instance. */
|
||||
close(): void;
|
||||
/** Send a signal to process.
|
||||
* Default signal is `"SIGTERM"`.
|
||||
*
|
||||
* ```ts
|
||||
* const p = Deno.run({ cmd: [ "sleep", "20" ]});
|
||||
* p.kill("SIGTERM");
|
||||
* p.close();
|
||||
* ```
|
||||
*/
|
||||
kill(signo?: Signal): void;
|
||||
}
|
||||
|
||||
/** Operating signals which can be listened for or sent to sub-processes. What
|
||||
* signals and what their standard behaviors are OS dependent.
|
||||
*
|
||||
|
@ -4353,61 +3987,6 @@ declare namespace Deno {
|
|||
handler: () => void,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Spawns new subprocess. RunOptions must contain at a minimum the `opt.cmd`,
|
||||
* an array of program arguments, the first of which is the binary.
|
||||
*
|
||||
* ```ts
|
||||
* const p = Deno.run({
|
||||
* cmd: ["curl", "https://example.com"],
|
||||
* });
|
||||
* const status = await p.status();
|
||||
* ```
|
||||
*
|
||||
* Subprocess uses same working directory as parent process unless `opt.cwd`
|
||||
* is specified.
|
||||
*
|
||||
* Environmental variables from parent process can be cleared using `opt.clearEnv`.
|
||||
* Doesn't guarantee that only `opt.env` variables are present,
|
||||
* as the OS may set environmental variables for processes.
|
||||
*
|
||||
* Environmental variables for subprocess can be specified using `opt.env`
|
||||
* mapping.
|
||||
*
|
||||
* `opt.uid` sets the child process’s user ID. This translates to a setuid call
|
||||
* in the child process. Failure in the setuid call will cause the spawn to fail.
|
||||
*
|
||||
* `opt.gid` is similar to `opt.uid`, but sets the group ID of the child process.
|
||||
* This has the same semantics as the uid field.
|
||||
*
|
||||
* By default subprocess inherits stdio of parent process. To change
|
||||
* this this, `opt.stdin`, `opt.stdout`, and `opt.stderr` can be set
|
||||
* independently to a resource ID (_rid_) of an open file, `"inherit"`,
|
||||
* `"piped"`, or `"null"`:
|
||||
*
|
||||
* - _number_: the resource ID of an open file/resource. This allows you to
|
||||
* read or write to a file.
|
||||
* - `"inherit"`: The default if unspecified. The subprocess inherits from the
|
||||
* parent.
|
||||
* - `"piped"`: A new pipe should be arranged to connect the parent and child
|
||||
* sub-process.
|
||||
* - `"null"`: This stream will be ignored. This is the equivalent of attaching
|
||||
* the stream to `/dev/null`.
|
||||
*
|
||||
* Details of the spawned process are returned as an instance of
|
||||
* {@linkcode Deno.Process}.
|
||||
*
|
||||
* Requires `allow-run` permission.
|
||||
*
|
||||
* @deprecated This will be soft-removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @tags allow-run
|
||||
* @category Subprocess
|
||||
*/
|
||||
export function run<T extends RunOptions = RunOptions>(opt: T): Process<T>;
|
||||
|
||||
/** Create a child process.
|
||||
*
|
||||
* If any stdio options are not set to `"piped"`, accessing the corresponding
|
||||
|
@ -5256,136 +4835,6 @@ declare namespace Deno {
|
|||
options?: SymlinkOptions,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Truncates or extends the specified file stream, to reach the specified
|
||||
* `len`.
|
||||
*
|
||||
* If `len` is not specified then the entire file contents are truncated as if
|
||||
* `len` was set to `0`.
|
||||
*
|
||||
* If the file previously was larger than this new length, the extra data is
|
||||
* lost.
|
||||
*
|
||||
* If the file previously was shorter, it is extended, and the extended part
|
||||
* reads as null bytes ('\0').
|
||||
*
|
||||
* ### Truncate the entire file
|
||||
*
|
||||
* ```ts
|
||||
* const file = await Deno.open(
|
||||
* "my_file.txt",
|
||||
* { read: true, write: true, create: true }
|
||||
* );
|
||||
* await Deno.ftruncate(file.rid);
|
||||
* ```
|
||||
*
|
||||
* ### Truncate part of the file
|
||||
*
|
||||
* ```ts
|
||||
* const file = await Deno.open(
|
||||
* "my_file.txt",
|
||||
* { read: true, write: true, create: true }
|
||||
* );
|
||||
* await file.write(new TextEncoder().encode("Hello World"));
|
||||
* await Deno.ftruncate(file.rid, 7);
|
||||
* const data = new Uint8Array(32);
|
||||
* await Deno.read(file.rid, data);
|
||||
* console.log(new TextDecoder().decode(data)); // Hello W
|
||||
* ```
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category File System
|
||||
*/
|
||||
export function ftruncate(rid: number, len?: number): Promise<void>;
|
||||
|
||||
/**
|
||||
* Synchronously truncates or extends the specified file stream, to reach the
|
||||
* specified `len`.
|
||||
*
|
||||
* If `len` is not specified then the entire file contents are truncated as if
|
||||
* `len` was set to `0`.
|
||||
*
|
||||
* If the file previously was larger than this new length, the extra data is
|
||||
* lost.
|
||||
*
|
||||
* If the file previously was shorter, it is extended, and the extended part
|
||||
* reads as null bytes ('\0').
|
||||
*
|
||||
* ### Truncate the entire file
|
||||
*
|
||||
* ```ts
|
||||
* const file = Deno.openSync(
|
||||
* "my_file.txt",
|
||||
* { read: true, write: true, truncate: true, create: true }
|
||||
* );
|
||||
* Deno.ftruncateSync(file.rid);
|
||||
* ```
|
||||
*
|
||||
* ### Truncate part of the file
|
||||
*
|
||||
* ```ts
|
||||
* const file = Deno.openSync(
|
||||
* "my_file.txt",
|
||||
* { read: true, write: true, create: true }
|
||||
* );
|
||||
* file.writeSync(new TextEncoder().encode("Hello World"));
|
||||
* Deno.ftruncateSync(file.rid, 7);
|
||||
* Deno.seekSync(file.rid, 0, Deno.SeekMode.Start);
|
||||
* const data = new Uint8Array(32);
|
||||
* Deno.readSync(file.rid, data);
|
||||
* console.log(new TextDecoder().decode(data)); // Hello W
|
||||
* ```
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category File System
|
||||
*/
|
||||
export function ftruncateSync(rid: number, len?: number): void;
|
||||
|
||||
/**
|
||||
* Returns a `Deno.FileInfo` for the given file stream.
|
||||
*
|
||||
* ```ts
|
||||
* import { assert } from "jsr:@std/assert";
|
||||
*
|
||||
* const file = await Deno.open("file.txt", { read: true });
|
||||
* const fileInfo = await Deno.fstat(file.rid);
|
||||
* assert(fileInfo.isFile);
|
||||
* ```
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category File System
|
||||
*/
|
||||
export function fstat(rid: number): Promise<FileInfo>;
|
||||
|
||||
/**
|
||||
* Synchronously returns a {@linkcode Deno.FileInfo} for the given file
|
||||
* stream.
|
||||
*
|
||||
* ```ts
|
||||
* import { assert } from "jsr:@std/assert";
|
||||
*
|
||||
* const file = Deno.openSync("file.txt", { read: true });
|
||||
* const fileInfo = Deno.fstatSync(file.rid);
|
||||
* assert(fileInfo.isFile);
|
||||
* ```
|
||||
*
|
||||
* @deprecated This will be removed in Deno 2.0. See the
|
||||
* {@link https://docs.deno.com/runtime/manual/advanced/migrate_deprecations | Deno 1.x to 2.x Migration Guide}
|
||||
* for migration instructions.
|
||||
*
|
||||
* @category File System
|
||||
*/
|
||||
export function fstatSync(rid: number): FileInfo;
|
||||
|
||||
/**
|
||||
* Synchronously changes the access (`atime`) and modification (`mtime`) times
|
||||
* of a file system object referenced by `path`. Given times are either in
|
||||
|
@ -5616,11 +5065,10 @@ declare namespace Deno {
|
|||
* Windows.
|
||||
*
|
||||
* ```ts
|
||||
* const p = Deno.run({
|
||||
* cmd: ["sleep", "10000"]
|
||||
* });
|
||||
* const command = new Deno.Command("sleep", { args: ["10000"] });
|
||||
* const child = command.spawn();
|
||||
*
|
||||
* Deno.kill(p.pid, "SIGINT");
|
||||
* Deno.kill(child.pid, "SIGINT");
|
||||
* ```
|
||||
*
|
||||
* Requires `allow-run` permission.
|
||||
|
@ -6255,14 +5703,14 @@ declare namespace Deno {
|
|||
/** Serves HTTP requests with the given option bag and handler.
|
||||
*
|
||||
* You can specify an object with a port and hostname option, which is the
|
||||
* address to listen on. The default is port `8000` on hostname `"127.0.0.1"`.
|
||||
* address to listen on. The default is port `8000` on hostname `"0.0.0.0"`.
|
||||
*
|
||||
* You can change the address to listen on using the `hostname` and `port`
|
||||
* options. The below example serves on port `3000` and hostname `"0.0.0.0"`.
|
||||
* options. The below example serves on port `3000` and hostname `"127.0.0.1"`.
|
||||
*
|
||||
* ```ts
|
||||
* Deno.serve(
|
||||
* { port: 3000, hostname: "0.0.0.0" },
|
||||
* { port: 3000, hostname: "127.0.0.1" },
|
||||
* (_req) => new Response("Hello, world")
|
||||
* );
|
||||
* ```
|
||||
|
@ -6344,14 +5792,14 @@ declare namespace Deno {
|
|||
/** Serves HTTP requests with the given option bag.
|
||||
*
|
||||
* You can specify an object with a port and hostname option, which is the
|
||||
* address to listen on. The default is port `8000` on hostname `"127.0.0.1"`.
|
||||
* address to listen on. The default is port `8000` on hostname `"0.0.0.0"`.
|
||||
*
|
||||
* ```ts
|
||||
* const ac = new AbortController();
|
||||
*
|
||||
* const server = Deno.serve({
|
||||
* port: 3000,
|
||||
* hostname: "0.0.0.0",
|
||||
* hostname: "127.0.0.1",
|
||||
* handler: (_req) => new Response("Hello, world"),
|
||||
* signal: ac.signal,
|
||||
* onListen({ port, hostname }) {
|
||||
|
|
74
cli/tsc/dts/lib.deno.unstable.d.ts
vendored
74
cli/tsc/dts/lib.deno.unstable.d.ts
vendored
|
@ -840,80 +840,6 @@ declare namespace Deno {
|
|||
present(): void;
|
||||
}
|
||||
|
||||
/** **UNSTABLE**: New API, yet to be vetted.
|
||||
*
|
||||
* These are unstable options which can be used with {@linkcode Deno.run}.
|
||||
*
|
||||
* @category Subprocess
|
||||
* @experimental
|
||||
*/
|
||||
export interface UnstableRunOptions extends RunOptions {
|
||||
/** If `true`, clears the environment variables before executing the
|
||||
* sub-process.
|
||||
*
|
||||
* @default {false} */
|
||||
clearEnv?: boolean;
|
||||
/** For POSIX systems, sets the group ID for the sub process. */
|
||||
gid?: number;
|
||||
/** For POSIX systems, sets the user ID for the sub process. */
|
||||
uid?: number;
|
||||
}
|
||||
|
||||
/** **UNSTABLE**: New API, yet to be vetted.
|
||||
*
|
||||
* Spawns new subprocess. RunOptions must contain at a minimum the `opt.cmd`,
|
||||
* an array of program arguments, the first of which is the binary.
|
||||
*
|
||||
* ```ts
|
||||
* const p = Deno.run({
|
||||
* cmd: ["curl", "https://example.com"],
|
||||
* });
|
||||
* const status = await p.status();
|
||||
* ```
|
||||
*
|
||||
* Subprocess uses same working directory as parent process unless `opt.cwd`
|
||||
* is specified.
|
||||
*
|
||||
* Environmental variables from parent process can be cleared using `opt.clearEnv`.
|
||||
* Doesn't guarantee that only `opt.env` variables are present,
|
||||
* as the OS may set environmental variables for processes.
|
||||
*
|
||||
* Environmental variables for subprocess can be specified using `opt.env`
|
||||
* mapping.
|
||||
*
|
||||
* `opt.uid` sets the child process’s user ID. This translates to a setuid call
|
||||
* in the child process. Failure in the setuid call will cause the spawn to fail.
|
||||
*
|
||||
* `opt.gid` is similar to `opt.uid`, but sets the group ID of the child process.
|
||||
* This has the same semantics as the uid field.
|
||||
*
|
||||
* By default subprocess inherits stdio of parent process. To change
|
||||
* this this, `opt.stdin`, `opt.stdout`, and `opt.stderr` can be set
|
||||
* independently to a resource ID (_rid_) of an open file, `"inherit"`,
|
||||
* `"piped"`, or `"null"`:
|
||||
*
|
||||
* - _number_: the resource ID of an open file/resource. This allows you to
|
||||
* read or write to a file.
|
||||
* - `"inherit"`: The default if unspecified. The subprocess inherits from the
|
||||
* parent.
|
||||
* - `"piped"`: A new pipe should be arranged to connect the parent and child
|
||||
* sub-process.
|
||||
* - `"null"`: This stream will be ignored. This is the equivalent of attaching
|
||||
* the stream to `/dev/null`.
|
||||
*
|
||||
* Details of the spawned process are returned as an instance of
|
||||
* {@linkcode Deno.Process}.
|
||||
*
|
||||
* Requires `allow-run` permission.
|
||||
*
|
||||
* @tags allow-run
|
||||
* @category Subprocess
|
||||
* @experimental
|
||||
*/
|
||||
export function run<T extends UnstableRunOptions = UnstableRunOptions>(
|
||||
opt: T,
|
||||
): Process<T>;
|
||||
|
||||
/** **UNSTABLE**: New API, yet to be vetted.
|
||||
*
|
||||
* A custom `HttpClient` for use with {@linkcode fetch} function. This is
|
||||
|
|
|
@ -795,7 +795,7 @@ fn resolve_graph_specifier_types(
|
|||
}
|
||||
|
||||
fn resolve_non_graph_specifier_types(
|
||||
specifier: &str,
|
||||
raw_specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
state: &State,
|
||||
|
@ -810,14 +810,16 @@ fn resolve_non_graph_specifier_types(
|
|||
Ok(Some(NodeResolution::into_specifier_and_media_type(
|
||||
node_resolver
|
||||
.resolve(
|
||||
specifier,
|
||||
raw_specifier,
|
||||
referrer,
|
||||
referrer_kind,
|
||||
NodeResolutionMode::Types,
|
||||
)
|
||||
.ok(),
|
||||
)))
|
||||
} else if let Ok(npm_req_ref) = NpmPackageReqReference::from_str(specifier) {
|
||||
} else if let Ok(npm_req_ref) =
|
||||
NpmPackageReqReference::from_str(raw_specifier)
|
||||
{
|
||||
debug_assert_eq!(referrer_kind, NodeModuleKind::Esm);
|
||||
// todo(dsherret): add support for injecting this in the graph so
|
||||
// we don't need this special code here.
|
||||
|
|
|
@ -496,9 +496,9 @@ pub fn hard_link_dir_recursive(from: &Path, to: &Path) -> Result<(), AnyError> {
|
|||
}
|
||||
|
||||
pub fn symlink_dir(oldpath: &Path, newpath: &Path) -> Result<(), Error> {
|
||||
let err_mapper = |err: Error| {
|
||||
let err_mapper = |err: Error, kind: Option<ErrorKind>| {
|
||||
Error::new(
|
||||
err.kind(),
|
||||
kind.unwrap_or_else(|| err.kind()),
|
||||
format!(
|
||||
"{}, symlink '{}' -> '{}'",
|
||||
err,
|
||||
|
@ -510,12 +510,19 @@ pub fn symlink_dir(oldpath: &Path, newpath: &Path) -> Result<(), Error> {
|
|||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::symlink;
|
||||
symlink(oldpath, newpath).map_err(err_mapper)?;
|
||||
symlink(oldpath, newpath).map_err(|e| err_mapper(e, None))?;
|
||||
}
|
||||
#[cfg(not(unix))]
|
||||
{
|
||||
use std::os::windows::fs::symlink_dir;
|
||||
symlink_dir(oldpath, newpath).map_err(err_mapper)?;
|
||||
symlink_dir(oldpath, newpath).map_err(|err| {
|
||||
if let Some(code) = err.raw_os_error() {
|
||||
if code as u32 == winapi::shared::winerror::ERROR_PRIVILEGE_NOT_HELD {
|
||||
return err_mapper(err, Some(ErrorKind::PermissionDenied));
|
||||
}
|
||||
}
|
||||
err_mapper(err, None)
|
||||
})?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -138,7 +138,6 @@ struct SharedWorkerState {
|
|||
maybe_inspector_server: Option<Arc<InspectorServer>>,
|
||||
maybe_lockfile: Option<Arc<CliLockfile>>,
|
||||
feature_checker: Arc<FeatureChecker>,
|
||||
enable_future_features: bool,
|
||||
code_cache: Option<Arc<dyn code_cache::CodeCache>>,
|
||||
}
|
||||
|
||||
|
@ -453,8 +452,6 @@ impl CliMainWorkerFactory {
|
|||
maybe_inspector_server,
|
||||
maybe_lockfile,
|
||||
feature_checker,
|
||||
// TODO(2.0): remove?
|
||||
enable_future_features: true,
|
||||
code_cache,
|
||||
}),
|
||||
}
|
||||
|
@ -591,7 +588,6 @@ impl CliMainWorkerFactory {
|
|||
argv0: shared.options.argv0.clone(),
|
||||
node_debug: shared.options.node_debug.clone(),
|
||||
node_ipc_fd: shared.options.node_ipc,
|
||||
future: shared.enable_future_features,
|
||||
mode,
|
||||
serve_port: shared.options.serve_port,
|
||||
serve_host: shared.options.serve_host.clone(),
|
||||
|
@ -787,7 +783,6 @@ fn create_web_worker_callback(
|
|||
argv0: shared.options.argv0.clone(),
|
||||
node_debug: shared.options.node_debug.clone(),
|
||||
node_ipc_fd: None,
|
||||
future: shared.enable_future_features,
|
||||
mode: WorkerExecutionMode::Worker,
|
||||
serve_port: shared.options.serve_port,
|
||||
serve_host: shared.options.serve_host.clone(),
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_broadcast_channel"
|
||||
version = "0.161.0"
|
||||
version = "0.162.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
2
ext/cache/01_cache.js
vendored
2
ext/cache/01_cache.js
vendored
|
@ -105,7 +105,7 @@ class Cache {
|
|||
const reqUrl = new URL(innerRequest.url());
|
||||
if (reqUrl.protocol !== "http:" && reqUrl.protocol !== "https:") {
|
||||
throw new TypeError(
|
||||
"Request url protocol must be 'http:' or 'https:'",
|
||||
`Request url protocol must be 'http:' or 'https:': received '${reqUrl.protocol}'`,
|
||||
);
|
||||
}
|
||||
if (innerRequest.method !== "GET") {
|
||||
|
|
2
ext/cache/Cargo.toml
vendored
2
ext/cache/Cargo.toml
vendored
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_cache"
|
||||
version = "0.99.0"
|
||||
version = "0.100.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
2
ext/cache/lib.rs
vendored
2
ext/cache/lib.rs
vendored
|
@ -211,7 +211,7 @@ where
|
|||
state.put(cache);
|
||||
Ok(state.borrow::<CA>().clone())
|
||||
} else {
|
||||
Err(type_error("CacheStorage is not available in this context."))
|
||||
Err(type_error("CacheStorage is not available in this context"))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -307,7 +307,9 @@ function processImage(input, width, height, sx, sy, sw, sh, options) {
|
|||
}
|
||||
|
||||
if (options.colorSpaceConversion === "none") {
|
||||
throw new TypeError("options.colorSpaceConversion 'none' is not supported");
|
||||
throw new TypeError(
|
||||
"Cannot create image: invalid colorSpaceConversion option, 'none' is not supported",
|
||||
);
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_canvas"
|
||||
version = "0.36.0"
|
||||
version = "0.37.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_console"
|
||||
version = "0.167.0"
|
||||
version = "0.168.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -41,7 +41,9 @@ export function formatToCronSchedule(
|
|||
} else if (end === undefined && every !== undefined) {
|
||||
return "*/" + every;
|
||||
} else {
|
||||
throw new TypeError("Invalid cron schedule");
|
||||
throw new TypeError(
|
||||
`Invalid cron schedule: start=${start}, end=${end}, every=${every}`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
if (typeof exact === "number") {
|
||||
|
@ -103,10 +105,14 @@ function cron(
|
|||
handler2?: () => Promise<void> | void,
|
||||
) {
|
||||
if (name === undefined) {
|
||||
throw new TypeError("Deno.cron requires a unique name");
|
||||
throw new TypeError(
|
||||
"Cannot create cron job, a unique name is required: received 'undefined'",
|
||||
);
|
||||
}
|
||||
if (schedule === undefined) {
|
||||
throw new TypeError("Deno.cron requires a valid schedule");
|
||||
throw new TypeError(
|
||||
"Cannot create cron job, a schedule is required: received 'undefined'",
|
||||
);
|
||||
}
|
||||
|
||||
schedule = parseScheduleToString(schedule);
|
||||
|
@ -119,13 +125,15 @@ function cron(
|
|||
if (typeof handlerOrOptions1 === "function") {
|
||||
handler = handlerOrOptions1;
|
||||
if (handler2 !== undefined) {
|
||||
throw new TypeError("Deno.cron requires a single handler");
|
||||
throw new TypeError(
|
||||
"Cannot create cron job, a single handler is required: two handlers were specified",
|
||||
);
|
||||
}
|
||||
} else if (typeof handler2 === "function") {
|
||||
handler = handler2;
|
||||
options = handlerOrOptions1;
|
||||
} else {
|
||||
throw new TypeError("Deno.cron requires a handler");
|
||||
throw new TypeError("Cannot create cron job: a handler is required");
|
||||
}
|
||||
|
||||
const rid = op_cron_create(
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_cron"
|
||||
version = "0.47.0"
|
||||
version = "0.48.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -116,12 +116,15 @@ where
|
|||
|
||||
fn validate_cron_name(name: &str) -> Result<(), AnyError> {
|
||||
if name.len() > 64 {
|
||||
return Err(type_error("Cron name is too long"));
|
||||
return Err(type_error(format!(
|
||||
"Cron name cannot exceed 64 characters: current length {}",
|
||||
name.len()
|
||||
)));
|
||||
}
|
||||
if !name.chars().all(|c| {
|
||||
c.is_ascii_whitespace() || c.is_ascii_alphanumeric() || c == '_' || c == '-'
|
||||
}) {
|
||||
return Err(type_error("Invalid cron name. Only alphanumeric characters, whitespace, hyphens, and underscores are allowed"));
|
||||
return Err(type_error("Invalid cron name: only alphanumeric characters, whitespace, hyphens, and underscores are allowed"));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -292,7 +292,7 @@ function normalizeAlgorithm(algorithm, op) {
|
|||
normalizedAlgorithm[member] = normalizeAlgorithm(idlValue, "digest");
|
||||
} else if (idlType === "AlgorithmIdentifier") {
|
||||
// TODO(lucacasonato): implement
|
||||
throw new TypeError("unimplemented");
|
||||
throw new TypeError("Unimplemented");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -443,7 +443,7 @@ function getKeyLength(algorithm) {
|
|||
// 1.
|
||||
if (!ArrayPrototypeIncludes([128, 192, 256], algorithm.length)) {
|
||||
throw new DOMException(
|
||||
"length must be 128, 192, or 256",
|
||||
`Length must be 128, 192, or 256: received ${algorithm.length}`,
|
||||
"OperationError",
|
||||
);
|
||||
}
|
||||
|
@ -470,14 +470,14 @@ function getKeyLength(algorithm) {
|
|||
break;
|
||||
default:
|
||||
throw new DOMException(
|
||||
"Unrecognized hash algorithm",
|
||||
`Unrecognized hash algorithm: ${algorithm.hash.name}`,
|
||||
"NotSupportedError",
|
||||
);
|
||||
}
|
||||
} else if (algorithm.length !== 0) {
|
||||
length = algorithm.length;
|
||||
} else {
|
||||
throw new TypeError("Invalid length.");
|
||||
throw new TypeError(`Invalid length: ${algorithm.length}`);
|
||||
}
|
||||
|
||||
// 2.
|
||||
|
@ -492,7 +492,7 @@ function getKeyLength(algorithm) {
|
|||
return null;
|
||||
}
|
||||
default:
|
||||
throw new TypeError("unreachable");
|
||||
throw new TypeError("Unreachable");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -556,7 +556,7 @@ class SubtleCrypto {
|
|||
// 8.
|
||||
if (normalizedAlgorithm.name !== key[_algorithm].name) {
|
||||
throw new DOMException(
|
||||
"Encryption algorithm doesn't match key algorithm.",
|
||||
`Encryption algorithm '${normalizedAlgorithm.name}' does not match key algorithm`,
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
@ -564,7 +564,7 @@ class SubtleCrypto {
|
|||
// 9.
|
||||
if (!ArrayPrototypeIncludes(key[_usages], "encrypt")) {
|
||||
throw new DOMException(
|
||||
"Key does not support the 'encrypt' operation.",
|
||||
"Key does not support the 'encrypt' operation",
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
@ -599,7 +599,7 @@ class SubtleCrypto {
|
|||
// 8.
|
||||
if (normalizedAlgorithm.name !== key[_algorithm].name) {
|
||||
throw new DOMException(
|
||||
"Decryption algorithm doesn't match key algorithm.",
|
||||
`Decryption algorithm "${normalizedAlgorithm.name}" does not match key algorithm`,
|
||||
"OperationError",
|
||||
);
|
||||
}
|
||||
|
@ -607,7 +607,7 @@ class SubtleCrypto {
|
|||
// 9.
|
||||
if (!ArrayPrototypeIncludes(key[_usages], "decrypt")) {
|
||||
throw new DOMException(
|
||||
"Key does not support the 'decrypt' operation.",
|
||||
"Key does not support the 'decrypt' operation",
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
@ -683,7 +683,7 @@ class SubtleCrypto {
|
|||
normalizedAlgorithm.length === 0 || normalizedAlgorithm.length > 128
|
||||
) {
|
||||
throw new DOMException(
|
||||
"Counter length must not be 0 or greater than 128",
|
||||
`Counter length must not be 0 or greater than 128: received ${normalizedAlgorithm.length}`,
|
||||
"OperationError",
|
||||
);
|
||||
}
|
||||
|
@ -713,7 +713,7 @@ class SubtleCrypto {
|
|||
)
|
||||
) {
|
||||
throw new DOMException(
|
||||
"Invalid tag length",
|
||||
`Invalid tag length: ${normalizedAlgorithm.tagLength}`,
|
||||
"OperationError",
|
||||
);
|
||||
}
|
||||
|
@ -805,7 +805,7 @@ class SubtleCrypto {
|
|||
// 8.
|
||||
if (normalizedAlgorithm.name !== key[_algorithm].name) {
|
||||
throw new DOMException(
|
||||
"Signing algorithm doesn't match key algorithm.",
|
||||
"Signing algorithm does not match key algorithm",
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
@ -813,7 +813,7 @@ class SubtleCrypto {
|
|||
// 9.
|
||||
if (!ArrayPrototypeIncludes(key[_usages], "sign")) {
|
||||
throw new DOMException(
|
||||
"Key does not support the 'sign' operation.",
|
||||
"Key does not support the 'sign' operation",
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
@ -928,7 +928,7 @@ class SubtleCrypto {
|
|||
}
|
||||
}
|
||||
|
||||
throw new TypeError("unreachable");
|
||||
throw new TypeError("Unreachable");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -967,11 +967,11 @@ class SubtleCrypto {
|
|||
if (ArrayBufferIsView(keyData) || isArrayBuffer(keyData)) {
|
||||
keyData = copyBuffer(keyData);
|
||||
} else {
|
||||
throw new TypeError("keyData is a JsonWebKey");
|
||||
throw new TypeError("Cannot import key: 'keyData' is a JsonWebKey");
|
||||
}
|
||||
} else {
|
||||
if (ArrayBufferIsView(keyData) || isArrayBuffer(keyData)) {
|
||||
throw new TypeError("keyData is not a JsonWebKey");
|
||||
throw new TypeError("Cannot import key: 'keyData' is not a JsonWebKey");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1156,7 +1156,7 @@ class SubtleCrypto {
|
|||
// 8.
|
||||
if (!ArrayPrototypeIncludes(baseKey[_usages], "deriveBits")) {
|
||||
throw new DOMException(
|
||||
"baseKey usages does not contain `deriveBits`",
|
||||
"'baseKey' usages does not contain 'deriveBits'",
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
@ -1222,7 +1222,7 @@ class SubtleCrypto {
|
|||
// 11.
|
||||
if (normalizedAlgorithm.name !== baseKey[_algorithm].name) {
|
||||
throw new DOMException(
|
||||
"Invalid algorithm name",
|
||||
`Invalid algorithm name: ${normalizedAlgorithm.name}`,
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
@ -1230,7 +1230,7 @@ class SubtleCrypto {
|
|||
// 12.
|
||||
if (!ArrayPrototypeIncludes(baseKey[_usages], "deriveKey")) {
|
||||
throw new DOMException(
|
||||
"baseKey usages does not contain `deriveKey`",
|
||||
"'baseKey' usages does not contain 'deriveKey'",
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
@ -1259,7 +1259,7 @@ class SubtleCrypto {
|
|||
ArrayPrototypeIncludes(["private", "secret"], result[_type]) &&
|
||||
keyUsages.length == 0
|
||||
) {
|
||||
throw new SyntaxError("Invalid key usages");
|
||||
throw new SyntaxError("Invalid key usage");
|
||||
}
|
||||
// 17.
|
||||
return result;
|
||||
|
@ -1298,14 +1298,14 @@ class SubtleCrypto {
|
|||
|
||||
if (normalizedAlgorithm.name !== key[_algorithm].name) {
|
||||
throw new DOMException(
|
||||
"Verifying algorithm doesn't match key algorithm.",
|
||||
"Verifying algorithm does not match key algorithm",
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
||||
if (!ArrayPrototypeIncludes(key[_usages], "verify")) {
|
||||
throw new DOMException(
|
||||
"Key does not support the 'verify' operation.",
|
||||
"Key does not support the 'verify' operation",
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
@ -1396,7 +1396,7 @@ class SubtleCrypto {
|
|||
}
|
||||
}
|
||||
|
||||
throw new TypeError("unreachable");
|
||||
throw new TypeError("Unreachable");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1435,7 +1435,7 @@ class SubtleCrypto {
|
|||
// 8.
|
||||
if (normalizedAlgorithm.name !== wrappingKey[_algorithm].name) {
|
||||
throw new DOMException(
|
||||
"Wrapping algorithm doesn't match key algorithm.",
|
||||
"Wrapping algorithm does not match key algorithm",
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
@ -1443,7 +1443,7 @@ class SubtleCrypto {
|
|||
// 9.
|
||||
if (!ArrayPrototypeIncludes(wrappingKey[_usages], "wrapKey")) {
|
||||
throw new DOMException(
|
||||
"Key does not support the 'wrapKey' operation.",
|
||||
"Key does not support the 'wrapKey' operation",
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
@ -1591,7 +1591,7 @@ class SubtleCrypto {
|
|||
// 11.
|
||||
if (normalizedAlgorithm.name !== unwrappingKey[_algorithm].name) {
|
||||
throw new DOMException(
|
||||
"Unwrapping algorithm doesn't match key algorithm.",
|
||||
"Unwrapping algorithm does not match key algorithm",
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
@ -1599,7 +1599,7 @@ class SubtleCrypto {
|
|||
// 12.
|
||||
if (!ArrayPrototypeIncludes(unwrappingKey[_usages], "unwrapKey")) {
|
||||
throw new DOMException(
|
||||
"Key does not support the 'unwrapKey' operation.",
|
||||
"Key does not support the 'unwrapKey' operation",
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
@ -1678,7 +1678,7 @@ class SubtleCrypto {
|
|||
(result[_type] == "secret" || result[_type] == "private") &&
|
||||
keyUsages.length == 0
|
||||
) {
|
||||
throw new SyntaxError("Invalid key type.");
|
||||
throw new SyntaxError("Invalid key type");
|
||||
}
|
||||
// 17.
|
||||
result[_extractable] = extractable;
|
||||
|
@ -1726,13 +1726,13 @@ class SubtleCrypto {
|
|||
if (ObjectPrototypeIsPrototypeOf(CryptoKeyPrototype, result)) {
|
||||
const type = result[_type];
|
||||
if ((type === "secret" || type === "private") && usages.length === 0) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
} else if (
|
||||
ObjectPrototypeIsPrototypeOf(CryptoKeyPrototype, result.privateKey)
|
||||
) {
|
||||
if (result.privateKey[_usages].length === 0) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1758,7 +1758,7 @@ async function generateKey(normalizedAlgorithm, extractable, usages) {
|
|||
(u) => !ArrayPrototypeIncludes(["sign", "verify"], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
// 2.
|
||||
|
@ -1817,7 +1817,7 @@ async function generateKey(normalizedAlgorithm, extractable, usages) {
|
|||
], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
// 2.
|
||||
|
@ -1873,7 +1873,7 @@ async function generateKey(normalizedAlgorithm, extractable, usages) {
|
|||
(u) => !ArrayPrototypeIncludes(["sign", "verify"], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
// 2-3.
|
||||
|
@ -1933,7 +1933,7 @@ async function generateKey(normalizedAlgorithm, extractable, usages) {
|
|||
(u) => !ArrayPrototypeIncludes(["deriveKey", "deriveBits"], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
// 2-3.
|
||||
|
@ -1999,7 +1999,7 @@ async function generateKey(normalizedAlgorithm, extractable, usages) {
|
|||
], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
return generateKeyAES(normalizedAlgorithm, extractable, usages);
|
||||
|
@ -2012,7 +2012,7 @@ async function generateKey(normalizedAlgorithm, extractable, usages) {
|
|||
(u) => !ArrayPrototypeIncludes(["wrapKey", "unwrapKey"], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
return generateKeyAES(normalizedAlgorithm, extractable, usages);
|
||||
|
@ -2024,7 +2024,7 @@ async function generateKey(normalizedAlgorithm, extractable, usages) {
|
|||
(u) => !ArrayPrototypeIncludes(["deriveKey", "deriveBits"], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
const privateKeyData = new Uint8Array(32);
|
||||
const publicKeyData = new Uint8Array(32);
|
||||
|
@ -2065,7 +2065,7 @@ async function generateKey(normalizedAlgorithm, extractable, usages) {
|
|||
(u) => !ArrayPrototypeIncludes(["sign", "verify"], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
const ED25519_SEED_LEN = 32;
|
||||
|
@ -2114,7 +2114,7 @@ async function generateKey(normalizedAlgorithm, extractable, usages) {
|
|||
(u) => !ArrayPrototypeIncludes(["sign", "verify"], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
// 2.
|
||||
|
@ -2178,7 +2178,7 @@ function importKeyEd25519(
|
|||
(u) => !ArrayPrototypeIncludes(["verify"], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
const handle = {};
|
||||
|
@ -2206,7 +2206,7 @@ function importKeyEd25519(
|
|||
(u) => !ArrayPrototypeIncludes(["verify"], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
const publicKeyData = new Uint8Array(32);
|
||||
|
@ -2237,7 +2237,7 @@ function importKeyEd25519(
|
|||
(u) => !ArrayPrototypeIncludes(["sign"], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
const privateKeyData = new Uint8Array(32);
|
||||
|
@ -2276,7 +2276,7 @@ function importKeyEd25519(
|
|||
),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
} else {
|
||||
if (
|
||||
|
@ -2289,7 +2289,7 @@ function importKeyEd25519(
|
|||
),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2349,7 +2349,7 @@ function importKeyEd25519(
|
|||
try {
|
||||
privateKeyData = op_crypto_base64url_decode(jwk.d);
|
||||
} catch (_) {
|
||||
throw new DOMException("invalid private key data", "DataError");
|
||||
throw new DOMException("Invalid private key data", "DataError");
|
||||
}
|
||||
|
||||
const handle = {};
|
||||
|
@ -2372,7 +2372,7 @@ function importKeyEd25519(
|
|||
try {
|
||||
publicKeyData = op_crypto_base64url_decode(jwk.x);
|
||||
} catch (_) {
|
||||
throw new DOMException("invalid public key data", "DataError");
|
||||
throw new DOMException("Invalid public key data", "DataError");
|
||||
}
|
||||
|
||||
const handle = {};
|
||||
|
@ -2406,7 +2406,7 @@ function importKeyX25519(
|
|||
case "raw": {
|
||||
// 1.
|
||||
if (keyUsages.length > 0) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
const handle = {};
|
||||
|
@ -2429,7 +2429,7 @@ function importKeyX25519(
|
|||
case "spki": {
|
||||
// 1.
|
||||
if (keyUsages.length > 0) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
const publicKeyData = new Uint8Array(32);
|
||||
|
@ -2460,7 +2460,7 @@ function importKeyX25519(
|
|||
(u) => !ArrayPrototypeIncludes(["deriveKey", "deriveBits"], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
const privateKeyData = new Uint8Array(32);
|
||||
|
@ -2499,13 +2499,13 @@ function importKeyX25519(
|
|||
),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
}
|
||||
|
||||
// 3.
|
||||
if (jwk.d === undefined && keyUsages.length > 0) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
// 4.
|
||||
|
@ -2641,7 +2641,7 @@ function exportKeyAES(
|
|||
break;
|
||||
default:
|
||||
throw new DOMException(
|
||||
"Invalid key length",
|
||||
`Invalid key length: ${algorithm.length}`,
|
||||
"NotSupportedError",
|
||||
);
|
||||
}
|
||||
|
@ -2675,7 +2675,7 @@ function importKeyAES(
|
|||
(u) => !ArrayPrototypeIncludes(supportedKeyUsages, u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
const algorithmName = normalizedAlgorithm.name;
|
||||
|
@ -2731,7 +2731,10 @@ function importKeyAES(
|
|||
jwk.alg !== undefined &&
|
||||
jwk.alg !== aesJwkAlg[algorithmName][128]
|
||||
) {
|
||||
throw new DOMException("Invalid algorithm", "DataError");
|
||||
throw new DOMException(
|
||||
`Invalid algorithm: ${jwk.alg}`,
|
||||
"DataError",
|
||||
);
|
||||
}
|
||||
break;
|
||||
case 192:
|
||||
|
@ -2739,7 +2742,10 @@ function importKeyAES(
|
|||
jwk.alg !== undefined &&
|
||||
jwk.alg !== aesJwkAlg[algorithmName][192]
|
||||
) {
|
||||
throw new DOMException("Invalid algorithm", "DataError");
|
||||
throw new DOMException(
|
||||
`Invalid algorithm: ${jwk.alg}`,
|
||||
"DataError",
|
||||
);
|
||||
}
|
||||
break;
|
||||
case 256:
|
||||
|
@ -2747,7 +2753,10 @@ function importKeyAES(
|
|||
jwk.alg !== undefined &&
|
||||
jwk.alg !== aesJwkAlg[algorithmName][256]
|
||||
) {
|
||||
throw new DOMException("Invalid algorithm", "DataError");
|
||||
throw new DOMException(
|
||||
`Invalid algorithm: ${jwk.alg}`,
|
||||
"DataError",
|
||||
);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
|
@ -2761,7 +2770,7 @@ function importKeyAES(
|
|||
if (
|
||||
keyUsages.length > 0 && jwk.use !== undefined && jwk.use !== "enc"
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "DataError");
|
||||
throw new DOMException("Invalid key usage", "DataError");
|
||||
}
|
||||
|
||||
// 7.
|
||||
|
@ -2844,7 +2853,7 @@ function importKeyHMAC(
|
|||
(u) => !ArrayPrototypeIncludes(["sign", "verify"], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
// 3.
|
||||
|
@ -2926,7 +2935,7 @@ function importKeyHMAC(
|
|||
break;
|
||||
}
|
||||
default:
|
||||
throw new TypeError("unreachable");
|
||||
throw new TypeError("Unreachable");
|
||||
}
|
||||
|
||||
// 7.
|
||||
|
@ -3059,7 +3068,7 @@ function importKeyEC(
|
|||
),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
// 3.
|
||||
|
@ -3100,7 +3109,7 @@ function importKeyEC(
|
|||
),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
// 2-9.
|
||||
|
@ -3140,7 +3149,7 @@ function importKeyEC(
|
|||
),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
} else if (keyUsages.length != 0) {
|
||||
throw new DOMException("Key usage must be empty", "SyntaxError");
|
||||
|
@ -3183,7 +3192,7 @@ function importKeyEC(
|
|||
(u) => !ArrayPrototypeIncludes(supportedUsages[keyType], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
// 3.
|
||||
|
@ -3391,7 +3400,7 @@ function importKeyRSA(
|
|||
),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
// 2-9.
|
||||
|
@ -3436,7 +3445,7 @@ function importKeyRSA(
|
|||
),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
// 2-9.
|
||||
|
@ -3485,7 +3494,7 @@ function importKeyRSA(
|
|||
),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
} else if (
|
||||
ArrayPrototypeFind(
|
||||
|
@ -3497,7 +3506,7 @@ function importKeyRSA(
|
|||
),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
// 3.
|
||||
|
@ -3579,7 +3588,7 @@ function importKeyRSA(
|
|||
break;
|
||||
default:
|
||||
throw new DOMException(
|
||||
`'alg' property of JsonWebKey must be one of 'RS1', 'RS256', 'RS384', 'RS512'`,
|
||||
`'alg' property of JsonWebKey must be one of 'RS1', 'RS256', 'RS384', 'RS512': received ${jwk.alg}`,
|
||||
"DataError",
|
||||
);
|
||||
}
|
||||
|
@ -3602,7 +3611,7 @@ function importKeyRSA(
|
|||
break;
|
||||
default:
|
||||
throw new DOMException(
|
||||
`'alg' property of JsonWebKey must be one of 'PS1', 'PS256', 'PS384', 'PS512'`,
|
||||
`'alg' property of JsonWebKey must be one of 'PS1', 'PS256', 'PS384', 'PS512': received ${jwk.alg}`,
|
||||
"DataError",
|
||||
);
|
||||
}
|
||||
|
@ -3625,7 +3634,7 @@ function importKeyRSA(
|
|||
break;
|
||||
default:
|
||||
throw new DOMException(
|
||||
`'alg' property of JsonWebKey must be one of 'RSA-OAEP', 'RSA-OAEP-256', 'RSA-OAEP-384', or 'RSA-OAEP-512'`,
|
||||
`'alg' property of JsonWebKey must be one of 'RSA-OAEP', 'RSA-OAEP-256', 'RSA-OAEP-384', or 'RSA-OAEP-512': received ${jwk.alg}`,
|
||||
"DataError",
|
||||
);
|
||||
}
|
||||
|
@ -3639,7 +3648,7 @@ function importKeyRSA(
|
|||
// 9.2.
|
||||
if (normalizedHash.name !== normalizedAlgorithm.hash.name) {
|
||||
throw new DOMException(
|
||||
`'alg' property of JsonWebKey must be '${normalizedAlgorithm.name}'`,
|
||||
`'alg' property of JsonWebKey must be '${normalizedAlgorithm.name}': received ${jwk.alg}`,
|
||||
"DataError",
|
||||
);
|
||||
}
|
||||
|
@ -3684,7 +3693,7 @@ function importKeyRSA(
|
|||
}
|
||||
} else {
|
||||
throw new DOMException(
|
||||
"only optimized private keys are supported",
|
||||
"Only optimized private keys are supported",
|
||||
"NotSupportedError",
|
||||
);
|
||||
}
|
||||
|
@ -3782,7 +3791,7 @@ function importKeyHKDF(
|
|||
(u) => !ArrayPrototypeIncludes(["deriveKey", "deriveBits"], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
// 2.
|
||||
|
@ -3834,7 +3843,7 @@ function importKeyPBKDF2(
|
|||
(u) => !ArrayPrototypeIncludes(["deriveKey", "deriveBits"], u),
|
||||
) !== undefined
|
||||
) {
|
||||
throw new DOMException("Invalid key usages", "SyntaxError");
|
||||
throw new DOMException("Invalid key usage", "SyntaxError");
|
||||
}
|
||||
|
||||
// 3.
|
||||
|
@ -3878,7 +3887,7 @@ function exportKeyHMAC(format, key, innerKey) {
|
|||
// 3.
|
||||
case "raw": {
|
||||
const bits = innerKey.data;
|
||||
// TODO(petamoriken): Uint8Array doesn't have push method
|
||||
// TODO(petamoriken): Uint8Array does not have push method
|
||||
// for (let _i = 7 & (8 - bits.length % 8); _i > 0; _i--) {
|
||||
// bits.push(0);
|
||||
// }
|
||||
|
@ -4331,7 +4340,10 @@ async function generateKeyAES(normalizedAlgorithm, extractable, usages) {
|
|||
|
||||
// 2.
|
||||
if (!ArrayPrototypeIncludes([128, 192, 256], normalizedAlgorithm.length)) {
|
||||
throw new DOMException("Invalid key length", "OperationError");
|
||||
throw new DOMException(
|
||||
`Invalid key length: ${normalizedAlgorithm.length}`,
|
||||
"OperationError",
|
||||
);
|
||||
}
|
||||
|
||||
// 3.
|
||||
|
@ -4417,7 +4429,7 @@ async function deriveBits(normalizedAlgorithm, baseKey, length) {
|
|||
publicKey[_algorithm].namedCurve !== baseKey[_algorithm].namedCurve
|
||||
) {
|
||||
throw new DOMException(
|
||||
"namedCurve mismatch",
|
||||
"'namedCurve' mismatch",
|
||||
"InvalidAccessError",
|
||||
);
|
||||
}
|
||||
|
@ -4670,7 +4682,7 @@ async function encrypt(normalizedAlgorithm, key, data) {
|
|||
)
|
||||
) {
|
||||
throw new DOMException(
|
||||
"Invalid tag length",
|
||||
`Invalid tag length: ${normalizedAlgorithm.tagLength}`,
|
||||
"OperationError",
|
||||
);
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_crypto"
|
||||
version = "0.181.0"
|
||||
version = "0.182.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -73,7 +73,7 @@ function fillHeaders(headers, object) {
|
|||
const header = object[i];
|
||||
if (header.length !== 2) {
|
||||
throw new TypeError(
|
||||
`Invalid header. Length must be 2, but is ${header.length}`,
|
||||
`Invalid header: length must be 2, but is ${header.length}`,
|
||||
);
|
||||
}
|
||||
appendHeader(headers, header[0], header[1]);
|
||||
|
@ -133,15 +133,15 @@ function appendHeader(headers, name, value) {
|
|||
|
||||
// 2.
|
||||
if (!checkHeaderNameForHttpTokenCodePoint(name)) {
|
||||
throw new TypeError("Header name is not valid.");
|
||||
throw new TypeError(`Invalid header name: "${name}"`);
|
||||
}
|
||||
if (!checkForInvalidValueChars(value)) {
|
||||
throw new TypeError("Header value is not valid.");
|
||||
throw new TypeError(`Invalid header value: "${value}"`);
|
||||
}
|
||||
|
||||
// 3.
|
||||
if (headers[_guard] == "immutable") {
|
||||
throw new TypeError("Headers are immutable.");
|
||||
throw new TypeError("Cannot change header: headers are immutable");
|
||||
}
|
||||
|
||||
// 7.
|
||||
|
@ -330,10 +330,10 @@ class Headers {
|
|||
name = webidl.converters["ByteString"](name, prefix, "Argument 1");
|
||||
|
||||
if (!checkHeaderNameForHttpTokenCodePoint(name)) {
|
||||
throw new TypeError("Header name is not valid.");
|
||||
throw new TypeError(`Invalid header name: "${name}"`);
|
||||
}
|
||||
if (this[_guard] == "immutable") {
|
||||
throw new TypeError("Headers are immutable.");
|
||||
throw new TypeError("Cannot change headers: headers are immutable");
|
||||
}
|
||||
|
||||
const list = this[_headerList];
|
||||
|
@ -356,7 +356,7 @@ class Headers {
|
|||
name = webidl.converters["ByteString"](name, prefix, "Argument 1");
|
||||
|
||||
if (!checkHeaderNameForHttpTokenCodePoint(name)) {
|
||||
throw new TypeError("Header name is not valid.");
|
||||
throw new TypeError(`Invalid header name: "${name}"`);
|
||||
}
|
||||
|
||||
const list = this[_headerList];
|
||||
|
@ -387,7 +387,7 @@ class Headers {
|
|||
name = webidl.converters["ByteString"](name, prefix, "Argument 1");
|
||||
|
||||
if (!checkHeaderNameForHttpTokenCodePoint(name)) {
|
||||
throw new TypeError("Header name is not valid.");
|
||||
throw new TypeError(`Invalid header name: "${name}"`);
|
||||
}
|
||||
|
||||
const list = this[_headerList];
|
||||
|
@ -415,14 +415,14 @@ class Headers {
|
|||
|
||||
// 2.
|
||||
if (!checkHeaderNameForHttpTokenCodePoint(name)) {
|
||||
throw new TypeError("Header name is not valid.");
|
||||
throw new TypeError(`Invalid header name: "${name}"`);
|
||||
}
|
||||
if (!checkForInvalidValueChars(value)) {
|
||||
throw new TypeError("Header value is not valid.");
|
||||
throw new TypeError(`Invalid header value: "${value}"`);
|
||||
}
|
||||
|
||||
if (this[_guard] == "immutable") {
|
||||
throw new TypeError("Headers are immutable.");
|
||||
throw new TypeError("Cannot change headers: headers are immutable");
|
||||
}
|
||||
|
||||
const list = this[_headerList];
|
||||
|
|
|
@ -396,7 +396,9 @@ class MultipartParser {
|
|||
*/
|
||||
constructor(body, boundary) {
|
||||
if (!boundary) {
|
||||
throw new TypeError("multipart/form-data must provide a boundary");
|
||||
throw new TypeError(
|
||||
"Cannot construct MultipartParser: multipart/form-data must provide a boundary",
|
||||
);
|
||||
}
|
||||
|
||||
this.boundary = `--${boundary}`;
|
||||
|
@ -445,7 +447,7 @@ class MultipartParser {
|
|||
) {
|
||||
return new FormData();
|
||||
}
|
||||
throw new TypeError("Unable to parse body as form data.");
|
||||
throw new TypeError("Unable to parse body as form data");
|
||||
}
|
||||
|
||||
const formData = new FormData();
|
||||
|
|
|
@ -151,7 +151,7 @@ class InnerBody {
|
|||
* @returns {Promise<Uint8Array>}
|
||||
*/
|
||||
consume() {
|
||||
if (this.unusable()) throw new TypeError("Body already consumed.");
|
||||
if (this.unusable()) throw new TypeError("Body already consumed");
|
||||
if (
|
||||
ObjectPrototypeIsPrototypeOf(
|
||||
ReadableStreamPrototype,
|
||||
|
@ -372,7 +372,7 @@ function packageData(bytes, type, mimeType) {
|
|||
const boundary = mimeType.parameters.get("boundary");
|
||||
if (boundary === null) {
|
||||
throw new TypeError(
|
||||
"Missing boundary parameter in mime type of multipart formdata.",
|
||||
"Cannot turn into form data: missing boundary parameter in mime type of multipart form data",
|
||||
);
|
||||
}
|
||||
return parseFormData(chunkToU8(bytes), boundary);
|
||||
|
|
|
@ -172,7 +172,7 @@ function initializeAResponse(response, init, bodyWithType) {
|
|||
// 1.
|
||||
if ((init.status < 200 || init.status > 599) && init.status != 101) {
|
||||
throw new RangeError(
|
||||
`The status provided (${init.status}) is not equal to 101 and outside the range [200, 599].`,
|
||||
`The status provided (${init.status}) is not equal to 101 and outside the range [200, 599]`,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -181,7 +181,9 @@ function initializeAResponse(response, init, bodyWithType) {
|
|||
init.statusText &&
|
||||
RegExpPrototypeExec(REASON_PHRASE_RE, init.statusText) === null
|
||||
) {
|
||||
throw new TypeError("Status text is not valid.");
|
||||
throw new TypeError(
|
||||
`Invalid status text: "${init.statusText}"`,
|
||||
);
|
||||
}
|
||||
|
||||
// 3.
|
||||
|
@ -263,7 +265,7 @@ class Response {
|
|||
const baseURL = getLocationHref();
|
||||
const parsedURL = new URL(url, baseURL);
|
||||
if (!redirectStatus(status)) {
|
||||
throw new RangeError("Invalid redirect status code.");
|
||||
throw new RangeError(`Invalid redirect status code: ${status}`);
|
||||
}
|
||||
const inner = newInnerResponse(status);
|
||||
inner.type = "default";
|
||||
|
@ -395,7 +397,7 @@ class Response {
|
|||
clone() {
|
||||
webidl.assertBranded(this, ResponsePrototype);
|
||||
if (this[_body] && this[_body].unusable()) {
|
||||
throw new TypeError("Body is unusable.");
|
||||
throw new TypeError("Body is unusable");
|
||||
}
|
||||
const second = webidl.createBranded(Response);
|
||||
const newRes = cloneInnerResponse(this[_response]);
|
||||
|
|
|
@ -99,7 +99,7 @@ function createResponseBodyStream(responseBodyRid, terminator) {
|
|||
async function mainFetch(req, recursive, terminator) {
|
||||
if (req.blobUrlEntry !== null) {
|
||||
if (req.method !== "GET") {
|
||||
throw new TypeError("Blob URL fetch only supports GET method.");
|
||||
throw new TypeError("Blob URL fetch only supports GET method");
|
||||
}
|
||||
|
||||
const body = new InnerBody(req.blobUrlEntry.stream());
|
||||
|
@ -145,7 +145,7 @@ async function mainFetch(req, recursive, terminator) {
|
|||
reqRid = resourceForReadableStream(stream, req.body.length);
|
||||
}
|
||||
} else {
|
||||
throw new TypeError("invalid body");
|
||||
throw new TypeError("Invalid body");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -441,13 +441,15 @@ function handleWasmStreaming(source, rid) {
|
|||
typeof contentType !== "string" ||
|
||||
StringPrototypeToLowerCase(contentType) !== "application/wasm"
|
||||
) {
|
||||
throw new TypeError("Invalid WebAssembly content type.");
|
||||
throw new TypeError("Invalid WebAssembly content type");
|
||||
}
|
||||
}
|
||||
|
||||
// 2.5.
|
||||
if (!res.ok) {
|
||||
throw new TypeError(`HTTP status code ${res.status}`);
|
||||
throw new TypeError(
|
||||
`Failed to receive WebAssembly content: HTTP status code ${res.status}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Pass the resolved URL to v8.
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_fetch"
|
||||
version = "0.191.0"
|
||||
version = "0.192.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -78,6 +78,5 @@ Following ops are provided, which can be accessed through `Deno.ops`:
|
|||
|
||||
- op_fetch
|
||||
- op_fetch_send
|
||||
- op_fetch_response_upgrade
|
||||
- op_utf8_to_byte_string
|
||||
- op_fetch_custom_client
|
||||
|
|
|
@ -43,7 +43,7 @@ impl FetchHandler for FsFetchHandler {
|
|||
Ok::<_, ()>(response)
|
||||
}
|
||||
.map_err(move |_| {
|
||||
type_error("NetworkError when attempting to fetch resource.")
|
||||
type_error("NetworkError when attempting to fetch resource")
|
||||
})
|
||||
.or_cancel(&cancel_handle)
|
||||
.boxed_local();
|
||||
|
|
137
ext/fetch/lib.rs
137
ext/fetch/lib.rs
|
@ -28,7 +28,6 @@ use deno_core::futures::Stream;
|
|||
use deno_core::futures::StreamExt;
|
||||
use deno_core::futures::TryFutureExt;
|
||||
use deno_core::op2;
|
||||
use deno_core::unsync::spawn;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::AsyncRefCell;
|
||||
use deno_core::AsyncResult;
|
||||
|
@ -70,12 +69,9 @@ use hyper::body::Frame;
|
|||
use hyper_util::client::legacy::connect::HttpConnector;
|
||||
use hyper_util::client::legacy::connect::HttpInfo;
|
||||
use hyper_util::rt::TokioExecutor;
|
||||
use hyper_util::rt::TokioIo;
|
||||
use hyper_util::rt::TokioTimer;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use tokio::io::AsyncReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tower::ServiceExt;
|
||||
use tower_http::decompression::Decompression;
|
||||
|
||||
|
@ -127,7 +123,6 @@ deno_core::extension!(deno_fetch,
|
|||
ops = [
|
||||
op_fetch<FP>,
|
||||
op_fetch_send,
|
||||
op_fetch_response_upgrade,
|
||||
op_utf8_to_byte_string,
|
||||
op_fetch_custom_client<FP>,
|
||||
],
|
||||
|
@ -177,7 +172,7 @@ impl FetchHandler for DefaultFileFetchHandler {
|
|||
) -> (CancelableResponseFuture, Option<Rc<CancelHandle>>) {
|
||||
let fut = async move {
|
||||
Ok(Err(type_error(
|
||||
"NetworkError when attempting to fetch resource.",
|
||||
"NetworkError when attempting to fetch resource",
|
||||
)))
|
||||
};
|
||||
(Box::pin(fut), None)
|
||||
|
@ -361,14 +356,14 @@ where
|
|||
let (request_rid, cancel_handle_rid) = match scheme {
|
||||
"file" => {
|
||||
let path = url.to_file_path().map_err(|_| {
|
||||
type_error("NetworkError when attempting to fetch resource.")
|
||||
type_error("NetworkError when attempting to fetch resource")
|
||||
})?;
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions.check_read(&path, "fetch()")?;
|
||||
|
||||
if method != Method::GET {
|
||||
return Err(type_error(format!(
|
||||
"Fetching files only supports the GET method. Received {method}."
|
||||
"Fetching files only supports the GET method: received {method}"
|
||||
)));
|
||||
}
|
||||
|
||||
|
@ -394,7 +389,7 @@ where
|
|||
let uri = url
|
||||
.as_str()
|
||||
.parse::<Uri>()
|
||||
.map_err(|_| type_error("Invalid URL"))?;
|
||||
.map_err(|_| type_error(format!("Invalid URL {url}")))?;
|
||||
|
||||
let mut con_len = None;
|
||||
let body = if has_body {
|
||||
|
@ -522,7 +517,9 @@ where
|
|||
// because the URL isn't an object URL.
|
||||
return Err(type_error("Blob for the given URL not found."));
|
||||
}
|
||||
_ => return Err(type_error(format!("scheme '{scheme}' not supported"))),
|
||||
_ => {
|
||||
return Err(type_error(format!("Url scheme '{scheme}' not supported")))
|
||||
}
|
||||
};
|
||||
|
||||
Ok(FetchReturn {
|
||||
|
@ -586,7 +583,7 @@ pub async fn op_fetch_send(
|
|||
|
||||
return Err(type_error(err.to_string()));
|
||||
}
|
||||
Err(_) => return Err(type_error("request was cancelled")),
|
||||
Err(_) => return Err(type_error("Request was cancelled")),
|
||||
};
|
||||
|
||||
let status = res.status();
|
||||
|
@ -625,114 +622,6 @@ pub async fn op_fetch_send(
|
|||
})
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[smi]
|
||||
pub async fn op_fetch_response_upgrade(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[smi] rid: ResourceId,
|
||||
) -> Result<ResourceId, AnyError> {
|
||||
let raw_response = state
|
||||
.borrow_mut()
|
||||
.resource_table
|
||||
.take::<FetchResponseResource>(rid)?;
|
||||
let raw_response = Rc::try_unwrap(raw_response)
|
||||
.expect("Someone is holding onto FetchResponseResource");
|
||||
|
||||
let (read, write) = tokio::io::duplex(1024);
|
||||
let (read_rx, write_tx) = tokio::io::split(read);
|
||||
let (mut write_rx, mut read_tx) = tokio::io::split(write);
|
||||
let upgraded = raw_response.upgrade().await?;
|
||||
{
|
||||
// Stage 3: Pump the data
|
||||
let (mut upgraded_rx, mut upgraded_tx) =
|
||||
tokio::io::split(TokioIo::new(upgraded));
|
||||
|
||||
spawn(async move {
|
||||
let mut buf = [0; 1024];
|
||||
loop {
|
||||
let read = upgraded_rx.read(&mut buf).await?;
|
||||
if read == 0 {
|
||||
break;
|
||||
}
|
||||
read_tx.write_all(&buf[..read]).await?;
|
||||
}
|
||||
Ok::<_, AnyError>(())
|
||||
});
|
||||
spawn(async move {
|
||||
let mut buf = [0; 1024];
|
||||
loop {
|
||||
let read = write_rx.read(&mut buf).await?;
|
||||
if read == 0 {
|
||||
break;
|
||||
}
|
||||
upgraded_tx.write_all(&buf[..read]).await?;
|
||||
}
|
||||
Ok::<_, AnyError>(())
|
||||
});
|
||||
}
|
||||
|
||||
Ok(
|
||||
state
|
||||
.borrow_mut()
|
||||
.resource_table
|
||||
.add(UpgradeStream::new(read_rx, write_tx)),
|
||||
)
|
||||
}
|
||||
|
||||
struct UpgradeStream {
|
||||
read: AsyncRefCell<tokio::io::ReadHalf<tokio::io::DuplexStream>>,
|
||||
write: AsyncRefCell<tokio::io::WriteHalf<tokio::io::DuplexStream>>,
|
||||
cancel_handle: CancelHandle,
|
||||
}
|
||||
|
||||
impl UpgradeStream {
|
||||
pub fn new(
|
||||
read: tokio::io::ReadHalf<tokio::io::DuplexStream>,
|
||||
write: tokio::io::WriteHalf<tokio::io::DuplexStream>,
|
||||
) -> Self {
|
||||
Self {
|
||||
read: AsyncRefCell::new(read),
|
||||
write: AsyncRefCell::new(write),
|
||||
cancel_handle: CancelHandle::new(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn read(self: Rc<Self>, buf: &mut [u8]) -> Result<usize, AnyError> {
|
||||
let cancel_handle = RcRef::map(self.clone(), |this| &this.cancel_handle);
|
||||
async {
|
||||
let read = RcRef::map(self, |this| &this.read);
|
||||
let mut read = read.borrow_mut().await;
|
||||
Ok(Pin::new(&mut *read).read(buf).await?)
|
||||
}
|
||||
.try_or_cancel(cancel_handle)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn write(self: Rc<Self>, buf: &[u8]) -> Result<usize, AnyError> {
|
||||
let cancel_handle = RcRef::map(self.clone(), |this| &this.cancel_handle);
|
||||
async {
|
||||
let write = RcRef::map(self, |this| &this.write);
|
||||
let mut write = write.borrow_mut().await;
|
||||
Ok(Pin::new(&mut *write).write(buf).await?)
|
||||
}
|
||||
.try_or_cancel(cancel_handle)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
impl Resource for UpgradeStream {
|
||||
fn name(&self) -> Cow<str> {
|
||||
"fetchUpgradedStream".into()
|
||||
}
|
||||
|
||||
deno_core::impl_readable_byob!();
|
||||
deno_core::impl_writable!();
|
||||
|
||||
fn close(self: Rc<Self>) {
|
||||
self.cancel_handle.cancel();
|
||||
}
|
||||
}
|
||||
|
||||
type CancelableResponseResult =
|
||||
Result<Result<http::Response<ResBody>, AnyError>, Canceled>;
|
||||
|
||||
|
@ -1016,9 +905,11 @@ pub fn create_http_client(
|
|||
let mut http_connector = HttpConnector::new();
|
||||
http_connector.enforce_http(false);
|
||||
|
||||
let user_agent = user_agent
|
||||
.parse::<HeaderValue>()
|
||||
.map_err(|_| type_error("illegal characters in User-Agent"))?;
|
||||
let user_agent = user_agent.parse::<HeaderValue>().map_err(|_| {
|
||||
type_error(format!(
|
||||
"Illegal characters in User-Agent: received {user_agent}"
|
||||
))
|
||||
})?;
|
||||
|
||||
let mut builder =
|
||||
hyper_util::client::legacy::Builder::new(TokioExecutor::new());
|
||||
|
@ -1060,7 +951,7 @@ pub fn create_http_client(
|
|||
}
|
||||
(true, true) => {}
|
||||
(false, false) => {
|
||||
return Err(type_error("Either `http1` or `http2` needs to be true"))
|
||||
return Err(type_error("Cannot create Http Client: either `http1` or `http2` needs to be set to true"))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -250,7 +250,7 @@ class UnsafePointer {
|
|||
}
|
||||
} else {
|
||||
throw new TypeError(
|
||||
"Expected ArrayBuffer, ArrayBufferView or UnsafeCallbackPrototype",
|
||||
`Cannot access pointer: expected 'ArrayBuffer', 'ArrayBufferView' or 'UnsafeCallbackPrototype', received ${typeof value}`,
|
||||
);
|
||||
}
|
||||
if (pointer) {
|
||||
|
@ -335,7 +335,9 @@ function getTypeSizeAndAlignment(type, cache = new SafeMap()) {
|
|||
const cached = cache.get(type);
|
||||
if (cached !== undefined) {
|
||||
if (cached === null) {
|
||||
throw new TypeError("Recursive struct definition");
|
||||
throw new TypeError(
|
||||
"Cannot get pointer size: found recursive struct",
|
||||
);
|
||||
}
|
||||
return cached;
|
||||
}
|
||||
|
@ -379,7 +381,7 @@ function getTypeSizeAndAlignment(type, cache = new SafeMap()) {
|
|||
case "isize":
|
||||
return [8, 8];
|
||||
default:
|
||||
throw new TypeError(`Unsupported type: ${type}`);
|
||||
throw new TypeError(`Cannot get pointer size, unsupported type: ${type}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -395,7 +397,7 @@ class UnsafeCallback {
|
|||
constructor(definition, callback) {
|
||||
if (definition.nonblocking) {
|
||||
throw new TypeError(
|
||||
"Invalid UnsafeCallback, cannot be nonblocking",
|
||||
"Cannot construct UnsafeCallback: cannot be nonblocking",
|
||||
);
|
||||
}
|
||||
const { 0: rid, 1: pointer } = op_ffi_unsafe_callback_create(
|
||||
|
@ -467,7 +469,7 @@ class DynamicLibrary {
|
|||
const type = symbols[symbol].type;
|
||||
if (type === "void") {
|
||||
throw new TypeError(
|
||||
"Foreign symbol of type 'void' is not supported.",
|
||||
"Foreign symbol of type 'void' is not supported",
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_ffi"
|
||||
version = "0.154.0"
|
||||
version = "0.155.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -334,7 +334,9 @@ pub fn op_ffi_call_nonblocking(
|
|||
let symbols = &resource.symbols;
|
||||
*symbols
|
||||
.get(&symbol)
|
||||
.ok_or_else(|| type_error("Invalid FFI symbol name"))?
|
||||
.ok_or_else(|| {
|
||||
type_error(format!("Invalid FFI symbol name: '{symbol}'"))
|
||||
})?
|
||||
.clone()
|
||||
};
|
||||
|
||||
|
|
|
@ -174,7 +174,7 @@ unsafe extern "C" fn deno_ffi_callback(
|
|||
let tc_scope = &mut TryCatch::new(scope);
|
||||
args.run(tc_scope);
|
||||
if tc_scope.exception().is_some() {
|
||||
log::error!("Illegal unhandled exception in nonblocking callback.");
|
||||
log::error!("Illegal unhandled exception in nonblocking callback");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -19,11 +19,11 @@ import {
|
|||
op_fs_fdatasync_sync,
|
||||
op_fs_file_stat_async,
|
||||
op_fs_file_stat_sync,
|
||||
op_fs_file_truncate_async,
|
||||
op_fs_flock_async,
|
||||
op_fs_flock_sync,
|
||||
op_fs_fsync_async,
|
||||
op_fs_fsync_sync,
|
||||
op_fs_ftruncate_async,
|
||||
op_fs_ftruncate_sync,
|
||||
op_fs_funlock_async,
|
||||
op_fs_funlock_async_unstable,
|
||||
|
@ -395,15 +395,6 @@ function parseFileInfo(response) {
|
|||
};
|
||||
}
|
||||
|
||||
function fstatSync(rid) {
|
||||
op_fs_file_stat_sync(rid, statBuf);
|
||||
return statStruct(statBuf);
|
||||
}
|
||||
|
||||
async function fstat(rid) {
|
||||
return parseFileInfo(await op_fs_file_stat_async(rid));
|
||||
}
|
||||
|
||||
async function lstat(path) {
|
||||
const res = await op_fs_lstat_async(pathFromURL(path));
|
||||
return parseFileInfo(res);
|
||||
|
@ -431,14 +422,6 @@ function coerceLen(len) {
|
|||
return len;
|
||||
}
|
||||
|
||||
function ftruncateSync(rid, len) {
|
||||
op_fs_ftruncate_sync(rid, coerceLen(len));
|
||||
}
|
||||
|
||||
async function ftruncate(rid, len) {
|
||||
await op_fs_ftruncate_async(rid, coerceLen(len));
|
||||
}
|
||||
|
||||
function truncateSync(path, len) {
|
||||
op_fs_truncate_sync(path, coerceLen(len));
|
||||
}
|
||||
|
@ -664,11 +647,11 @@ class FsFile {
|
|||
}
|
||||
|
||||
truncate(len) {
|
||||
return ftruncate(this.#rid, len);
|
||||
return op_fs_file_truncate_async(this.#rid, coerceLen(len));
|
||||
}
|
||||
|
||||
truncateSync(len) {
|
||||
return ftruncateSync(this.#rid, len);
|
||||
return op_fs_ftruncate_sync(this.#rid, coerceLen(len));
|
||||
}
|
||||
|
||||
read(p) {
|
||||
|
@ -687,12 +670,13 @@ class FsFile {
|
|||
return seekSync(this.#rid, offset, whence);
|
||||
}
|
||||
|
||||
stat() {
|
||||
return fstat(this.#rid);
|
||||
async stat() {
|
||||
return parseFileInfo(await op_fs_file_stat_async(this.#rid));
|
||||
}
|
||||
|
||||
statSync() {
|
||||
return fstatSync(this.#rid);
|
||||
op_fs_file_stat_sync(this.#rid, statBuf);
|
||||
return statStruct(statBuf);
|
||||
}
|
||||
|
||||
async syncData() {
|
||||
|
@ -802,8 +786,6 @@ function checkOpenOptions(options) {
|
|||
}
|
||||
}
|
||||
|
||||
const File = FsFile;
|
||||
|
||||
function readFileSync(path) {
|
||||
return op_fs_read_file_sync(pathFromURL(path));
|
||||
}
|
||||
|
@ -966,14 +948,9 @@ export {
|
|||
cwd,
|
||||
fdatasync,
|
||||
fdatasyncSync,
|
||||
File,
|
||||
FsFile,
|
||||
fstat,
|
||||
fstatSync,
|
||||
fsync,
|
||||
fsyncSync,
|
||||
ftruncate,
|
||||
ftruncateSync,
|
||||
funlock,
|
||||
funlockSync,
|
||||
link,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_fs"
|
||||
version = "0.77.0"
|
||||
version = "0.78.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -242,7 +242,7 @@ deno_core::extension!(deno_fs,
|
|||
op_fs_funlock_async,
|
||||
op_fs_funlock_sync,
|
||||
op_fs_ftruncate_sync,
|
||||
op_fs_ftruncate_async,
|
||||
op_fs_file_truncate_async,
|
||||
op_fs_futime_sync,
|
||||
op_fs_futime_async,
|
||||
|
||||
|
|
|
@ -1569,7 +1569,7 @@ pub fn op_fs_ftruncate_sync(
|
|||
}
|
||||
|
||||
#[op2(async)]
|
||||
pub async fn op_fs_ftruncate_async(
|
||||
pub async fn op_fs_file_truncate_async(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[smi] rid: ResourceId,
|
||||
#[number] len: u64,
|
||||
|
|
|
@ -583,6 +583,19 @@ type RawServeOptions = {
|
|||
|
||||
const kLoadBalanced = Symbol("kLoadBalanced");
|
||||
|
||||
function mapAnyAddrToLocalhostForWindows(hostname: string) {
|
||||
// If the hostname is "0.0.0.0", we display "localhost" in console
|
||||
// because browsers in Windows don't resolve "0.0.0.0".
|
||||
// See the discussion in https://github.com/denoland/deno_std/issues/1165
|
||||
if (
|
||||
(Deno.build.os === "windows") &&
|
||||
(hostname == "0.0.0.0" || hostname == "::")
|
||||
) {
|
||||
return "localhost";
|
||||
}
|
||||
return hostname;
|
||||
}
|
||||
|
||||
function serve(arg1, arg2) {
|
||||
let options: RawServeOptions | undefined;
|
||||
let handler: RawHandler | undefined;
|
||||
|
@ -672,22 +685,15 @@ function serve(arg1, arg2) {
|
|||
}
|
||||
|
||||
const addr = listener.addr;
|
||||
// If the hostname is "0.0.0.0", we display "localhost" in console
|
||||
// because browsers in Windows don't resolve "0.0.0.0".
|
||||
// See the discussion in https://github.com/denoland/deno_std/issues/1165
|
||||
const hostname = (addr.hostname == "0.0.0.0" || addr.hostname == "::") &&
|
||||
(Deno.build.os === "windows")
|
||||
? "localhost"
|
||||
: addr.hostname;
|
||||
addr.hostname = hostname;
|
||||
|
||||
const onListen = (scheme) => {
|
||||
if (options.onListen) {
|
||||
options.onListen(addr);
|
||||
} else {
|
||||
const host = StringPrototypeIncludes(addr.hostname, ":")
|
||||
? `[${addr.hostname}]`
|
||||
: addr.hostname;
|
||||
const hostname = mapAnyAddrToLocalhostForWindows(addr.hostname);
|
||||
const host = StringPrototypeIncludes(hostname, ":")
|
||||
? `[${hostname}]`
|
||||
: hostname;
|
||||
// deno-lint-ignore no-console
|
||||
console.log(`Listening on ${scheme}${host}:${addr.port}/`);
|
||||
}
|
||||
|
@ -862,9 +868,10 @@ function registerDeclarativeServer(exports) {
|
|||
const nThreads = serveWorkerCount > 1
|
||||
? ` with ${serveWorkerCount} threads`
|
||||
: "";
|
||||
const hostname_ = mapAnyAddrToLocalhostForWindows(hostname);
|
||||
// deno-lint-ignore no-console
|
||||
console.debug(
|
||||
`%cdeno serve%c: Listening on %chttp://${hostname}:${port}/%c${nThreads}`,
|
||||
`%cdeno serve%c: Listening on %chttp://${hostname_}:${port}/%c${nThreads}`,
|
||||
"color: green",
|
||||
"color: inherit",
|
||||
"color: yellow",
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_http"
|
||||
version = "0.165.0"
|
||||
version = "0.166.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -20,7 +20,6 @@ import {
|
|||
writableStreamForRid,
|
||||
} from "ext:deno_web/06_streams.js";
|
||||
|
||||
const DEFAULT_BUFFER_SIZE = 32 * 1024;
|
||||
// Seek whence values.
|
||||
// https://golang.org/pkg/io/#pkg-constants
|
||||
const SeekMode = {
|
||||
|
@ -33,37 +32,6 @@ const SeekMode = {
|
|||
End: 2,
|
||||
};
|
||||
|
||||
async function copy(
|
||||
src,
|
||||
dst,
|
||||
options,
|
||||
) {
|
||||
internals.warnOnDeprecatedApi(
|
||||
"Deno.copy()",
|
||||
new Error().stack,
|
||||
"Use `copy()` from `https://jsr.io/@std/io/doc/copy/~` instead.",
|
||||
);
|
||||
let n = 0;
|
||||
const bufSize = options?.bufSize ?? DEFAULT_BUFFER_SIZE;
|
||||
const b = new Uint8Array(bufSize);
|
||||
let gotEOF = false;
|
||||
while (gotEOF === false) {
|
||||
const result = await src.read(b);
|
||||
if (result === null) {
|
||||
gotEOF = true;
|
||||
} else {
|
||||
let nwritten = 0;
|
||||
while (nwritten < result) {
|
||||
nwritten += await dst.write(
|
||||
TypedArrayPrototypeSubarray(b, nwritten, result),
|
||||
);
|
||||
}
|
||||
n += nwritten;
|
||||
}
|
||||
}
|
||||
return n;
|
||||
}
|
||||
|
||||
function readSync(rid, buffer) {
|
||||
if (buffer.length === 0) return 0;
|
||||
const nread = core.readSync(rid, buffer);
|
||||
|
@ -295,7 +263,6 @@ const stdout = new Stdout();
|
|||
const stderr = new Stderr();
|
||||
|
||||
export {
|
||||
copy,
|
||||
read,
|
||||
readAll,
|
||||
readAllSync,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_io"
|
||||
version = "0.77.0"
|
||||
version = "0.78.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_kv"
|
||||
version = "0.75.0"
|
||||
version = "0.76.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_napi"
|
||||
version = "0.98.0"
|
||||
version = "0.99.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_net"
|
||||
version = "0.159.0"
|
||||
version = "0.160.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_node"
|
||||
version = "0.104.0"
|
||||
version = "0.105.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -54,7 +54,6 @@ const fn str_to_utf16<const N: usize>(s: &str) -> [u16; N] {
|
|||
// - clearTimeout (both, but different implementation)
|
||||
// - global (node only)
|
||||
// - performance (both, but different implementation)
|
||||
// - process (node only)
|
||||
// - setImmediate (node only)
|
||||
// - setInterval (both, but different implementation)
|
||||
// - setTimeout (both, but different implementation)
|
||||
|
@ -62,7 +61,7 @@ const fn str_to_utf16<const N: usize>(s: &str) -> [u16; N] {
|
|||
|
||||
// UTF-16 encodings of the managed globals. THIS LIST MUST BE SORTED.
|
||||
#[rustfmt::skip]
|
||||
const MANAGED_GLOBALS: [&[u16]; 13] = [
|
||||
const MANAGED_GLOBALS: [&[u16]; 12] = [
|
||||
&str_to_utf16::<6>("Buffer"),
|
||||
&str_to_utf16::<17>("WorkerGlobalScope"),
|
||||
&str_to_utf16::<14>("clearImmediate"),
|
||||
|
@ -70,7 +69,6 @@ const MANAGED_GLOBALS: [&[u16]; 13] = [
|
|||
&str_to_utf16::<12>("clearTimeout"),
|
||||
&str_to_utf16::<6>("global"),
|
||||
&str_to_utf16::<11>("performance"),
|
||||
&str_to_utf16::<7>("process"),
|
||||
&str_to_utf16::<4>("self"),
|
||||
&str_to_utf16::<12>("setImmediate"),
|
||||
&str_to_utf16::<11>("setInterval"),
|
||||
|
|
|
@ -502,6 +502,7 @@ deno_core::extension!(deno_node,
|
|||
"internal/error_codes.ts",
|
||||
"internal/errors.ts",
|
||||
"internal/event_target.mjs",
|
||||
"internal/events/abort_listener.mjs",
|
||||
"internal/fixed_queue.ts",
|
||||
"internal/fs/streams.mjs",
|
||||
"internal/fs/utils.mjs",
|
||||
|
|
|
@ -272,6 +272,7 @@ pub async fn op_node_http_fetch_response_upgrade(
|
|||
loop {
|
||||
let read = upgraded_rx.read(&mut buf).await?;
|
||||
if read == 0 {
|
||||
read_tx.shutdown().await?;
|
||||
break;
|
||||
}
|
||||
read_tx.write_all(&buf[..read]).await?;
|
||||
|
|
|
@ -940,12 +940,11 @@ Module.prototype.require = function (id) {
|
|||
|
||||
// The module wrapper looks slightly different to Node. Instead of using one
|
||||
// wrapper function, we use two. The first one exists to performance optimize
|
||||
// access to magic node globals, like `Buffer` or `process`. The second one
|
||||
// is the actual wrapper function we run the users code in.
|
||||
// The only observable difference is that in Deno `arguments.callee` is not
|
||||
// null.
|
||||
// access to magic node globals, like `Buffer`. The second one is the actual
|
||||
// wrapper function we run the users code in. The only observable difference is
|
||||
// that in Deno `arguments.callee` is not null.
|
||||
Module.wrapper = [
|
||||
"(function (exports, require, module, __filename, __dirname, Buffer, clearImmediate, clearInterval, clearTimeout, global, process, setImmediate, setInterval, setTimeout, performance) { (function (exports, require, module, __filename, __dirname) {",
|
||||
"(function (exports, require, module, __filename, __dirname, Buffer, clearImmediate, clearInterval, clearTimeout, global, setImmediate, setInterval, setTimeout, performance) { (function (exports, require, module, __filename, __dirname) {",
|
||||
"\n}).call(this, exports, require, module, __filename, __dirname); })",
|
||||
];
|
||||
Module.wrap = function (script) {
|
||||
|
@ -1030,7 +1029,6 @@ Module.prototype._compile = function (content, filename, format) {
|
|||
clearInterval,
|
||||
clearTimeout,
|
||||
global,
|
||||
process,
|
||||
setImmediate,
|
||||
setInterval,
|
||||
setTimeout,
|
||||
|
@ -1049,7 +1047,6 @@ Module.prototype._compile = function (content, filename, format) {
|
|||
clearInterval,
|
||||
clearTimeout,
|
||||
global,
|
||||
process,
|
||||
setImmediate,
|
||||
setInterval,
|
||||
setTimeout,
|
||||
|
|
|
@ -46,7 +46,8 @@ import {
|
|||
} from "ext:deno_node/internal/validators.mjs";
|
||||
import { spliceOne } from "ext:deno_node/_utils.ts";
|
||||
import { nextTick } from "ext:deno_node/_process/process.ts";
|
||||
import { nodeGlobals } from "ext:deno_node/00_globals.js";
|
||||
|
||||
export { addAbortListener } from "./internal/events/abort_listener.mjs";
|
||||
|
||||
const kCapture = Symbol("kCapture");
|
||||
const kErrorMonitor = Symbol("events.errorMonitor");
|
||||
|
@ -55,6 +56,11 @@ const kMaxEventTargetListenersWarned = Symbol(
|
|||
"events.maxEventTargetListenersWarned",
|
||||
);
|
||||
|
||||
let process;
|
||||
export function setProcess(p) {
|
||||
process = p;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new `EventEmitter` instance.
|
||||
* @param {{ captureRejections?: boolean; }} [opts]
|
||||
|
@ -469,7 +475,7 @@ function _addListener(target, type, listener, prepend) {
|
|||
w.emitter = target;
|
||||
w.type = type;
|
||||
w.count = existing.length;
|
||||
nodeGlobals.process.emitWarning(w);
|
||||
process.emitWarning(w);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// @deno-types="./_events.d.ts"
|
||||
export {
|
||||
addAbortListener,
|
||||
captureRejectionSymbol,
|
||||
default,
|
||||
defaultMaxListeners,
|
||||
|
|
44
ext/node/polyfills/internal/events/abort_listener.mjs
Normal file
44
ext/node/polyfills/internal/events/abort_listener.mjs
Normal file
|
@ -0,0 +1,44 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright Joyent, Inc. and Node.js contributors. All rights reserved. MIT license.
|
||||
|
||||
import { primordials } from "ext:deno_node/internal/test/binding.ts";
|
||||
const { queueMicrotask } = primordials;
|
||||
import { SymbolDispose } from "ext:deno_web/00_infra.js";
|
||||
import * as abortSignal from "ext:deno_web/03_abort_signal.js";
|
||||
import { validateAbortSignal, validateFunction } from "../validators.mjs";
|
||||
import { codes } from "../errors.ts";
|
||||
const { ERR_INVALID_ARG_TYPE } = codes;
|
||||
|
||||
/**
|
||||
* @param {AbortSignal} signal
|
||||
* @param {EventListener} listener
|
||||
* @returns {Disposable}
|
||||
*/
|
||||
function addAbortListener(signal, listener) {
|
||||
if (signal === undefined) {
|
||||
throw new ERR_INVALID_ARG_TYPE("signal", "AbortSignal", signal);
|
||||
}
|
||||
validateAbortSignal(signal, "signal");
|
||||
validateFunction(listener, "listener");
|
||||
|
||||
let removeEventListener;
|
||||
if (signal.aborted) {
|
||||
queueMicrotask(() => listener());
|
||||
} else {
|
||||
signal[abortSignal.add](() => {
|
||||
removeEventListener?.();
|
||||
listener();
|
||||
});
|
||||
removeEventListener = () => {
|
||||
signal[abortSignal.remove](listener);
|
||||
};
|
||||
}
|
||||
return {
|
||||
__proto__: null,
|
||||
[SymbolDispose]() {
|
||||
removeEventListener?.();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export { addAbortListener };
|
|
@ -69,6 +69,7 @@ import * as constants from "ext:deno_node/internal_binding/constants.ts";
|
|||
import * as uv from "ext:deno_node/internal_binding/uv.ts";
|
||||
import type { BindingName } from "ext:deno_node/internal_binding/mod.ts";
|
||||
import { buildAllowedFlags } from "ext:deno_node/internal/process/per_thread.mjs";
|
||||
import { setProcess } from "ext:deno_node/_events.mjs";
|
||||
|
||||
const notImplementedEvents = [
|
||||
"multipleResolves",
|
||||
|
@ -81,6 +82,8 @@ export const argv: string[] = ["", ""];
|
|||
// And retains any value as long as it's nullish or number-ish.
|
||||
let ProcessExitCode: undefined | null | string | number;
|
||||
|
||||
export const execArgv: string[] = [];
|
||||
|
||||
/** https://nodejs.org/api/process.html#process_process_exit_code */
|
||||
export const exit = (code?: number | string) => {
|
||||
if (code || code === 0) {
|
||||
|
@ -336,7 +339,20 @@ function uncaughtExceptionHandler(err: any, origin: string) {
|
|||
process.emit("uncaughtException", err, origin);
|
||||
}
|
||||
|
||||
let execPath: string | null = null;
|
||||
export let execPath: string = Object.freeze({
|
||||
__proto__: String.prototype,
|
||||
toString() {
|
||||
execPath = Deno.execPath();
|
||||
return execPath;
|
||||
},
|
||||
get length() {
|
||||
return this.toString().length;
|
||||
},
|
||||
[Symbol.for("Deno.customInspect")](inspect, options) {
|
||||
return inspect(this.toString(), options);
|
||||
},
|
||||
// deno-lint-ignore no-explicit-any
|
||||
}) as any as string;
|
||||
|
||||
// The process class needs to be an ES5 class because it can be instantiated
|
||||
// in Node without the `new` keyword. It's not a true class in Node. Popular
|
||||
|
@ -424,7 +440,7 @@ Process.prototype.cwd = cwd;
|
|||
Process.prototype.env = env;
|
||||
|
||||
/** https://nodejs.org/api/process.html#process_process_execargv */
|
||||
Process.prototype.execArgv = [];
|
||||
Process.prototype.execArgv = execArgv;
|
||||
|
||||
/** https://nodejs.org/api/process.html#process_process_exit_code */
|
||||
Process.prototype.exit = exit;
|
||||
|
@ -703,11 +719,7 @@ Process.prototype._eval = undefined;
|
|||
|
||||
Object.defineProperty(Process.prototype, "execPath", {
|
||||
get() {
|
||||
if (execPath) {
|
||||
return execPath;
|
||||
}
|
||||
execPath = Deno.execPath();
|
||||
return execPath;
|
||||
return String(execPath);
|
||||
},
|
||||
set(path: string) {
|
||||
execPath = path;
|
||||
|
@ -960,4 +972,6 @@ internals.__bootstrapNodeProcess = function (
|
|||
}
|
||||
};
|
||||
|
||||
setProcess(process);
|
||||
|
||||
export default process;
|
||||
|
|
|
@ -19,6 +19,9 @@ import {
|
|||
Transform,
|
||||
Writable,
|
||||
} from "ext:deno_node/_stream.mjs";
|
||||
import {
|
||||
getDefaultHighWaterMark,
|
||||
} from "ext:deno_node/internal/streams/state.mjs";
|
||||
|
||||
export {
|
||||
_isUint8Array,
|
||||
|
@ -26,6 +29,7 @@ export {
|
|||
addAbortSignal,
|
||||
Duplex,
|
||||
finished,
|
||||
getDefaultHighWaterMark,
|
||||
PassThrough,
|
||||
pipeline,
|
||||
Readable,
|
||||
|
|
|
@ -25,9 +25,13 @@ const {
|
|||
StringPrototypeIsWellFormed,
|
||||
StringPrototypePadStart,
|
||||
StringPrototypeToWellFormed,
|
||||
PromiseResolve,
|
||||
} = primordials;
|
||||
|
||||
import { promisify } from "ext:deno_node/internal/util.mjs";
|
||||
import {
|
||||
createDeferredPromise,
|
||||
promisify,
|
||||
} from "ext:deno_node/internal/util.mjs";
|
||||
import { callbackify } from "ext:deno_node/_util/_util_callbackify.js";
|
||||
import { debuglog } from "ext:deno_node/internal/util/debuglog.ts";
|
||||
import {
|
||||
|
@ -41,8 +45,13 @@ import types from "node:util/types";
|
|||
import { Buffer } from "node:buffer";
|
||||
import { isDeepStrictEqual } from "ext:deno_node/internal/util/comparisons.ts";
|
||||
import process from "node:process";
|
||||
import { validateString } from "ext:deno_node/internal/validators.mjs";
|
||||
import {
|
||||
validateAbortSignal,
|
||||
validateString,
|
||||
} from "ext:deno_node/internal/validators.mjs";
|
||||
import { parseArgs } from "ext:deno_node/internal/util/parse_args/parse_args.js";
|
||||
import * as abortSignal from "ext:deno_web/03_abort_signal.js";
|
||||
import { ERR_INVALID_ARG_TYPE } from "ext:deno_node/internal/errors.ts";
|
||||
|
||||
export {
|
||||
callbackify,
|
||||
|
@ -288,6 +297,24 @@ export function deprecate(fn: any, msg: string, code?: any) {
|
|||
return deprecated;
|
||||
}
|
||||
|
||||
// deno-lint-ignore require-await
|
||||
export async function aborted(
|
||||
signal: AbortSignal,
|
||||
// deno-lint-ignore no-explicit-any
|
||||
_resource: any,
|
||||
): Promise<void> {
|
||||
if (signal === undefined) {
|
||||
throw new ERR_INVALID_ARG_TYPE("signal", "AbortSignal", signal);
|
||||
}
|
||||
validateAbortSignal(signal, "signal");
|
||||
if (signal.aborted) {
|
||||
return PromiseResolve();
|
||||
}
|
||||
const abortPromise = createDeferredPromise();
|
||||
signal[abortSignal.add](abortPromise.resolve);
|
||||
return abortPromise.promise;
|
||||
}
|
||||
|
||||
export { getSystemErrorName, isDeepStrictEqual };
|
||||
|
||||
export default {
|
||||
|
@ -311,6 +338,7 @@ export default {
|
|||
isBuffer,
|
||||
_extend,
|
||||
getSystemErrorName,
|
||||
aborted,
|
||||
deprecate,
|
||||
callbackify,
|
||||
parseArgs,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "node_resolver"
|
||||
version = "0.6.0"
|
||||
version = "0.7.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -18,6 +18,7 @@ pub use npm::NpmResolverRc;
|
|||
pub use package_json::load_pkg_json;
|
||||
pub use package_json::PackageJsonThreadLocalCache;
|
||||
pub use path::PathClean;
|
||||
pub use resolution::parse_npm_pkg_name;
|
||||
pub use resolution::NodeModuleKind;
|
||||
pub use resolution::NodeResolution;
|
||||
pub use resolution::NodeResolutionMode;
|
||||
|
|
|
@ -367,6 +367,7 @@ impl<TEnv: NodeResolverEnv> NodeResolver<TEnv> {
|
|||
pkg_json_path,
|
||||
});
|
||||
};
|
||||
let name = name.split("/").last().unwrap();
|
||||
vec![name.to_string()]
|
||||
}
|
||||
Some(Value::Object(o)) => {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_tls"
|
||||
version = "0.154.0"
|
||||
version = "0.155.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_url"
|
||||
version = "0.167.0"
|
||||
version = "0.168.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -71,6 +71,7 @@ class WeakRefSet {
|
|||
const add = Symbol("[[add]]");
|
||||
const signalAbort = Symbol("[[signalAbort]]");
|
||||
const remove = Symbol("[[remove]]");
|
||||
const runAbortSteps = Symbol("[[runAbortSteps]]");
|
||||
const abortReason = Symbol("[[abortReason]]");
|
||||
const abortAlgos = Symbol("[[abortAlgos]]");
|
||||
const dependent = Symbol("[[dependent]]");
|
||||
|
@ -149,26 +150,43 @@ class AbortSignal extends EventTarget {
|
|||
return;
|
||||
}
|
||||
this[abortReason] = reason;
|
||||
|
||||
const dependentSignalsToAbort = [];
|
||||
if (this[dependentSignals] !== null) {
|
||||
const dependentSignalArray = this[dependentSignals].toArray();
|
||||
for (let i = 0; i < dependentSignalArray.length; ++i) {
|
||||
const dependentSignal = dependentSignalArray[i];
|
||||
if (dependentSignal[abortReason] === undefined) {
|
||||
dependentSignal[abortReason] = this[abortReason];
|
||||
ArrayPrototypePush(dependentSignalsToAbort, dependentSignal);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this[runAbortSteps]();
|
||||
|
||||
if (dependentSignalsToAbort.length !== 0) {
|
||||
for (let i = 0; i < dependentSignalsToAbort.length; ++i) {
|
||||
const dependentSignal = dependentSignalsToAbort[i];
|
||||
dependentSignal[runAbortSteps]();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[runAbortSteps]() {
|
||||
const algos = this[abortAlgos];
|
||||
this[abortAlgos] = null;
|
||||
|
||||
if (listenerCount(this, "abort") > 0) {
|
||||
const event = new Event("abort");
|
||||
setIsTrusted(event, true);
|
||||
super.dispatchEvent(event);
|
||||
}
|
||||
if (algos !== null) {
|
||||
for (const algorithm of new SafeSetIterator(algos)) {
|
||||
algorithm();
|
||||
}
|
||||
}
|
||||
|
||||
if (this[dependentSignals] !== null) {
|
||||
const dependentSignalArray = this[dependentSignals].toArray();
|
||||
for (let i = 0; i < dependentSignalArray.length; ++i) {
|
||||
const dependentSignal = dependentSignalArray[i];
|
||||
dependentSignal[signalAbort](reason);
|
||||
}
|
||||
if (listenerCount(this, "abort") > 0) {
|
||||
const event = new Event("abort");
|
||||
setIsTrusted(event, true);
|
||||
super.dispatchEvent(event);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_web"
|
||||
version = "0.198.0"
|
||||
version = "0.199.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_webgpu"
|
||||
version = "0.134.0"
|
||||
version = "0.135.0"
|
||||
authors = ["the Deno authors"]
|
||||
edition.workspace = true
|
||||
license = "MIT"
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_webidl"
|
||||
version = "0.167.0"
|
||||
version = "0.168.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_websocket"
|
||||
version = "0.172.0"
|
||||
version = "0.173.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue