mirror of
https://github.com/denoland/deno.git
synced 2024-12-02 17:01:14 -05:00
Merge branch 'cjs_suggestions_for_mjs' of https://github.com/MohammadSu1/my_deno into cjs_suggestions_for_mjs
This commit is contained in:
commit
4db134d19e
1825 changed files with 11889 additions and 3998 deletions
|
@ -65,10 +65,14 @@
|
|||
"tests/wpt/runner/expectation.json",
|
||||
"tests/wpt/runner/manifest.json",
|
||||
"tests/wpt/suite",
|
||||
"third_party"
|
||||
"third_party",
|
||||
"tests/specs/run/shebang_with_json_imports_tsc",
|
||||
"tests/specs/run/shebang_with_json_imports_swc",
|
||||
"tests/specs/run/ext_flag_takes_precedence_over_extension",
|
||||
"tests/specs/run/error_syntax_empty_trailing_line/error_syntax_empty_trailing_line.mjs"
|
||||
],
|
||||
"plugins": [
|
||||
"https://plugins.dprint.dev/typescript-0.93.1.wasm",
|
||||
"https://plugins.dprint.dev/typescript-0.93.2.wasm",
|
||||
"https://plugins.dprint.dev/json-0.19.4.wasm",
|
||||
"https://plugins.dprint.dev/markdown-0.17.8.wasm",
|
||||
"https://plugins.dprint.dev/toml-0.6.3.wasm",
|
||||
|
|
2
.github/workflows/cargo_publish.yml
vendored
2
.github/workflows/cargo_publish.yml
vendored
|
@ -10,7 +10,7 @@ concurrency:
|
|||
jobs:
|
||||
build:
|
||||
name: cargo publish
|
||||
runs-on: ubuntu-20.04-xl
|
||||
runs-on: ubuntu-24.04-xl
|
||||
timeout-minutes: 90
|
||||
|
||||
env:
|
||||
|
|
6
.github/workflows/ci.generate.ts
vendored
6
.github/workflows/ci.generate.ts
vendored
|
@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
|
|||
// Bump this number when you want to purge the cache.
|
||||
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
|
||||
// automatically via regex, so ensure that this line maintains this format.
|
||||
const cacheVersion = 23;
|
||||
const cacheVersion = 24;
|
||||
|
||||
const ubuntuX86Runner = "ubuntu-24.04";
|
||||
const ubuntuX86XlRunner = "ubuntu-24.04-xl";
|
||||
|
@ -14,6 +14,7 @@ const windowsX86Runner = "windows-2022";
|
|||
const windowsX86XlRunner = "windows-2022-xl";
|
||||
const macosX86Runner = "macos-13";
|
||||
const macosArmRunner = "macos-14";
|
||||
const selfHostedMacosArmRunner = "self-hosted";
|
||||
|
||||
const Runners = {
|
||||
linuxX86: {
|
||||
|
@ -40,7 +41,8 @@ const Runners = {
|
|||
macosArm: {
|
||||
os: "macos",
|
||||
arch: "aarch64",
|
||||
runner: macosArmRunner,
|
||||
runner:
|
||||
`\${{ github.repository == 'denoland/deno' && startsWith(github.ref, 'refs/tags/') && '${selfHostedMacosArmRunner}' || '${macosArmRunner}' }}`,
|
||||
},
|
||||
windowsX86: {
|
||||
os: "windows",
|
||||
|
|
12
.github/workflows/ci.yml
vendored
12
.github/workflows/ci.yml
vendored
|
@ -68,12 +68,12 @@ jobs:
|
|||
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
|
||||
- os: macos
|
||||
arch: aarch64
|
||||
runner: macos-14
|
||||
runner: '${{ github.repository == ''denoland/deno'' && startsWith(github.ref, ''refs/tags/'') && ''self-hosted'' || ''macos-14'' }}'
|
||||
job: test
|
||||
profile: debug
|
||||
- os: macos
|
||||
arch: aarch64
|
||||
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || ''macos-14'' }}'
|
||||
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && startsWith(github.ref, ''refs/tags/'') && ''self-hosted'' || ''macos-14'' }}'
|
||||
job: test
|
||||
profile: release
|
||||
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
|
||||
|
@ -361,8 +361,8 @@ jobs:
|
|||
path: |-
|
||||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
key: '23-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '23-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
key: '24-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '24-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
if: '!(matrix.skip)'
|
||||
- name: Restore cache build output (PR)
|
||||
uses: actions/cache/restore@v4
|
||||
|
@ -375,7 +375,7 @@ jobs:
|
|||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: never_saved
|
||||
restore-keys: '23-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
restore-keys: '24-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
- name: Apply and update mtime cache
|
||||
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
|
||||
uses: ./.github/mtime_cache
|
||||
|
@ -685,7 +685,7 @@ jobs:
|
|||
!./target/*/*.zip
|
||||
!./target/*/*.sha256sum
|
||||
!./target/*/*.tar.gz
|
||||
key: '23-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
key: '24-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
publish-canary:
|
||||
name: publish canary
|
||||
runs-on: ubuntu-24.04
|
||||
|
|
2
.github/workflows/post_publish.yml
vendored
2
.github/workflows/post_publish.yml
vendored
|
@ -7,7 +7,7 @@ on:
|
|||
jobs:
|
||||
update-dl-version:
|
||||
name: update dl.deno.land version
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
if: github.repository == 'denoland/deno'
|
||||
steps:
|
||||
- name: Authenticate with Google Cloud
|
||||
|
|
2
.github/workflows/start_release.yml
vendored
2
.github/workflows/start_release.yml
vendored
|
@ -16,7 +16,7 @@ on:
|
|||
jobs:
|
||||
build:
|
||||
name: start release
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 30
|
||||
|
||||
env:
|
||||
|
|
2
.github/workflows/version_bump.yml
vendored
2
.github/workflows/version_bump.yml
vendored
|
@ -16,7 +16,7 @@ on:
|
|||
jobs:
|
||||
build:
|
||||
name: version bump
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 90
|
||||
|
||||
env:
|
||||
|
|
2
.github/workflows/wpt_epoch.yml
vendored
2
.github/workflows/wpt_epoch.yml
vendored
|
@ -20,7 +20,7 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
deno-version: [v1.x, canary]
|
||||
os: [ubuntu-22.04-xl]
|
||||
os: [ubuntu-24.04-xl]
|
||||
|
||||
steps:
|
||||
- name: Clone repository
|
||||
|
|
332
Cargo.lock
generated
332
Cargo.lock
generated
|
@ -765,6 +765,8 @@ dependencies = [
|
|||
"fastwebsockets",
|
||||
"file_test_runner",
|
||||
"flaky_test",
|
||||
"hickory-client",
|
||||
"hickory-server",
|
||||
"http 1.1.0",
|
||||
"http-body-util",
|
||||
"hyper 1.4.1",
|
||||
|
@ -778,8 +780,6 @@ dependencies = [
|
|||
"serde",
|
||||
"test_server",
|
||||
"tokio",
|
||||
"trust-dns-client",
|
||||
"trust-dns-server",
|
||||
"url",
|
||||
"uuid",
|
||||
"zeromq",
|
||||
|
@ -1154,7 +1154,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno"
|
||||
version = "2.0.4"
|
||||
version = "2.0.5"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"async-trait",
|
||||
|
@ -1323,7 +1323,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_bench_util"
|
||||
version = "0.169.0"
|
||||
version = "0.170.0"
|
||||
dependencies = [
|
||||
"bencher",
|
||||
"deno_core",
|
||||
|
@ -1332,7 +1332,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_broadcast_channel"
|
||||
version = "0.169.0"
|
||||
version = "0.170.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"deno_core",
|
||||
|
@ -1343,7 +1343,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_cache"
|
||||
version = "0.107.0"
|
||||
version = "0.108.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"deno_core",
|
||||
|
@ -1376,7 +1376,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_canvas"
|
||||
version = "0.44.0"
|
||||
version = "0.45.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_webgpu",
|
||||
|
@ -1387,9 +1387,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_config"
|
||||
version = "0.37.2"
|
||||
version = "0.38.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5900bfb37538d83b19ba0b157cdc785770e38422ee4632411e3bd3d90ac0f537"
|
||||
checksum = "966825073480a6ac7e01977a3879d13edc8d6ea2d65ea164b37156a5fb206e9a"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"deno_package_json",
|
||||
|
@ -1411,16 +1411,16 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_console"
|
||||
version = "0.175.0"
|
||||
version = "0.176.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_core"
|
||||
version = "0.316.0"
|
||||
version = "0.318.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94f68061c88ced959c6b0417f0f0d0b3dbeaeb18013b55f86c505e9fba705cf8"
|
||||
checksum = "10cae2393219ff9278123f7b24799cdfab37c7d6561b69ca06ced115cac92111"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bincode",
|
||||
|
@ -1456,7 +1456,7 @@ checksum = "a13951ea98c0a4c372f162d669193b4c9d991512de9f2381dd161027f34b26b1"
|
|||
|
||||
[[package]]
|
||||
name = "deno_cron"
|
||||
version = "0.55.0"
|
||||
version = "0.56.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
|
@ -1469,7 +1469,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_crypto"
|
||||
version = "0.189.0"
|
||||
version = "0.190.0"
|
||||
dependencies = [
|
||||
"aes",
|
||||
"aes-gcm",
|
||||
|
@ -1531,7 +1531,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_fetch"
|
||||
version = "0.199.0"
|
||||
version = "0.200.0"
|
||||
dependencies = [
|
||||
"base64 0.21.7",
|
||||
"bytes",
|
||||
|
@ -1564,7 +1564,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_ffi"
|
||||
version = "0.162.0"
|
||||
version = "0.163.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_permissions",
|
||||
|
@ -1584,7 +1584,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_fs"
|
||||
version = "0.85.0"
|
||||
version = "0.86.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"base32",
|
||||
|
@ -1635,7 +1635,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_http"
|
||||
version = "0.173.0"
|
||||
version = "0.174.0"
|
||||
dependencies = [
|
||||
"async-compression",
|
||||
"async-trait",
|
||||
|
@ -1674,7 +1674,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_io"
|
||||
version = "0.85.0"
|
||||
version = "0.86.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"deno_core",
|
||||
|
@ -1695,7 +1695,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_kv"
|
||||
version = "0.83.0"
|
||||
version = "0.84.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
|
@ -1767,7 +1767,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_napi"
|
||||
version = "0.106.0"
|
||||
version = "0.107.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_permissions",
|
||||
|
@ -1795,24 +1795,24 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_net"
|
||||
version = "0.167.0"
|
||||
version = "0.168.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_permissions",
|
||||
"deno_tls",
|
||||
"hickory-proto",
|
||||
"hickory-resolver",
|
||||
"pin-project",
|
||||
"rustls-tokio-stream",
|
||||
"serde",
|
||||
"socket2",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"trust-dns-proto",
|
||||
"trust-dns-resolver",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_node"
|
||||
version = "0.112.0"
|
||||
version = "0.113.0"
|
||||
dependencies = [
|
||||
"aead-gcm-stream",
|
||||
"aes",
|
||||
|
@ -1921,9 +1921,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_ops"
|
||||
version = "0.192.0"
|
||||
version = "0.194.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bdb7096887508456349d7e7e09e326d157d4dba46ef1f5849bc544592ea3042a"
|
||||
checksum = "f760b492bd638c1dc3e992d11672c259fbe9a233162099a8347591c9e22d0391"
|
||||
dependencies = [
|
||||
"proc-macro-rules",
|
||||
"proc-macro2",
|
||||
|
@ -1961,7 +1961,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_permissions"
|
||||
version = "0.35.0"
|
||||
version = "0.36.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_path_util",
|
||||
|
@ -1972,13 +1972,14 @@ dependencies = [
|
|||
"once_cell",
|
||||
"percent-encoding",
|
||||
"serde",
|
||||
"thiserror",
|
||||
"which 4.4.2",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deno_resolver"
|
||||
version = "0.7.0"
|
||||
version = "0.8.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base32",
|
||||
|
@ -1994,7 +1995,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_runtime"
|
||||
version = "0.184.0"
|
||||
version = "0.185.0"
|
||||
dependencies = [
|
||||
"color-print",
|
||||
"deno_ast",
|
||||
|
@ -2112,7 +2113,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_tls"
|
||||
version = "0.162.0"
|
||||
version = "0.163.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_native_certs",
|
||||
|
@ -2161,7 +2162,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_url"
|
||||
version = "0.175.0"
|
||||
version = "0.176.0"
|
||||
dependencies = [
|
||||
"deno_bench_util",
|
||||
"deno_console",
|
||||
|
@ -2173,7 +2174,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_web"
|
||||
version = "0.206.0"
|
||||
version = "0.207.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"base64-simd 0.8.0",
|
||||
|
@ -2195,7 +2196,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_webgpu"
|
||||
version = "0.142.0"
|
||||
version = "0.143.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"raw-window-handle",
|
||||
|
@ -2208,7 +2209,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_webidl"
|
||||
version = "0.175.0"
|
||||
version = "0.176.0"
|
||||
dependencies = [
|
||||
"deno_bench_util",
|
||||
"deno_core",
|
||||
|
@ -2216,7 +2217,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_websocket"
|
||||
version = "0.180.0"
|
||||
version = "0.181.0"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"deno_core",
|
||||
|
@ -2238,7 +2239,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_webstorage"
|
||||
version = "0.170.0"
|
||||
version = "0.171.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_web",
|
||||
|
@ -2608,9 +2609,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "dprint-plugin-typescript"
|
||||
version = "0.93.1"
|
||||
version = "0.93.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5abfd78fe3cde4f5a6699d65f760c8d44da130cf446b6f80a7a9bc6580e156ab"
|
||||
checksum = "3ff29fd136541e59d51946f0d2d353fefc886776f61a799ebfb5838b06cef13b"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"deno_ast",
|
||||
|
@ -2638,15 +2639,6 @@ dependencies = [
|
|||
"text_lines",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "drain"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9d105028bd2b5dfcb33318fd79a445001ead36004dd8dffef1bdd7e493d8bc1e"
|
||||
dependencies = [
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dsa"
|
||||
version = "0.6.3"
|
||||
|
@ -3544,6 +3536,92 @@ version = "0.2.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dfa686283ad6dd069f105e5ab091b04c62850d3e4cf5d67debad1933f55023df"
|
||||
|
||||
[[package]]
|
||||
name = "hickory-client"
|
||||
version = "0.24.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bab9683b08d8f8957a857b0236455d80e1886eaa8c6178af556aa7871fb61b55"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"data-encoding",
|
||||
"futures-channel",
|
||||
"futures-util",
|
||||
"hickory-proto",
|
||||
"once_cell",
|
||||
"radix_trie",
|
||||
"rand",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hickory-proto"
|
||||
version = "0.24.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "07698b8420e2f0d6447a436ba999ec85d8fbf2a398bbd737b82cac4a2e96e512"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"cfg-if",
|
||||
"data-encoding",
|
||||
"enum-as-inner",
|
||||
"futures-channel",
|
||||
"futures-io",
|
||||
"futures-util",
|
||||
"idna 0.4.0",
|
||||
"ipnet",
|
||||
"once_cell",
|
||||
"rand",
|
||||
"serde",
|
||||
"thiserror",
|
||||
"tinyvec",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hickory-resolver"
|
||||
version = "0.24.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28757f23aa75c98f254cf0405e6d8c25b831b32921b050a66692427679b1f243"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"futures-util",
|
||||
"hickory-proto",
|
||||
"ipconfig",
|
||||
"lru-cache",
|
||||
"once_cell",
|
||||
"parking_lot",
|
||||
"rand",
|
||||
"resolv-conf",
|
||||
"serde",
|
||||
"smallvec",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hickory-server"
|
||||
version = "0.24.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9be0e43c556b9b3fdb6c7c71a9a32153a2275d02419e3de809e520bfcfe40c37"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"bytes",
|
||||
"cfg-if",
|
||||
"enum-as-inner",
|
||||
"futures-util",
|
||||
"hickory-proto",
|
||||
"serde",
|
||||
"thiserror",
|
||||
"time",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hkdf"
|
||||
version = "0.12.4"
|
||||
|
@ -4194,9 +4272,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "libsui"
|
||||
version = "0.4.0"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "205eca4e7beaad637dcd38fe41292065894ee7f498077cf3c135d5f7252b9f27"
|
||||
checksum = "89795977654ad6250d6c0915411b622bac22f9efb4f852af94b2e00964cab832"
|
||||
dependencies = [
|
||||
"editpe",
|
||||
"libc",
|
||||
|
@ -4483,7 +4561,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "napi_sym"
|
||||
version = "0.105.0"
|
||||
version = "0.106.0"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"serde",
|
||||
|
@ -4538,7 +4616,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "node_resolver"
|
||||
version = "0.14.0"
|
||||
version = "0.15.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
|
@ -6146,15 +6224,6 @@ dependencies = [
|
|||
"syn 2.0.72",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_spanned"
|
||||
version = "0.6.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_urlencoded"
|
||||
version = "0.7.1"
|
||||
|
@ -6169,9 +6238,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde_v8"
|
||||
version = "0.225.0"
|
||||
version = "0.227.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ce4b71200ef49a9e629edaea3d13fc98c25ede07e1496558df7f09354e37976f"
|
||||
checksum = "0a8294c2223c53bed343be8b80564ece4dc0d03b643b06fa86c4ccc0e064eda0"
|
||||
dependencies = [
|
||||
"num-bigint",
|
||||
"serde",
|
||||
|
@ -7121,6 +7190,7 @@ dependencies = [
|
|||
"console_static_text",
|
||||
"deno_unsync",
|
||||
"denokv_proto",
|
||||
"faster-hex",
|
||||
"fastwebsockets",
|
||||
"flate2",
|
||||
"futures",
|
||||
|
@ -7368,40 +7438,6 @@ dependencies = [
|
|||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.7.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_spanned",
|
||||
"toml_datetime",
|
||||
"toml_edit",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_datetime"
|
||||
version = "0.6.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_edit"
|
||||
version = "0.19.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"serde",
|
||||
"serde_spanned",
|
||||
"toml_datetime",
|
||||
"winnow 0.5.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower"
|
||||
version = "0.4.13"
|
||||
|
@ -7491,95 +7527,6 @@ dependencies = [
|
|||
"stable_deref_trait",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "trust-dns-client"
|
||||
version = "0.23.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "14135e72c7e6d4c9b6902d4437881a8598f0145dbb2e3f86f92dbad845b61e63"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"data-encoding",
|
||||
"futures-channel",
|
||||
"futures-util",
|
||||
"once_cell",
|
||||
"radix_trie",
|
||||
"rand",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"trust-dns-proto",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "trust-dns-proto"
|
||||
version = "0.23.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3119112651c157f4488931a01e586aa459736e9d6046d3bd9105ffb69352d374"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"cfg-if",
|
||||
"data-encoding",
|
||||
"enum-as-inner",
|
||||
"futures-channel",
|
||||
"futures-io",
|
||||
"futures-util",
|
||||
"idna 0.4.0",
|
||||
"ipnet",
|
||||
"once_cell",
|
||||
"rand",
|
||||
"serde",
|
||||
"smallvec",
|
||||
"thiserror",
|
||||
"tinyvec",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "trust-dns-resolver"
|
||||
version = "0.23.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "10a3e6c3aff1718b3c73e395d1f35202ba2ffa847c6a62eea0db8fb4cfe30be6"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"futures-util",
|
||||
"ipconfig",
|
||||
"lru-cache",
|
||||
"once_cell",
|
||||
"parking_lot",
|
||||
"rand",
|
||||
"resolv-conf",
|
||||
"serde",
|
||||
"smallvec",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"trust-dns-proto",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "trust-dns-server"
|
||||
version = "0.23.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c540f73c2b2ec2f6c54eabd0900e7aafb747a820224b742f556e8faabb461bc7"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"bytes",
|
||||
"cfg-if",
|
||||
"drain",
|
||||
"enum-as-inner",
|
||||
"futures-executor",
|
||||
"futures-util",
|
||||
"serde",
|
||||
"thiserror",
|
||||
"time",
|
||||
"tokio",
|
||||
"toml 0.7.8",
|
||||
"tracing",
|
||||
"trust-dns-proto",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "try-lock"
|
||||
version = "0.2.5"
|
||||
|
@ -8329,15 +8276,6 @@ version = "0.52.4"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8"
|
||||
|
||||
[[package]]
|
||||
name = "winnow"
|
||||
version = "0.5.40"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winnow"
|
||||
version = "0.6.15"
|
||||
|
@ -8373,7 +8311,7 @@ version = "0.1.12"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b68db261ef59e9e52806f688020631e987592bd83619edccda9c47d42cde4f6c"
|
||||
dependencies = [
|
||||
"toml 0.5.11",
|
||||
"toml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -8450,7 +8388,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "2a6a39b6b5ba0d02c910d05d7fbc366a4befb8901ea107dcde9c1c97acb8a366"
|
||||
dependencies = [
|
||||
"rowan",
|
||||
"winnow 0.6.15",
|
||||
"winnow",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
58
Cargo.toml
58
Cargo.toml
|
@ -46,18 +46,18 @@ repository = "https://github.com/denoland/deno"
|
|||
|
||||
[workspace.dependencies]
|
||||
deno_ast = { version = "=0.43.3", features = ["transpiling"] }
|
||||
deno_core = { version = "0.316.0" }
|
||||
deno_core = { version = "0.318.0" }
|
||||
|
||||
deno_bench_util = { version = "0.169.0", path = "./bench_util" }
|
||||
deno_bench_util = { version = "0.170.0", path = "./bench_util" }
|
||||
deno_lockfile = "=0.23.1"
|
||||
deno_media_type = { version = "0.2.0", features = ["module_specifier"] }
|
||||
deno_npm = "=0.25.4"
|
||||
deno_path_util = "=0.2.1"
|
||||
deno_permissions = { version = "0.35.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.184.0", path = "./runtime" }
|
||||
deno_permissions = { version = "0.36.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.185.0", path = "./runtime" }
|
||||
deno_semver = "=0.5.16"
|
||||
deno_terminal = "0.2.0"
|
||||
napi_sym = { version = "0.105.0", path = "./ext/napi/sym" }
|
||||
napi_sym = { version = "0.106.0", path = "./ext/napi/sym" }
|
||||
test_util = { package = "test_server", path = "./tests/util/server" }
|
||||
|
||||
denokv_proto = "0.8.1"
|
||||
|
@ -66,32 +66,32 @@ denokv_remote = "0.8.1"
|
|||
denokv_sqlite = { default-features = false, version = "0.8.2" }
|
||||
|
||||
# exts
|
||||
deno_broadcast_channel = { version = "0.169.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.107.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.44.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.175.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.55.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.189.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.199.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.162.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.85.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.173.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.85.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.83.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.106.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.167.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.112.0", path = "./ext/node" }
|
||||
deno_tls = { version = "0.162.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.175.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.206.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.142.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.175.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.180.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.170.0", path = "./ext/webstorage" }
|
||||
deno_broadcast_channel = { version = "0.170.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.108.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.45.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.176.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.56.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.190.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.200.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.163.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.86.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.174.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.86.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.84.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.107.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.168.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.113.0", path = "./ext/node" }
|
||||
deno_tls = { version = "0.163.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.176.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.207.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.143.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.176.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.181.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.171.0", path = "./ext/webstorage" }
|
||||
|
||||
# resolvers
|
||||
deno_resolver = { version = "0.7.0", path = "./resolvers/deno" }
|
||||
node_resolver = { version = "0.14.0", path = "./resolvers/node" }
|
||||
deno_resolver = { version = "0.8.0", path = "./resolvers/deno" }
|
||||
node_resolver = { version = "0.15.0", path = "./resolvers/node" }
|
||||
|
||||
aes = "=0.8.3"
|
||||
anyhow = "1.0.57"
|
||||
|
|
36
Releases.md
36
Releases.md
|
@ -6,6 +6,42 @@ https://github.com/denoland/deno/releases
|
|||
We also have one-line install commands at:
|
||||
https://github.com/denoland/deno_install
|
||||
|
||||
### 2.0.5 / 2024.11.05
|
||||
|
||||
- fix(add): better error message when adding package that only has pre-release
|
||||
versions (#26724)
|
||||
- fix(add): only add npm deps to package.json if it's at least as close as
|
||||
deno.json (#26683)
|
||||
- fix(cli): set `npm_config_user_agent` when running npm packages or tasks
|
||||
(#26639)
|
||||
- fix(coverage): exclude comment lines from coverage reports (#25939)
|
||||
- fix(ext/node): add `findSourceMap` to the default export of `node:module`
|
||||
(#26720)
|
||||
- fix(ext/node): convert errors from `fs.readFile/fs.readFileSync` to node
|
||||
format (#26632)
|
||||
- fix(ext/node): resolve exports even if parent module filename isn't present
|
||||
(#26553)
|
||||
- fix(ext/node): return `this` from `http.Server.ref/unref()` (#26647)
|
||||
- fix(fmt): do not panic for jsx ignore container followed by jsx text (#26723)
|
||||
- fix(fmt): fix several HTML and components issues (#26654)
|
||||
- fix(fmt): ignore file directive for YAML files (#26717)
|
||||
- fix(install): handle invalid function error, and fallback to junctions
|
||||
regardless of the error (#26730)
|
||||
- fix(lsp): include unstable features from editor settings (#26655)
|
||||
- fix(lsp): scope attribution for lazily loaded assets (#26699)
|
||||
- fix(node): Implement `os.userInfo` properly, add missing `toPrimitive`
|
||||
(#24702)
|
||||
- fix(serve): support serve hmr (#26078)
|
||||
- fix(types): missing `import` permission on `PermissionOptionsObject` (#26627)
|
||||
- fix(workspace): support wildcard packages (#26568)
|
||||
- fix: clamp smi in fast calls by default (#26506)
|
||||
- fix: improved support for cjs and cts modules (#26558)
|
||||
- fix: op_run_microtasks crash (#26718)
|
||||
- fix: panic_hook hangs without procfs (#26732)
|
||||
- fix: remove permission check in op_require_node_module_paths (#26645)
|
||||
- fix: surface package.json location on dep parse failure (#26665)
|
||||
- perf(lsp): don't walk coverage directory (#26715)
|
||||
|
||||
### 2.0.4 / 2024.10.29
|
||||
|
||||
- Revert "fix(ext/node): fix dns.lookup result ordering (#26264)" (#26621)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_bench_util"
|
||||
version = "0.169.0"
|
||||
version = "0.170.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno"
|
||||
version = "2.0.4"
|
||||
version = "2.0.5"
|
||||
authors.workspace = true
|
||||
default-run = "deno"
|
||||
edition.workspace = true
|
||||
|
@ -70,7 +70,7 @@ winres.workspace = true
|
|||
[dependencies]
|
||||
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
|
||||
deno_cache_dir = { workspace = true }
|
||||
deno_config = { version = "=0.37.2", features = ["workspace", "sync"] }
|
||||
deno_config = { version = "=0.38.2", features = ["workspace", "sync"] }
|
||||
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_doc = { version = "0.156.0", default-features = false, features = ["rust", "html", "syntect"] }
|
||||
deno_graph = { version = "=0.84.1" }
|
||||
|
@ -84,7 +84,7 @@ deno_runtime = { workspace = true, features = ["include_js_files_for_snapshottin
|
|||
deno_semver.workspace = true
|
||||
deno_task_shell = "=0.18.1"
|
||||
deno_terminal.workspace = true
|
||||
libsui = "0.4.0"
|
||||
libsui = "0.5.0"
|
||||
node_resolver.workspace = true
|
||||
|
||||
anstream = "0.6.14"
|
||||
|
@ -107,7 +107,7 @@ dotenvy = "0.15.7"
|
|||
dprint-plugin-json = "=0.19.4"
|
||||
dprint-plugin-jupyter = "=0.1.5"
|
||||
dprint-plugin-markdown = "=0.17.8"
|
||||
dprint-plugin-typescript = "=0.93.1"
|
||||
dprint-plugin-typescript = "=0.93.2"
|
||||
env_logger = "=0.10.0"
|
||||
fancy-regex = "=0.10.0"
|
||||
faster-hex.workspace = true
|
||||
|
|
|
@ -3388,8 +3388,7 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
|
|||
.value_name("IP_OR_HOSTNAME")
|
||||
.help("Allow network access. Optionally specify allowed IP addresses and host names, with ports as necessary")
|
||||
.value_parser(flags_net::validator)
|
||||
.hide(true)
|
||||
;
|
||||
.hide(true);
|
||||
if let Some(requires) = requires {
|
||||
arg = arg.requires(requires)
|
||||
}
|
||||
|
|
|
@ -51,7 +51,7 @@ pub fn parse(paths: Vec<String>) -> clap::error::Result<Vec<String>> {
|
|||
}
|
||||
} else {
|
||||
NetDescriptor::parse(&host_and_port).map_err(|e| {
|
||||
clap::Error::raw(clap::error::ErrorKind::InvalidValue, format!("{e:?}"))
|
||||
clap::Error::raw(clap::error::ErrorKind::InvalidValue, e.to_string())
|
||||
})?;
|
||||
out.push(host_and_port)
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
// deno-lint-ignore-file no-console no-process-globals
|
||||
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
? Deno.args
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
// deno-lint-ignore-file no-console no-process-globals
|
||||
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
? Deno.args
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
// deno-lint-ignore-file no-console no-process-globals
|
||||
|
||||
const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
// deno-lint-ignore-file no-console no-process-globals
|
||||
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
? Deno.args
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
// deno-lint-ignore-file no-console no-process-globals
|
||||
|
||||
const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
// deno-lint-ignore-file no-console no-process-globals
|
||||
|
||||
const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// deno-lint-ignore-file no-console
|
||||
// deno-lint-ignore-file no-console no-process-globals
|
||||
|
||||
const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
|
||||
let [total, count] = typeof Deno !== "undefined"
|
||||
|
|
|
@ -88,6 +88,10 @@ fn get_resolution_error_class(err: &ResolutionError) -> &'static str {
|
|||
}
|
||||
}
|
||||
|
||||
fn get_try_from_int_error_class(_: &std::num::TryFromIntError) -> &'static str {
|
||||
"TypeError"
|
||||
}
|
||||
|
||||
pub fn get_error_class_name(e: &AnyError) -> &'static str {
|
||||
deno_runtime::errors::get_error_class_name(e)
|
||||
.or_else(|| {
|
||||
|
@ -106,5 +110,9 @@ pub fn get_error_class_name(e: &AnyError) -> &'static str {
|
|||
e.downcast_ref::<ResolutionError>()
|
||||
.map(get_resolution_error_class)
|
||||
})
|
||||
.or_else(|| {
|
||||
e.downcast_ref::<std::num::TryFromIntError>()
|
||||
.map(get_try_from_int_error_class)
|
||||
})
|
||||
.unwrap_or("Error")
|
||||
}
|
||||
|
|
|
@ -12,7 +12,9 @@ use super::urls::url_to_uri;
|
|||
use crate::args::jsr_url;
|
||||
use crate::lsp::search::PackageSearchApi;
|
||||
use crate::tools::lint::CliLinter;
|
||||
use crate::util::path::relative_specifier;
|
||||
use deno_config::workspace::MappedResolution;
|
||||
use deno_graph::source::ResolutionMode;
|
||||
use deno_lint::diagnostic::LintDiagnosticRange;
|
||||
|
||||
use deno_ast::SourceRange;
|
||||
|
@ -228,6 +230,7 @@ pub struct TsResponseImportMapper<'a> {
|
|||
documents: &'a Documents,
|
||||
maybe_import_map: Option<&'a ImportMap>,
|
||||
resolver: &'a LspResolver,
|
||||
tsc_specifier_map: &'a tsc::TscSpecifierMap,
|
||||
file_referrer: ModuleSpecifier,
|
||||
}
|
||||
|
||||
|
@ -236,12 +239,14 @@ impl<'a> TsResponseImportMapper<'a> {
|
|||
documents: &'a Documents,
|
||||
maybe_import_map: Option<&'a ImportMap>,
|
||||
resolver: &'a LspResolver,
|
||||
tsc_specifier_map: &'a tsc::TscSpecifierMap,
|
||||
file_referrer: &ModuleSpecifier,
|
||||
) -> Self {
|
||||
Self {
|
||||
documents,
|
||||
maybe_import_map,
|
||||
resolver,
|
||||
tsc_specifier_map,
|
||||
file_referrer: file_referrer.clone(),
|
||||
}
|
||||
}
|
||||
|
@ -387,6 +392,11 @@ impl<'a> TsResponseImportMapper<'a> {
|
|||
}
|
||||
}
|
||||
}
|
||||
} else if let Some(dep_name) = self
|
||||
.resolver
|
||||
.file_url_to_package_json_dep(specifier, Some(&self.file_referrer))
|
||||
{
|
||||
return Some(dep_name);
|
||||
}
|
||||
|
||||
// check if the import map has this specifier
|
||||
|
@ -457,19 +467,36 @@ impl<'a> TsResponseImportMapper<'a> {
|
|||
specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> Option<String> {
|
||||
if let Ok(specifier) = referrer.join(specifier) {
|
||||
if let Some(specifier) = self.check_specifier(&specifier, referrer) {
|
||||
return Some(specifier);
|
||||
}
|
||||
}
|
||||
let specifier = specifier.strip_suffix(".js").unwrap_or(specifier);
|
||||
for ext in SUPPORTED_EXTENSIONS {
|
||||
let specifier_with_ext = format!("{specifier}{ext}");
|
||||
if self
|
||||
.documents
|
||||
.contains_import(&specifier_with_ext, referrer)
|
||||
let specifier_stem = specifier.strip_suffix(".js").unwrap_or(specifier);
|
||||
let specifiers = std::iter::once(Cow::Borrowed(specifier)).chain(
|
||||
SUPPORTED_EXTENSIONS
|
||||
.iter()
|
||||
.map(|ext| Cow::Owned(format!("{specifier_stem}{ext}"))),
|
||||
);
|
||||
for specifier in specifiers {
|
||||
if let Some(specifier) = self
|
||||
.resolver
|
||||
.as_graph_resolver(Some(&self.file_referrer))
|
||||
.resolve(
|
||||
&specifier,
|
||||
&deno_graph::Range {
|
||||
specifier: referrer.clone(),
|
||||
start: deno_graph::Position::zeroed(),
|
||||
end: deno_graph::Position::zeroed(),
|
||||
},
|
||||
ResolutionMode::Types,
|
||||
)
|
||||
.ok()
|
||||
.and_then(|s| self.tsc_specifier_map.normalize(s.as_str()).ok())
|
||||
.filter(|s| self.documents.exists(s, Some(&self.file_referrer)))
|
||||
{
|
||||
return Some(specifier_with_ext);
|
||||
if let Some(specifier) = self
|
||||
.check_specifier(&specifier, referrer)
|
||||
.or_else(|| relative_specifier(referrer, &specifier))
|
||||
.filter(|s| !s.contains("/node_modules/"))
|
||||
{
|
||||
return Some(specifier);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
|
@ -559,8 +586,9 @@ fn try_reverse_map_package_json_exports(
|
|||
pub fn fix_ts_import_changes(
|
||||
referrer: &ModuleSpecifier,
|
||||
changes: &[tsc::FileTextChanges],
|
||||
import_mapper: &TsResponseImportMapper,
|
||||
language_server: &language_server::Inner,
|
||||
) -> Result<Vec<tsc::FileTextChanges>, AnyError> {
|
||||
let import_mapper = language_server.get_ts_response_import_mapper(referrer);
|
||||
let mut r = Vec::new();
|
||||
for change in changes {
|
||||
let mut text_changes = Vec::new();
|
||||
|
@ -605,7 +633,7 @@ pub fn fix_ts_import_changes(
|
|||
fn fix_ts_import_action<'a>(
|
||||
referrer: &ModuleSpecifier,
|
||||
action: &'a tsc::CodeFixAction,
|
||||
import_mapper: &TsResponseImportMapper,
|
||||
language_server: &language_server::Inner,
|
||||
) -> Option<Cow<'a, tsc::CodeFixAction>> {
|
||||
if !matches!(
|
||||
action.fix_name.as_str(),
|
||||
|
@ -621,6 +649,7 @@ fn fix_ts_import_action<'a>(
|
|||
let Some(specifier) = specifier else {
|
||||
return Some(Cow::Borrowed(action));
|
||||
};
|
||||
let import_mapper = language_server.get_ts_response_import_mapper(referrer);
|
||||
if let Some(new_specifier) =
|
||||
import_mapper.check_unresolved_specifier(specifier, referrer)
|
||||
{
|
||||
|
@ -728,7 +757,7 @@ pub fn ts_changes_to_edit(
|
|||
}))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CodeActionData {
|
||||
pub specifier: ModuleSpecifier,
|
||||
|
@ -998,11 +1027,8 @@ impl CodeActionCollection {
|
|||
"The action returned from TypeScript is unsupported.",
|
||||
));
|
||||
}
|
||||
let Some(action) = fix_ts_import_action(
|
||||
specifier,
|
||||
action,
|
||||
&language_server.get_ts_response_import_mapper(specifier),
|
||||
) else {
|
||||
let Some(action) = fix_ts_import_action(specifier, action, language_server)
|
||||
else {
|
||||
return Ok(());
|
||||
};
|
||||
let edit = ts_changes_to_edit(&action.changes, language_server)?;
|
||||
|
@ -1051,10 +1077,12 @@ impl CodeActionCollection {
|
|||
specifier: &ModuleSpecifier,
|
||||
diagnostic: &lsp::Diagnostic,
|
||||
) {
|
||||
let data = Some(json!({
|
||||
"specifier": specifier,
|
||||
"fixId": action.fix_id,
|
||||
}));
|
||||
let data = action.fix_id.as_ref().map(|fix_id| {
|
||||
json!(CodeActionData {
|
||||
specifier: specifier.clone(),
|
||||
fix_id: fix_id.clone(),
|
||||
})
|
||||
});
|
||||
let title = if let Some(description) = &action.fix_all_description {
|
||||
description.clone()
|
||||
} else {
|
||||
|
|
|
@ -1059,34 +1059,6 @@ impl Documents {
|
|||
self.cache.is_valid_file_referrer(specifier)
|
||||
}
|
||||
|
||||
/// Return `true` if the provided specifier can be resolved to a document,
|
||||
/// otherwise `false`.
|
||||
pub fn contains_import(
|
||||
&self,
|
||||
specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
) -> bool {
|
||||
let file_referrer = self.get_file_referrer(referrer);
|
||||
let maybe_specifier = self
|
||||
.resolver
|
||||
.as_graph_resolver(file_referrer.as_deref())
|
||||
.resolve(
|
||||
specifier,
|
||||
&deno_graph::Range {
|
||||
specifier: referrer.clone(),
|
||||
start: deno_graph::Position::zeroed(),
|
||||
end: deno_graph::Position::zeroed(),
|
||||
},
|
||||
ResolutionMode::Types,
|
||||
)
|
||||
.ok();
|
||||
if let Some(import_specifier) = maybe_specifier {
|
||||
self.exists(&import_specifier, file_referrer.as_deref())
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_document_specifier(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
|
|
|
@ -863,7 +863,10 @@ impl Inner {
|
|||
// We ignore these directories by default because there is a
|
||||
// high likelihood they aren't relevant. Someone can opt-into
|
||||
// them by specifying one of them as an enabled path.
|
||||
if matches!(dir_name.as_str(), "vendor" | "node_modules" | ".git") {
|
||||
if matches!(
|
||||
dir_name.as_str(),
|
||||
"vendor" | "coverage" | "node_modules" | ".git"
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
// ignore cargo target directories for anyone using Deno with Rust
|
||||
|
@ -1834,7 +1837,7 @@ impl Inner {
|
|||
fix_ts_import_changes(
|
||||
&code_action_data.specifier,
|
||||
&combined_code_actions.changes,
|
||||
&self.get_ts_response_import_mapper(&code_action_data.specifier),
|
||||
self,
|
||||
)
|
||||
.map_err(|err| {
|
||||
error!("Unable to remap changes: {:#}", err);
|
||||
|
@ -1887,7 +1890,7 @@ impl Inner {
|
|||
refactor_edit_info.edits = fix_ts_import_changes(
|
||||
&action_data.specifier,
|
||||
&refactor_edit_info.edits,
|
||||
&self.get_ts_response_import_mapper(&action_data.specifier),
|
||||
self,
|
||||
)
|
||||
.map_err(|err| {
|
||||
error!("Unable to remap changes: {:#}", err);
|
||||
|
@ -1918,7 +1921,8 @@ impl Inner {
|
|||
// todo(dsherret): this should probably just take the resolver itself
|
||||
// as the import map is an implementation detail
|
||||
.and_then(|d| d.resolver.maybe_import_map()),
|
||||
self.resolver.as_ref(),
|
||||
&self.resolver,
|
||||
&self.ts_server.specifier_map,
|
||||
file_referrer,
|
||||
)
|
||||
}
|
||||
|
@ -2281,7 +2285,11 @@ impl Inner {
|
|||
.into(),
|
||||
scope.cloned(),
|
||||
)
|
||||
.await;
|
||||
.await
|
||||
.unwrap_or_else(|err| {
|
||||
error!("Unable to get completion info from TypeScript: {:#}", err);
|
||||
None
|
||||
});
|
||||
|
||||
if let Some(completions) = maybe_completion_info {
|
||||
response = Some(
|
||||
|
@ -3944,7 +3952,9 @@ mod tests {
|
|||
fn test_walk_workspace() {
|
||||
let temp_dir = TempDir::new();
|
||||
temp_dir.create_dir_all("root1/vendor/");
|
||||
temp_dir.create_dir_all("root1/coverage/");
|
||||
temp_dir.write("root1/vendor/mod.ts", ""); // no, vendor
|
||||
temp_dir.write("root1/coverage/mod.ts", ""); // no, coverage
|
||||
|
||||
temp_dir.create_dir_all("root1/node_modules/");
|
||||
temp_dir.write("root1/node_modules/mod.ts", ""); // no, node_modules
|
||||
|
|
|
@ -74,6 +74,7 @@ struct LspScopeResolver {
|
|||
pkg_json_resolver: Option<Arc<PackageJsonResolver>>,
|
||||
redirect_resolver: Option<Arc<RedirectResolver>>,
|
||||
graph_imports: Arc<IndexMap<ModuleSpecifier, GraphImport>>,
|
||||
package_json_deps_by_resolution: Arc<IndexMap<ModuleSpecifier, String>>,
|
||||
config_data: Option<Arc<ConfigData>>,
|
||||
}
|
||||
|
||||
|
@ -88,6 +89,7 @@ impl Default for LspScopeResolver {
|
|||
pkg_json_resolver: None,
|
||||
redirect_resolver: None,
|
||||
graph_imports: Default::default(),
|
||||
package_json_deps_by_resolution: Default::default(),
|
||||
config_data: None,
|
||||
}
|
||||
}
|
||||
|
@ -165,6 +167,33 @@ impl LspScopeResolver {
|
|||
)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let package_json_deps_by_resolution = (|| {
|
||||
let node_resolver = node_resolver.as_ref()?;
|
||||
let package_json = config_data?.maybe_pkg_json()?;
|
||||
let referrer = package_json.specifier();
|
||||
let dependencies = package_json.dependencies.as_ref()?;
|
||||
let result = dependencies
|
||||
.iter()
|
||||
.flat_map(|(name, _)| {
|
||||
let req_ref =
|
||||
NpmPackageReqReference::from_str(&format!("npm:{name}")).ok()?;
|
||||
let specifier = into_specifier_and_media_type(Some(
|
||||
node_resolver
|
||||
.resolve_req_reference(
|
||||
&req_ref,
|
||||
&referrer,
|
||||
NodeResolutionMode::Types,
|
||||
)
|
||||
.ok()?,
|
||||
))
|
||||
.0;
|
||||
Some((specifier, name.clone()))
|
||||
})
|
||||
.collect();
|
||||
Some(result)
|
||||
})();
|
||||
let package_json_deps_by_resolution =
|
||||
Arc::new(package_json_deps_by_resolution.unwrap_or_default());
|
||||
Self {
|
||||
cjs_tracker: lsp_cjs_tracker,
|
||||
graph_resolver,
|
||||
|
@ -174,6 +203,7 @@ impl LspScopeResolver {
|
|||
pkg_json_resolver: Some(pkg_json_resolver),
|
||||
redirect_resolver,
|
||||
graph_imports,
|
||||
package_json_deps_by_resolution,
|
||||
config_data: config_data.cloned(),
|
||||
}
|
||||
}
|
||||
|
@ -216,6 +246,9 @@ impl LspScopeResolver {
|
|||
redirect_resolver: self.redirect_resolver.clone(),
|
||||
pkg_json_resolver: Some(pkg_json_resolver),
|
||||
graph_imports: self.graph_imports.clone(),
|
||||
package_json_deps_by_resolution: self
|
||||
.package_json_deps_by_resolution
|
||||
.clone(),
|
||||
config_data: self.config_data.clone(),
|
||||
})
|
||||
}
|
||||
|
@ -407,6 +440,18 @@ impl LspResolver {
|
|||
)))
|
||||
}
|
||||
|
||||
pub fn file_url_to_package_json_dep(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<String> {
|
||||
let resolver = self.get_scope_resolver(file_referrer);
|
||||
resolver
|
||||
.package_json_deps_by_resolution
|
||||
.get(specifier)
|
||||
.cloned()
|
||||
}
|
||||
|
||||
pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
fn has_node_modules_dir(specifier: &ModuleSpecifier) -> bool {
|
||||
// consider any /node_modules/ directory as being in the node_modules
|
||||
|
|
102
cli/lsp/tsc.rs
102
cli/lsp/tsc.rs
|
@ -236,7 +236,7 @@ pub struct TsServer {
|
|||
performance: Arc<Performance>,
|
||||
sender: mpsc::UnboundedSender<Request>,
|
||||
receiver: Mutex<Option<mpsc::UnboundedReceiver<Request>>>,
|
||||
specifier_map: Arc<TscSpecifierMap>,
|
||||
pub specifier_map: Arc<TscSpecifierMap>,
|
||||
inspector_server: Mutex<Option<Arc<InspectorServer>>>,
|
||||
pending_change: Mutex<Option<PendingChange>>,
|
||||
}
|
||||
|
@ -882,20 +882,22 @@ impl TsServer {
|
|||
options: GetCompletionsAtPositionOptions,
|
||||
format_code_settings: FormatCodeSettings,
|
||||
scope: Option<ModuleSpecifier>,
|
||||
) -> Option<CompletionInfo> {
|
||||
) -> Result<Option<CompletionInfo>, AnyError> {
|
||||
let req = TscRequest::GetCompletionsAtPosition(Box::new((
|
||||
self.specifier_map.denormalize(&specifier),
|
||||
position,
|
||||
options,
|
||||
format_code_settings,
|
||||
)));
|
||||
match self.request(snapshot, req, scope).await {
|
||||
Ok(maybe_info) => maybe_info,
|
||||
Err(err) => {
|
||||
log::error!("Unable to get completion info from TypeScript: {:#}", err);
|
||||
None
|
||||
}
|
||||
}
|
||||
self
|
||||
.request::<Option<CompletionInfo>>(snapshot, req, scope)
|
||||
.await
|
||||
.map(|mut info| {
|
||||
if let Some(info) = &mut info {
|
||||
info.normalize(&self.specifier_map);
|
||||
}
|
||||
info
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_completion_details(
|
||||
|
@ -3642,6 +3644,12 @@ pub struct CompletionInfo {
|
|||
}
|
||||
|
||||
impl CompletionInfo {
|
||||
fn normalize(&mut self, specifier_map: &TscSpecifierMap) {
|
||||
for entry in &mut self.entries {
|
||||
entry.normalize(specifier_map);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_completion_response(
|
||||
&self,
|
||||
line_index: Arc<LineIndex>,
|
||||
|
@ -3703,11 +3711,17 @@ pub struct CompletionItemData {
|
|||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct CompletionEntryDataImport {
|
||||
struct CompletionEntryDataAutoImport {
|
||||
module_specifier: String,
|
||||
file_name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CompletionNormalizedAutoImportData {
|
||||
raw: CompletionEntryDataAutoImport,
|
||||
normalized: ModuleSpecifier,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CompletionEntry {
|
||||
|
@ -3740,9 +3754,28 @@ pub struct CompletionEntry {
|
|||
is_import_statement_completion: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
data: Option<Value>,
|
||||
/// This is not from tsc, we add it for convenience during normalization.
|
||||
/// Represents `self.data.file_name`, but normalized.
|
||||
#[serde(skip)]
|
||||
auto_import_data: Option<CompletionNormalizedAutoImportData>,
|
||||
}
|
||||
|
||||
impl CompletionEntry {
|
||||
fn normalize(&mut self, specifier_map: &TscSpecifierMap) {
|
||||
let Some(data) = &self.data else {
|
||||
return;
|
||||
};
|
||||
let Ok(raw) =
|
||||
serde_json::from_value::<CompletionEntryDataAutoImport>(data.clone())
|
||||
else {
|
||||
return;
|
||||
};
|
||||
if let Ok(normalized) = specifier_map.normalize(&raw.file_name) {
|
||||
self.auto_import_data =
|
||||
Some(CompletionNormalizedAutoImportData { raw, normalized });
|
||||
}
|
||||
}
|
||||
|
||||
fn get_commit_characters(
|
||||
&self,
|
||||
info: &CompletionInfo,
|
||||
|
@ -3891,25 +3924,24 @@ impl CompletionEntry {
|
|||
|
||||
if let Some(source) = &self.source {
|
||||
let mut display_source = source.clone();
|
||||
if let Some(data) = &self.data {
|
||||
if let Ok(import_data) =
|
||||
serde_json::from_value::<CompletionEntryDataImport>(data.clone())
|
||||
if let Some(import_data) = &self.auto_import_data {
|
||||
if let Some(new_module_specifier) = language_server
|
||||
.get_ts_response_import_mapper(specifier)
|
||||
.check_specifier(&import_data.normalized, specifier)
|
||||
.or_else(|| relative_specifier(specifier, &import_data.normalized))
|
||||
{
|
||||
if let Ok(import_specifier) = resolve_url(&import_data.file_name) {
|
||||
if let Some(new_module_specifier) = language_server
|
||||
.get_ts_response_import_mapper(specifier)
|
||||
.check_specifier(&import_specifier, specifier)
|
||||
.or_else(|| relative_specifier(specifier, &import_specifier))
|
||||
{
|
||||
display_source.clone_from(&new_module_specifier);
|
||||
if new_module_specifier != import_data.module_specifier {
|
||||
specifier_rewrite =
|
||||
Some((import_data.module_specifier, new_module_specifier));
|
||||
}
|
||||
} else if source.starts_with(jsr_url().as_str()) {
|
||||
return None;
|
||||
}
|
||||
if new_module_specifier.contains("/node_modules/") {
|
||||
return None;
|
||||
}
|
||||
display_source.clone_from(&new_module_specifier);
|
||||
if new_module_specifier != import_data.raw.module_specifier {
|
||||
specifier_rewrite = Some((
|
||||
import_data.raw.module_specifier.clone(),
|
||||
new_module_specifier,
|
||||
));
|
||||
}
|
||||
} else if source.starts_with(jsr_url().as_str()) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
// We want relative or bare (import-mapped or otherwise) specifiers to
|
||||
|
@ -4212,6 +4244,13 @@ impl TscSpecifierMap {
|
|||
return specifier.to_string();
|
||||
}
|
||||
let mut specifier = original.to_string();
|
||||
if specifier.contains("/node_modules/.deno/")
|
||||
&& !specifier.contains("/node_modules/@types/node/")
|
||||
{
|
||||
// The ts server doesn't give completions from files in
|
||||
// `node_modules/.deno/`. We work around it like this.
|
||||
specifier = specifier.replace("/node_modules/", "/$node_modules/");
|
||||
}
|
||||
let media_type = MediaType::from_specifier(original);
|
||||
// If the URL-inferred media type doesn't correspond to tsc's path-inferred
|
||||
// media type, force it to be the same by appending an extension.
|
||||
|
@ -4329,7 +4368,7 @@ fn op_is_cancelled(state: &mut OpState) -> bool {
|
|||
fn op_is_node_file(state: &mut OpState, #[string] path: String) -> bool {
|
||||
let state = state.borrow::<State>();
|
||||
let mark = state.performance.mark("tsc.op.op_is_node_file");
|
||||
let r = match ModuleSpecifier::parse(&path) {
|
||||
let r = match state.specifier_map.normalize(path) {
|
||||
Ok(specifier) => state.state_snapshot.resolver.in_node_modules(&specifier),
|
||||
Err(_) => false,
|
||||
};
|
||||
|
@ -4609,7 +4648,10 @@ fn op_script_names(state: &mut OpState) -> ScriptNames {
|
|||
for doc in &docs {
|
||||
let specifier = doc.specifier();
|
||||
let is_open = doc.is_open();
|
||||
if is_open || specifier.scheme() == "file" {
|
||||
if is_open
|
||||
|| (specifier.scheme() == "file"
|
||||
&& !state.state_snapshot.resolver.in_node_modules(specifier))
|
||||
{
|
||||
let script_names = doc
|
||||
.scope()
|
||||
.and_then(|s| result.by_scope.get_mut(s))
|
||||
|
@ -6035,6 +6077,7 @@ mod tests {
|
|||
Some(temp_dir.url()),
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(info.entries.len(), 22);
|
||||
let details = ts_server
|
||||
|
@ -6194,6 +6237,7 @@ mod tests {
|
|||
Some(temp_dir.url()),
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let entry = info
|
||||
.entries
|
||||
|
|
|
@ -89,7 +89,7 @@ impl CliNpmResolver for CliByonmNpmResolver {
|
|||
.components()
|
||||
.any(|c| c.as_os_str().to_ascii_lowercase() == "node_modules")
|
||||
{
|
||||
permissions.check_read_path(path)
|
||||
permissions.check_read_path(path).map_err(Into::into)
|
||||
} else {
|
||||
Ok(Cow::Borrowed(path))
|
||||
}
|
||||
|
|
|
@ -133,7 +133,7 @@ impl RegistryReadPermissionChecker {
|
|||
}
|
||||
}
|
||||
|
||||
permissions.check_read_path(path)
|
||||
permissions.check_read_path(path).map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1035,12 +1035,18 @@ fn junction_or_symlink_dir(
|
|||
if symlink_err.kind() == std::io::ErrorKind::PermissionDenied =>
|
||||
{
|
||||
USE_JUNCTIONS.store(true, std::sync::atomic::Ordering::Relaxed);
|
||||
junction::create(old_path, new_path).map_err(Into::into)
|
||||
junction::create(old_path, new_path)
|
||||
.context("Failed creating junction in node_modules folder")
|
||||
}
|
||||
Err(symlink_err) => {
|
||||
log::warn!(
|
||||
"{} Unexpected error symlinking node_modules: {symlink_err}",
|
||||
colors::yellow("Warning")
|
||||
);
|
||||
USE_JUNCTIONS.store(true, std::sync::atomic::Ordering::Relaxed);
|
||||
junction::create(old_path, new_path)
|
||||
.context("Failed creating junction in node_modules folder")
|
||||
}
|
||||
Err(symlink_err) => Err(
|
||||
AnyError::from(symlink_err)
|
||||
.context("Failed creating symlink in node_modules folder"),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ struct PermissionsHolder(Uuid, PermissionsContainer);
|
|||
pub fn op_pledge_test_permissions(
|
||||
state: &mut OpState,
|
||||
#[serde] args: ChildPermissionsArg,
|
||||
) -> Result<Uuid, AnyError> {
|
||||
) -> Result<Uuid, deno_runtime::deno_permissions::ChildPermissionError> {
|
||||
let token = Uuid::new_v4();
|
||||
let parent_permissions = state.borrow_mut::<PermissionsContainer>();
|
||||
let worker_permissions = parent_permissions.create_child_permissions(args)?;
|
||||
|
@ -147,7 +147,7 @@ fn op_dispatch_bench_event(state: &mut OpState, #[serde] event: BenchEvent) {
|
|||
|
||||
#[op2(fast)]
|
||||
#[number]
|
||||
fn op_bench_now(state: &mut OpState) -> Result<u64, AnyError> {
|
||||
fn op_bench_now(state: &mut OpState) -> Result<u64, std::num::TryFromIntError> {
|
||||
let ns = state.borrow::<time::Instant>().elapsed().as_nanos();
|
||||
let ns_u64 = u64::try_from(ns)?;
|
||||
Ok(ns_u64)
|
||||
|
|
|
@ -46,7 +46,7 @@ pub fn op_jupyter_input(
|
|||
state: &mut OpState,
|
||||
#[string] prompt: String,
|
||||
is_password: bool,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
) -> Option<String> {
|
||||
let (last_execution_request, stdin_connection_proxy) = {
|
||||
(
|
||||
state.borrow::<Arc<Mutex<Option<JupyterMessage>>>>().clone(),
|
||||
|
@ -58,11 +58,11 @@ pub fn op_jupyter_input(
|
|||
if let Some(last_request) = maybe_last_request {
|
||||
let JupyterMessageContent::ExecuteRequest(msg) = &last_request.content
|
||||
else {
|
||||
return Ok(None);
|
||||
return None;
|
||||
};
|
||||
|
||||
if !msg.allow_stdin {
|
||||
return Ok(None);
|
||||
return None;
|
||||
}
|
||||
|
||||
let content = InputRequest {
|
||||
|
@ -73,7 +73,7 @@ pub fn op_jupyter_input(
|
|||
let msg = JupyterMessage::new(content, Some(&last_request));
|
||||
|
||||
let Ok(()) = stdin_connection_proxy.lock().tx.send(msg) else {
|
||||
return Ok(None);
|
||||
return None;
|
||||
};
|
||||
|
||||
// Need to spawn a separate thread here, because `blocking_recv()` can't
|
||||
|
@ -82,17 +82,25 @@ pub fn op_jupyter_input(
|
|||
stdin_connection_proxy.lock().rx.blocking_recv()
|
||||
});
|
||||
let Ok(Some(response)) = join_handle.join() else {
|
||||
return Ok(None);
|
||||
return None;
|
||||
};
|
||||
|
||||
let JupyterMessageContent::InputReply(msg) = response.content else {
|
||||
return Ok(None);
|
||||
return None;
|
||||
};
|
||||
|
||||
return Ok(Some(msg.value));
|
||||
return Some(msg.value);
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
None
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum JupyterBroadcastError {
|
||||
#[error(transparent)]
|
||||
SerdeJson(serde_json::Error),
|
||||
#[error(transparent)]
|
||||
ZeroMq(AnyError),
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
|
@ -102,7 +110,7 @@ pub async fn op_jupyter_broadcast(
|
|||
#[serde] content: serde_json::Value,
|
||||
#[serde] metadata: serde_json::Value,
|
||||
#[serde] buffers: Vec<deno_core::JsBuffer>,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), JupyterBroadcastError> {
|
||||
let (iopub_connection, last_execution_request) = {
|
||||
let s = state.borrow();
|
||||
|
||||
|
@ -125,36 +133,35 @@ pub async fn op_jupyter_broadcast(
|
|||
content,
|
||||
err
|
||||
);
|
||||
err
|
||||
JupyterBroadcastError::SerdeJson(err)
|
||||
})?;
|
||||
|
||||
let jupyter_message = JupyterMessage::new(content, Some(&last_request))
|
||||
.with_metadata(metadata)
|
||||
.with_buffers(buffers.into_iter().map(|b| b.to_vec().into()).collect());
|
||||
|
||||
iopub_connection.lock().send(jupyter_message).await?;
|
||||
iopub_connection
|
||||
.lock()
|
||||
.send(jupyter_message)
|
||||
.await
|
||||
.map_err(JupyterBroadcastError::ZeroMq)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_print(
|
||||
state: &mut OpState,
|
||||
#[string] msg: &str,
|
||||
is_err: bool,
|
||||
) -> Result<(), AnyError> {
|
||||
pub fn op_print(state: &mut OpState, #[string] msg: &str, is_err: bool) {
|
||||
let sender = state.borrow_mut::<mpsc::UnboundedSender<StreamContent>>();
|
||||
|
||||
if is_err {
|
||||
if let Err(err) = sender.send(StreamContent::stderr(msg)) {
|
||||
log::error!("Failed to send stderr message: {}", err);
|
||||
}
|
||||
return Ok(());
|
||||
return;
|
||||
}
|
||||
|
||||
if let Err(err) = sender.send(StreamContent::stdout(msg)) {
|
||||
log::error!("Failed to send stdout message: {}", err);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -51,7 +51,7 @@ struct PermissionsHolder(Uuid, PermissionsContainer);
|
|||
pub fn op_pledge_test_permissions(
|
||||
state: &mut OpState,
|
||||
#[serde] args: ChildPermissionsArg,
|
||||
) -> Result<Uuid, AnyError> {
|
||||
) -> Result<Uuid, deno_runtime::deno_permissions::ChildPermissionError> {
|
||||
let token = Uuid::new_v4();
|
||||
let parent_permissions = state.borrow_mut::<PermissionsContainer>();
|
||||
let worker_permissions = parent_permissions.create_child_permissions(args)?;
|
||||
|
@ -150,7 +150,7 @@ fn op_register_test_step(
|
|||
#[smi] parent_id: usize,
|
||||
#[smi] root_id: usize,
|
||||
#[string] root_name: String,
|
||||
) -> Result<usize, AnyError> {
|
||||
) -> usize {
|
||||
let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
|
||||
let origin = state.borrow::<ModuleSpecifier>().to_string();
|
||||
let description = TestStepDescription {
|
||||
|
@ -169,7 +169,7 @@ fn op_register_test_step(
|
|||
};
|
||||
let sender = state.borrow_mut::<TestEventSender>();
|
||||
sender.send(TestEvent::StepRegister(description)).ok();
|
||||
Ok(id)
|
||||
id
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
|
|
|
@ -353,6 +353,21 @@ fn format_yaml(
|
|||
file_text: &str,
|
||||
fmt_options: &FmtOptionsConfig,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
let ignore_file = file_text
|
||||
.lines()
|
||||
.take_while(|line| line.starts_with('#'))
|
||||
.any(|line| {
|
||||
line
|
||||
.strip_prefix('#')
|
||||
.unwrap()
|
||||
.trim()
|
||||
.starts_with("deno-fmt-ignore-file")
|
||||
});
|
||||
|
||||
if ignore_file {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let formatted_str =
|
||||
pretty_yaml::format_text(file_text, &get_resolved_yaml_config(fmt_options))
|
||||
.map_err(AnyError::from)?;
|
||||
|
@ -1017,7 +1032,7 @@ fn get_resolved_markup_fmt_config(
|
|||
max_attrs_per_line: None,
|
||||
prefer_attrs_single_line: false,
|
||||
html_normal_self_closing: None,
|
||||
html_void_self_closing: Some(true),
|
||||
html_void_self_closing: None,
|
||||
component_self_closing: None,
|
||||
svg_self_closing: None,
|
||||
mathml_self_closing: None,
|
||||
|
|
|
@ -645,10 +645,12 @@ impl<'a> GraphDisplayContext<'a> {
|
|||
let message = match err {
|
||||
HttpsChecksumIntegrity(_) => "(checksum integrity error)",
|
||||
Decode(_) => "(loading decode error)",
|
||||
Loader(err) => match deno_core::error::get_custom_error_class(err) {
|
||||
Some("NotCapable") => "(not capable, requires --allow-import)",
|
||||
_ => "(loading error)",
|
||||
},
|
||||
Loader(err) => {
|
||||
match deno_runtime::errors::get_error_class_name(err) {
|
||||
Some("NotCapable") => "(not capable, requires --allow-import)",
|
||||
_ => "(loading error)",
|
||||
}
|
||||
}
|
||||
Jsr(_) => "(loading error)",
|
||||
NodeUnknownBuiltinModule(_) => "(unknown node built-in error)",
|
||||
Npm(_) => "(npm loading error)",
|
||||
|
|
|
@ -12,7 +12,9 @@ use deno_core::futures::StreamExt;
|
|||
use deno_path_util::url_to_file_path;
|
||||
use deno_semver::jsr::JsrPackageReqReference;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::Version;
|
||||
use deno_semver::VersionReq;
|
||||
use jsonc_parser::cst::CstObject;
|
||||
use jsonc_parser::cst::CstObjectProp;
|
||||
|
@ -455,15 +457,32 @@ pub async fn add(
|
|||
match package_and_version {
|
||||
PackageAndVersion::NotFound {
|
||||
package: package_name,
|
||||
found_npm_package,
|
||||
help,
|
||||
package_req,
|
||||
} => {
|
||||
if found_npm_package {
|
||||
bail!("{} was not found, but a matching npm package exists. Did you mean `{}`?", crate::colors::red(package_name), crate::colors::yellow(format!("deno {cmd_name} npm:{package_req}")));
|
||||
} else {
|
||||
bail!("{} was not found.", crate::colors::red(package_name));
|
||||
} => match help {
|
||||
Some(NotFoundHelp::NpmPackage) => {
|
||||
bail!(
|
||||
"{} was not found, but a matching npm package exists. Did you mean `{}`?",
|
||||
crate::colors::red(package_name),
|
||||
crate::colors::yellow(format!("deno {cmd_name} npm:{package_req}"))
|
||||
);
|
||||
}
|
||||
}
|
||||
Some(NotFoundHelp::JsrPackage) => {
|
||||
bail!(
|
||||
"{} was not found, but a matching jsr package exists. Did you mean `{}`?",
|
||||
crate::colors::red(package_name),
|
||||
crate::colors::yellow(format!("deno {cmd_name} jsr:{package_req}"))
|
||||
)
|
||||
}
|
||||
Some(NotFoundHelp::PreReleaseVersion(version)) => {
|
||||
bail!(
|
||||
"{} has only pre-release versions available. Try specifying a version: `{}`",
|
||||
crate::colors::red(&package_name),
|
||||
crate::colors::yellow(format!("deno {cmd_name} {package_name}@^{version}"))
|
||||
)
|
||||
}
|
||||
None => bail!("{} was not found.", crate::colors::red(package_name)),
|
||||
},
|
||||
PackageAndVersion::Selected(selected) => {
|
||||
selected_packages.push(selected);
|
||||
}
|
||||
|
@ -511,76 +530,144 @@ struct SelectedPackage {
|
|||
selected_version: String,
|
||||
}
|
||||
|
||||
enum NotFoundHelp {
|
||||
NpmPackage,
|
||||
JsrPackage,
|
||||
PreReleaseVersion(Version),
|
||||
}
|
||||
|
||||
enum PackageAndVersion {
|
||||
NotFound {
|
||||
package: String,
|
||||
found_npm_package: bool,
|
||||
package_req: PackageReq,
|
||||
help: Option<NotFoundHelp>,
|
||||
},
|
||||
Selected(SelectedPackage),
|
||||
}
|
||||
|
||||
fn best_version<'a>(
|
||||
versions: impl Iterator<Item = &'a Version>,
|
||||
) -> Option<&'a Version> {
|
||||
let mut maybe_best_version: Option<&Version> = None;
|
||||
for version in versions {
|
||||
let is_best_version = maybe_best_version
|
||||
.as_ref()
|
||||
.map(|best_version| (*best_version).cmp(version).is_lt())
|
||||
.unwrap_or(true);
|
||||
if is_best_version {
|
||||
maybe_best_version = Some(version);
|
||||
}
|
||||
}
|
||||
maybe_best_version
|
||||
}
|
||||
|
||||
trait PackageInfoProvider {
|
||||
const SPECIFIER_PREFIX: &str;
|
||||
/// The help to return if a package is found by this provider
|
||||
const HELP: NotFoundHelp;
|
||||
async fn req_to_nv(&self, req: &PackageReq) -> Option<PackageNv>;
|
||||
async fn latest_version<'a>(&self, req: &PackageReq) -> Option<Version>;
|
||||
}
|
||||
|
||||
impl PackageInfoProvider for Arc<JsrFetchResolver> {
|
||||
const HELP: NotFoundHelp = NotFoundHelp::JsrPackage;
|
||||
const SPECIFIER_PREFIX: &str = "jsr";
|
||||
async fn req_to_nv(&self, req: &PackageReq) -> Option<PackageNv> {
|
||||
(**self).req_to_nv(req).await
|
||||
}
|
||||
|
||||
async fn latest_version<'a>(&self, req: &PackageReq) -> Option<Version> {
|
||||
let info = self.package_info(&req.name).await?;
|
||||
best_version(
|
||||
info
|
||||
.versions
|
||||
.iter()
|
||||
.filter(|(_, version_info)| !version_info.yanked)
|
||||
.map(|(version, _)| version),
|
||||
)
|
||||
.cloned()
|
||||
}
|
||||
}
|
||||
|
||||
impl PackageInfoProvider for Arc<NpmFetchResolver> {
|
||||
const HELP: NotFoundHelp = NotFoundHelp::NpmPackage;
|
||||
const SPECIFIER_PREFIX: &str = "npm";
|
||||
async fn req_to_nv(&self, req: &PackageReq) -> Option<PackageNv> {
|
||||
(**self).req_to_nv(req).await
|
||||
}
|
||||
|
||||
async fn latest_version<'a>(&self, req: &PackageReq) -> Option<Version> {
|
||||
let info = self.package_info(&req.name).await?;
|
||||
best_version(info.versions.keys()).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
async fn find_package_and_select_version_for_req(
|
||||
jsr_resolver: Arc<JsrFetchResolver>,
|
||||
npm_resolver: Arc<NpmFetchResolver>,
|
||||
add_package_req: AddRmPackageReq,
|
||||
) -> Result<PackageAndVersion, AnyError> {
|
||||
match add_package_req.value {
|
||||
AddRmPackageReqValue::Jsr(req) => {
|
||||
let jsr_prefixed_name = format!("jsr:{}", &req.name);
|
||||
let Some(nv) = jsr_resolver.req_to_nv(&req).await else {
|
||||
if npm_resolver.req_to_nv(&req).await.is_some() {
|
||||
async fn select<T: PackageInfoProvider, S: PackageInfoProvider>(
|
||||
main_resolver: T,
|
||||
fallback_resolver: S,
|
||||
add_package_req: AddRmPackageReq,
|
||||
) -> Result<PackageAndVersion, AnyError> {
|
||||
let req = match &add_package_req.value {
|
||||
AddRmPackageReqValue::Jsr(req) => req,
|
||||
AddRmPackageReqValue::Npm(req) => req,
|
||||
};
|
||||
let prefixed_name = format!("{}:{}", T::SPECIFIER_PREFIX, req.name);
|
||||
let help_if_found_in_fallback = S::HELP;
|
||||
let Some(nv) = main_resolver.req_to_nv(req).await else {
|
||||
if fallback_resolver.req_to_nv(req).await.is_some() {
|
||||
// it's in the other registry
|
||||
return Ok(PackageAndVersion::NotFound {
|
||||
package: prefixed_name,
|
||||
help: Some(help_if_found_in_fallback),
|
||||
package_req: req.clone(),
|
||||
});
|
||||
}
|
||||
if req.version_req.version_text() == "*" {
|
||||
if let Some(pre_release_version) =
|
||||
main_resolver.latest_version(req).await
|
||||
{
|
||||
return Ok(PackageAndVersion::NotFound {
|
||||
package: jsr_prefixed_name,
|
||||
found_npm_package: true,
|
||||
package_req: req,
|
||||
package: prefixed_name,
|
||||
package_req: req.clone(),
|
||||
help: Some(NotFoundHelp::PreReleaseVersion(
|
||||
pre_release_version.clone(),
|
||||
)),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return Ok(PackageAndVersion::NotFound {
|
||||
package: jsr_prefixed_name,
|
||||
found_npm_package: false,
|
||||
package_req: req,
|
||||
});
|
||||
};
|
||||
let range_symbol = if req.version_req.version_text().starts_with('~') {
|
||||
"~"
|
||||
} else if req.version_req.version_text() == nv.version.to_string() {
|
||||
""
|
||||
} else {
|
||||
"^"
|
||||
};
|
||||
Ok(PackageAndVersion::Selected(SelectedPackage {
|
||||
import_name: add_package_req.alias,
|
||||
package_name: jsr_prefixed_name,
|
||||
version_req: format!("{}{}", range_symbol, &nv.version),
|
||||
selected_version: nv.version.to_string(),
|
||||
}))
|
||||
return Ok(PackageAndVersion::NotFound {
|
||||
package: prefixed_name,
|
||||
help: None,
|
||||
package_req: req.clone(),
|
||||
});
|
||||
};
|
||||
let range_symbol = if req.version_req.version_text().starts_with('~') {
|
||||
"~"
|
||||
} else if req.version_req.version_text() == nv.version.to_string() {
|
||||
""
|
||||
} else {
|
||||
"^"
|
||||
};
|
||||
Ok(PackageAndVersion::Selected(SelectedPackage {
|
||||
import_name: add_package_req.alias,
|
||||
package_name: prefixed_name,
|
||||
version_req: format!("{}{}", range_symbol, &nv.version),
|
||||
selected_version: nv.version.to_string(),
|
||||
}))
|
||||
}
|
||||
|
||||
match &add_package_req.value {
|
||||
AddRmPackageReqValue::Jsr(_) => {
|
||||
select(jsr_resolver, npm_resolver, add_package_req).await
|
||||
}
|
||||
AddRmPackageReqValue::Npm(req) => {
|
||||
let npm_prefixed_name = format!("npm:{}", &req.name);
|
||||
let Some(nv) = npm_resolver.req_to_nv(&req).await else {
|
||||
return Ok(PackageAndVersion::NotFound {
|
||||
package: npm_prefixed_name,
|
||||
found_npm_package: false,
|
||||
package_req: req,
|
||||
});
|
||||
};
|
||||
|
||||
let range_symbol = if req.version_req.version_text().starts_with('~') {
|
||||
"~"
|
||||
} else if req.version_req.version_text() == nv.version.to_string() {
|
||||
""
|
||||
} else {
|
||||
"^"
|
||||
};
|
||||
|
||||
Ok(PackageAndVersion::Selected(SelectedPackage {
|
||||
import_name: add_package_req.alias,
|
||||
package_name: npm_prefixed_name,
|
||||
version_req: format!("{}{}", range_symbol, &nv.version),
|
||||
selected_version: nv.version.to_string(),
|
||||
}))
|
||||
AddRmPackageReqValue::Npm(_) => {
|
||||
select(npm_resolver, jsr_resolver, add_package_req).await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -801,13 +801,18 @@ delete Object.prototype.__proto__;
|
|||
if (logDebug) {
|
||||
debug(`host.getScriptSnapshot("${specifier}")`);
|
||||
}
|
||||
const sourceFile = sourceFileCache.get(specifier);
|
||||
if (sourceFile) {
|
||||
if (!assetScopes.has(specifier)) {
|
||||
assetScopes.set(specifier, lastRequestScope);
|
||||
if (specifier.startsWith(ASSETS_URL_PREFIX)) {
|
||||
const sourceFile = this.getSourceFile(
|
||||
specifier,
|
||||
ts.ScriptTarget.ESNext,
|
||||
);
|
||||
if (sourceFile) {
|
||||
if (!assetScopes.has(specifier)) {
|
||||
assetScopes.set(specifier, lastRequestScope);
|
||||
}
|
||||
// This case only occurs for assets.
|
||||
return ts.ScriptSnapshot.fromString(sourceFile.text);
|
||||
}
|
||||
// This case only occurs for assets.
|
||||
return ts.ScriptSnapshot.fromString(sourceFile.text);
|
||||
}
|
||||
let sourceText = sourceTextCache.get(specifier);
|
||||
if (sourceText == undefined) {
|
||||
|
|
37
cli/tsc/dts/lib.deno.ns.d.ts
vendored
37
cli/tsc/dts/lib.deno.ns.d.ts
vendored
|
@ -556,14 +556,23 @@ declare namespace Deno {
|
|||
*/
|
||||
env?: "inherit" | boolean | string[];
|
||||
|
||||
/** Specifies if the `sys` permission should be requested or revoked.
|
||||
* If set to `"inherit"`, the current `sys` permission will be inherited.
|
||||
* If set to `true`, the global `sys` permission will be requested.
|
||||
* If set to `false`, the global `sys` permission will be revoked.
|
||||
/** Specifies if the `ffi` permission should be requested or revoked.
|
||||
* If set to `"inherit"`, the current `ffi` permission will be inherited.
|
||||
* If set to `true`, the global `ffi` permission will be requested.
|
||||
* If set to `false`, the global `ffi` permission will be revoked.
|
||||
*
|
||||
* @default {false}
|
||||
*/
|
||||
sys?: "inherit" | boolean | string[];
|
||||
ffi?: "inherit" | boolean | Array<string | URL>;
|
||||
|
||||
/** Specifies if the `import` permission should be requested or revoked.
|
||||
* If set to `"inherit"` the current `import` permission will be inherited.
|
||||
* If set to `true`, the global `import` permission will be requested.
|
||||
* If set to `false`, the global `import` permission will be revoked.
|
||||
* If set to `Array<string>`, the `import` permissions will be requested with the
|
||||
* specified domains.
|
||||
*/
|
||||
import?: "inherit" | boolean | Array<string>;
|
||||
|
||||
/** Specifies if the `net` permission should be requested or revoked.
|
||||
* if set to `"inherit"`, the current `net` permission will be inherited.
|
||||
|
@ -638,15 +647,6 @@ declare namespace Deno {
|
|||
*/
|
||||
net?: "inherit" | boolean | string[];
|
||||
|
||||
/** Specifies if the `ffi` permission should be requested or revoked.
|
||||
* If set to `"inherit"`, the current `ffi` permission will be inherited.
|
||||
* If set to `true`, the global `ffi` permission will be requested.
|
||||
* If set to `false`, the global `ffi` permission will be revoked.
|
||||
*
|
||||
* @default {false}
|
||||
*/
|
||||
ffi?: "inherit" | boolean | Array<string | URL>;
|
||||
|
||||
/** Specifies if the `read` permission should be requested or revoked.
|
||||
* If set to `"inherit"`, the current `read` permission will be inherited.
|
||||
* If set to `true`, the global `read` permission will be requested.
|
||||
|
@ -667,6 +667,15 @@ declare namespace Deno {
|
|||
*/
|
||||
run?: "inherit" | boolean | Array<string | URL>;
|
||||
|
||||
/** Specifies if the `sys` permission should be requested or revoked.
|
||||
* If set to `"inherit"`, the current `sys` permission will be inherited.
|
||||
* If set to `true`, the global `sys` permission will be requested.
|
||||
* If set to `false`, the global `sys` permission will be revoked.
|
||||
*
|
||||
* @default {false}
|
||||
*/
|
||||
sys?: "inherit" | boolean | string[];
|
||||
|
||||
/** Specifies if the `write` permission should be requested or revoked.
|
||||
* If set to `"inherit"`, the current `write` permission will be inherited.
|
||||
* If set to `true`, the global `write` permission will be requested.
|
||||
|
|
|
@ -565,7 +565,9 @@ pub fn symlink_dir(oldpath: &Path, newpath: &Path) -> Result<(), Error> {
|
|||
use std::os::windows::fs::symlink_dir;
|
||||
symlink_dir(oldpath, newpath).map_err(|err| {
|
||||
if let Some(code) = err.raw_os_error() {
|
||||
if code as u32 == winapi::shared::winerror::ERROR_PRIVILEGE_NOT_HELD {
|
||||
if code as u32 == winapi::shared::winerror::ERROR_PRIVILEGE_NOT_HELD
|
||||
|| code as u32 == winapi::shared::winerror::ERROR_INVALID_FUNCTION
|
||||
{
|
||||
return err_mapper(err, Some(ErrorKind::PermissionDenied));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_broadcast_channel"
|
||||
version = "0.169.0"
|
||||
version = "0.170.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
2
ext/cache/Cargo.toml
vendored
2
ext/cache/Cargo.toml
vendored
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_cache"
|
||||
version = "0.107.0"
|
||||
version = "0.108.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
6
ext/cache/lib.rs
vendored
6
ext/cache/lib.rs
vendored
|
@ -33,7 +33,9 @@ pub enum CacheError {
|
|||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CreateCache<C: Cache + 'static>(pub Arc<dyn Fn() -> C>);
|
||||
pub struct CreateCache<C: Cache + 'static>(
|
||||
pub Arc<dyn Fn() -> Result<C, CacheError>>,
|
||||
);
|
||||
|
||||
deno_core::extension!(deno_cache,
|
||||
deps = [ deno_webidl, deno_web, deno_url, deno_fetch ],
|
||||
|
@ -231,7 +233,7 @@ where
|
|||
if let Some(cache) = state.try_borrow::<CA>() {
|
||||
Ok(cache.clone())
|
||||
} else if let Some(create_cache) = state.try_borrow::<CreateCache<CA>>() {
|
||||
let cache = create_cache.0();
|
||||
let cache = create_cache.0()?;
|
||||
state.put(cache);
|
||||
Ok(state.borrow::<CA>().clone())
|
||||
} else {
|
||||
|
|
23
ext/cache/sqlite.rs
vendored
23
ext/cache/sqlite.rs
vendored
|
@ -42,7 +42,7 @@ pub struct SqliteBackedCache {
|
|||
}
|
||||
|
||||
impl SqliteBackedCache {
|
||||
pub fn new(cache_storage_dir: PathBuf) -> Self {
|
||||
pub fn new(cache_storage_dir: PathBuf) -> Result<Self, CacheError> {
|
||||
{
|
||||
std::fs::create_dir_all(&cache_storage_dir)
|
||||
.expect("failed to create cache dir");
|
||||
|
@ -57,18 +57,14 @@ impl SqliteBackedCache {
|
|||
PRAGMA synchronous=NORMAL;
|
||||
PRAGMA optimize;
|
||||
";
|
||||
connection
|
||||
.execute_batch(initial_pragmas)
|
||||
.expect("failed to execute pragmas");
|
||||
connection
|
||||
.execute(
|
||||
"CREATE TABLE IF NOT EXISTS cache_storage (
|
||||
connection.execute_batch(initial_pragmas)?;
|
||||
connection.execute(
|
||||
"CREATE TABLE IF NOT EXISTS cache_storage (
|
||||
id INTEGER PRIMARY KEY,
|
||||
cache_name TEXT NOT NULL UNIQUE
|
||||
)",
|
||||
(),
|
||||
)
|
||||
.expect("failed to create cache_storage table");
|
||||
(),
|
||||
)?;
|
||||
connection
|
||||
.execute(
|
||||
"CREATE TABLE IF NOT EXISTS request_response_list (
|
||||
|
@ -86,12 +82,11 @@ impl SqliteBackedCache {
|
|||
UNIQUE (cache_id, request_url)
|
||||
)",
|
||||
(),
|
||||
)
|
||||
.expect("failed to create request_response_list table");
|
||||
SqliteBackedCache {
|
||||
)?;
|
||||
Ok(SqliteBackedCache {
|
||||
connection: Arc::new(Mutex::new(connection)),
|
||||
cache_storage_dir,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_canvas"
|
||||
version = "0.44.0"
|
||||
version = "0.45.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_console"
|
||||
version = "0.175.0"
|
||||
version = "0.176.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_cron"
|
||||
version = "0.55.0"
|
||||
version = "0.56.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_crypto"
|
||||
version = "0.189.0"
|
||||
version = "0.190.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -269,12 +269,6 @@ class Request {
|
|||
/** @type {AbortSignal} */
|
||||
get [_signal]() {
|
||||
const signal = this[_signalCache];
|
||||
// This signal not been created yet, and the request is still in progress
|
||||
if (signal === undefined) {
|
||||
const signal = newSignal();
|
||||
this[_signalCache] = signal;
|
||||
return signal;
|
||||
}
|
||||
// This signal has not been created yet, but the request has already completed
|
||||
if (signal === false) {
|
||||
const signal = newSignal();
|
||||
|
@ -282,6 +276,18 @@ class Request {
|
|||
signal[signalAbort](signalAbortError);
|
||||
return signal;
|
||||
}
|
||||
|
||||
// This signal not been created yet, and the request is still in progress
|
||||
if (signal === undefined) {
|
||||
const signal = newSignal();
|
||||
this[_signalCache] = signal;
|
||||
this[_request].onCancel?.(() => {
|
||||
signal[signalAbort](signalAbortError);
|
||||
});
|
||||
|
||||
return signal;
|
||||
}
|
||||
|
||||
return signal;
|
||||
}
|
||||
get [_mimeType]() {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_fetch"
|
||||
version = "0.199.0"
|
||||
version = "0.200.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -39,6 +39,7 @@ use deno_core::OpState;
|
|||
use deno_core::RcRef;
|
||||
use deno_core::Resource;
|
||||
use deno_core::ResourceId;
|
||||
use deno_permissions::PermissionCheckError;
|
||||
use deno_tls::rustls::RootCertStore;
|
||||
use deno_tls::Proxy;
|
||||
use deno_tls::RootCertStoreProvider;
|
||||
|
@ -149,7 +150,7 @@ pub enum FetchError {
|
|||
#[error(transparent)]
|
||||
Resource(deno_core::error::AnyError),
|
||||
#[error(transparent)]
|
||||
Permission(deno_core::error::AnyError),
|
||||
Permission(#[from] PermissionCheckError),
|
||||
#[error("NetworkError when attempting to fetch resource")]
|
||||
NetworkError,
|
||||
#[error("Fetching files only supports the GET method: received {0}")]
|
||||
|
@ -346,13 +347,13 @@ pub trait FetchPermissions {
|
|||
&mut self,
|
||||
url: &Url,
|
||||
api_name: &str,
|
||||
) -> Result<(), deno_core::error::AnyError>;
|
||||
) -> Result<(), PermissionCheckError>;
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
fn check_read<'a>(
|
||||
&mut self,
|
||||
p: &'a Path,
|
||||
api_name: &str,
|
||||
) -> Result<Cow<'a, Path>, deno_core::error::AnyError>;
|
||||
) -> Result<Cow<'a, Path>, PermissionCheckError>;
|
||||
}
|
||||
|
||||
impl FetchPermissions for deno_permissions::PermissionsContainer {
|
||||
|
@ -361,7 +362,7 @@ impl FetchPermissions for deno_permissions::PermissionsContainer {
|
|||
&mut self,
|
||||
url: &Url,
|
||||
api_name: &str,
|
||||
) -> Result<(), deno_core::error::AnyError> {
|
||||
) -> Result<(), PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_net_url(self, url, api_name)
|
||||
}
|
||||
|
||||
|
@ -370,7 +371,7 @@ impl FetchPermissions for deno_permissions::PermissionsContainer {
|
|||
&mut self,
|
||||
path: &'a Path,
|
||||
api_name: &str,
|
||||
) -> Result<Cow<'a, Path>, deno_core::error::AnyError> {
|
||||
) -> Result<Cow<'a, Path>, PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_read_path(
|
||||
self,
|
||||
path,
|
||||
|
@ -414,9 +415,7 @@ where
|
|||
"file" => {
|
||||
let path = url.to_file_path().map_err(|_| FetchError::NetworkError)?;
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
let path = permissions
|
||||
.check_read(&path, "fetch()")
|
||||
.map_err(FetchError::Permission)?;
|
||||
let path = permissions.check_read(&path, "fetch()")?;
|
||||
let url = match path {
|
||||
Cow::Owned(path) => Url::from_file_path(path).unwrap(),
|
||||
Cow::Borrowed(_) => url,
|
||||
|
@ -442,9 +441,7 @@ where
|
|||
}
|
||||
"http" | "https" => {
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_net_url(&url, "fetch()")
|
||||
.map_err(FetchError::Resource)?;
|
||||
permissions.check_net_url(&url, "fetch()")?;
|
||||
|
||||
let maybe_authority = extract_authority(&mut url);
|
||||
let uri = url
|
||||
|
@ -863,9 +860,7 @@ where
|
|||
if let Some(proxy) = args.proxy.clone() {
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
let url = Url::parse(&proxy.url)?;
|
||||
permissions
|
||||
.check_net_url(&url, "Deno.createHttpClient()")
|
||||
.map_err(FetchError::Permission)?;
|
||||
permissions.check_net_url(&url, "Deno.createHttpClient()")?;
|
||||
}
|
||||
|
||||
let options = state.borrow::<Options>();
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_ffi"
|
||||
version = "0.162.0"
|
||||
version = "0.163.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -32,7 +32,9 @@ pub enum CallError {
|
|||
#[error("Invalid FFI symbol name: '{0}'")]
|
||||
InvalidSymbol(String),
|
||||
#[error(transparent)]
|
||||
Permission(deno_core::error::AnyError),
|
||||
Permission(#[from] deno_permissions::PermissionCheckError),
|
||||
#[error(transparent)]
|
||||
Resource(deno_core::error::AnyError),
|
||||
#[error(transparent)]
|
||||
Callback(#[from] super::CallbackError),
|
||||
}
|
||||
|
@ -301,9 +303,7 @@ where
|
|||
{
|
||||
let mut state = state.borrow_mut();
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(CallError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
};
|
||||
|
||||
let symbol = PtrSymbol::new(pointer, &def)?;
|
||||
|
@ -347,7 +347,7 @@ pub fn op_ffi_call_nonblocking(
|
|||
let resource = state
|
||||
.resource_table
|
||||
.get::<DynamicLibraryResource>(rid)
|
||||
.map_err(CallError::Permission)?;
|
||||
.map_err(CallError::Resource)?;
|
||||
let symbols = &resource.symbols;
|
||||
*symbols
|
||||
.get(&symbol)
|
||||
|
@ -401,9 +401,7 @@ where
|
|||
{
|
||||
let mut state = state.borrow_mut();
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(CallError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
};
|
||||
|
||||
let symbol = PtrSymbol::new(pointer, &def)?;
|
||||
|
|
|
@ -38,7 +38,7 @@ pub enum CallbackError {
|
|||
#[error(transparent)]
|
||||
Resource(deno_core::error::AnyError),
|
||||
#[error(transparent)]
|
||||
Permission(deno_core::error::AnyError),
|
||||
Permission(#[from] deno_permissions::PermissionCheckError),
|
||||
#[error(transparent)]
|
||||
Other(deno_core::error::AnyError),
|
||||
}
|
||||
|
@ -572,9 +572,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(CallbackError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
let thread_id: u32 = LOCAL_THREAD_ID.with(|s| {
|
||||
let value = *s.borrow();
|
||||
|
|
|
@ -30,7 +30,7 @@ pub enum DlfcnError {
|
|||
#[error(transparent)]
|
||||
Dlopen(#[from] dlopen2::Error),
|
||||
#[error(transparent)]
|
||||
Permission(deno_core::error::AnyError),
|
||||
Permission(#[from] deno_permissions::PermissionCheckError),
|
||||
#[error(transparent)]
|
||||
Other(deno_core::error::AnyError),
|
||||
}
|
||||
|
@ -133,9 +133,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
let path = permissions
|
||||
.check_partial_with_path(&args.path)
|
||||
.map_err(DlfcnError::Permission)?;
|
||||
let path = permissions.check_partial_with_path(&args.path)?;
|
||||
|
||||
let lib = Library::open(&path).map_err(|e| {
|
||||
dlopen2::Error::OpeningLibraryError(std::io::Error::new(
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
|
||||
use std::mem::size_of;
|
||||
use std::os::raw::c_char;
|
||||
use std::os::raw::c_short;
|
||||
|
@ -31,6 +29,7 @@ use symbol::Symbol;
|
|||
|
||||
pub use call::CallError;
|
||||
pub use callback::CallbackError;
|
||||
use deno_permissions::PermissionCheckError;
|
||||
pub use dlfcn::DlfcnError;
|
||||
pub use ir::IRError;
|
||||
pub use r#static::StaticError;
|
||||
|
@ -48,17 +47,17 @@ const _: () = {
|
|||
pub const UNSTABLE_FEATURE_NAME: &str = "ffi";
|
||||
|
||||
pub trait FfiPermissions {
|
||||
fn check_partial_no_path(&mut self) -> Result<(), AnyError>;
|
||||
fn check_partial_no_path(&mut self) -> Result<(), PermissionCheckError>;
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
fn check_partial_with_path(
|
||||
&mut self,
|
||||
path: &str,
|
||||
) -> Result<PathBuf, AnyError>;
|
||||
) -> Result<PathBuf, PermissionCheckError>;
|
||||
}
|
||||
|
||||
impl FfiPermissions for deno_permissions::PermissionsContainer {
|
||||
#[inline(always)]
|
||||
fn check_partial_no_path(&mut self) -> Result<(), AnyError> {
|
||||
fn check_partial_no_path(&mut self) -> Result<(), PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_ffi_partial_no_path(self)
|
||||
}
|
||||
|
||||
|
@ -66,7 +65,7 @@ impl FfiPermissions for deno_permissions::PermissionsContainer {
|
|||
fn check_partial_with_path(
|
||||
&mut self,
|
||||
path: &str,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
) -> Result<PathBuf, PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_ffi_partial_with_path(
|
||||
self, path,
|
||||
)
|
||||
|
|
|
@ -46,7 +46,7 @@ pub enum ReprError {
|
|||
#[error("Invalid pointer pointer, pointer is null")]
|
||||
InvalidPointer,
|
||||
#[error(transparent)]
|
||||
Permission(deno_core::error::AnyError),
|
||||
Permission(#[from] deno_permissions::PermissionCheckError),
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
|
@ -58,9 +58,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
Ok(ptr_number as *mut c_void)
|
||||
}
|
||||
|
@ -75,9 +73,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
Ok(a == b)
|
||||
}
|
||||
|
@ -91,9 +87,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
Ok(buf as *mut c_void)
|
||||
}
|
||||
|
@ -107,9 +101,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
let Some(buf) = buf.get_backing_store() else {
|
||||
return Ok(0 as _);
|
||||
|
@ -130,9 +122,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
if ptr.is_null() {
|
||||
return Err(ReprError::InvalidOffset);
|
||||
|
@ -162,9 +152,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
Ok(ptr as usize)
|
||||
}
|
||||
|
@ -181,9 +169,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
if ptr.is_null() {
|
||||
return Err(ReprError::InvalidArrayBuffer);
|
||||
|
@ -215,9 +201,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
if src.is_null() {
|
||||
Err(ReprError::InvalidArrayBuffer)
|
||||
|
@ -246,9 +230,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
if ptr.is_null() {
|
||||
return Err(ReprError::InvalidCString);
|
||||
|
@ -272,9 +254,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
if ptr.is_null() {
|
||||
return Err(ReprError::InvalidBool);
|
||||
|
@ -294,9 +274,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
if ptr.is_null() {
|
||||
return Err(ReprError::InvalidU8);
|
||||
|
@ -318,9 +296,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
if ptr.is_null() {
|
||||
return Err(ReprError::InvalidI8);
|
||||
|
@ -342,9 +318,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
if ptr.is_null() {
|
||||
return Err(ReprError::InvalidU16);
|
||||
|
@ -366,9 +340,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
if ptr.is_null() {
|
||||
return Err(ReprError::InvalidI16);
|
||||
|
@ -390,9 +362,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
if ptr.is_null() {
|
||||
return Err(ReprError::InvalidU32);
|
||||
|
@ -412,9 +382,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
if ptr.is_null() {
|
||||
return Err(ReprError::InvalidI32);
|
||||
|
@ -437,9 +405,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
if ptr.is_null() {
|
||||
return Err(ReprError::InvalidU64);
|
||||
|
@ -465,9 +431,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
if ptr.is_null() {
|
||||
return Err(ReprError::InvalidI64);
|
||||
|
@ -490,9 +454,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
if ptr.is_null() {
|
||||
return Err(ReprError::InvalidF32);
|
||||
|
@ -512,9 +474,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
if ptr.is_null() {
|
||||
return Err(ReprError::InvalidF64);
|
||||
|
@ -534,9 +494,7 @@ where
|
|||
FP: FfiPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<FP>();
|
||||
permissions
|
||||
.check_partial_no_path()
|
||||
.map_err(ReprError::Permission)?;
|
||||
permissions.check_partial_no_path()?;
|
||||
|
||||
if ptr.is_null() {
|
||||
return Err(ReprError::InvalidPointer);
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_fs"
|
||||
version = "0.85.0"
|
||||
version = "0.86.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -22,8 +22,8 @@ pub use crate::sync::MaybeSync;
|
|||
|
||||
use crate::ops::*;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_io::fs::FsError;
|
||||
use deno_permissions::PermissionCheckError;
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
@ -42,45 +42,51 @@ pub trait FsPermissions {
|
|||
&mut self,
|
||||
path: &str,
|
||||
api_name: &str,
|
||||
) -> Result<PathBuf, AnyError>;
|
||||
) -> Result<PathBuf, PermissionCheckError>;
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
fn check_read_path<'a>(
|
||||
&mut self,
|
||||
path: &'a Path,
|
||||
api_name: &str,
|
||||
) -> Result<Cow<'a, Path>, AnyError>;
|
||||
fn check_read_all(&mut self, api_name: &str) -> Result<(), AnyError>;
|
||||
) -> Result<Cow<'a, Path>, PermissionCheckError>;
|
||||
fn check_read_all(
|
||||
&mut self,
|
||||
api_name: &str,
|
||||
) -> Result<(), PermissionCheckError>;
|
||||
fn check_read_blind(
|
||||
&mut self,
|
||||
p: &Path,
|
||||
display: &str,
|
||||
api_name: &str,
|
||||
) -> Result<(), AnyError>;
|
||||
) -> Result<(), PermissionCheckError>;
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
fn check_write(
|
||||
&mut self,
|
||||
path: &str,
|
||||
api_name: &str,
|
||||
) -> Result<PathBuf, AnyError>;
|
||||
) -> Result<PathBuf, PermissionCheckError>;
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
fn check_write_path<'a>(
|
||||
&mut self,
|
||||
path: &'a Path,
|
||||
api_name: &str,
|
||||
) -> Result<Cow<'a, Path>, AnyError>;
|
||||
) -> Result<Cow<'a, Path>, PermissionCheckError>;
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
fn check_write_partial(
|
||||
&mut self,
|
||||
path: &str,
|
||||
api_name: &str,
|
||||
) -> Result<PathBuf, AnyError>;
|
||||
fn check_write_all(&mut self, api_name: &str) -> Result<(), AnyError>;
|
||||
) -> Result<PathBuf, PermissionCheckError>;
|
||||
fn check_write_all(
|
||||
&mut self,
|
||||
api_name: &str,
|
||||
) -> Result<(), PermissionCheckError>;
|
||||
fn check_write_blind(
|
||||
&mut self,
|
||||
p: &Path,
|
||||
display: &str,
|
||||
api_name: &str,
|
||||
) -> Result<(), AnyError>;
|
||||
) -> Result<(), PermissionCheckError>;
|
||||
|
||||
fn check<'a>(
|
||||
&mut self,
|
||||
|
@ -140,7 +146,7 @@ impl FsPermissions for deno_permissions::PermissionsContainer {
|
|||
&mut self,
|
||||
path: &str,
|
||||
api_name: &str,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
) -> Result<PathBuf, PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_read(self, path, api_name)
|
||||
}
|
||||
|
||||
|
@ -148,7 +154,7 @@ impl FsPermissions for deno_permissions::PermissionsContainer {
|
|||
&mut self,
|
||||
path: &'a Path,
|
||||
api_name: &str,
|
||||
) -> Result<Cow<'a, Path>, AnyError> {
|
||||
) -> Result<Cow<'a, Path>, PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_read_path(
|
||||
self,
|
||||
path,
|
||||
|
@ -160,7 +166,7 @@ impl FsPermissions for deno_permissions::PermissionsContainer {
|
|||
path: &Path,
|
||||
display: &str,
|
||||
api_name: &str,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_read_blind(
|
||||
self, path, display, api_name,
|
||||
)
|
||||
|
@ -170,7 +176,7 @@ impl FsPermissions for deno_permissions::PermissionsContainer {
|
|||
&mut self,
|
||||
path: &str,
|
||||
api_name: &str,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
) -> Result<PathBuf, PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_write(self, path, api_name)
|
||||
}
|
||||
|
||||
|
@ -178,7 +184,7 @@ impl FsPermissions for deno_permissions::PermissionsContainer {
|
|||
&mut self,
|
||||
path: &'a Path,
|
||||
api_name: &str,
|
||||
) -> Result<Cow<'a, Path>, AnyError> {
|
||||
) -> Result<Cow<'a, Path>, PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_write_path(
|
||||
self, path, api_name,
|
||||
)
|
||||
|
@ -188,7 +194,7 @@ impl FsPermissions for deno_permissions::PermissionsContainer {
|
|||
&mut self,
|
||||
path: &str,
|
||||
api_name: &str,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
) -> Result<PathBuf, PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_write_partial(
|
||||
self, path, api_name,
|
||||
)
|
||||
|
@ -199,17 +205,23 @@ impl FsPermissions for deno_permissions::PermissionsContainer {
|
|||
p: &Path,
|
||||
display: &str,
|
||||
api_name: &str,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_write_blind(
|
||||
self, p, display, api_name,
|
||||
)
|
||||
}
|
||||
|
||||
fn check_read_all(&mut self, api_name: &str) -> Result<(), AnyError> {
|
||||
fn check_read_all(
|
||||
&mut self,
|
||||
api_name: &str,
|
||||
) -> Result<(), PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_read_all(self, api_name)
|
||||
}
|
||||
|
||||
fn check_write_all(&mut self, api_name: &str) -> Result<(), AnyError> {
|
||||
fn check_write_all(
|
||||
&mut self,
|
||||
api_name: &str,
|
||||
) -> Result<(), PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_write_all(self, api_name)
|
||||
}
|
||||
}
|
||||
|
|
217
ext/fs/ops.rs
217
ext/fs/ops.rs
|
@ -10,6 +10,12 @@ use std::path::PathBuf;
|
|||
use std::path::StripPrefixError;
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::interface::AccessCheckFn;
|
||||
use crate::interface::FileSystemRc;
|
||||
use crate::interface::FsDirEntry;
|
||||
use crate::interface::FsFileType;
|
||||
use crate::FsPermissions;
|
||||
use crate::OpenOptions;
|
||||
use deno_core::op2;
|
||||
use deno_core::CancelFuture;
|
||||
use deno_core::CancelHandle;
|
||||
|
@ -20,18 +26,12 @@ use deno_core::ToJsBuffer;
|
|||
use deno_io::fs::FileResource;
|
||||
use deno_io::fs::FsError;
|
||||
use deno_io::fs::FsStat;
|
||||
use deno_permissions::PermissionCheckError;
|
||||
use rand::rngs::ThreadRng;
|
||||
use rand::thread_rng;
|
||||
use rand::Rng;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::interface::AccessCheckFn;
|
||||
use crate::interface::FileSystemRc;
|
||||
use crate::interface::FsDirEntry;
|
||||
use crate::interface::FsFileType;
|
||||
use crate::FsPermissions;
|
||||
use crate::OpenOptions;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum FsOpsError {
|
||||
#[error("{0}")]
|
||||
|
@ -39,7 +39,7 @@ pub enum FsOpsError {
|
|||
#[error("{0}")]
|
||||
OperationError(#[source] OperationError),
|
||||
#[error(transparent)]
|
||||
Permission(deno_core::error::AnyError),
|
||||
Permission(#[from] PermissionCheckError),
|
||||
#[error(transparent)]
|
||||
Resource(deno_core::error::AnyError),
|
||||
#[error("File name or path {0:?} is not valid UTF-8")]
|
||||
|
@ -150,8 +150,7 @@ where
|
|||
let path = fs.cwd()?;
|
||||
state
|
||||
.borrow_mut::<P>()
|
||||
.check_read_blind(&path, "CWD", "Deno.cwd()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
.check_read_blind(&path, "CWD", "Deno.cwd()")?;
|
||||
let path_str = path_into_string(path.into_os_string())?;
|
||||
Ok(path_str)
|
||||
}
|
||||
|
@ -166,8 +165,7 @@ where
|
|||
{
|
||||
let d = state
|
||||
.borrow_mut::<P>()
|
||||
.check_read(directory, "Deno.chdir()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
.check_read(directory, "Deno.chdir()")?;
|
||||
state
|
||||
.borrow::<FileSystemRc>()
|
||||
.chdir(&d)
|
||||
|
@ -253,8 +251,7 @@ where
|
|||
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write(&path, "Deno.mkdirSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
.check_write(&path, "Deno.mkdirSync()")?;
|
||||
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
fs.mkdir_sync(&path, recursive, Some(mode))
|
||||
|
@ -277,10 +274,7 @@ where
|
|||
|
||||
let (fs, path) = {
|
||||
let mut state = state.borrow_mut();
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write(&path, "Deno.mkdir()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let path = state.borrow_mut::<P>().check_write(&path, "Deno.mkdir()")?;
|
||||
(state.borrow::<FileSystemRc>().clone(), path)
|
||||
};
|
||||
|
||||
|
@ -302,8 +296,7 @@ where
|
|||
{
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write(&path, "Deno.chmodSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
.check_write(&path, "Deno.chmodSync()")?;
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
fs.chmod_sync(&path, mode).context_path("chmod", &path)?;
|
||||
Ok(())
|
||||
|
@ -320,10 +313,7 @@ where
|
|||
{
|
||||
let (fs, path) = {
|
||||
let mut state = state.borrow_mut();
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write(&path, "Deno.chmod()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let path = state.borrow_mut::<P>().check_write(&path, "Deno.chmod()")?;
|
||||
(state.borrow::<FileSystemRc>().clone(), path)
|
||||
};
|
||||
fs.chmod_async(path.clone(), mode)
|
||||
|
@ -344,8 +334,7 @@ where
|
|||
{
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write(&path, "Deno.chownSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
.check_write(&path, "Deno.chownSync()")?;
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
fs.chown_sync(&path, uid, gid)
|
||||
.context_path("chown", &path)?;
|
||||
|
@ -364,10 +353,7 @@ where
|
|||
{
|
||||
let (fs, path) = {
|
||||
let mut state = state.borrow_mut();
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write(&path, "Deno.chown()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let path = state.borrow_mut::<P>().check_write(&path, "Deno.chown()")?;
|
||||
(state.borrow::<FileSystemRc>().clone(), path)
|
||||
};
|
||||
fs.chown_async(path.clone(), uid, gid)
|
||||
|
@ -387,8 +373,7 @@ where
|
|||
{
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write(path, "Deno.removeSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
.check_write(path, "Deno.removeSync()")?;
|
||||
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
fs.remove_sync(&path, recursive)
|
||||
|
@ -411,13 +396,11 @@ where
|
|||
let path = if recursive {
|
||||
state
|
||||
.borrow_mut::<P>()
|
||||
.check_write(&path, "Deno.remove()")
|
||||
.map_err(FsOpsError::Permission)?
|
||||
.check_write(&path, "Deno.remove()")?
|
||||
} else {
|
||||
state
|
||||
.borrow_mut::<P>()
|
||||
.check_write_partial(&path, "Deno.remove()")
|
||||
.map_err(FsOpsError::Permission)?
|
||||
.check_write_partial(&path, "Deno.remove()")?
|
||||
};
|
||||
|
||||
(state.borrow::<FileSystemRc>().clone(), path)
|
||||
|
@ -440,12 +423,8 @@ where
|
|||
P: FsPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
let from = permissions
|
||||
.check_read(from, "Deno.copyFileSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let to = permissions
|
||||
.check_write(to, "Deno.copyFileSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let from = permissions.check_read(from, "Deno.copyFileSync()")?;
|
||||
let to = permissions.check_write(to, "Deno.copyFileSync()")?;
|
||||
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
fs.copy_file_sync(&from, &to)
|
||||
|
@ -466,12 +445,8 @@ where
|
|||
let (fs, from, to) = {
|
||||
let mut state = state.borrow_mut();
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
let from = permissions
|
||||
.check_read(&from, "Deno.copyFile()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let to = permissions
|
||||
.check_write(&to, "Deno.copyFile()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let from = permissions.check_read(&from, "Deno.copyFile()")?;
|
||||
let to = permissions.check_write(&to, "Deno.copyFile()")?;
|
||||
(state.borrow::<FileSystemRc>().clone(), from, to)
|
||||
};
|
||||
|
||||
|
@ -493,8 +468,7 @@ where
|
|||
{
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_read(&path, "Deno.statSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
.check_read(&path, "Deno.statSync()")?;
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
let stat = fs.stat_sync(&path).context_path("stat", &path)?;
|
||||
let serializable_stat = SerializableStat::from(stat);
|
||||
|
@ -514,9 +488,7 @@ where
|
|||
let (fs, path) = {
|
||||
let mut state = state.borrow_mut();
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
let path = permissions
|
||||
.check_read(&path, "Deno.stat()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let path = permissions.check_read(&path, "Deno.stat()")?;
|
||||
(state.borrow::<FileSystemRc>().clone(), path)
|
||||
};
|
||||
let stat = fs
|
||||
|
@ -537,8 +509,7 @@ where
|
|||
{
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_read(&path, "Deno.lstatSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
.check_read(&path, "Deno.lstatSync()")?;
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
let stat = fs.lstat_sync(&path).context_path("lstat", &path)?;
|
||||
let serializable_stat = SerializableStat::from(stat);
|
||||
|
@ -558,9 +529,7 @@ where
|
|||
let (fs, path) = {
|
||||
let mut state = state.borrow_mut();
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
let path = permissions
|
||||
.check_read(&path, "Deno.lstat()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let path = permissions.check_read(&path, "Deno.lstat()")?;
|
||||
(state.borrow::<FileSystemRc>().clone(), path)
|
||||
};
|
||||
let stat = fs
|
||||
|
@ -581,13 +550,9 @@ where
|
|||
{
|
||||
let fs = state.borrow::<FileSystemRc>().clone();
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
let path = permissions
|
||||
.check_read(&path, "Deno.realPathSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let path = permissions.check_read(&path, "Deno.realPathSync()")?;
|
||||
if path.is_relative() {
|
||||
permissions
|
||||
.check_read_blind(&fs.cwd()?, "CWD", "Deno.realPathSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
permissions.check_read_blind(&fs.cwd()?, "CWD", "Deno.realPathSync()")?;
|
||||
}
|
||||
|
||||
let resolved_path =
|
||||
|
@ -610,13 +575,9 @@ where
|
|||
let mut state = state.borrow_mut();
|
||||
let fs = state.borrow::<FileSystemRc>().clone();
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
let path = permissions
|
||||
.check_read(&path, "Deno.realPath()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let path = permissions.check_read(&path, "Deno.realPath()")?;
|
||||
if path.is_relative() {
|
||||
permissions
|
||||
.check_read_blind(&fs.cwd()?, "CWD", "Deno.realPath()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
permissions.check_read_blind(&fs.cwd()?, "CWD", "Deno.realPath()")?;
|
||||
}
|
||||
(fs, path)
|
||||
};
|
||||
|
@ -640,8 +601,7 @@ where
|
|||
{
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_read(&path, "Deno.readDirSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
.check_read(&path, "Deno.readDirSync()")?;
|
||||
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
let entries = fs.read_dir_sync(&path).context_path("readdir", &path)?;
|
||||
|
@ -662,8 +622,7 @@ where
|
|||
let mut state = state.borrow_mut();
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_read(&path, "Deno.readDir()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
.check_read(&path, "Deno.readDir()")?;
|
||||
(state.borrow::<FileSystemRc>().clone(), path)
|
||||
};
|
||||
|
||||
|
@ -685,15 +644,9 @@ where
|
|||
P: FsPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
let _ = permissions
|
||||
.check_read(&oldpath, "Deno.renameSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let oldpath = permissions
|
||||
.check_write(&oldpath, "Deno.renameSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let newpath = permissions
|
||||
.check_write(&newpath, "Deno.renameSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let _ = permissions.check_read(&oldpath, "Deno.renameSync()")?;
|
||||
let oldpath = permissions.check_write(&oldpath, "Deno.renameSync()")?;
|
||||
let newpath = permissions.check_write(&newpath, "Deno.renameSync()")?;
|
||||
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
fs.rename_sync(&oldpath, &newpath)
|
||||
|
@ -714,15 +667,9 @@ where
|
|||
let (fs, oldpath, newpath) = {
|
||||
let mut state = state.borrow_mut();
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
_ = permissions
|
||||
.check_read(&oldpath, "Deno.rename()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let oldpath = permissions
|
||||
.check_write(&oldpath, "Deno.rename()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let newpath = permissions
|
||||
.check_write(&newpath, "Deno.rename()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
_ = permissions.check_read(&oldpath, "Deno.rename()")?;
|
||||
let oldpath = permissions.check_write(&oldpath, "Deno.rename()")?;
|
||||
let newpath = permissions.check_write(&newpath, "Deno.rename()")?;
|
||||
(state.borrow::<FileSystemRc>().clone(), oldpath, newpath)
|
||||
};
|
||||
|
||||
|
@ -743,18 +690,10 @@ where
|
|||
P: FsPermissions + 'static,
|
||||
{
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
_ = permissions
|
||||
.check_read(oldpath, "Deno.linkSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let oldpath = permissions
|
||||
.check_write(oldpath, "Deno.linkSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
_ = permissions
|
||||
.check_read(newpath, "Deno.linkSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let newpath = permissions
|
||||
.check_write(newpath, "Deno.linkSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
_ = permissions.check_read(oldpath, "Deno.linkSync()")?;
|
||||
let oldpath = permissions.check_write(oldpath, "Deno.linkSync()")?;
|
||||
_ = permissions.check_read(newpath, "Deno.linkSync()")?;
|
||||
let newpath = permissions.check_write(newpath, "Deno.linkSync()")?;
|
||||
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
fs.link_sync(&oldpath, &newpath)
|
||||
|
@ -775,18 +714,10 @@ where
|
|||
let (fs, oldpath, newpath) = {
|
||||
let mut state = state.borrow_mut();
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
_ = permissions
|
||||
.check_read(&oldpath, "Deno.link()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let oldpath = permissions
|
||||
.check_write(&oldpath, "Deno.link()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
_ = permissions
|
||||
.check_read(&newpath, "Deno.link()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let newpath = permissions
|
||||
.check_write(&newpath, "Deno.link()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
_ = permissions.check_read(&oldpath, "Deno.link()")?;
|
||||
let oldpath = permissions.check_write(&oldpath, "Deno.link()")?;
|
||||
_ = permissions.check_read(&newpath, "Deno.link()")?;
|
||||
let newpath = permissions.check_write(&newpath, "Deno.link()")?;
|
||||
(state.borrow::<FileSystemRc>().clone(), oldpath, newpath)
|
||||
};
|
||||
|
||||
|
@ -811,12 +742,8 @@ where
|
|||
let newpath = PathBuf::from(newpath);
|
||||
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
permissions
|
||||
.check_write_all("Deno.symlinkSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
permissions
|
||||
.check_read_all("Deno.symlinkSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
permissions.check_write_all("Deno.symlinkSync()")?;
|
||||
permissions.check_read_all("Deno.symlinkSync()")?;
|
||||
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
fs.symlink_sync(&oldpath, &newpath, file_type)
|
||||
|
@ -841,12 +768,8 @@ where
|
|||
let fs = {
|
||||
let mut state = state.borrow_mut();
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
permissions
|
||||
.check_write_all("Deno.symlink()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
permissions
|
||||
.check_read_all("Deno.symlink()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
permissions.check_write_all("Deno.symlink()")?;
|
||||
permissions.check_read_all("Deno.symlink()")?;
|
||||
state.borrow::<FileSystemRc>().clone()
|
||||
};
|
||||
|
||||
|
@ -868,8 +791,7 @@ where
|
|||
{
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_read(&path, "Deno.readLink()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
.check_read(&path, "Deno.readLink()")?;
|
||||
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
|
||||
|
@ -891,8 +813,7 @@ where
|
|||
let mut state = state.borrow_mut();
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_read(&path, "Deno.readLink()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
.check_read(&path, "Deno.readLink()")?;
|
||||
(state.borrow::<FileSystemRc>().clone(), path)
|
||||
};
|
||||
|
||||
|
@ -915,8 +836,7 @@ where
|
|||
{
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write(path, "Deno.truncateSync()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
.check_write(path, "Deno.truncateSync()")?;
|
||||
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
fs.truncate_sync(&path, len)
|
||||
|
@ -938,8 +858,7 @@ where
|
|||
let mut state = state.borrow_mut();
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write(&path, "Deno.truncate()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
.check_write(&path, "Deno.truncate()")?;
|
||||
(state.borrow::<FileSystemRc>().clone(), path)
|
||||
};
|
||||
|
||||
|
@ -962,10 +881,7 @@ pub fn op_fs_utime_sync<P>(
|
|||
where
|
||||
P: FsPermissions + 'static,
|
||||
{
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write(path, "Deno.utime()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let path = state.borrow_mut::<P>().check_write(path, "Deno.utime()")?;
|
||||
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
fs.utime_sync(&path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)
|
||||
|
@ -988,10 +904,7 @@ where
|
|||
{
|
||||
let (fs, path) = {
|
||||
let mut state = state.borrow_mut();
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write(&path, "Deno.utime()")
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
let path = state.borrow_mut::<P>().check_write(&path, "Deno.utime()")?;
|
||||
(state.borrow::<FileSystemRc>().clone(), path)
|
||||
};
|
||||
|
||||
|
@ -1219,16 +1132,12 @@ where
|
|||
{
|
||||
let fs = state.borrow::<FileSystemRc>().clone();
|
||||
let dir = match dir {
|
||||
Some(dir) => state
|
||||
.borrow_mut::<P>()
|
||||
.check_write(dir, api_name)
|
||||
.map_err(FsOpsError::Permission)?,
|
||||
Some(dir) => state.borrow_mut::<P>().check_write(dir, api_name)?,
|
||||
None => {
|
||||
let dir = fs.tmp_dir().context("tmpdir")?;
|
||||
state
|
||||
.borrow_mut::<P>()
|
||||
.check_write_blind(&dir, "TMP", api_name)
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
.check_write_blind(&dir, "TMP", api_name)?;
|
||||
dir
|
||||
}
|
||||
};
|
||||
|
@ -1246,16 +1155,12 @@ where
|
|||
let mut state = state.borrow_mut();
|
||||
let fs = state.borrow::<FileSystemRc>().clone();
|
||||
let dir = match dir {
|
||||
Some(dir) => state
|
||||
.borrow_mut::<P>()
|
||||
.check_write(dir, api_name)
|
||||
.map_err(FsOpsError::Permission)?,
|
||||
Some(dir) => state.borrow_mut::<P>().check_write(dir, api_name)?,
|
||||
None => {
|
||||
let dir = fs.tmp_dir().context("tmpdir")?;
|
||||
state
|
||||
.borrow_mut::<P>()
|
||||
.check_write_blind(&dir, "TMP", api_name)
|
||||
.map_err(FsOpsError::Permission)?;
|
||||
.check_write_blind(&dir, "TMP", api_name)?;
|
||||
dir
|
||||
}
|
||||
};
|
||||
|
|
|
@ -14,6 +14,7 @@ import {
|
|||
op_http_get_request_headers,
|
||||
op_http_get_request_method_and_url,
|
||||
op_http_read_request_body,
|
||||
op_http_request_on_cancel,
|
||||
op_http_serve,
|
||||
op_http_serve_on,
|
||||
op_http_set_promise_complete,
|
||||
|
@ -373,6 +374,18 @@ class InnerRequest {
|
|||
get external() {
|
||||
return this.#external;
|
||||
}
|
||||
|
||||
onCancel(callback) {
|
||||
if (this.#external === null) {
|
||||
callback();
|
||||
return;
|
||||
}
|
||||
|
||||
PromisePrototypeThen(
|
||||
op_http_request_on_cancel(this.#external),
|
||||
callback,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class CallbackContext {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_http"
|
||||
version = "0.173.0"
|
||||
version = "0.174.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -700,6 +700,27 @@ fn set_response(
|
|||
http.complete();
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_http_get_request_cancelled(external: *const c_void) -> bool {
|
||||
let http =
|
||||
// SAFETY: op is called with external.
|
||||
unsafe { clone_external!(external, "op_http_get_request_cancelled") };
|
||||
http.cancelled()
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
pub async fn op_http_request_on_cancel(external: *const c_void) {
|
||||
let http =
|
||||
// SAFETY: op is called with external.
|
||||
unsafe { clone_external!(external, "op_http_request_on_cancel") };
|
||||
let (tx, rx) = tokio::sync::oneshot::channel();
|
||||
|
||||
http.on_cancel(tx);
|
||||
drop(http);
|
||||
|
||||
rx.await.ok();
|
||||
}
|
||||
|
||||
/// Returned promise resolves when body streaming finishes.
|
||||
/// Call [`op_http_close_after_finish`] when done with the external.
|
||||
#[op2(async)]
|
||||
|
|
|
@ -112,7 +112,9 @@ deno_core::extension!(
|
|||
http_next::op_http_close_after_finish,
|
||||
http_next::op_http_get_request_header,
|
||||
http_next::op_http_get_request_headers,
|
||||
http_next::op_http_request_on_cancel,
|
||||
http_next::op_http_get_request_method_and_url<HTTP>,
|
||||
http_next::op_http_get_request_cancelled,
|
||||
http_next::op_http_read_request_body,
|
||||
http_next::op_http_serve_on<HTTP>,
|
||||
http_next::op_http_serve<HTTP>,
|
||||
|
|
|
@ -27,6 +27,7 @@ use std::rc::Rc;
|
|||
use std::task::Context;
|
||||
use std::task::Poll;
|
||||
use std::task::Waker;
|
||||
use tokio::sync::oneshot;
|
||||
|
||||
pub type Request = hyper::Request<Incoming>;
|
||||
pub type Response = hyper::Response<HttpRecordResponse>;
|
||||
|
@ -211,6 +212,7 @@ pub struct UpgradeUnavailableError;
|
|||
|
||||
struct HttpRecordInner {
|
||||
server_state: SignallingRc<HttpServerState>,
|
||||
closed_channel: Option<oneshot::Sender<()>>,
|
||||
request_info: HttpConnectionProperties,
|
||||
request_parts: http::request::Parts,
|
||||
request_body: Option<RequestBodyState>,
|
||||
|
@ -276,6 +278,7 @@ impl HttpRecord {
|
|||
response_body_finished: false,
|
||||
response_body_waker: None,
|
||||
trailers: None,
|
||||
closed_channel: None,
|
||||
been_dropped: false,
|
||||
finished: false,
|
||||
needs_close_after_finish: false,
|
||||
|
@ -312,6 +315,10 @@ impl HttpRecord {
|
|||
RefMut::map(self.self_mut(), |inner| &mut inner.needs_close_after_finish)
|
||||
}
|
||||
|
||||
pub fn on_cancel(&self, sender: oneshot::Sender<()>) {
|
||||
self.self_mut().closed_channel = Some(sender);
|
||||
}
|
||||
|
||||
fn recycle(self: Rc<Self>) {
|
||||
assert!(
|
||||
Rc::strong_count(&self) == 1,
|
||||
|
@ -390,6 +397,9 @@ impl HttpRecord {
|
|||
inner.been_dropped = true;
|
||||
// The request body might include actual resources.
|
||||
inner.request_body.take();
|
||||
if let Some(closed_channel) = inner.closed_channel.take() {
|
||||
let _ = closed_channel.send(());
|
||||
}
|
||||
}
|
||||
|
||||
/// Complete this record, potentially expunging it if it is fully complete (ie: cancelled as well).
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_io"
|
||||
version = "0.85.0"
|
||||
version = "0.86.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_kv"
|
||||
version = "0.83.0"
|
||||
version = "0.84.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -15,6 +15,7 @@ use deno_core::futures::Stream;
|
|||
use deno_core::OpState;
|
||||
use deno_fetch::create_http_client;
|
||||
use deno_fetch::CreateHttpClientOptions;
|
||||
use deno_permissions::PermissionCheckError;
|
||||
use deno_tls::rustls::RootCertStore;
|
||||
use deno_tls::Proxy;
|
||||
use deno_tls::RootCertStoreProvider;
|
||||
|
@ -45,17 +46,17 @@ impl HttpOptions {
|
|||
}
|
||||
|
||||
pub trait RemoteDbHandlerPermissions {
|
||||
fn check_env(&mut self, var: &str) -> Result<(), AnyError>;
|
||||
fn check_env(&mut self, var: &str) -> Result<(), PermissionCheckError>;
|
||||
fn check_net_url(
|
||||
&mut self,
|
||||
url: &Url,
|
||||
api_name: &str,
|
||||
) -> Result<(), AnyError>;
|
||||
) -> Result<(), PermissionCheckError>;
|
||||
}
|
||||
|
||||
impl RemoteDbHandlerPermissions for deno_permissions::PermissionsContainer {
|
||||
#[inline(always)]
|
||||
fn check_env(&mut self, var: &str) -> Result<(), AnyError> {
|
||||
fn check_env(&mut self, var: &str) -> Result<(), PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_env(self, var)
|
||||
}
|
||||
|
||||
|
@ -64,7 +65,7 @@ impl RemoteDbHandlerPermissions for deno_permissions::PermissionsContainer {
|
|||
&mut self,
|
||||
url: &Url,
|
||||
api_name: &str,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_net_url(self, url, api_name)
|
||||
}
|
||||
}
|
||||
|
@ -103,7 +104,9 @@ impl<P: RemoteDbHandlerPermissions + 'static> denokv_remote::RemotePermissions
|
|||
fn check_net_url(&self, url: &Url) -> Result<(), anyhow::Error> {
|
||||
let mut state = self.state.borrow_mut();
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
permissions.check_net_url(url, "Deno.openKv")
|
||||
permissions
|
||||
.check_net_url(url, "Deno.openKv")
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -13,20 +13,20 @@ use std::sync::Arc;
|
|||
use std::sync::Mutex;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use crate::DatabaseHandler;
|
||||
use async_trait::async_trait;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::unsync::spawn_blocking;
|
||||
use deno_core::OpState;
|
||||
use deno_path_util::normalize_path;
|
||||
use deno_permissions::PermissionCheckError;
|
||||
pub use denokv_sqlite::SqliteBackendError;
|
||||
use denokv_sqlite::SqliteConfig;
|
||||
use denokv_sqlite::SqliteNotifier;
|
||||
use rand::SeedableRng;
|
||||
use rusqlite::OpenFlags;
|
||||
|
||||
use crate::DatabaseHandler;
|
||||
|
||||
static SQLITE_NOTIFIERS_MAP: OnceLock<Mutex<HashMap<PathBuf, SqliteNotifier>>> =
|
||||
OnceLock::new();
|
||||
|
||||
|
@ -42,13 +42,13 @@ pub trait SqliteDbHandlerPermissions {
|
|||
&mut self,
|
||||
p: &str,
|
||||
api_name: &str,
|
||||
) -> Result<PathBuf, AnyError>;
|
||||
) -> Result<PathBuf, PermissionCheckError>;
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
fn check_write<'a>(
|
||||
&mut self,
|
||||
p: &'a Path,
|
||||
api_name: &str,
|
||||
) -> Result<Cow<'a, Path>, AnyError>;
|
||||
) -> Result<Cow<'a, Path>, PermissionCheckError>;
|
||||
}
|
||||
|
||||
impl SqliteDbHandlerPermissions for deno_permissions::PermissionsContainer {
|
||||
|
@ -57,7 +57,7 @@ impl SqliteDbHandlerPermissions for deno_permissions::PermissionsContainer {
|
|||
&mut self,
|
||||
p: &str,
|
||||
api_name: &str,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
) -> Result<PathBuf, PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_read(self, p, api_name)
|
||||
}
|
||||
|
||||
|
@ -66,7 +66,7 @@ impl SqliteDbHandlerPermissions for deno_permissions::PermissionsContainer {
|
|||
&mut self,
|
||||
p: &'a Path,
|
||||
api_name: &str,
|
||||
) -> Result<Cow<'a, Path>, AnyError> {
|
||||
) -> Result<Cow<'a, Path>, PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_write_path(self, p, api_name)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_napi"
|
||||
version = "0.106.0"
|
||||
version = "0.107.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -43,7 +43,7 @@ pub enum NApiError {
|
|||
#[error("Unable to find register Node-API module at {}", .0.display())]
|
||||
ModuleNotFound(PathBuf),
|
||||
#[error(transparent)]
|
||||
Permission(deno_core::error::AnyError),
|
||||
Permission(#[from] PermissionCheckError),
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
|
@ -55,6 +55,7 @@ use libloading::os::windows::*;
|
|||
// Expose common stuff for ease of use.
|
||||
// `use deno_napi::*`
|
||||
pub use deno_core::v8;
|
||||
use deno_permissions::PermissionCheckError;
|
||||
pub use std::ffi::CStr;
|
||||
pub use std::os::raw::c_char;
|
||||
pub use std::os::raw::c_void;
|
||||
|
@ -508,20 +509,14 @@ deno_core::extension!(deno_napi,
|
|||
|
||||
pub trait NapiPermissions {
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
fn check(
|
||||
&mut self,
|
||||
path: &str,
|
||||
) -> Result<PathBuf, deno_core::error::AnyError>;
|
||||
fn check(&mut self, path: &str) -> Result<PathBuf, PermissionCheckError>;
|
||||
}
|
||||
|
||||
// NOTE(bartlomieju): for now, NAPI uses `--allow-ffi` flag, but that might
|
||||
// change in the future.
|
||||
impl NapiPermissions for deno_permissions::PermissionsContainer {
|
||||
#[inline(always)]
|
||||
fn check(
|
||||
&mut self,
|
||||
path: &str,
|
||||
) -> Result<PathBuf, deno_core::error::AnyError> {
|
||||
fn check(&mut self, path: &str) -> Result<PathBuf, PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_ffi(self, path)
|
||||
}
|
||||
}
|
||||
|
@ -553,7 +548,7 @@ where
|
|||
let (async_work_sender, cleanup_hooks, external_ops_tracker, path) = {
|
||||
let mut op_state = op_state.borrow_mut();
|
||||
let permissions = op_state.borrow_mut::<NP>();
|
||||
let path = permissions.check(&path).map_err(NApiError::Permission)?;
|
||||
let path = permissions.check(&path)?;
|
||||
let napi_state = op_state.borrow::<NapiState>();
|
||||
(
|
||||
op_state.borrow::<V8CrossThreadTaskSpawner>().clone(),
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "napi_sym"
|
||||
version = "0.105.0"
|
||||
version = "0.106.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_net"
|
||||
version = "0.167.0"
|
||||
version = "0.168.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
@ -17,11 +17,11 @@ path = "lib.rs"
|
|||
deno_core.workspace = true
|
||||
deno_permissions.workspace = true
|
||||
deno_tls.workspace = true
|
||||
hickory-proto = "0.24"
|
||||
hickory-resolver = { version = "0.24", features = ["tokio-runtime", "serde-config"] }
|
||||
pin-project.workspace = true
|
||||
rustls-tokio-stream.workspace = true
|
||||
serde.workspace = true
|
||||
socket2.workspace = true
|
||||
thiserror.workspace = true
|
||||
tokio.workspace = true
|
||||
trust-dns-proto = "0.23"
|
||||
trust-dns-resolver = { version = "0.23", features = ["tokio-runtime", "serde-config"] }
|
||||
|
|
|
@ -11,6 +11,7 @@ mod tcp;
|
|||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::OpState;
|
||||
use deno_permissions::PermissionCheckError;
|
||||
use deno_tls::rustls::RootCertStore;
|
||||
use deno_tls::RootCertStoreProvider;
|
||||
use std::borrow::Cow;
|
||||
|
@ -25,25 +26,25 @@ pub trait NetPermissions {
|
|||
&mut self,
|
||||
host: &(T, Option<u16>),
|
||||
api_name: &str,
|
||||
) -> Result<(), AnyError>;
|
||||
) -> Result<(), PermissionCheckError>;
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
fn check_read(
|
||||
&mut self,
|
||||
p: &str,
|
||||
api_name: &str,
|
||||
) -> Result<PathBuf, AnyError>;
|
||||
) -> Result<PathBuf, PermissionCheckError>;
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
fn check_write(
|
||||
&mut self,
|
||||
p: &str,
|
||||
api_name: &str,
|
||||
) -> Result<PathBuf, AnyError>;
|
||||
) -> Result<PathBuf, PermissionCheckError>;
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
fn check_write_path<'a>(
|
||||
&mut self,
|
||||
p: &'a Path,
|
||||
api_name: &str,
|
||||
) -> Result<Cow<'a, Path>, AnyError>;
|
||||
) -> Result<Cow<'a, Path>, PermissionCheckError>;
|
||||
}
|
||||
|
||||
impl NetPermissions for deno_permissions::PermissionsContainer {
|
||||
|
@ -52,7 +53,7 @@ impl NetPermissions for deno_permissions::PermissionsContainer {
|
|||
&mut self,
|
||||
host: &(T, Option<u16>),
|
||||
api_name: &str,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_net(self, host, api_name)
|
||||
}
|
||||
|
||||
|
@ -61,7 +62,7 @@ impl NetPermissions for deno_permissions::PermissionsContainer {
|
|||
&mut self,
|
||||
path: &str,
|
||||
api_name: &str,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
) -> Result<PathBuf, PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_read(self, path, api_name)
|
||||
}
|
||||
|
||||
|
@ -70,7 +71,7 @@ impl NetPermissions for deno_permissions::PermissionsContainer {
|
|||
&mut self,
|
||||
path: &str,
|
||||
api_name: &str,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
) -> Result<PathBuf, PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_write(self, path, api_name)
|
||||
}
|
||||
|
||||
|
@ -79,7 +80,7 @@ impl NetPermissions for deno_permissions::PermissionsContainer {
|
|||
&mut self,
|
||||
path: &'a Path,
|
||||
api_name: &str,
|
||||
) -> Result<Cow<'a, Path>, AnyError> {
|
||||
) -> Result<Cow<'a, Path>, PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_write_path(
|
||||
self, path, api_name,
|
||||
)
|
||||
|
|
|
@ -18,6 +18,16 @@ use deno_core::OpState;
|
|||
use deno_core::RcRef;
|
||||
use deno_core::Resource;
|
||||
use deno_core::ResourceId;
|
||||
use hickory_proto::rr::rdata::caa::Value;
|
||||
use hickory_proto::rr::record_data::RData;
|
||||
use hickory_proto::rr::record_type::RecordType;
|
||||
use hickory_resolver::config::NameServerConfigGroup;
|
||||
use hickory_resolver::config::ResolverConfig;
|
||||
use hickory_resolver::config::ResolverOpts;
|
||||
use hickory_resolver::error::ResolveError;
|
||||
use hickory_resolver::error::ResolveErrorKind;
|
||||
use hickory_resolver::system_conf;
|
||||
use hickory_resolver::AsyncResolver;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use socket2::Domain;
|
||||
|
@ -33,16 +43,6 @@ use std::rc::Rc;
|
|||
use std::str::FromStr;
|
||||
use tokio::net::TcpStream;
|
||||
use tokio::net::UdpSocket;
|
||||
use trust_dns_proto::rr::rdata::caa::Value;
|
||||
use trust_dns_proto::rr::record_data::RData;
|
||||
use trust_dns_proto::rr::record_type::RecordType;
|
||||
use trust_dns_resolver::config::NameServerConfigGroup;
|
||||
use trust_dns_resolver::config::ResolverConfig;
|
||||
use trust_dns_resolver::config::ResolverOpts;
|
||||
use trust_dns_resolver::error::ResolveError;
|
||||
use trust_dns_resolver::error::ResolveErrorKind;
|
||||
use trust_dns_resolver::system_conf;
|
||||
use trust_dns_resolver::AsyncResolver;
|
||||
|
||||
#[derive(Serialize, Clone, Debug)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
|
@ -81,8 +81,8 @@ pub enum NetError {
|
|||
Io(#[from] std::io::Error),
|
||||
#[error("Another accept task is ongoing")]
|
||||
AcceptTaskOngoing,
|
||||
#[error("{0}")]
|
||||
Permission(deno_core::error::AnyError),
|
||||
#[error(transparent)]
|
||||
Permission(#[from] deno_permissions::PermissionCheckError),
|
||||
#[error("{0}")]
|
||||
Resource(deno_core::error::AnyError),
|
||||
#[error("No resolved address found")]
|
||||
|
@ -195,12 +195,10 @@ where
|
|||
{
|
||||
{
|
||||
let mut s = state.borrow_mut();
|
||||
s.borrow_mut::<NP>()
|
||||
.check_net(
|
||||
&(&addr.hostname, Some(addr.port)),
|
||||
"Deno.DatagramConn.send()",
|
||||
)
|
||||
.map_err(NetError::Permission)?;
|
||||
s.borrow_mut::<NP>().check_net(
|
||||
&(&addr.hostname, Some(addr.port)),
|
||||
"Deno.DatagramConn.send()",
|
||||
)?;
|
||||
}
|
||||
let addr = resolve_addr(&addr.hostname, addr.port)
|
||||
.await?
|
||||
|
@ -369,8 +367,7 @@ where
|
|||
let mut state_ = state.borrow_mut();
|
||||
state_
|
||||
.borrow_mut::<NP>()
|
||||
.check_net(&(&addr.hostname, Some(addr.port)), "Deno.connect()")
|
||||
.map_err(NetError::Permission)?;
|
||||
.check_net(&(&addr.hostname, Some(addr.port)), "Deno.connect()")?;
|
||||
}
|
||||
|
||||
let addr = resolve_addr(&addr.hostname, addr.port)
|
||||
|
@ -420,8 +417,7 @@ where
|
|||
}
|
||||
state
|
||||
.borrow_mut::<NP>()
|
||||
.check_net(&(&addr.hostname, Some(addr.port)), "Deno.listen()")
|
||||
.map_err(NetError::Permission)?;
|
||||
.check_net(&(&addr.hostname, Some(addr.port)), "Deno.listen()")?;
|
||||
let addr = resolve_addr_sync(&addr.hostname, addr.port)?
|
||||
.next()
|
||||
.ok_or_else(|| NetError::NoResolvedAddress)?;
|
||||
|
@ -449,8 +445,7 @@ where
|
|||
{
|
||||
state
|
||||
.borrow_mut::<NP>()
|
||||
.check_net(&(&addr.hostname, Some(addr.port)), "Deno.listenDatagram()")
|
||||
.map_err(NetError::Permission)?;
|
||||
.check_net(&(&addr.hostname, Some(addr.port)), "Deno.listenDatagram()")?;
|
||||
let addr = resolve_addr_sync(&addr.hostname, addr.port)?
|
||||
.next()
|
||||
.ok_or_else(|| NetError::NoResolvedAddress)?;
|
||||
|
@ -647,9 +642,7 @@ where
|
|||
let socker_addr = &ns.socket_addr;
|
||||
let ip = socker_addr.ip().to_string();
|
||||
let port = socker_addr.port();
|
||||
perm
|
||||
.check_net(&(ip, Some(port)), "Deno.resolveDns()")
|
||||
.map_err(NetError::Permission)?;
|
||||
perm.check_net(&(ip, Some(port)), "Deno.resolveDns()")?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -834,6 +827,22 @@ mod tests {
|
|||
use deno_core::futures::FutureExt;
|
||||
use deno_core::JsRuntime;
|
||||
use deno_core::RuntimeOptions;
|
||||
use deno_permissions::PermissionCheckError;
|
||||
use hickory_proto::rr::rdata::a::A;
|
||||
use hickory_proto::rr::rdata::aaaa::AAAA;
|
||||
use hickory_proto::rr::rdata::caa::KeyValue;
|
||||
use hickory_proto::rr::rdata::caa::CAA;
|
||||
use hickory_proto::rr::rdata::mx::MX;
|
||||
use hickory_proto::rr::rdata::name::ANAME;
|
||||
use hickory_proto::rr::rdata::name::CNAME;
|
||||
use hickory_proto::rr::rdata::name::NS;
|
||||
use hickory_proto::rr::rdata::name::PTR;
|
||||
use hickory_proto::rr::rdata::naptr::NAPTR;
|
||||
use hickory_proto::rr::rdata::srv::SRV;
|
||||
use hickory_proto::rr::rdata::txt::TXT;
|
||||
use hickory_proto::rr::rdata::SOA;
|
||||
use hickory_proto::rr::record_data::RData;
|
||||
use hickory_proto::rr::Name;
|
||||
use socket2::SockRef;
|
||||
use std::net::Ipv4Addr;
|
||||
use std::net::Ipv6Addr;
|
||||
|
@ -842,21 +851,6 @@ mod tests {
|
|||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
use trust_dns_proto::rr::rdata::a::A;
|
||||
use trust_dns_proto::rr::rdata::aaaa::AAAA;
|
||||
use trust_dns_proto::rr::rdata::caa::KeyValue;
|
||||
use trust_dns_proto::rr::rdata::caa::CAA;
|
||||
use trust_dns_proto::rr::rdata::mx::MX;
|
||||
use trust_dns_proto::rr::rdata::name::ANAME;
|
||||
use trust_dns_proto::rr::rdata::name::CNAME;
|
||||
use trust_dns_proto::rr::rdata::name::NS;
|
||||
use trust_dns_proto::rr::rdata::name::PTR;
|
||||
use trust_dns_proto::rr::rdata::naptr::NAPTR;
|
||||
use trust_dns_proto::rr::rdata::srv::SRV;
|
||||
use trust_dns_proto::rr::rdata::txt::TXT;
|
||||
use trust_dns_proto::rr::rdata::SOA;
|
||||
use trust_dns_proto::rr::record_data::RData;
|
||||
use trust_dns_proto::rr::Name;
|
||||
|
||||
#[test]
|
||||
fn rdata_to_return_record_a() {
|
||||
|
@ -1041,7 +1035,7 @@ mod tests {
|
|||
&mut self,
|
||||
_host: &(T, Option<u16>),
|
||||
_api_name: &str,
|
||||
) -> Result<(), deno_core::error::AnyError> {
|
||||
) -> Result<(), PermissionCheckError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -1049,7 +1043,7 @@ mod tests {
|
|||
&mut self,
|
||||
p: &str,
|
||||
_api_name: &str,
|
||||
) -> Result<PathBuf, deno_core::error::AnyError> {
|
||||
) -> Result<PathBuf, PermissionCheckError> {
|
||||
Ok(PathBuf::from(p))
|
||||
}
|
||||
|
||||
|
@ -1057,7 +1051,7 @@ mod tests {
|
|||
&mut self,
|
||||
p: &str,
|
||||
_api_name: &str,
|
||||
) -> Result<PathBuf, deno_core::error::AnyError> {
|
||||
) -> Result<PathBuf, PermissionCheckError> {
|
||||
Ok(PathBuf::from(p))
|
||||
}
|
||||
|
||||
|
@ -1065,7 +1059,7 @@ mod tests {
|
|||
&mut self,
|
||||
p: &'a Path,
|
||||
_api_name: &str,
|
||||
) -> Result<Cow<'a, Path>, deno_core::error::AnyError> {
|
||||
) -> Result<Cow<'a, Path>, PermissionCheckError> {
|
||||
Ok(Cow::Borrowed(p))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_node"
|
||||
version = "0.112.0"
|
||||
version = "0.113.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -24,6 +24,7 @@ pub mod ops;
|
|||
mod polyfill;
|
||||
|
||||
pub use deno_package_json::PackageJson;
|
||||
use deno_permissions::PermissionCheckError;
|
||||
pub use node_resolver::PathClean;
|
||||
pub use ops::ipc::ChildPipeFd;
|
||||
pub use ops::ipc::IpcJsonStreamResource;
|
||||
|
@ -45,10 +46,18 @@ pub trait NodePermissions {
|
|||
&mut self,
|
||||
url: &Url,
|
||||
api_name: &str,
|
||||
) -> Result<(), AnyError>;
|
||||
) -> Result<(), PermissionCheckError>;
|
||||
fn check_net(
|
||||
&mut self,
|
||||
host: (&str, Option<u16>),
|
||||
api_name: &str,
|
||||
) -> Result<(), PermissionCheckError>;
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
#[inline(always)]
|
||||
fn check_read(&mut self, path: &str) -> Result<PathBuf, AnyError> {
|
||||
fn check_read(
|
||||
&mut self,
|
||||
path: &str,
|
||||
) -> Result<PathBuf, PermissionCheckError> {
|
||||
self.check_read_with_api_name(path, None)
|
||||
}
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
|
@ -56,20 +65,24 @@ pub trait NodePermissions {
|
|||
&mut self,
|
||||
path: &str,
|
||||
api_name: Option<&str>,
|
||||
) -> Result<PathBuf, AnyError>;
|
||||
) -> Result<PathBuf, PermissionCheckError>;
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
fn check_read_path<'a>(
|
||||
&mut self,
|
||||
path: &'a Path,
|
||||
) -> Result<Cow<'a, Path>, AnyError>;
|
||||
) -> Result<Cow<'a, Path>, PermissionCheckError>;
|
||||
fn query_read_all(&mut self) -> bool;
|
||||
fn check_sys(&mut self, kind: &str, api_name: &str) -> Result<(), AnyError>;
|
||||
fn check_sys(
|
||||
&mut self,
|
||||
kind: &str,
|
||||
api_name: &str,
|
||||
) -> Result<(), PermissionCheckError>;
|
||||
#[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"]
|
||||
fn check_write_with_api_name(
|
||||
&mut self,
|
||||
path: &str,
|
||||
api_name: Option<&str>,
|
||||
) -> Result<PathBuf, AnyError>;
|
||||
) -> Result<PathBuf, PermissionCheckError>;
|
||||
}
|
||||
|
||||
impl NodePermissions for deno_permissions::PermissionsContainer {
|
||||
|
@ -78,16 +91,24 @@ impl NodePermissions for deno_permissions::PermissionsContainer {
|
|||
&mut self,
|
||||
url: &Url,
|
||||
api_name: &str,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_net_url(self, url, api_name)
|
||||
}
|
||||
|
||||
fn check_net(
|
||||
&mut self,
|
||||
host: (&str, Option<u16>),
|
||||
api_name: &str,
|
||||
) -> Result<(), PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_net(self, &host, api_name)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn check_read_with_api_name(
|
||||
&mut self,
|
||||
path: &str,
|
||||
api_name: Option<&str>,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
) -> Result<PathBuf, PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_read_with_api_name(
|
||||
self, path, api_name,
|
||||
)
|
||||
|
@ -96,7 +117,7 @@ impl NodePermissions for deno_permissions::PermissionsContainer {
|
|||
fn check_read_path<'a>(
|
||||
&mut self,
|
||||
path: &'a Path,
|
||||
) -> Result<Cow<'a, Path>, AnyError> {
|
||||
) -> Result<Cow<'a, Path>, PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_read_path(self, path, None)
|
||||
}
|
||||
|
||||
|
@ -109,13 +130,17 @@ impl NodePermissions for deno_permissions::PermissionsContainer {
|
|||
&mut self,
|
||||
path: &str,
|
||||
api_name: Option<&str>,
|
||||
) -> Result<PathBuf, AnyError> {
|
||||
) -> Result<PathBuf, PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_write_with_api_name(
|
||||
self, path, api_name,
|
||||
)
|
||||
}
|
||||
|
||||
fn check_sys(&mut self, kind: &str, api_name: &str) -> Result<(), AnyError> {
|
||||
fn check_sys(
|
||||
&mut self,
|
||||
kind: &str,
|
||||
api_name: &str,
|
||||
) -> Result<(), PermissionCheckError> {
|
||||
deno_permissions::PermissionsContainer::check_sys(self, kind, api_name)
|
||||
}
|
||||
}
|
||||
|
@ -386,6 +411,15 @@ deno_core::extension!(deno_node,
|
|||
ops::process::op_node_process_kill,
|
||||
ops::process::op_process_abort,
|
||||
ops::tls::op_get_root_certificates,
|
||||
ops::inspector::op_inspector_open<P>,
|
||||
ops::inspector::op_inspector_close,
|
||||
ops::inspector::op_inspector_url,
|
||||
ops::inspector::op_inspector_wait,
|
||||
ops::inspector::op_inspector_connect<P>,
|
||||
ops::inspector::op_inspector_dispatch,
|
||||
ops::inspector::op_inspector_disconnect,
|
||||
ops::inspector::op_inspector_emit_protocol_event,
|
||||
ops::inspector::op_inspector_enabled,
|
||||
],
|
||||
esm_entry_point = "ext:deno_node/02_init.js",
|
||||
esm = [
|
||||
|
@ -594,8 +628,8 @@ deno_core::extension!(deno_node,
|
|||
"node:http" = "http.ts",
|
||||
"node:http2" = "http2.ts",
|
||||
"node:https" = "https.ts",
|
||||
"node:inspector" = "inspector.ts",
|
||||
"node:inspector/promises" = "inspector.ts",
|
||||
"node:inspector" = "inspector.js",
|
||||
"node:inspector/promises" = "inspector/promises.js",
|
||||
"node:module" = "01_require.js",
|
||||
"node:net" = "net.ts",
|
||||
"node:os" = "os.ts",
|
||||
|
|
|
@ -4,9 +4,6 @@ use aes::cipher::block_padding::Pkcs7;
|
|||
use aes::cipher::BlockDecryptMut;
|
||||
use aes::cipher::BlockEncryptMut;
|
||||
use aes::cipher::KeyIvInit;
|
||||
use deno_core::error::range_error;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::Resource;
|
||||
use digest::generic_array::GenericArray;
|
||||
use digest::KeyInit;
|
||||
|
@ -50,8 +47,22 @@ pub struct DecipherContext {
|
|||
decipher: Rc<RefCell<Decipher>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum CipherContextError {
|
||||
#[error("Cipher context is already in use")]
|
||||
ContextInUse,
|
||||
#[error("{0}")]
|
||||
Resource(deno_core::error::AnyError),
|
||||
#[error(transparent)]
|
||||
Cipher(#[from] CipherError),
|
||||
}
|
||||
|
||||
impl CipherContext {
|
||||
pub fn new(algorithm: &str, key: &[u8], iv: &[u8]) -> Result<Self, AnyError> {
|
||||
pub fn new(
|
||||
algorithm: &str,
|
||||
key: &[u8],
|
||||
iv: &[u8],
|
||||
) -> Result<Self, CipherContextError> {
|
||||
Ok(Self {
|
||||
cipher: Rc::new(RefCell::new(Cipher::new(algorithm, key, iv)?)),
|
||||
})
|
||||
|
@ -74,16 +85,31 @@ impl CipherContext {
|
|||
auto_pad: bool,
|
||||
input: &[u8],
|
||||
output: &mut [u8],
|
||||
) -> Result<Tag, AnyError> {
|
||||
) -> Result<Tag, CipherContextError> {
|
||||
Rc::try_unwrap(self.cipher)
|
||||
.map_err(|_| type_error("Cipher context is already in use"))?
|
||||
.map_err(|_| CipherContextError::ContextInUse)?
|
||||
.into_inner()
|
||||
.r#final(auto_pad, input, output)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum DecipherContextError {
|
||||
#[error("Decipher context is already in use")]
|
||||
ContextInUse,
|
||||
#[error("{0}")]
|
||||
Resource(deno_core::error::AnyError),
|
||||
#[error(transparent)]
|
||||
Decipher(#[from] DecipherError),
|
||||
}
|
||||
|
||||
impl DecipherContext {
|
||||
pub fn new(algorithm: &str, key: &[u8], iv: &[u8]) -> Result<Self, AnyError> {
|
||||
pub fn new(
|
||||
algorithm: &str,
|
||||
key: &[u8],
|
||||
iv: &[u8],
|
||||
) -> Result<Self, DecipherContextError> {
|
||||
Ok(Self {
|
||||
decipher: Rc::new(RefCell::new(Decipher::new(algorithm, key, iv)?)),
|
||||
})
|
||||
|
@ -103,11 +129,12 @@ impl DecipherContext {
|
|||
input: &[u8],
|
||||
output: &mut [u8],
|
||||
auth_tag: &[u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), DecipherContextError> {
|
||||
Rc::try_unwrap(self.decipher)
|
||||
.map_err(|_| type_error("Decipher context is already in use"))?
|
||||
.map_err(|_| DecipherContextError::ContextInUse)?
|
||||
.into_inner()
|
||||
.r#final(auto_pad, input, output, auth_tag)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -123,12 +150,26 @@ impl Resource for DecipherContext {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum CipherError {
|
||||
#[error("IV length must be 12 bytes")]
|
||||
InvalidIvLength,
|
||||
#[error("Invalid key length")]
|
||||
InvalidKeyLength,
|
||||
#[error("Invalid initialization vector")]
|
||||
InvalidInitializationVector,
|
||||
#[error("Cannot pad the input data")]
|
||||
CannotPadInputData,
|
||||
#[error("Unknown cipher {0}")]
|
||||
UnknownCipher(String),
|
||||
}
|
||||
|
||||
impl Cipher {
|
||||
fn new(
|
||||
algorithm_name: &str,
|
||||
key: &[u8],
|
||||
iv: &[u8],
|
||||
) -> Result<Self, AnyError> {
|
||||
) -> Result<Self, CipherError> {
|
||||
use Cipher::*;
|
||||
Ok(match algorithm_name {
|
||||
"aes-128-cbc" => {
|
||||
|
@ -139,7 +180,7 @@ impl Cipher {
|
|||
"aes-256-ecb" => Aes256Ecb(Box::new(ecb::Encryptor::new(key.into()))),
|
||||
"aes-128-gcm" => {
|
||||
if iv.len() != 12 {
|
||||
return Err(type_error("IV length must be 12 bytes"));
|
||||
return Err(CipherError::InvalidIvLength);
|
||||
}
|
||||
|
||||
let cipher =
|
||||
|
@ -149,7 +190,7 @@ impl Cipher {
|
|||
}
|
||||
"aes-256-gcm" => {
|
||||
if iv.len() != 12 {
|
||||
return Err(type_error("IV length must be 12 bytes"));
|
||||
return Err(CipherError::InvalidIvLength);
|
||||
}
|
||||
|
||||
let cipher =
|
||||
|
@ -159,15 +200,15 @@ impl Cipher {
|
|||
}
|
||||
"aes256" | "aes-256-cbc" => {
|
||||
if key.len() != 32 {
|
||||
return Err(range_error("Invalid key length"));
|
||||
return Err(CipherError::InvalidKeyLength);
|
||||
}
|
||||
if iv.len() != 16 {
|
||||
return Err(type_error("Invalid initialization vector"));
|
||||
return Err(CipherError::InvalidInitializationVector);
|
||||
}
|
||||
|
||||
Aes256Cbc(Box::new(cbc::Encryptor::new(key.into(), iv.into())))
|
||||
}
|
||||
_ => return Err(type_error(format!("Unknown cipher {algorithm_name}"))),
|
||||
_ => return Err(CipherError::UnknownCipher(algorithm_name.to_string())),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -235,14 +276,14 @@ impl Cipher {
|
|||
auto_pad: bool,
|
||||
input: &[u8],
|
||||
output: &mut [u8],
|
||||
) -> Result<Tag, AnyError> {
|
||||
) -> Result<Tag, CipherError> {
|
||||
assert!(input.len() < 16);
|
||||
use Cipher::*;
|
||||
match (self, auto_pad) {
|
||||
(Aes128Cbc(encryptor), true) => {
|
||||
let _ = (*encryptor)
|
||||
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot pad the input data"))?;
|
||||
.map_err(|_| CipherError::CannotPadInputData)?;
|
||||
Ok(None)
|
||||
}
|
||||
(Aes128Cbc(mut encryptor), false) => {
|
||||
|
@ -255,7 +296,7 @@ impl Cipher {
|
|||
(Aes128Ecb(encryptor), true) => {
|
||||
let _ = (*encryptor)
|
||||
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot pad the input data"))?;
|
||||
.map_err(|_| CipherError::CannotPadInputData)?;
|
||||
Ok(None)
|
||||
}
|
||||
(Aes128Ecb(mut encryptor), false) => {
|
||||
|
@ -268,7 +309,7 @@ impl Cipher {
|
|||
(Aes192Ecb(encryptor), true) => {
|
||||
let _ = (*encryptor)
|
||||
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot pad the input data"))?;
|
||||
.map_err(|_| CipherError::CannotPadInputData)?;
|
||||
Ok(None)
|
||||
}
|
||||
(Aes192Ecb(mut encryptor), false) => {
|
||||
|
@ -281,7 +322,7 @@ impl Cipher {
|
|||
(Aes256Ecb(encryptor), true) => {
|
||||
let _ = (*encryptor)
|
||||
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot pad the input data"))?;
|
||||
.map_err(|_| CipherError::CannotPadInputData)?;
|
||||
Ok(None)
|
||||
}
|
||||
(Aes256Ecb(mut encryptor), false) => {
|
||||
|
@ -296,7 +337,7 @@ impl Cipher {
|
|||
(Aes256Cbc(encryptor), true) => {
|
||||
let _ = (*encryptor)
|
||||
.encrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot pad the input data"))?;
|
||||
.map_err(|_| CipherError::CannotPadInputData)?;
|
||||
Ok(None)
|
||||
}
|
||||
(Aes256Cbc(mut encryptor), false) => {
|
||||
|
@ -319,12 +360,32 @@ impl Cipher {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum DecipherError {
|
||||
#[error("IV length must be 12 bytes")]
|
||||
InvalidIvLength,
|
||||
#[error("Invalid key length")]
|
||||
InvalidKeyLength,
|
||||
#[error("Invalid initialization vector")]
|
||||
InvalidInitializationVector,
|
||||
#[error("Cannot unpad the input data")]
|
||||
CannotUnpadInputData,
|
||||
#[error("Failed to authenticate data")]
|
||||
DataAuthenticationFailed,
|
||||
#[error("setAutoPadding(false) not supported for Aes128Gcm yet")]
|
||||
SetAutoPaddingFalseAes128GcmUnsupported,
|
||||
#[error("setAutoPadding(false) not supported for Aes256Gcm yet")]
|
||||
SetAutoPaddingFalseAes256GcmUnsupported,
|
||||
#[error("Unknown cipher {0}")]
|
||||
UnknownCipher(String),
|
||||
}
|
||||
|
||||
impl Decipher {
|
||||
fn new(
|
||||
algorithm_name: &str,
|
||||
key: &[u8],
|
||||
iv: &[u8],
|
||||
) -> Result<Self, AnyError> {
|
||||
) -> Result<Self, DecipherError> {
|
||||
use Decipher::*;
|
||||
Ok(match algorithm_name {
|
||||
"aes-128-cbc" => {
|
||||
|
@ -335,7 +396,7 @@ impl Decipher {
|
|||
"aes-256-ecb" => Aes256Ecb(Box::new(ecb::Decryptor::new(key.into()))),
|
||||
"aes-128-gcm" => {
|
||||
if iv.len() != 12 {
|
||||
return Err(type_error("IV length must be 12 bytes"));
|
||||
return Err(DecipherError::InvalidIvLength);
|
||||
}
|
||||
|
||||
let decipher =
|
||||
|
@ -345,7 +406,7 @@ impl Decipher {
|
|||
}
|
||||
"aes-256-gcm" => {
|
||||
if iv.len() != 12 {
|
||||
return Err(type_error("IV length must be 12 bytes"));
|
||||
return Err(DecipherError::InvalidIvLength);
|
||||
}
|
||||
|
||||
let decipher =
|
||||
|
@ -355,15 +416,17 @@ impl Decipher {
|
|||
}
|
||||
"aes256" | "aes-256-cbc" => {
|
||||
if key.len() != 32 {
|
||||
return Err(range_error("Invalid key length"));
|
||||
return Err(DecipherError::InvalidKeyLength);
|
||||
}
|
||||
if iv.len() != 16 {
|
||||
return Err(type_error("Invalid initialization vector"));
|
||||
return Err(DecipherError::InvalidInitializationVector);
|
||||
}
|
||||
|
||||
Aes256Cbc(Box::new(cbc::Decryptor::new(key.into(), iv.into())))
|
||||
}
|
||||
_ => return Err(type_error(format!("Unknown cipher {algorithm_name}"))),
|
||||
_ => {
|
||||
return Err(DecipherError::UnknownCipher(algorithm_name.to_string()))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -432,14 +495,14 @@ impl Decipher {
|
|||
input: &[u8],
|
||||
output: &mut [u8],
|
||||
auth_tag: &[u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), DecipherError> {
|
||||
use Decipher::*;
|
||||
match (self, auto_pad) {
|
||||
(Aes128Cbc(decryptor), true) => {
|
||||
assert!(input.len() == 16);
|
||||
let _ = (*decryptor)
|
||||
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot unpad the input data"))?;
|
||||
.map_err(|_| DecipherError::CannotUnpadInputData)?;
|
||||
Ok(())
|
||||
}
|
||||
(Aes128Cbc(mut decryptor), false) => {
|
||||
|
@ -453,7 +516,7 @@ impl Decipher {
|
|||
assert!(input.len() == 16);
|
||||
let _ = (*decryptor)
|
||||
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot unpad the input data"))?;
|
||||
.map_err(|_| DecipherError::CannotUnpadInputData)?;
|
||||
Ok(())
|
||||
}
|
||||
(Aes128Ecb(mut decryptor), false) => {
|
||||
|
@ -467,7 +530,7 @@ impl Decipher {
|
|||
assert!(input.len() == 16);
|
||||
let _ = (*decryptor)
|
||||
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot unpad the input data"))?;
|
||||
.map_err(|_| DecipherError::CannotUnpadInputData)?;
|
||||
Ok(())
|
||||
}
|
||||
(Aes192Ecb(mut decryptor), false) => {
|
||||
|
@ -481,7 +544,7 @@ impl Decipher {
|
|||
assert!(input.len() == 16);
|
||||
let _ = (*decryptor)
|
||||
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot unpad the input data"))?;
|
||||
.map_err(|_| DecipherError::CannotUnpadInputData)?;
|
||||
Ok(())
|
||||
}
|
||||
(Aes256Ecb(mut decryptor), false) => {
|
||||
|
@ -496,28 +559,28 @@ impl Decipher {
|
|||
if tag.as_slice() == auth_tag {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(type_error("Failed to authenticate data"))
|
||||
Err(DecipherError::DataAuthenticationFailed)
|
||||
}
|
||||
}
|
||||
(Aes128Gcm(_), false) => Err(type_error(
|
||||
"setAutoPadding(false) not supported for Aes256Gcm yet",
|
||||
)),
|
||||
(Aes128Gcm(_), false) => {
|
||||
Err(DecipherError::SetAutoPaddingFalseAes128GcmUnsupported)
|
||||
}
|
||||
(Aes256Gcm(decipher), true) => {
|
||||
let tag = decipher.finish();
|
||||
if tag.as_slice() == auth_tag {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(type_error("Failed to authenticate data"))
|
||||
Err(DecipherError::DataAuthenticationFailed)
|
||||
}
|
||||
}
|
||||
(Aes256Gcm(_), false) => Err(type_error(
|
||||
"setAutoPadding(false) not supported for Aes256Gcm yet",
|
||||
)),
|
||||
(Aes256Gcm(_), false) => {
|
||||
Err(DecipherError::SetAutoPaddingFalseAes256GcmUnsupported)
|
||||
}
|
||||
(Aes256Cbc(decryptor), true) => {
|
||||
assert!(input.len() == 16);
|
||||
let _ = (*decryptor)
|
||||
.decrypt_padded_b2b_mut::<Pkcs7>(input, output)
|
||||
.map_err(|_| type_error("Cannot unpad the input data"))?;
|
||||
.map_err(|_| DecipherError::CannotUnpadInputData)?;
|
||||
Ok(())
|
||||
}
|
||||
(Aes256Cbc(mut decryptor), false) => {
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::GarbageCollected;
|
||||
use digest::Digest;
|
||||
use digest::DynDigest;
|
||||
|
@ -19,7 +17,7 @@ impl Hasher {
|
|||
pub fn new(
|
||||
algorithm: &str,
|
||||
output_length: Option<usize>,
|
||||
) -> Result<Self, AnyError> {
|
||||
) -> Result<Self, HashError> {
|
||||
let hash = Hash::new(algorithm, output_length)?;
|
||||
|
||||
Ok(Self {
|
||||
|
@ -44,7 +42,7 @@ impl Hasher {
|
|||
pub fn clone_inner(
|
||||
&self,
|
||||
output_length: Option<usize>,
|
||||
) -> Result<Option<Self>, AnyError> {
|
||||
) -> Result<Option<Self>, HashError> {
|
||||
let hash = self.hash.borrow();
|
||||
let Some(hash) = hash.as_ref() else {
|
||||
return Ok(None);
|
||||
|
@ -184,11 +182,19 @@ pub enum Hash {
|
|||
|
||||
use Hash::*;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum HashError {
|
||||
#[error("Output length mismatch for non-extendable algorithm")]
|
||||
OutputLengthMismatch,
|
||||
#[error("Digest method not supported: {0}")]
|
||||
DigestMethodUnsupported(String),
|
||||
}
|
||||
|
||||
impl Hash {
|
||||
pub fn new(
|
||||
algorithm_name: &str,
|
||||
output_length: Option<usize>,
|
||||
) -> Result<Self, AnyError> {
|
||||
) -> Result<Self, HashError> {
|
||||
match algorithm_name {
|
||||
"shake128" => return Ok(Shake128(Default::default(), output_length)),
|
||||
"shake256" => return Ok(Shake256(Default::default(), output_length)),
|
||||
|
@ -201,17 +207,13 @@ impl Hash {
|
|||
let digest: D = Digest::new();
|
||||
if let Some(length) = output_length {
|
||||
if length != digest.output_size() {
|
||||
return Err(generic_error(
|
||||
"Output length mismatch for non-extendable algorithm",
|
||||
));
|
||||
return Err(HashError::OutputLengthMismatch);
|
||||
}
|
||||
}
|
||||
FixedSize(Box::new(digest))
|
||||
},
|
||||
_ => {
|
||||
return Err(generic_error(format!(
|
||||
"Digest method not supported: {algorithm_name}"
|
||||
)))
|
||||
return Err(HashError::DigestMethodUnsupported(algorithm_name.to_string()))
|
||||
}
|
||||
);
|
||||
|
||||
|
@ -243,14 +245,12 @@ impl Hash {
|
|||
pub fn clone_hash(
|
||||
&self,
|
||||
output_length: Option<usize>,
|
||||
) -> Result<Self, AnyError> {
|
||||
) -> Result<Self, HashError> {
|
||||
let hash = match self {
|
||||
FixedSize(context) => {
|
||||
if let Some(length) = output_length {
|
||||
if length != context.output_size() {
|
||||
return Err(generic_error(
|
||||
"Output length mismatch for non-extendable algorithm",
|
||||
));
|
||||
return Err(HashError::OutputLengthMismatch);
|
||||
}
|
||||
}
|
||||
FixedSize(context.box_clone())
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,7 +1,6 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
use deno_core::unsync::spawn_blocking;
|
||||
use deno_core::JsBuffer;
|
||||
|
@ -34,14 +33,14 @@ use rsa::Pkcs1v15Encrypt;
|
|||
use rsa::RsaPrivateKey;
|
||||
use rsa::RsaPublicKey;
|
||||
|
||||
mod cipher;
|
||||
pub mod cipher;
|
||||
mod dh;
|
||||
mod digest;
|
||||
pub mod digest;
|
||||
pub mod keys;
|
||||
mod md5_sha1;
|
||||
mod pkcs3;
|
||||
mod primes;
|
||||
mod sign;
|
||||
pub mod sign;
|
||||
pub mod x509;
|
||||
|
||||
use self::digest::match_fixed_digest_with_eager_block_buffer;
|
||||
|
@ -58,38 +57,31 @@ pub fn op_node_check_prime(
|
|||
pub fn op_node_check_prime_bytes(
|
||||
#[anybuffer] bytes: &[u8],
|
||||
#[number] checks: usize,
|
||||
) -> Result<bool, AnyError> {
|
||||
) -> bool {
|
||||
let candidate = BigInt::from_bytes_be(num_bigint::Sign::Plus, bytes);
|
||||
Ok(primes::is_probably_prime(&candidate, checks))
|
||||
primes::is_probably_prime(&candidate, checks)
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
pub async fn op_node_check_prime_async(
|
||||
#[bigint] num: i64,
|
||||
#[number] checks: usize,
|
||||
) -> Result<bool, AnyError> {
|
||||
) -> Result<bool, tokio::task::JoinError> {
|
||||
// TODO(@littledivy): use rayon for CPU-bound tasks
|
||||
Ok(
|
||||
spawn_blocking(move || {
|
||||
primes::is_probably_prime(&BigInt::from(num), checks)
|
||||
})
|
||||
.await?,
|
||||
)
|
||||
spawn_blocking(move || primes::is_probably_prime(&BigInt::from(num), checks))
|
||||
.await
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
pub fn op_node_check_prime_bytes_async(
|
||||
#[anybuffer] bytes: &[u8],
|
||||
#[number] checks: usize,
|
||||
) -> Result<impl Future<Output = Result<bool, AnyError>>, AnyError> {
|
||||
) -> impl Future<Output = Result<bool, tokio::task::JoinError>> {
|
||||
let candidate = BigInt::from_bytes_be(num_bigint::Sign::Plus, bytes);
|
||||
// TODO(@littledivy): use rayon for CPU-bound tasks
|
||||
Ok(async move {
|
||||
Ok(
|
||||
spawn_blocking(move || primes::is_probably_prime(&candidate, checks))
|
||||
.await?,
|
||||
)
|
||||
})
|
||||
async move {
|
||||
spawn_blocking(move || primes::is_probably_prime(&candidate, checks)).await
|
||||
}
|
||||
}
|
||||
|
||||
#[op2]
|
||||
|
@ -97,7 +89,7 @@ pub fn op_node_check_prime_bytes_async(
|
|||
pub fn op_node_create_hash(
|
||||
#[string] algorithm: &str,
|
||||
output_length: Option<u32>,
|
||||
) -> Result<digest::Hasher, AnyError> {
|
||||
) -> Result<digest::Hasher, digest::HashError> {
|
||||
digest::Hasher::new(algorithm, output_length.map(|l| l as usize))
|
||||
}
|
||||
|
||||
|
@ -145,17 +137,31 @@ pub fn op_node_hash_digest_hex(
|
|||
pub fn op_node_hash_clone(
|
||||
#[cppgc] hasher: &digest::Hasher,
|
||||
output_length: Option<u32>,
|
||||
) -> Result<Option<digest::Hasher>, AnyError> {
|
||||
) -> Result<Option<digest::Hasher>, digest::HashError> {
|
||||
hasher.clone_inner(output_length.map(|l| l as usize))
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum PrivateEncryptDecryptError {
|
||||
#[error(transparent)]
|
||||
Pkcs8(#[from] pkcs8::Error),
|
||||
#[error(transparent)]
|
||||
Spki(#[from] spki::Error),
|
||||
#[error(transparent)]
|
||||
Utf8(#[from] std::str::Utf8Error),
|
||||
#[error(transparent)]
|
||||
Rsa(#[from] rsa::Error),
|
||||
#[error("Unknown padding")]
|
||||
UnknownPadding,
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[serde]
|
||||
pub fn op_node_private_encrypt(
|
||||
#[serde] key: StringOrBuffer,
|
||||
#[serde] msg: StringOrBuffer,
|
||||
#[smi] padding: u32,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
) -> Result<ToJsBuffer, PrivateEncryptDecryptError> {
|
||||
let key = RsaPrivateKey::from_pkcs8_pem((&key).try_into()?)?;
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
|
@ -172,7 +178,7 @@ pub fn op_node_private_encrypt(
|
|||
.encrypt(&mut rng, Oaep::new::<sha1::Sha1>(), &msg)?
|
||||
.into(),
|
||||
),
|
||||
_ => Err(type_error("Unknown padding")),
|
||||
_ => Err(PrivateEncryptDecryptError::UnknownPadding),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -182,13 +188,13 @@ pub fn op_node_private_decrypt(
|
|||
#[serde] key: StringOrBuffer,
|
||||
#[serde] msg: StringOrBuffer,
|
||||
#[smi] padding: u32,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
) -> Result<ToJsBuffer, PrivateEncryptDecryptError> {
|
||||
let key = RsaPrivateKey::from_pkcs8_pem((&key).try_into()?)?;
|
||||
|
||||
match padding {
|
||||
1 => Ok(key.decrypt(Pkcs1v15Encrypt, &msg)?.into()),
|
||||
4 => Ok(key.decrypt(Oaep::new::<sha1::Sha1>(), &msg)?.into()),
|
||||
_ => Err(type_error("Unknown padding")),
|
||||
_ => Err(PrivateEncryptDecryptError::UnknownPadding),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -198,7 +204,7 @@ pub fn op_node_public_encrypt(
|
|||
#[serde] key: StringOrBuffer,
|
||||
#[serde] msg: StringOrBuffer,
|
||||
#[smi] padding: u32,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
) -> Result<ToJsBuffer, PrivateEncryptDecryptError> {
|
||||
let key = RsaPublicKey::from_public_key_pem((&key).try_into()?)?;
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
|
@ -209,7 +215,7 @@ pub fn op_node_public_encrypt(
|
|||
.encrypt(&mut rng, Oaep::new::<sha1::Sha1>(), &msg)?
|
||||
.into(),
|
||||
),
|
||||
_ => Err(type_error("Unknown padding")),
|
||||
_ => Err(PrivateEncryptDecryptError::UnknownPadding),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -220,7 +226,7 @@ pub fn op_node_create_cipheriv(
|
|||
#[string] algorithm: &str,
|
||||
#[buffer] key: &[u8],
|
||||
#[buffer] iv: &[u8],
|
||||
) -> Result<u32, AnyError> {
|
||||
) -> Result<u32, cipher::CipherContextError> {
|
||||
let context = cipher::CipherContext::new(algorithm, key, iv)?;
|
||||
Ok(state.resource_table.add(context))
|
||||
}
|
||||
|
@ -262,11 +268,14 @@ pub fn op_node_cipheriv_final(
|
|||
auto_pad: bool,
|
||||
#[buffer] input: &[u8],
|
||||
#[anybuffer] output: &mut [u8],
|
||||
) -> Result<Option<Vec<u8>>, AnyError> {
|
||||
let context = state.resource_table.take::<cipher::CipherContext>(rid)?;
|
||||
) -> Result<Option<Vec<u8>>, cipher::CipherContextError> {
|
||||
let context = state
|
||||
.resource_table
|
||||
.take::<cipher::CipherContext>(rid)
|
||||
.map_err(cipher::CipherContextError::Resource)?;
|
||||
let context = Rc::try_unwrap(context)
|
||||
.map_err(|_| type_error("Cipher context is already in use"))?;
|
||||
context.r#final(auto_pad, input, output)
|
||||
.map_err(|_| cipher::CipherContextError::ContextInUse)?;
|
||||
context.r#final(auto_pad, input, output).map_err(Into::into)
|
||||
}
|
||||
|
||||
#[op2]
|
||||
|
@ -274,10 +283,13 @@ pub fn op_node_cipheriv_final(
|
|||
pub fn op_node_cipheriv_take(
|
||||
state: &mut OpState,
|
||||
#[smi] rid: u32,
|
||||
) -> Result<Option<Vec<u8>>, AnyError> {
|
||||
let context = state.resource_table.take::<cipher::CipherContext>(rid)?;
|
||||
) -> Result<Option<Vec<u8>>, cipher::CipherContextError> {
|
||||
let context = state
|
||||
.resource_table
|
||||
.take::<cipher::CipherContext>(rid)
|
||||
.map_err(cipher::CipherContextError::Resource)?;
|
||||
let context = Rc::try_unwrap(context)
|
||||
.map_err(|_| type_error("Cipher context is already in use"))?;
|
||||
.map_err(|_| cipher::CipherContextError::ContextInUse)?;
|
||||
Ok(context.take_tag())
|
||||
}
|
||||
|
||||
|
@ -288,7 +300,7 @@ pub fn op_node_create_decipheriv(
|
|||
#[string] algorithm: &str,
|
||||
#[buffer] key: &[u8],
|
||||
#[buffer] iv: &[u8],
|
||||
) -> Result<u32, AnyError> {
|
||||
) -> Result<u32, cipher::DecipherContextError> {
|
||||
let context = cipher::DecipherContext::new(algorithm, key, iv)?;
|
||||
Ok(state.resource_table.add(context))
|
||||
}
|
||||
|
@ -326,10 +338,13 @@ pub fn op_node_decipheriv_decrypt(
|
|||
pub fn op_node_decipheriv_take(
|
||||
state: &mut OpState,
|
||||
#[smi] rid: u32,
|
||||
) -> Result<(), AnyError> {
|
||||
let context = state.resource_table.take::<cipher::DecipherContext>(rid)?;
|
||||
) -> Result<(), cipher::DecipherContextError> {
|
||||
let context = state
|
||||
.resource_table
|
||||
.take::<cipher::DecipherContext>(rid)
|
||||
.map_err(cipher::DecipherContextError::Resource)?;
|
||||
Rc::try_unwrap(context)
|
||||
.map_err(|_| type_error("Cipher context is already in use"))?;
|
||||
.map_err(|_| cipher::DecipherContextError::ContextInUse)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -341,11 +356,16 @@ pub fn op_node_decipheriv_final(
|
|||
#[buffer] input: &[u8],
|
||||
#[anybuffer] output: &mut [u8],
|
||||
#[buffer] auth_tag: &[u8],
|
||||
) -> Result<(), AnyError> {
|
||||
let context = state.resource_table.take::<cipher::DecipherContext>(rid)?;
|
||||
) -> Result<(), cipher::DecipherContextError> {
|
||||
let context = state
|
||||
.resource_table
|
||||
.take::<cipher::DecipherContext>(rid)
|
||||
.map_err(cipher::DecipherContextError::Resource)?;
|
||||
let context = Rc::try_unwrap(context)
|
||||
.map_err(|_| type_error("Cipher context is already in use"))?;
|
||||
context.r#final(auto_pad, input, output, auth_tag)
|
||||
.map_err(|_| cipher::DecipherContextError::ContextInUse)?;
|
||||
context
|
||||
.r#final(auto_pad, input, output, auth_tag)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
#[op2]
|
||||
|
@ -356,7 +376,7 @@ pub fn op_node_sign(
|
|||
#[string] digest_type: &str,
|
||||
#[smi] pss_salt_length: Option<u32>,
|
||||
#[smi] dsa_signature_encoding: u32,
|
||||
) -> Result<Box<[u8]>, AnyError> {
|
||||
) -> Result<Box<[u8]>, sign::KeyObjectHandlePrehashedSignAndVerifyError> {
|
||||
handle.sign_prehashed(
|
||||
digest_type,
|
||||
digest,
|
||||
|
@ -373,7 +393,7 @@ pub fn op_node_verify(
|
|||
#[buffer] signature: &[u8],
|
||||
#[smi] pss_salt_length: Option<u32>,
|
||||
#[smi] dsa_signature_encoding: u32,
|
||||
) -> Result<bool, AnyError> {
|
||||
) -> Result<bool, sign::KeyObjectHandlePrehashedSignAndVerifyError> {
|
||||
handle.verify_prehashed(
|
||||
digest_type,
|
||||
digest,
|
||||
|
@ -383,13 +403,21 @@ pub fn op_node_verify(
|
|||
)
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Pbkdf2Error {
|
||||
#[error("unsupported digest: {0}")]
|
||||
UnsupportedDigest(String),
|
||||
#[error(transparent)]
|
||||
Join(#[from] tokio::task::JoinError),
|
||||
}
|
||||
|
||||
fn pbkdf2_sync(
|
||||
password: &[u8],
|
||||
salt: &[u8],
|
||||
iterations: u32,
|
||||
algorithm_name: &str,
|
||||
derived_key: &mut [u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), Pbkdf2Error> {
|
||||
match_fixed_digest_with_eager_block_buffer!(
|
||||
algorithm_name,
|
||||
fn <D>() {
|
||||
|
@ -397,10 +425,7 @@ fn pbkdf2_sync(
|
|||
Ok(())
|
||||
},
|
||||
_ => {
|
||||
Err(type_error(format!(
|
||||
"unsupported digest: {}",
|
||||
algorithm_name
|
||||
)))
|
||||
Err(Pbkdf2Error::UnsupportedDigest(algorithm_name.to_string()))
|
||||
}
|
||||
)
|
||||
}
|
||||
|
@ -424,7 +449,7 @@ pub async fn op_node_pbkdf2_async(
|
|||
#[smi] iterations: u32,
|
||||
#[string] digest: String,
|
||||
#[number] keylen: usize,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
) -> Result<ToJsBuffer, Pbkdf2Error> {
|
||||
spawn_blocking(move || {
|
||||
let mut derived_key = vec![0; keylen];
|
||||
pbkdf2_sync(&password, &salt, iterations, &digest, &mut derived_key)
|
||||
|
@ -450,15 +475,27 @@ pub async fn op_node_fill_random_async(#[smi] len: i32) -> ToJsBuffer {
|
|||
.unwrap()
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum HkdfError {
|
||||
#[error("expected secret key")]
|
||||
ExpectedSecretKey,
|
||||
#[error("HKDF-Expand failed")]
|
||||
HkdfExpandFailed,
|
||||
#[error("Unsupported digest: {0}")]
|
||||
UnsupportedDigest(String),
|
||||
#[error(transparent)]
|
||||
Join(#[from] tokio::task::JoinError),
|
||||
}
|
||||
|
||||
fn hkdf_sync(
|
||||
digest_algorithm: &str,
|
||||
handle: &KeyObjectHandle,
|
||||
salt: &[u8],
|
||||
info: &[u8],
|
||||
okm: &mut [u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), HkdfError> {
|
||||
let Some(ikm) = handle.as_secret_key() else {
|
||||
return Err(type_error("expected secret key"));
|
||||
return Err(HkdfError::ExpectedSecretKey);
|
||||
};
|
||||
|
||||
match_fixed_digest_with_eager_block_buffer!(
|
||||
|
@ -466,10 +503,10 @@ fn hkdf_sync(
|
|||
fn <D>() {
|
||||
let hk = Hkdf::<D>::new(Some(salt), ikm);
|
||||
hk.expand(info, okm)
|
||||
.map_err(|_| type_error("HKDF-Expand failed"))
|
||||
.map_err(|_| HkdfError::HkdfExpandFailed)
|
||||
},
|
||||
_ => {
|
||||
Err(type_error(format!("Unsupported digest: {}", digest_algorithm)))
|
||||
Err(HkdfError::UnsupportedDigest(digest_algorithm.to_string()))
|
||||
}
|
||||
)
|
||||
}
|
||||
|
@ -481,7 +518,7 @@ pub fn op_node_hkdf(
|
|||
#[buffer] salt: &[u8],
|
||||
#[buffer] info: &[u8],
|
||||
#[buffer] okm: &mut [u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), HkdfError> {
|
||||
hkdf_sync(digest_algorithm, handle, salt, info, okm)
|
||||
}
|
||||
|
||||
|
@ -493,7 +530,7 @@ pub async fn op_node_hkdf_async(
|
|||
#[buffer] salt: JsBuffer,
|
||||
#[buffer] info: JsBuffer,
|
||||
#[number] okm_len: usize,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
) -> Result<ToJsBuffer, HkdfError> {
|
||||
let handle = handle.clone();
|
||||
spawn_blocking(move || {
|
||||
let mut okm = vec![0u8; okm_len];
|
||||
|
@ -509,27 +546,24 @@ pub fn op_node_dh_compute_secret(
|
|||
#[buffer] prime: JsBuffer,
|
||||
#[buffer] private_key: JsBuffer,
|
||||
#[buffer] their_public_key: JsBuffer,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
) -> ToJsBuffer {
|
||||
let pubkey: BigUint = BigUint::from_bytes_be(their_public_key.as_ref());
|
||||
let privkey: BigUint = BigUint::from_bytes_be(private_key.as_ref());
|
||||
let primei: BigUint = BigUint::from_bytes_be(prime.as_ref());
|
||||
let shared_secret: BigUint = pubkey.modpow(&privkey, &primei);
|
||||
|
||||
Ok(shared_secret.to_bytes_be().into())
|
||||
shared_secret.to_bytes_be().into()
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
#[number]
|
||||
pub fn op_node_random_int(
|
||||
#[number] min: i64,
|
||||
#[number] max: i64,
|
||||
) -> Result<i64, AnyError> {
|
||||
pub fn op_node_random_int(#[number] min: i64, #[number] max: i64) -> i64 {
|
||||
let mut rng = rand::thread_rng();
|
||||
// Uniform distribution is required to avoid Modulo Bias
|
||||
// https://en.wikipedia.org/wiki/Fisher–Yates_shuffle#Modulo_bias
|
||||
let dist = Uniform::from(min..max);
|
||||
|
||||
Ok(dist.sample(&mut rng))
|
||||
dist.sample(&mut rng)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
|
@ -542,7 +576,7 @@ fn scrypt(
|
|||
parallelization: u32,
|
||||
_maxmem: u32,
|
||||
output_buffer: &mut [u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), deno_core::error::AnyError> {
|
||||
// Construct Params
|
||||
let params = scrypt::Params::new(
|
||||
cost as u8,
|
||||
|
@ -573,7 +607,7 @@ pub fn op_node_scrypt_sync(
|
|||
#[smi] parallelization: u32,
|
||||
#[smi] maxmem: u32,
|
||||
#[anybuffer] output_buffer: &mut [u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), deno_core::error::AnyError> {
|
||||
scrypt(
|
||||
password,
|
||||
salt,
|
||||
|
@ -586,6 +620,14 @@ pub fn op_node_scrypt_sync(
|
|||
)
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum ScryptAsyncError {
|
||||
#[error(transparent)]
|
||||
Join(#[from] tokio::task::JoinError),
|
||||
#[error(transparent)]
|
||||
Other(deno_core::error::AnyError),
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[serde]
|
||||
pub async fn op_node_scrypt_async(
|
||||
|
@ -596,10 +638,11 @@ pub async fn op_node_scrypt_async(
|
|||
#[smi] block_size: u32,
|
||||
#[smi] parallelization: u32,
|
||||
#[smi] maxmem: u32,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
) -> Result<ToJsBuffer, ScryptAsyncError> {
|
||||
spawn_blocking(move || {
|
||||
let mut output_buffer = vec![0u8; keylen as usize];
|
||||
let res = scrypt(
|
||||
|
||||
scrypt(
|
||||
password,
|
||||
salt,
|
||||
keylen,
|
||||
|
@ -608,25 +651,30 @@ pub async fn op_node_scrypt_async(
|
|||
parallelization,
|
||||
maxmem,
|
||||
&mut output_buffer,
|
||||
);
|
||||
|
||||
if res.is_ok() {
|
||||
Ok(output_buffer.into())
|
||||
} else {
|
||||
// TODO(lev): rethrow the error?
|
||||
Err(generic_error("scrypt failure"))
|
||||
}
|
||||
)
|
||||
.map(|_| output_buffer.into())
|
||||
.map_err(ScryptAsyncError::Other)
|
||||
})
|
||||
.await?
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum EcdhEncodePubKey {
|
||||
#[error("Invalid public key")]
|
||||
InvalidPublicKey,
|
||||
#[error("Unsupported curve")]
|
||||
UnsupportedCurve,
|
||||
#[error(transparent)]
|
||||
Sec1(#[from] sec1::Error),
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[buffer]
|
||||
pub fn op_node_ecdh_encode_pubkey(
|
||||
#[string] curve: &str,
|
||||
#[buffer] pubkey: &[u8],
|
||||
compress: bool,
|
||||
) -> Result<Vec<u8>, AnyError> {
|
||||
) -> Result<Vec<u8>, EcdhEncodePubKey> {
|
||||
use elliptic_curve::sec1::FromEncodedPoint;
|
||||
|
||||
match curve {
|
||||
|
@ -639,7 +687,7 @@ pub fn op_node_ecdh_encode_pubkey(
|
|||
);
|
||||
// CtOption does not expose its variants.
|
||||
if pubkey.is_none().into() {
|
||||
return Err(type_error("Invalid public key"));
|
||||
return Err(EcdhEncodePubKey::InvalidPublicKey);
|
||||
}
|
||||
|
||||
let pubkey = pubkey.unwrap();
|
||||
|
@ -652,7 +700,7 @@ pub fn op_node_ecdh_encode_pubkey(
|
|||
);
|
||||
// CtOption does not expose its variants.
|
||||
if pubkey.is_none().into() {
|
||||
return Err(type_error("Invalid public key"));
|
||||
return Err(EcdhEncodePubKey::InvalidPublicKey);
|
||||
}
|
||||
|
||||
let pubkey = pubkey.unwrap();
|
||||
|
@ -665,7 +713,7 @@ pub fn op_node_ecdh_encode_pubkey(
|
|||
);
|
||||
// CtOption does not expose its variants.
|
||||
if pubkey.is_none().into() {
|
||||
return Err(type_error("Invalid public key"));
|
||||
return Err(EcdhEncodePubKey::InvalidPublicKey);
|
||||
}
|
||||
|
||||
let pubkey = pubkey.unwrap();
|
||||
|
@ -678,14 +726,14 @@ pub fn op_node_ecdh_encode_pubkey(
|
|||
);
|
||||
// CtOption does not expose its variants.
|
||||
if pubkey.is_none().into() {
|
||||
return Err(type_error("Invalid public key"));
|
||||
return Err(EcdhEncodePubKey::InvalidPublicKey);
|
||||
}
|
||||
|
||||
let pubkey = pubkey.unwrap();
|
||||
|
||||
Ok(pubkey.to_encoded_point(compress).as_ref().to_vec())
|
||||
}
|
||||
&_ => Err(type_error("Unsupported curve")),
|
||||
&_ => Err(EcdhEncodePubKey::UnsupportedCurve),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -695,7 +743,7 @@ pub fn op_node_ecdh_generate_keys(
|
|||
#[buffer] pubbuf: &mut [u8],
|
||||
#[buffer] privbuf: &mut [u8],
|
||||
#[string] format: &str,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), deno_core::error::AnyError> {
|
||||
let mut rng = rand::thread_rng();
|
||||
let compress = format == "compressed";
|
||||
match curve {
|
||||
|
@ -742,7 +790,7 @@ pub fn op_node_ecdh_compute_secret(
|
|||
#[buffer] this_priv: Option<JsBuffer>,
|
||||
#[buffer] their_pub: &mut [u8],
|
||||
#[buffer] secret: &mut [u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) {
|
||||
match curve {
|
||||
"secp256k1" => {
|
||||
let their_public_key =
|
||||
|
@ -760,8 +808,6 @@ pub fn op_node_ecdh_compute_secret(
|
|||
their_public_key.as_affine(),
|
||||
);
|
||||
secret.copy_from_slice(shared_secret.raw_secret_bytes());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
"prime256v1" | "secp256r1" => {
|
||||
let their_public_key =
|
||||
|
@ -776,8 +822,6 @@ pub fn op_node_ecdh_compute_secret(
|
|||
their_public_key.as_affine(),
|
||||
);
|
||||
secret.copy_from_slice(shared_secret.raw_secret_bytes());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
"secp384r1" => {
|
||||
let their_public_key =
|
||||
|
@ -792,8 +836,6 @@ pub fn op_node_ecdh_compute_secret(
|
|||
their_public_key.as_affine(),
|
||||
);
|
||||
secret.copy_from_slice(shared_secret.raw_secret_bytes());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
"secp224r1" => {
|
||||
let their_public_key =
|
||||
|
@ -808,8 +850,6 @@ pub fn op_node_ecdh_compute_secret(
|
|||
their_public_key.as_affine(),
|
||||
);
|
||||
secret.copy_from_slice(shared_secret.raw_secret_bytes());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
&_ => todo!(),
|
||||
}
|
||||
|
@ -820,7 +860,7 @@ pub fn op_node_ecdh_compute_public_key(
|
|||
#[string] curve: &str,
|
||||
#[buffer] privkey: &[u8],
|
||||
#[buffer] pubkey: &mut [u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) {
|
||||
match curve {
|
||||
"secp256k1" => {
|
||||
let this_private_key =
|
||||
|
@ -828,8 +868,6 @@ pub fn op_node_ecdh_compute_public_key(
|
|||
.expect("bad private key");
|
||||
let public_key = this_private_key.public_key();
|
||||
pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
"prime256v1" | "secp256r1" => {
|
||||
let this_private_key =
|
||||
|
@ -837,7 +875,6 @@ pub fn op_node_ecdh_compute_public_key(
|
|||
.expect("bad private key");
|
||||
let public_key = this_private_key.public_key();
|
||||
pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref());
|
||||
Ok(())
|
||||
}
|
||||
"secp384r1" => {
|
||||
let this_private_key =
|
||||
|
@ -845,7 +882,6 @@ pub fn op_node_ecdh_compute_public_key(
|
|||
.expect("bad private key");
|
||||
let public_key = this_private_key.public_key();
|
||||
pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref());
|
||||
Ok(())
|
||||
}
|
||||
"secp224r1" => {
|
||||
let this_private_key =
|
||||
|
@ -853,7 +889,6 @@ pub fn op_node_ecdh_compute_public_key(
|
|||
.expect("bad private key");
|
||||
let public_key = this_private_key.public_key();
|
||||
pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref());
|
||||
Ok(())
|
||||
}
|
||||
&_ => todo!(),
|
||||
}
|
||||
|
@ -874,8 +909,20 @@ pub fn op_node_gen_prime(#[number] size: usize) -> ToJsBuffer {
|
|||
#[serde]
|
||||
pub async fn op_node_gen_prime_async(
|
||||
#[number] size: usize,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
Ok(spawn_blocking(move || gen_prime(size)).await?)
|
||||
) -> Result<ToJsBuffer, tokio::task::JoinError> {
|
||||
spawn_blocking(move || gen_prime(size)).await
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum DiffieHellmanError {
|
||||
#[error("Expected private key")]
|
||||
ExpectedPrivateKey,
|
||||
#[error("Expected public key")]
|
||||
ExpectedPublicKey,
|
||||
#[error("DH parameters mismatch")]
|
||||
DhParametersMismatch,
|
||||
#[error("Unsupported key type for diffie hellman, or key type mismatch")]
|
||||
UnsupportedKeyTypeForDiffieHellmanOrKeyTypeMismatch,
|
||||
}
|
||||
|
||||
#[op2]
|
||||
|
@ -883,117 +930,134 @@ pub async fn op_node_gen_prime_async(
|
|||
pub fn op_node_diffie_hellman(
|
||||
#[cppgc] private: &KeyObjectHandle,
|
||||
#[cppgc] public: &KeyObjectHandle,
|
||||
) -> Result<Box<[u8]>, AnyError> {
|
||||
) -> Result<Box<[u8]>, DiffieHellmanError> {
|
||||
let private = private
|
||||
.as_private_key()
|
||||
.ok_or_else(|| type_error("Expected private key"))?;
|
||||
.ok_or(DiffieHellmanError::ExpectedPrivateKey)?;
|
||||
let public = public
|
||||
.as_public_key()
|
||||
.ok_or_else(|| type_error("Expected public key"))?;
|
||||
.ok_or(DiffieHellmanError::ExpectedPublicKey)?;
|
||||
|
||||
let res = match (private, &*public) {
|
||||
(
|
||||
AsymmetricPrivateKey::Ec(EcPrivateKey::P224(private)),
|
||||
AsymmetricPublicKey::Ec(EcPublicKey::P224(public)),
|
||||
) => p224::ecdh::diffie_hellman(
|
||||
private.to_nonzero_scalar(),
|
||||
public.as_affine(),
|
||||
)
|
||||
.raw_secret_bytes()
|
||||
.to_vec()
|
||||
.into_boxed_slice(),
|
||||
(
|
||||
AsymmetricPrivateKey::Ec(EcPrivateKey::P256(private)),
|
||||
AsymmetricPublicKey::Ec(EcPublicKey::P256(public)),
|
||||
) => p256::ecdh::diffie_hellman(
|
||||
private.to_nonzero_scalar(),
|
||||
public.as_affine(),
|
||||
)
|
||||
.raw_secret_bytes()
|
||||
.to_vec()
|
||||
.into_boxed_slice(),
|
||||
(
|
||||
AsymmetricPrivateKey::Ec(EcPrivateKey::P384(private)),
|
||||
AsymmetricPublicKey::Ec(EcPublicKey::P384(public)),
|
||||
) => p384::ecdh::diffie_hellman(
|
||||
private.to_nonzero_scalar(),
|
||||
public.as_affine(),
|
||||
)
|
||||
.raw_secret_bytes()
|
||||
.to_vec()
|
||||
.into_boxed_slice(),
|
||||
(
|
||||
AsymmetricPrivateKey::X25519(private),
|
||||
AsymmetricPublicKey::X25519(public),
|
||||
) => private
|
||||
.diffie_hellman(public)
|
||||
.to_bytes()
|
||||
.into_iter()
|
||||
.collect(),
|
||||
(AsymmetricPrivateKey::Dh(private), AsymmetricPublicKey::Dh(public)) => {
|
||||
if private.params.prime != public.params.prime
|
||||
|| private.params.base != public.params.base
|
||||
{
|
||||
return Err(type_error("DH parameters mismatch"));
|
||||
let res =
|
||||
match (private, &*public) {
|
||||
(
|
||||
AsymmetricPrivateKey::Ec(EcPrivateKey::P224(private)),
|
||||
AsymmetricPublicKey::Ec(EcPublicKey::P224(public)),
|
||||
) => p224::ecdh::diffie_hellman(
|
||||
private.to_nonzero_scalar(),
|
||||
public.as_affine(),
|
||||
)
|
||||
.raw_secret_bytes()
|
||||
.to_vec()
|
||||
.into_boxed_slice(),
|
||||
(
|
||||
AsymmetricPrivateKey::Ec(EcPrivateKey::P256(private)),
|
||||
AsymmetricPublicKey::Ec(EcPublicKey::P256(public)),
|
||||
) => p256::ecdh::diffie_hellman(
|
||||
private.to_nonzero_scalar(),
|
||||
public.as_affine(),
|
||||
)
|
||||
.raw_secret_bytes()
|
||||
.to_vec()
|
||||
.into_boxed_slice(),
|
||||
(
|
||||
AsymmetricPrivateKey::Ec(EcPrivateKey::P384(private)),
|
||||
AsymmetricPublicKey::Ec(EcPublicKey::P384(public)),
|
||||
) => p384::ecdh::diffie_hellman(
|
||||
private.to_nonzero_scalar(),
|
||||
public.as_affine(),
|
||||
)
|
||||
.raw_secret_bytes()
|
||||
.to_vec()
|
||||
.into_boxed_slice(),
|
||||
(
|
||||
AsymmetricPrivateKey::X25519(private),
|
||||
AsymmetricPublicKey::X25519(public),
|
||||
) => private
|
||||
.diffie_hellman(public)
|
||||
.to_bytes()
|
||||
.into_iter()
|
||||
.collect(),
|
||||
(AsymmetricPrivateKey::Dh(private), AsymmetricPublicKey::Dh(public)) => {
|
||||
if private.params.prime != public.params.prime
|
||||
|| private.params.base != public.params.base
|
||||
{
|
||||
return Err(DiffieHellmanError::DhParametersMismatch);
|
||||
}
|
||||
|
||||
// OSIP - Octet-String-to-Integer primitive
|
||||
let public_key = public.key.clone().into_vec();
|
||||
let pubkey = BigUint::from_bytes_be(&public_key);
|
||||
|
||||
// Exponentiation (z = y^x mod p)
|
||||
let prime = BigUint::from_bytes_be(private.params.prime.as_bytes());
|
||||
let private_key = private.key.clone().into_vec();
|
||||
let private_key = BigUint::from_bytes_be(&private_key);
|
||||
let shared_secret = pubkey.modpow(&private_key, &prime);
|
||||
|
||||
shared_secret.to_bytes_be().into()
|
||||
}
|
||||
|
||||
// OSIP - Octet-String-to-Integer primitive
|
||||
let public_key = public.key.clone().into_vec();
|
||||
let pubkey = BigUint::from_bytes_be(&public_key);
|
||||
|
||||
// Exponentiation (z = y^x mod p)
|
||||
let prime = BigUint::from_bytes_be(private.params.prime.as_bytes());
|
||||
let private_key = private.key.clone().into_vec();
|
||||
let private_key = BigUint::from_bytes_be(&private_key);
|
||||
let shared_secret = pubkey.modpow(&private_key, &prime);
|
||||
|
||||
shared_secret.to_bytes_be().into()
|
||||
}
|
||||
_ => {
|
||||
return Err(type_error(
|
||||
"Unsupported key type for diffie hellman, or key type mismatch",
|
||||
))
|
||||
}
|
||||
};
|
||||
_ => return Err(
|
||||
DiffieHellmanError::UnsupportedKeyTypeForDiffieHellmanOrKeyTypeMismatch,
|
||||
),
|
||||
};
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum SignEd25519Error {
|
||||
#[error("Expected private key")]
|
||||
ExpectedPrivateKey,
|
||||
#[error("Expected Ed25519 private key")]
|
||||
ExpectedEd25519PrivateKey,
|
||||
#[error("Invalid Ed25519 private key")]
|
||||
InvalidEd25519PrivateKey,
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_node_sign_ed25519(
|
||||
#[cppgc] key: &KeyObjectHandle,
|
||||
#[buffer] data: &[u8],
|
||||
#[buffer] signature: &mut [u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), SignEd25519Error> {
|
||||
let private = key
|
||||
.as_private_key()
|
||||
.ok_or_else(|| type_error("Expected private key"))?;
|
||||
.ok_or(SignEd25519Error::ExpectedPrivateKey)?;
|
||||
|
||||
let ed25519 = match private {
|
||||
AsymmetricPrivateKey::Ed25519(private) => private,
|
||||
_ => return Err(type_error("Expected Ed25519 private key")),
|
||||
_ => return Err(SignEd25519Error::ExpectedEd25519PrivateKey),
|
||||
};
|
||||
|
||||
let pair = Ed25519KeyPair::from_seed_unchecked(ed25519.as_bytes().as_slice())
|
||||
.map_err(|_| type_error("Invalid Ed25519 private key"))?;
|
||||
.map_err(|_| SignEd25519Error::InvalidEd25519PrivateKey)?;
|
||||
signature.copy_from_slice(pair.sign(data).as_ref());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum VerifyEd25519Error {
|
||||
#[error("Expected public key")]
|
||||
ExpectedPublicKey,
|
||||
#[error("Expected Ed25519 public key")]
|
||||
ExpectedEd25519PublicKey,
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_node_verify_ed25519(
|
||||
#[cppgc] key: &KeyObjectHandle,
|
||||
#[buffer] data: &[u8],
|
||||
#[buffer] signature: &[u8],
|
||||
) -> Result<bool, AnyError> {
|
||||
) -> Result<bool, VerifyEd25519Error> {
|
||||
let public = key
|
||||
.as_public_key()
|
||||
.ok_or_else(|| type_error("Expected public key"))?;
|
||||
.ok_or(VerifyEd25519Error::ExpectedPublicKey)?;
|
||||
|
||||
let ed25519 = match &*public {
|
||||
AsymmetricPublicKey::Ed25519(public) => public,
|
||||
_ => return Err(type_error("Expected Ed25519 public key")),
|
||||
_ => return Err(VerifyEd25519Error::ExpectedEd25519PublicKey),
|
||||
};
|
||||
|
||||
let verified = ring::signature::UnparsedPublicKey::new(
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use rand::rngs::OsRng;
|
||||
use rsa::signature::hazmat::PrehashSigner as _;
|
||||
use rsa::signature::hazmat::PrehashVerifier as _;
|
||||
|
@ -26,7 +23,7 @@ use elliptic_curve::FieldBytesSize;
|
|||
fn dsa_signature<C: elliptic_curve::PrimeCurve>(
|
||||
encoding: u32,
|
||||
signature: ecdsa::Signature<C>,
|
||||
) -> Result<Box<[u8]>, AnyError>
|
||||
) -> Result<Box<[u8]>, KeyObjectHandlePrehashedSignAndVerifyError>
|
||||
where
|
||||
MaxSize<C>: ArrayLength<u8>,
|
||||
<FieldBytesSize<C> as Add>::Output: Add<MaxOverhead> + ArrayLength<u8>,
|
||||
|
@ -36,10 +33,54 @@ where
|
|||
0 => Ok(signature.to_der().to_bytes().to_vec().into_boxed_slice()),
|
||||
// IEEE P1363
|
||||
1 => Ok(signature.to_bytes().to_vec().into_boxed_slice()),
|
||||
_ => Err(type_error("invalid DSA signature encoding")),
|
||||
_ => Err(
|
||||
KeyObjectHandlePrehashedSignAndVerifyError::InvalidDsaSignatureEncoding,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum KeyObjectHandlePrehashedSignAndVerifyError {
|
||||
#[error("invalid DSA signature encoding")]
|
||||
InvalidDsaSignatureEncoding,
|
||||
#[error("key is not a private key")]
|
||||
KeyIsNotPrivate,
|
||||
#[error("digest not allowed for RSA signature: {0}")]
|
||||
DigestNotAllowedForRsaSignature(String),
|
||||
#[error("failed to sign digest with RSA")]
|
||||
FailedToSignDigestWithRsa,
|
||||
#[error("digest not allowed for RSA-PSS signature: {0}")]
|
||||
DigestNotAllowedForRsaPssSignature(String),
|
||||
#[error("failed to sign digest with RSA-PSS")]
|
||||
FailedToSignDigestWithRsaPss,
|
||||
#[error("failed to sign digest with DSA")]
|
||||
FailedToSignDigestWithDsa,
|
||||
#[error("rsa-pss with different mf1 hash algorithm and hash algorithm is not supported")]
|
||||
RsaPssHashAlgorithmUnsupported,
|
||||
#[error(
|
||||
"private key does not allow {actual} to be used, expected {expected}"
|
||||
)]
|
||||
PrivateKeyDisallowsUsage { actual: String, expected: String },
|
||||
#[error("failed to sign digest")]
|
||||
FailedToSignDigest,
|
||||
#[error("x25519 key cannot be used for signing")]
|
||||
X25519KeyCannotBeUsedForSigning,
|
||||
#[error("Ed25519 key cannot be used for prehashed signing")]
|
||||
Ed25519KeyCannotBeUsedForPrehashedSigning,
|
||||
#[error("DH key cannot be used for signing")]
|
||||
DhKeyCannotBeUsedForSigning,
|
||||
#[error("key is not a public or private key")]
|
||||
KeyIsNotPublicOrPrivate,
|
||||
#[error("Invalid DSA signature")]
|
||||
InvalidDsaSignature,
|
||||
#[error("x25519 key cannot be used for verification")]
|
||||
X25519KeyCannotBeUsedForVerification,
|
||||
#[error("Ed25519 key cannot be used for prehashed verification")]
|
||||
Ed25519KeyCannotBeUsedForPrehashedVerification,
|
||||
#[error("DH key cannot be used for verification")]
|
||||
DhKeyCannotBeUsedForVerification,
|
||||
}
|
||||
|
||||
impl KeyObjectHandle {
|
||||
pub fn sign_prehashed(
|
||||
&self,
|
||||
|
@ -47,10 +88,10 @@ impl KeyObjectHandle {
|
|||
digest: &[u8],
|
||||
pss_salt_length: Option<u32>,
|
||||
dsa_signature_encoding: u32,
|
||||
) -> Result<Box<[u8]>, AnyError> {
|
||||
) -> Result<Box<[u8]>, KeyObjectHandlePrehashedSignAndVerifyError> {
|
||||
let private_key = self
|
||||
.as_private_key()
|
||||
.ok_or_else(|| type_error("key is not a private key"))?;
|
||||
.ok_or(KeyObjectHandlePrehashedSignAndVerifyError::KeyIsNotPrivate)?;
|
||||
|
||||
match private_key {
|
||||
AsymmetricPrivateKey::Rsa(key) => {
|
||||
|
@ -63,17 +104,14 @@ impl KeyObjectHandle {
|
|||
rsa::pkcs1v15::Pkcs1v15Sign::new::<D>()
|
||||
},
|
||||
_ => {
|
||||
return Err(type_error(format!(
|
||||
"digest not allowed for RSA signature: {}",
|
||||
digest_type
|
||||
)))
|
||||
return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaSignature(digest_type.to_string()))
|
||||
}
|
||||
)
|
||||
};
|
||||
|
||||
let signature = signer
|
||||
.sign(Some(&mut OsRng), key, digest)
|
||||
.map_err(|_| generic_error("failed to sign digest with RSA"))?;
|
||||
.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithRsa)?;
|
||||
Ok(signature.into())
|
||||
}
|
||||
AsymmetricPrivateKey::RsaPss(key) => {
|
||||
|
@ -81,9 +119,7 @@ impl KeyObjectHandle {
|
|||
let mut salt_length = None;
|
||||
if let Some(details) = &key.details {
|
||||
if details.hash_algorithm != details.mf1_hash_algorithm {
|
||||
return Err(type_error(
|
||||
"rsa-pss with different mf1 hash algorithm and hash algorithm is not supported",
|
||||
));
|
||||
return Err(KeyObjectHandlePrehashedSignAndVerifyError::RsaPssHashAlgorithmUnsupported);
|
||||
}
|
||||
hash_algorithm = Some(details.hash_algorithm);
|
||||
salt_length = Some(details.salt_length as usize);
|
||||
|
@ -96,10 +132,10 @@ impl KeyObjectHandle {
|
|||
fn <D>(algorithm: Option<RsaPssHashAlgorithm>) {
|
||||
if let Some(hash_algorithm) = hash_algorithm.take() {
|
||||
if Some(hash_algorithm) != algorithm {
|
||||
return Err(type_error(format!(
|
||||
"private key does not allow {} to be used, expected {}",
|
||||
digest_type, hash_algorithm.as_str()
|
||||
)));
|
||||
return Err(KeyObjectHandlePrehashedSignAndVerifyError::PrivateKeyDisallowsUsage {
|
||||
actual: digest_type.to_string(),
|
||||
expected: hash_algorithm.as_str().to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
if let Some(salt_length) = salt_length {
|
||||
|
@ -109,15 +145,12 @@ impl KeyObjectHandle {
|
|||
}
|
||||
},
|
||||
_ => {
|
||||
return Err(type_error(format!(
|
||||
"digest not allowed for RSA-PSS signature: {}",
|
||||
digest_type
|
||||
)))
|
||||
return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaPssSignature(digest_type.to_string()));
|
||||
}
|
||||
);
|
||||
let signature = pss
|
||||
.sign(Some(&mut OsRng), &key.key, digest)
|
||||
.map_err(|_| generic_error("failed to sign digest with RSA-PSS"))?;
|
||||
.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithRsaPss)?;
|
||||
Ok(signature.into())
|
||||
}
|
||||
AsymmetricPrivateKey::Dsa(key) => {
|
||||
|
@ -127,15 +160,12 @@ impl KeyObjectHandle {
|
|||
key.sign_prehashed_rfc6979::<D>(digest)
|
||||
},
|
||||
_ => {
|
||||
return Err(type_error(format!(
|
||||
"digest not allowed for RSA signature: {}",
|
||||
digest_type
|
||||
)))
|
||||
return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaSignature(digest_type.to_string()))
|
||||
}
|
||||
);
|
||||
|
||||
let signature =
|
||||
res.map_err(|_| generic_error("failed to sign digest with DSA"))?;
|
||||
res.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithDsa)?;
|
||||
Ok(signature.into())
|
||||
}
|
||||
AsymmetricPrivateKey::Ec(key) => match key {
|
||||
|
@ -143,7 +173,7 @@ impl KeyObjectHandle {
|
|||
let signing_key = p224::ecdsa::SigningKey::from(key);
|
||||
let signature: p224::ecdsa::Signature = signing_key
|
||||
.sign_prehash(digest)
|
||||
.map_err(|_| type_error("failed to sign digest"))?;
|
||||
.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigest)?;
|
||||
|
||||
dsa_signature(dsa_signature_encoding, signature)
|
||||
}
|
||||
|
@ -151,7 +181,7 @@ impl KeyObjectHandle {
|
|||
let signing_key = p256::ecdsa::SigningKey::from(key);
|
||||
let signature: p256::ecdsa::Signature = signing_key
|
||||
.sign_prehash(digest)
|
||||
.map_err(|_| type_error("failed to sign digest"))?;
|
||||
.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigest)?;
|
||||
|
||||
dsa_signature(dsa_signature_encoding, signature)
|
||||
}
|
||||
|
@ -159,19 +189,17 @@ impl KeyObjectHandle {
|
|||
let signing_key = p384::ecdsa::SigningKey::from(key);
|
||||
let signature: p384::ecdsa::Signature = signing_key
|
||||
.sign_prehash(digest)
|
||||
.map_err(|_| type_error("failed to sign digest"))?;
|
||||
.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigest)?;
|
||||
|
||||
dsa_signature(dsa_signature_encoding, signature)
|
||||
}
|
||||
},
|
||||
AsymmetricPrivateKey::X25519(_) => {
|
||||
Err(type_error("x25519 key cannot be used for signing"))
|
||||
Err(KeyObjectHandlePrehashedSignAndVerifyError::X25519KeyCannotBeUsedForSigning)
|
||||
}
|
||||
AsymmetricPrivateKey::Ed25519(_) => Err(type_error(
|
||||
"Ed25519 key cannot be used for prehashed signing",
|
||||
)),
|
||||
AsymmetricPrivateKey::Ed25519(_) => Err(KeyObjectHandlePrehashedSignAndVerifyError::Ed25519KeyCannotBeUsedForPrehashedSigning),
|
||||
AsymmetricPrivateKey::Dh(_) => {
|
||||
Err(type_error("DH key cannot be used for signing"))
|
||||
Err(KeyObjectHandlePrehashedSignAndVerifyError::DhKeyCannotBeUsedForSigning)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -183,10 +211,10 @@ impl KeyObjectHandle {
|
|||
signature: &[u8],
|
||||
pss_salt_length: Option<u32>,
|
||||
dsa_signature_encoding: u32,
|
||||
) -> Result<bool, AnyError> {
|
||||
let public_key = self
|
||||
.as_public_key()
|
||||
.ok_or_else(|| type_error("key is not a public or private key"))?;
|
||||
) -> Result<bool, KeyObjectHandlePrehashedSignAndVerifyError> {
|
||||
let public_key = self.as_public_key().ok_or(
|
||||
KeyObjectHandlePrehashedSignAndVerifyError::KeyIsNotPublicOrPrivate,
|
||||
)?;
|
||||
|
||||
match &*public_key {
|
||||
AsymmetricPublicKey::Rsa(key) => {
|
||||
|
@ -199,10 +227,7 @@ impl KeyObjectHandle {
|
|||
rsa::pkcs1v15::Pkcs1v15Sign::new::<D>()
|
||||
},
|
||||
_ => {
|
||||
return Err(type_error(format!(
|
||||
"digest not allowed for RSA signature: {}",
|
||||
digest_type
|
||||
)))
|
||||
return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaSignature(digest_type.to_string()))
|
||||
}
|
||||
)
|
||||
};
|
||||
|
@ -214,9 +239,7 @@ impl KeyObjectHandle {
|
|||
let mut salt_length = None;
|
||||
if let Some(details) = &key.details {
|
||||
if details.hash_algorithm != details.mf1_hash_algorithm {
|
||||
return Err(type_error(
|
||||
"rsa-pss with different mf1 hash algorithm and hash algorithm is not supported",
|
||||
));
|
||||
return Err(KeyObjectHandlePrehashedSignAndVerifyError::RsaPssHashAlgorithmUnsupported);
|
||||
}
|
||||
hash_algorithm = Some(details.hash_algorithm);
|
||||
salt_length = Some(details.salt_length as usize);
|
||||
|
@ -229,10 +252,10 @@ impl KeyObjectHandle {
|
|||
fn <D>(algorithm: Option<RsaPssHashAlgorithm>) {
|
||||
if let Some(hash_algorithm) = hash_algorithm.take() {
|
||||
if Some(hash_algorithm) != algorithm {
|
||||
return Err(type_error(format!(
|
||||
"private key does not allow {} to be used, expected {}",
|
||||
digest_type, hash_algorithm.as_str()
|
||||
)));
|
||||
return Err(KeyObjectHandlePrehashedSignAndVerifyError::PrivateKeyDisallowsUsage {
|
||||
actual: digest_type.to_string(),
|
||||
expected: hash_algorithm.as_str().to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
if let Some(salt_length) = salt_length {
|
||||
|
@ -242,17 +265,14 @@ impl KeyObjectHandle {
|
|||
}
|
||||
},
|
||||
_ => {
|
||||
return Err(type_error(format!(
|
||||
"digest not allowed for RSA-PSS signature: {}",
|
||||
digest_type
|
||||
)))
|
||||
return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaPssSignature(digest_type.to_string()));
|
||||
}
|
||||
);
|
||||
Ok(pss.verify(&key.key, digest, signature).is_ok())
|
||||
}
|
||||
AsymmetricPublicKey::Dsa(key) => {
|
||||
let signature = dsa::Signature::from_der(signature)
|
||||
.map_err(|_| type_error("Invalid DSA signature"))?;
|
||||
.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::InvalidDsaSignature)?;
|
||||
Ok(key.verify_prehash(digest, &signature).is_ok())
|
||||
}
|
||||
AsymmetricPublicKey::Ec(key) => match key {
|
||||
|
@ -294,13 +314,11 @@ impl KeyObjectHandle {
|
|||
}
|
||||
},
|
||||
AsymmetricPublicKey::X25519(_) => {
|
||||
Err(type_error("x25519 key cannot be used for verification"))
|
||||
Err(KeyObjectHandlePrehashedSignAndVerifyError::X25519KeyCannotBeUsedForVerification)
|
||||
}
|
||||
AsymmetricPublicKey::Ed25519(_) => Err(type_error(
|
||||
"Ed25519 key cannot be used for prehashed verification",
|
||||
)),
|
||||
AsymmetricPublicKey::Ed25519(_) => Err(KeyObjectHandlePrehashedSignAndVerifyError::Ed25519KeyCannotBeUsedForPrehashedVerification),
|
||||
AsymmetricPublicKey::Dh(_) => {
|
||||
Err(type_error("DH key cannot be used for verification"))
|
||||
Err(KeyObjectHandlePrehashedSignAndVerifyError::DhKeyCannotBeUsedForVerification)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
|
||||
use x509_parser::der_parser::asn1_rs::Any;
|
||||
use x509_parser::der_parser::asn1_rs::Tag;
|
||||
use x509_parser::der_parser::oid::Oid;
|
||||
pub use x509_parser::error::X509Error;
|
||||
use x509_parser::extensions;
|
||||
use x509_parser::pem;
|
||||
use x509_parser::prelude::*;
|
||||
|
@ -65,7 +65,7 @@ impl<'a> Deref for CertificateView<'a> {
|
|||
#[cppgc]
|
||||
pub fn op_node_x509_parse(
|
||||
#[buffer] buf: &[u8],
|
||||
) -> Result<Certificate, AnyError> {
|
||||
) -> Result<Certificate, X509Error> {
|
||||
let source = match pem::parse_x509_pem(buf) {
|
||||
Ok((_, pem)) => CertificateSources::Pem(pem),
|
||||
Err(_) => CertificateSources::Der(buf.to_vec().into_boxed_slice()),
|
||||
|
@ -81,7 +81,7 @@ pub fn op_node_x509_parse(
|
|||
X509Certificate::from_der(buf).map(|(_, cert)| cert)?
|
||||
}
|
||||
};
|
||||
Ok::<_, AnyError>(CertificateView { cert })
|
||||
Ok::<_, X509Error>(CertificateView { cert })
|
||||
},
|
||||
)?;
|
||||
|
||||
|
@ -89,23 +89,23 @@ pub fn op_node_x509_parse(
|
|||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_node_x509_ca(#[cppgc] cert: &Certificate) -> Result<bool, AnyError> {
|
||||
pub fn op_node_x509_ca(#[cppgc] cert: &Certificate) -> bool {
|
||||
let cert = cert.inner.get().deref();
|
||||
Ok(cert.is_ca())
|
||||
cert.is_ca()
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_node_x509_check_email(
|
||||
#[cppgc] cert: &Certificate,
|
||||
#[string] email: &str,
|
||||
) -> Result<bool, AnyError> {
|
||||
) -> bool {
|
||||
let cert = cert.inner.get().deref();
|
||||
let subject = cert.subject();
|
||||
if subject
|
||||
.iter_email()
|
||||
.any(|e| e.as_str().unwrap_or("") == email)
|
||||
{
|
||||
return Ok(true);
|
||||
return true;
|
||||
}
|
||||
|
||||
let subject_alt = cert
|
||||
|
@ -121,62 +121,60 @@ pub fn op_node_x509_check_email(
|
|||
for name in &subject_alt.general_names {
|
||||
if let extensions::GeneralName::RFC822Name(n) = name {
|
||||
if *n == email {
|
||||
return Ok(true);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
false
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_node_x509_fingerprint(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
Ok(cert.fingerprint::<sha1::Sha1>())
|
||||
pub fn op_node_x509_fingerprint(#[cppgc] cert: &Certificate) -> Option<String> {
|
||||
cert.fingerprint::<sha1::Sha1>()
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_node_x509_fingerprint256(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
Ok(cert.fingerprint::<sha2::Sha256>())
|
||||
) -> Option<String> {
|
||||
cert.fingerprint::<sha2::Sha256>()
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_node_x509_fingerprint512(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<Option<String>, AnyError> {
|
||||
Ok(cert.fingerprint::<sha2::Sha512>())
|
||||
) -> Option<String> {
|
||||
cert.fingerprint::<sha2::Sha512>()
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_node_x509_get_issuer(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<String, AnyError> {
|
||||
) -> Result<String, X509Error> {
|
||||
let cert = cert.inner.get().deref();
|
||||
Ok(x509name_to_string(cert.issuer(), oid_registry())?)
|
||||
x509name_to_string(cert.issuer(), oid_registry())
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_node_x509_get_subject(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<String, AnyError> {
|
||||
) -> Result<String, X509Error> {
|
||||
let cert = cert.inner.get().deref();
|
||||
Ok(x509name_to_string(cert.subject(), oid_registry())?)
|
||||
x509name_to_string(cert.subject(), oid_registry())
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[cppgc]
|
||||
pub fn op_node_x509_public_key(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<KeyObjectHandle, AnyError> {
|
||||
) -> Result<KeyObjectHandle, super::keys::X509PublicKeyError> {
|
||||
let cert = cert.inner.get().deref();
|
||||
let public_key = &cert.tbs_certificate.subject_pki;
|
||||
|
||||
|
@ -245,37 +243,29 @@ fn x509name_to_string(
|
|||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_node_x509_get_valid_from(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<String, AnyError> {
|
||||
pub fn op_node_x509_get_valid_from(#[cppgc] cert: &Certificate) -> String {
|
||||
let cert = cert.inner.get().deref();
|
||||
Ok(cert.validity().not_before.to_string())
|
||||
cert.validity().not_before.to_string()
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_node_x509_get_valid_to(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<String, AnyError> {
|
||||
pub fn op_node_x509_get_valid_to(#[cppgc] cert: &Certificate) -> String {
|
||||
let cert = cert.inner.get().deref();
|
||||
Ok(cert.validity().not_after.to_string())
|
||||
cert.validity().not_after.to_string()
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_node_x509_get_serial_number(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<String, AnyError> {
|
||||
pub fn op_node_x509_get_serial_number(#[cppgc] cert: &Certificate) -> String {
|
||||
let cert = cert.inner.get().deref();
|
||||
let mut s = cert.serial.to_str_radix(16);
|
||||
s.make_ascii_uppercase();
|
||||
Ok(s)
|
||||
s
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_node_x509_key_usage(
|
||||
#[cppgc] cert: &Certificate,
|
||||
) -> Result<u16, AnyError> {
|
||||
pub fn op_node_x509_key_usage(#[cppgc] cert: &Certificate) -> u16 {
|
||||
let cert = cert.inner.get().deref();
|
||||
let key_usage = cert
|
||||
.extensions()
|
||||
|
@ -286,5 +276,5 @@ pub fn op_node_x509_key_usage(
|
|||
_ => None,
|
||||
});
|
||||
|
||||
Ok(key_usage.map(|k| k.flags).unwrap_or(0))
|
||||
key_usage.map(|k| k.flags).unwrap_or(0)
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ use crate::NodePermissions;
|
|||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum FsError {
|
||||
#[error(transparent)]
|
||||
Permission(deno_core::error::AnyError),
|
||||
Permission(#[from] deno_permissions::PermissionCheckError),
|
||||
#[error("{0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
#[cfg(windows)]
|
||||
|
@ -53,8 +53,7 @@ where
|
|||
let mut state = state.borrow_mut();
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_read_with_api_name(&path, Some("node:fs.exists()"))
|
||||
.map_err(FsError::Permission)?;
|
||||
.check_read_with_api_name(&path, Some("node:fs.exists()"))?;
|
||||
(state.borrow::<FileSystemRc>().clone(), path)
|
||||
};
|
||||
|
||||
|
@ -72,12 +71,10 @@ where
|
|||
{
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_read_with_api_name(path, Some("node:fs.cpSync"))
|
||||
.map_err(FsError::Permission)?;
|
||||
.check_read_with_api_name(path, Some("node:fs.cpSync"))?;
|
||||
let new_path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write_with_api_name(new_path, Some("node:fs.cpSync"))
|
||||
.map_err(FsError::Permission)?;
|
||||
.check_write_with_api_name(new_path, Some("node:fs.cpSync"))?;
|
||||
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
fs.cp_sync(&path, &new_path)?;
|
||||
|
@ -97,12 +94,10 @@ where
|
|||
let mut state = state.borrow_mut();
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_read_with_api_name(&path, Some("node:fs.cpSync"))
|
||||
.map_err(FsError::Permission)?;
|
||||
.check_read_with_api_name(&path, Some("node:fs.cpSync"))?;
|
||||
let new_path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write_with_api_name(&new_path, Some("node:fs.cpSync"))
|
||||
.map_err(FsError::Permission)?;
|
||||
.check_write_with_api_name(&new_path, Some("node:fs.cpSync"))?;
|
||||
(state.borrow::<FileSystemRc>().clone(), path, new_path)
|
||||
};
|
||||
|
||||
|
@ -136,12 +131,10 @@ where
|
|||
let mut state = state.borrow_mut();
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_read_with_api_name(&path, Some("node:fs.statfs"))
|
||||
.map_err(FsError::Permission)?;
|
||||
.check_read_with_api_name(&path, Some("node:fs.statfs"))?;
|
||||
state
|
||||
.borrow_mut::<P>()
|
||||
.check_sys("statfs", "node:fs.statfs")
|
||||
.map_err(FsError::Permission)?;
|
||||
.check_sys("statfs", "node:fs.statfs")?;
|
||||
path
|
||||
};
|
||||
#[cfg(unix)]
|
||||
|
@ -279,8 +272,7 @@ where
|
|||
{
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write_with_api_name(path, Some("node:fs.lutimes"))
|
||||
.map_err(FsError::Permission)?;
|
||||
.check_write_with_api_name(path, Some("node:fs.lutimes"))?;
|
||||
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
fs.lutime_sync(&path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)?;
|
||||
|
@ -303,8 +295,7 @@ where
|
|||
let mut state = state.borrow_mut();
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write_with_api_name(&path, Some("node:fs.lutimesSync"))
|
||||
.map_err(FsError::Permission)?;
|
||||
.check_write_with_api_name(&path, Some("node:fs.lutimesSync"))?;
|
||||
(state.borrow::<FileSystemRc>().clone(), path)
|
||||
};
|
||||
|
||||
|
@ -326,8 +317,7 @@ where
|
|||
{
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write_with_api_name(&path, Some("node:fs.lchownSync"))
|
||||
.map_err(FsError::Permission)?;
|
||||
.check_write_with_api_name(&path, Some("node:fs.lchownSync"))?;
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
fs.lchown_sync(&path, uid, gid)?;
|
||||
Ok(())
|
||||
|
@ -347,8 +337,7 @@ where
|
|||
let mut state = state.borrow_mut();
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write_with_api_name(&path, Some("node:fs.lchown"))
|
||||
.map_err(FsError::Permission)?;
|
||||
.check_write_with_api_name(&path, Some("node:fs.lchown"))?;
|
||||
(state.borrow::<FileSystemRc>().clone(), path)
|
||||
};
|
||||
fs.lchown_async(path, uid, gid).await?;
|
||||
|
|
|
@ -78,9 +78,7 @@ where
|
|||
|
||||
{
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
permissions
|
||||
.check_net_url(&url, "ClientRequest")
|
||||
.map_err(FetchError::Permission)?;
|
||||
permissions.check_net_url(&url, "ClientRequest")?;
|
||||
}
|
||||
|
||||
let mut header_map = HeaderMap::new();
|
||||
|
|
161
ext/node/ops/inspector.rs
Normal file
161
ext/node/ops/inspector.rs
Normal file
|
@ -0,0 +1,161 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::NodePermissions;
|
||||
use deno_core::anyhow::Error;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::futures::channel::mpsc;
|
||||
use deno_core::op2;
|
||||
use deno_core::v8;
|
||||
use deno_core::GarbageCollected;
|
||||
use deno_core::InspectorSessionKind;
|
||||
use deno_core::InspectorSessionOptions;
|
||||
use deno_core::JsRuntimeInspector;
|
||||
use deno_core::OpState;
|
||||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_inspector_enabled() -> bool {
|
||||
// TODO: hook up to InspectorServer
|
||||
false
|
||||
}
|
||||
|
||||
#[op2]
|
||||
pub fn op_inspector_open<P>(
|
||||
_state: &mut OpState,
|
||||
_port: Option<u16>,
|
||||
#[string] _host: Option<String>,
|
||||
) -> Result<(), Error>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
// TODO: hook up to InspectorServer
|
||||
/*
|
||||
let server = state.borrow_mut::<InspectorServer>();
|
||||
if let Some(host) = host {
|
||||
server.set_host(host);
|
||||
}
|
||||
if let Some(port) = port {
|
||||
server.set_port(port);
|
||||
}
|
||||
state
|
||||
.borrow_mut::<P>()
|
||||
.check_net((server.host(), Some(server.port())), "inspector.open")?;
|
||||
*/
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_inspector_close() {
|
||||
// TODO: hook up to InspectorServer
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_inspector_url() -> Option<String> {
|
||||
// TODO: hook up to InspectorServer
|
||||
None
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_inspector_wait(state: &OpState) -> bool {
|
||||
match state.try_borrow::<Rc<RefCell<JsRuntimeInspector>>>() {
|
||||
Some(inspector) => {
|
||||
inspector
|
||||
.borrow_mut()
|
||||
.wait_for_session_and_break_on_next_statement();
|
||||
true
|
||||
}
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_inspector_emit_protocol_event(
|
||||
#[string] _event_name: String,
|
||||
#[string] _params: String,
|
||||
) {
|
||||
// TODO: inspector channel & protocol notifications
|
||||
}
|
||||
|
||||
struct JSInspectorSession {
|
||||
tx: RefCell<Option<mpsc::UnboundedSender<String>>>,
|
||||
}
|
||||
|
||||
impl GarbageCollected for JSInspectorSession {}
|
||||
|
||||
#[op2]
|
||||
#[cppgc]
|
||||
pub fn op_inspector_connect<'s, P>(
|
||||
isolate: *mut v8::Isolate,
|
||||
scope: &mut v8::HandleScope<'s>,
|
||||
state: &mut OpState,
|
||||
connect_to_main_thread: bool,
|
||||
callback: v8::Local<'s, v8::Function>,
|
||||
) -> Result<JSInspectorSession, Error>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
state
|
||||
.borrow_mut::<P>()
|
||||
.check_sys("inspector", "inspector.Session.connect")?;
|
||||
|
||||
if connect_to_main_thread {
|
||||
return Err(generic_error("connectToMainThread not supported"));
|
||||
}
|
||||
|
||||
let context = scope.get_current_context();
|
||||
let context = v8::Global::new(scope, context);
|
||||
let callback = v8::Global::new(scope, callback);
|
||||
|
||||
let inspector = state
|
||||
.borrow::<Rc<RefCell<JsRuntimeInspector>>>()
|
||||
.borrow_mut();
|
||||
|
||||
let tx = inspector.create_raw_session(
|
||||
InspectorSessionOptions {
|
||||
kind: InspectorSessionKind::NonBlocking {
|
||||
wait_for_disconnect: false,
|
||||
},
|
||||
},
|
||||
// The inspector connection does not keep the event loop alive but
|
||||
// when the inspector sends a message to the frontend, the JS that
|
||||
// that runs may keep the event loop alive so we have to call back
|
||||
// synchronously, instead of using the usual LocalInspectorSession
|
||||
// UnboundedReceiver<InspectorMsg> API.
|
||||
Box::new(move |message| {
|
||||
// SAFETY: This function is called directly by the inspector, so
|
||||
// 1) The isolate is still valid
|
||||
// 2) We are on the same thread as the Isolate
|
||||
let scope = unsafe { &mut v8::CallbackScope::new(&mut *isolate) };
|
||||
let context = v8::Local::new(scope, context.clone());
|
||||
let scope = &mut v8::ContextScope::new(scope, context);
|
||||
let scope = &mut v8::TryCatch::new(scope);
|
||||
let recv = v8::undefined(scope);
|
||||
if let Some(message) = v8::String::new(scope, &message.content) {
|
||||
let callback = v8::Local::new(scope, callback.clone());
|
||||
callback.call(scope, recv.into(), &[message.into()]);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
Ok(JSInspectorSession {
|
||||
tx: RefCell::new(Some(tx)),
|
||||
})
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_inspector_dispatch(
|
||||
#[cppgc] session: &JSInspectorSession,
|
||||
#[string] message: String,
|
||||
) {
|
||||
if let Some(tx) = &*session.tx.borrow() {
|
||||
let _ = tx.unbounded_send(message);
|
||||
}
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_inspector_disconnect(#[cppgc] session: &JSInspectorSession) {
|
||||
drop(session.tx.borrow_mut().take());
|
||||
}
|
|
@ -7,6 +7,7 @@ pub mod fs;
|
|||
pub mod http;
|
||||
pub mod http2;
|
||||
pub mod idna;
|
||||
pub mod inspector;
|
||||
pub mod ipc;
|
||||
pub mod os;
|
||||
pub mod process;
|
||||
|
|
|
@ -14,7 +14,7 @@ pub enum OsError {
|
|||
#[error(transparent)]
|
||||
Priority(priority::PriorityError),
|
||||
#[error(transparent)]
|
||||
Permission(deno_core::error::AnyError),
|
||||
Permission(#[from] deno_permissions::PermissionCheckError),
|
||||
#[error("Failed to get cpu info")]
|
||||
FailedToGetCpuInfo,
|
||||
#[error("Failed to get user info")]
|
||||
|
@ -31,9 +31,7 @@ where
|
|||
{
|
||||
{
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
permissions
|
||||
.check_sys("getPriority", "node:os.getPriority()")
|
||||
.map_err(OsError::Permission)?;
|
||||
permissions.check_sys("getPriority", "node:os.getPriority()")?;
|
||||
}
|
||||
|
||||
priority::get_priority(pid).map_err(OsError::Priority)
|
||||
|
@ -50,9 +48,7 @@ where
|
|||
{
|
||||
{
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
permissions
|
||||
.check_sys("setPriority", "node:os.setPriority()")
|
||||
.map_err(OsError::Permission)?;
|
||||
permissions.check_sys("setPriority", "node:os.setPriority()")?;
|
||||
}
|
||||
|
||||
priority::set_priority(pid, priority).map_err(OsError::Priority)
|
||||
|
@ -266,9 +262,7 @@ where
|
|||
{
|
||||
{
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
permissions
|
||||
.check_sys("cpus", "node:os.cpus()")
|
||||
.map_err(OsError::Permission)?;
|
||||
permissions.check_sys("cpus", "node:os.cpus()")?;
|
||||
}
|
||||
|
||||
cpus::cpu_info().ok_or(OsError::FailedToGetCpuInfo)
|
||||
|
|
|
@ -1312,6 +1312,8 @@ export function findSourceMap(_path) {
|
|||
return undefined;
|
||||
}
|
||||
|
||||
Module.findSourceMap = findSourceMap;
|
||||
|
||||
/**
|
||||
* @param {string | URL} _specifier
|
||||
* @param {string | URL} _parentUrl
|
||||
|
|
|
@ -20,6 +20,7 @@ import {
|
|||
notImplemented,
|
||||
TextEncodings,
|
||||
} from "ext:deno_node/_utils.ts";
|
||||
import { type Buffer } from "node:buffer";
|
||||
|
||||
export type CallbackWithError = (err: ErrnoException | null) => void;
|
||||
|
||||
|
|
|
@ -147,8 +147,8 @@ export function open(
|
|||
|
||||
export function openPromise(
|
||||
path: string | Buffer | URL,
|
||||
flags?: openFlags = "r",
|
||||
mode? = 0o666,
|
||||
flags: openFlags = "r",
|
||||
mode = 0o666,
|
||||
): Promise<FileHandle> {
|
||||
return new Promise((resolve, reject) => {
|
||||
open(path, flags, mode, (err, fd) => {
|
||||
|
|
|
@ -15,6 +15,7 @@ import { maybeCallback } from "ext:deno_node/_fs/_fs_common.ts";
|
|||
import { validateInteger } from "ext:deno_node/internal/validators.mjs";
|
||||
import * as io from "ext:deno_io/12_io.js";
|
||||
import { op_fs_seek_async, op_fs_seek_sync } from "ext:core/ops";
|
||||
import process from "node:process";
|
||||
|
||||
type Callback = (
|
||||
err: ErrnoException | null,
|
||||
|
|
|
@ -14,6 +14,7 @@ import { nextTick } from "ext:deno_node/_next_tick.ts";
|
|||
import {
|
||||
isAnyArrayBuffer,
|
||||
isArrayBufferView,
|
||||
isUint8Array,
|
||||
} from "ext:deno_node/internal/util/types.ts";
|
||||
|
||||
var kRangeErrorMessage = "Cannot create final Buffer. It would be larger " +
|
||||
|
@ -158,6 +159,12 @@ export const inflateRawSync = function (buffer, opts) {
|
|||
function sanitizeInput(input) {
|
||||
if (typeof input === "string") input = Buffer.from(input);
|
||||
|
||||
if (isArrayBufferView(input) && !isUint8Array(input)) {
|
||||
input = Buffer.from(input.buffer, input.byteOffset, input.byteLength);
|
||||
} else if (isAnyArrayBuffer(input)) {
|
||||
input = Buffer.from(input);
|
||||
}
|
||||
|
||||
if (
|
||||
!Buffer.isBuffer(input) &&
|
||||
(input.buffer && !input.buffer.constructor === ArrayBuffer)
|
||||
|
|
210
ext/node/polyfills/inspector.js
Normal file
210
ext/node/polyfills/inspector.js
Normal file
|
@ -0,0 +1,210 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
|
||||
|
||||
import process from "node:process";
|
||||
import { EventEmitter } from "node:events";
|
||||
import { primordials } from "ext:core/mod.js";
|
||||
import {
|
||||
op_get_extras_binding_object,
|
||||
op_inspector_close,
|
||||
op_inspector_connect,
|
||||
op_inspector_disconnect,
|
||||
op_inspector_dispatch,
|
||||
op_inspector_emit_protocol_event,
|
||||
op_inspector_enabled,
|
||||
op_inspector_open,
|
||||
op_inspector_url,
|
||||
op_inspector_wait,
|
||||
} from "ext:core/ops";
|
||||
import {
|
||||
isUint32,
|
||||
validateFunction,
|
||||
validateInt32,
|
||||
validateObject,
|
||||
validateString,
|
||||
} from "ext:deno_node/internal/validators.mjs";
|
||||
import {
|
||||
ERR_INSPECTOR_ALREADY_ACTIVATED,
|
||||
ERR_INSPECTOR_ALREADY_CONNECTED,
|
||||
ERR_INSPECTOR_CLOSED,
|
||||
ERR_INSPECTOR_COMMAND,
|
||||
ERR_INSPECTOR_NOT_ACTIVE,
|
||||
ERR_INSPECTOR_NOT_CONNECTED,
|
||||
ERR_INSPECTOR_NOT_WORKER,
|
||||
} from "ext:deno_node/internal/errors.ts";
|
||||
|
||||
const {
|
||||
SymbolDispose,
|
||||
JSONParse,
|
||||
JSONStringify,
|
||||
SafeMap,
|
||||
} = primordials;
|
||||
|
||||
class Session extends EventEmitter {
|
||||
#connection = null;
|
||||
#nextId = 1;
|
||||
#messageCallbacks = new SafeMap();
|
||||
|
||||
connect() {
|
||||
if (this.#connection) {
|
||||
throw new ERR_INSPECTOR_ALREADY_CONNECTED("The inspector session");
|
||||
}
|
||||
this.#connection = op_inspector_connect(false, (m) => this.#onMessage(m));
|
||||
}
|
||||
|
||||
connectToMainThread() {
|
||||
if (isMainThread) {
|
||||
throw new ERR_INSPECTOR_NOT_WORKER();
|
||||
}
|
||||
if (this.#connection) {
|
||||
throw new ERR_INSPECTOR_ALREADY_CONNECTED("The inspector session");
|
||||
}
|
||||
this.#connection = op_inspector_connect(true, (m) => this.#onMessage(m));
|
||||
}
|
||||
|
||||
#onMessage(message) {
|
||||
const parsed = JSONParse(message);
|
||||
try {
|
||||
if (parsed.id) {
|
||||
const callback = this.#messageCallbacks.get(parsed.id);
|
||||
this.#messageCallbacks.delete(parsed.id);
|
||||
if (callback) {
|
||||
if (parsed.error) {
|
||||
return callback(
|
||||
new ERR_INSPECTOR_COMMAND(
|
||||
parsed.error.code,
|
||||
parsed.error.message,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
callback(null, parsed.result);
|
||||
}
|
||||
} else {
|
||||
this.emit(parsed.method, parsed);
|
||||
this.emit("inspectorNotification", parsed);
|
||||
}
|
||||
} catch (error) {
|
||||
process.emitWarning(error);
|
||||
}
|
||||
}
|
||||
|
||||
post(method, params, callback) {
|
||||
validateString(method, "method");
|
||||
if (!callback && typeof params === "function") {
|
||||
callback = params;
|
||||
params = null;
|
||||
}
|
||||
if (params) {
|
||||
validateObject(params, "params");
|
||||
}
|
||||
if (callback) {
|
||||
validateFunction(callback, "callback");
|
||||
}
|
||||
|
||||
if (!this.#connection) {
|
||||
throw new ERR_INSPECTOR_NOT_CONNECTED();
|
||||
}
|
||||
const id = this.#nextId++;
|
||||
const message = { id, method };
|
||||
if (params) {
|
||||
message.params = params;
|
||||
}
|
||||
if (callback) {
|
||||
this.#messageCallbacks.set(id, callback);
|
||||
}
|
||||
op_inspector_dispatch(this.#connection, JSONStringify(message));
|
||||
}
|
||||
|
||||
disconnect() {
|
||||
if (!this.#connection) {
|
||||
return;
|
||||
}
|
||||
op_inspector_disconnect(this.#connection);
|
||||
this.#connection = null;
|
||||
// deno-lint-ignore prefer-primordials
|
||||
for (const callback of this.#messageCallbacks.values()) {
|
||||
process.nextTick(callback, new ERR_INSPECTOR_CLOSED());
|
||||
}
|
||||
this.#messageCallbacks.clear();
|
||||
this.#nextId = 1;
|
||||
}
|
||||
}
|
||||
|
||||
function open(port, host, wait) {
|
||||
if (op_inspector_enabled()) {
|
||||
throw new ERR_INSPECTOR_ALREADY_ACTIVATED();
|
||||
}
|
||||
// inspectorOpen() currently does not typecheck its arguments and adding
|
||||
// such checks would be a potentially breaking change. However, the native
|
||||
// open() function requires the port to fit into a 16-bit unsigned integer,
|
||||
// causing an integer overflow otherwise, so we at least need to prevent that.
|
||||
if (isUint32(port)) {
|
||||
validateInt32(port, "port", 0, 65535);
|
||||
} else {
|
||||
// equiv of handling args[0]->IsUint32()
|
||||
port = undefined;
|
||||
}
|
||||
if (typeof host !== "string") {
|
||||
// equiv of handling args[1]->IsString()
|
||||
host = undefined;
|
||||
}
|
||||
op_inspector_open(port, host);
|
||||
if (wait) {
|
||||
op_inspector_wait();
|
||||
}
|
||||
|
||||
return {
|
||||
__proto__: null,
|
||||
[SymbolDispose]() {
|
||||
_debugEnd();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function close() {
|
||||
op_inspector_close();
|
||||
}
|
||||
|
||||
function url() {
|
||||
return op_inspector_url();
|
||||
}
|
||||
|
||||
function waitForDebugger() {
|
||||
if (!op_inspector_wait()) {
|
||||
throw new ERR_INSPECTOR_NOT_ACTIVE();
|
||||
}
|
||||
}
|
||||
|
||||
function broadcastToFrontend(eventName, params) {
|
||||
validateString(eventName, "eventName");
|
||||
if (params) {
|
||||
validateObject(params, "params");
|
||||
}
|
||||
op_inspector_emit_protocol_event(eventName, JSONStringify(params ?? {}));
|
||||
}
|
||||
|
||||
const Network = {
|
||||
requestWillBeSent: (params) =>
|
||||
broadcastToFrontend("Network.requestWillBeSent", params),
|
||||
responseReceived: (params) =>
|
||||
broadcastToFrontend("Network.responseReceived", params),
|
||||
loadingFinished: (params) =>
|
||||
broadcastToFrontend("Network.loadingFinished", params),
|
||||
loadingFailed: (params) =>
|
||||
broadcastToFrontend("Network.loadingFailed", params),
|
||||
};
|
||||
|
||||
const console = op_get_extras_binding_object().console;
|
||||
|
||||
export { close, console, Network, open, Session, url, waitForDebugger };
|
||||
|
||||
export default {
|
||||
open,
|
||||
close,
|
||||
url,
|
||||
waitForDebugger,
|
||||
console,
|
||||
Session,
|
||||
Network,
|
||||
};
|
|
@ -1,82 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
|
||||
|
||||
import { EventEmitter } from "node:events";
|
||||
import { notImplemented } from "ext:deno_node/_utils.ts";
|
||||
import { primordials } from "ext:core/mod.js";
|
||||
|
||||
const {
|
||||
SafeMap,
|
||||
} = primordials;
|
||||
|
||||
class Session extends EventEmitter {
|
||||
#connection = null;
|
||||
#nextId = 1;
|
||||
#messageCallbacks = new SafeMap();
|
||||
|
||||
/** Connects the session to the inspector back-end. */
|
||||
connect() {
|
||||
notImplemented("inspector.Session.prototype.connect");
|
||||
}
|
||||
|
||||
/** Connects the session to the main thread
|
||||
* inspector back-end. */
|
||||
connectToMainThread() {
|
||||
notImplemented("inspector.Session.prototype.connectToMainThread");
|
||||
}
|
||||
|
||||
/** Posts a message to the inspector back-end. */
|
||||
post(
|
||||
_method: string,
|
||||
_params?: Record<string, unknown>,
|
||||
_callback?: (...args: unknown[]) => void,
|
||||
) {
|
||||
notImplemented("inspector.Session.prototype.post");
|
||||
}
|
||||
|
||||
/** Immediately closes the session, all pending
|
||||
* message callbacks will be called with an
|
||||
* error.
|
||||
*/
|
||||
disconnect() {
|
||||
notImplemented("inspector.Session.prototype.disconnect");
|
||||
}
|
||||
}
|
||||
|
||||
/** Activates inspector on host and port.
|
||||
* See https://nodejs.org/api/inspector.html#inspectoropenport-host-wait */
|
||||
function open(_port?: number, _host?: string, _wait?: boolean) {
|
||||
notImplemented("inspector.Session.prototype.open");
|
||||
}
|
||||
|
||||
/** Deactivate the inspector. Blocks until there are no active connections.
|
||||
* See https://nodejs.org/api/inspector.html#inspectorclose */
|
||||
function close() {
|
||||
notImplemented("inspector.Session.prototype.close");
|
||||
}
|
||||
|
||||
/** Return the URL of the active inspector, or undefined if there is none.
|
||||
* See https://nodejs.org/api/inspector.html#inspectorurl */
|
||||
function url() {
|
||||
// TODO(kt3k): returns undefined for now, which means the inspector is not activated.
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/** Blocks until a client (existing or connected later) has sent Runtime.runIfWaitingForDebugger command.
|
||||
* See https://nodejs.org/api/inspector.html#inspectorwaitfordebugger */
|
||||
function waitForDebugger() {
|
||||
notImplemented("inspector.wairForDebugger");
|
||||
}
|
||||
|
||||
const console = globalThis.console;
|
||||
|
||||
export { close, console, open, Session, url, waitForDebugger };
|
||||
|
||||
export default {
|
||||
close,
|
||||
console,
|
||||
open,
|
||||
Session,
|
||||
url,
|
||||
waitForDebugger,
|
||||
};
|
20
ext/node/polyfills/inspector/promises.js
Normal file
20
ext/node/polyfills/inspector/promises.js
Normal file
|
@ -0,0 +1,20 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
|
||||
|
||||
import inspector from "node:inspector";
|
||||
import { promisify } from "ext:deno_node/internal/util.mjs";
|
||||
|
||||
class Session extends inspector.Session {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
}
|
||||
Session.prototype.post = promisify(inspector.Session.prototype.post);
|
||||
|
||||
export * from "node:inspector";
|
||||
export { Session };
|
||||
|
||||
export default {
|
||||
...inspector,
|
||||
Session,
|
||||
};
|
|
@ -29,6 +29,7 @@ import {
|
|||
} from "ext:deno_node/internal/validators.mjs";
|
||||
import { Buffer } from "node:buffer";
|
||||
import { KeyFormat, KeyType } from "ext:deno_node/internal/crypto/types.ts";
|
||||
import process from "node:process";
|
||||
|
||||
import {
|
||||
op_node_generate_dh_group_key,
|
||||
|
|
|
@ -38,6 +38,7 @@ import {
|
|||
ERR_INVALID_ARG_TYPE,
|
||||
ERR_OUT_OF_RANGE,
|
||||
} from "ext:deno_node/internal/errors.ts";
|
||||
import { Buffer } from "node:buffer";
|
||||
|
||||
export { default as randomBytes } from "ext:deno_node/internal/crypto/_randomBytes.ts";
|
||||
export {
|
||||
|
|
|
@ -126,6 +126,7 @@ ObjectSetPrototypeOf(HTTPParser.prototype, AsyncWrap.prototype);
|
|||
function defineProps(obj: object, props: Record<string, unknown>) {
|
||||
for (const entry of new SafeArrayIterator(ObjectEntries(props))) {
|
||||
ObjectDefineProperty(obj, entry[0], {
|
||||
__proto__: null,
|
||||
value: entry[1],
|
||||
enumerable: true,
|
||||
writable: true,
|
||||
|
|
|
@ -182,6 +182,7 @@ function getContextOptions(options) {
|
|||
|
||||
let defaultContextNameIndex = 1;
|
||||
export function createContext(
|
||||
// deno-lint-ignore prefer-primordials
|
||||
contextObject = {},
|
||||
options = { __proto__: null },
|
||||
) {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue