mirror of
https://github.com/denoland/deno.git
synced 2025-01-05 13:59:01 -05:00
Merge branch 'main' into support_create_connection
This commit is contained in:
commit
862e6b37a6
174 changed files with 3356 additions and 1865 deletions
2
.github/workflows/ci.generate.ts
vendored
2
.github/workflows/ci.generate.ts
vendored
|
@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
|
|||
// Bump this number when you want to purge the cache.
|
||||
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
|
||||
// automatically via regex, so ensure that this line maintains this format.
|
||||
const cacheVersion = 21;
|
||||
const cacheVersion = 22;
|
||||
|
||||
const ubuntuX86Runner = "ubuntu-22.04";
|
||||
const ubuntuX86XlRunner = "ubuntu-22.04-xl";
|
||||
|
|
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
|
@ -361,8 +361,8 @@ jobs:
|
|||
path: |-
|
||||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
key: '21-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '21-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
key: '22-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '22-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
|
||||
if: '!(matrix.skip)'
|
||||
- name: Restore cache build output (PR)
|
||||
uses: actions/cache/restore@v4
|
||||
|
@ -375,7 +375,7 @@ jobs:
|
|||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: never_saved
|
||||
restore-keys: '21-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
restore-keys: '22-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
- name: Apply and update mtime cache
|
||||
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
|
||||
uses: ./.github/mtime_cache
|
||||
|
@ -685,7 +685,7 @@ jobs:
|
|||
!./target/*/*.zip
|
||||
!./target/*/*.sha256sum
|
||||
!./target/*/*.tar.gz
|
||||
key: '21-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
key: '22-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
publish-canary:
|
||||
name: publish canary
|
||||
runs-on: ubuntu-22.04
|
||||
|
|
109
Cargo.lock
generated
109
Cargo.lock
generated
|
@ -1154,7 +1154,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno"
|
||||
version = "2.0.2"
|
||||
version = "2.0.3"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"async-trait",
|
||||
|
@ -1196,7 +1196,6 @@ dependencies = [
|
|||
"dprint-plugin-markdown",
|
||||
"dprint-plugin-typescript",
|
||||
"env_logger",
|
||||
"eszip",
|
||||
"fancy-regex",
|
||||
"faster-hex",
|
||||
"flate2",
|
||||
|
@ -1214,7 +1213,6 @@ dependencies = [
|
|||
"lazy-regex",
|
||||
"libc",
|
||||
"libsui",
|
||||
"libuv-sys-lite",
|
||||
"libz-sys",
|
||||
"log",
|
||||
"lsp-types",
|
||||
|
@ -1222,7 +1220,6 @@ dependencies = [
|
|||
"markup_fmt",
|
||||
"memmem",
|
||||
"monch",
|
||||
"napi_sym",
|
||||
"nix",
|
||||
"node_resolver",
|
||||
"notify",
|
||||
|
@ -1263,7 +1260,6 @@ dependencies = [
|
|||
"walkdir",
|
||||
"which 4.4.2",
|
||||
"winapi",
|
||||
"windows-sys 0.52.0",
|
||||
"winres",
|
||||
"zeromq",
|
||||
"zip",
|
||||
|
@ -1327,7 +1323,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_bench_util"
|
||||
version = "0.167.0"
|
||||
version = "0.168.0"
|
||||
dependencies = [
|
||||
"bencher",
|
||||
"deno_core",
|
||||
|
@ -1336,7 +1332,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_broadcast_channel"
|
||||
version = "0.167.0"
|
||||
version = "0.168.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"deno_core",
|
||||
|
@ -1347,7 +1343,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_cache"
|
||||
version = "0.105.0"
|
||||
version = "0.106.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"deno_core",
|
||||
|
@ -1380,7 +1376,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_canvas"
|
||||
version = "0.42.0"
|
||||
version = "0.43.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_webgpu",
|
||||
|
@ -1415,7 +1411,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_console"
|
||||
version = "0.173.0"
|
||||
version = "0.174.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
]
|
||||
|
@ -1460,7 +1456,7 @@ checksum = "a13951ea98c0a4c372f162d669193b4c9d991512de9f2381dd161027f34b26b1"
|
|||
|
||||
[[package]]
|
||||
name = "deno_cron"
|
||||
version = "0.53.0"
|
||||
version = "0.54.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
|
@ -1473,7 +1469,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_crypto"
|
||||
version = "0.187.0"
|
||||
version = "0.188.0"
|
||||
dependencies = [
|
||||
"aes",
|
||||
"aes-gcm",
|
||||
|
@ -1535,7 +1531,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_fetch"
|
||||
version = "0.197.0"
|
||||
version = "0.198.0"
|
||||
dependencies = [
|
||||
"base64 0.21.7",
|
||||
"bytes",
|
||||
|
@ -1568,7 +1564,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_ffi"
|
||||
version = "0.160.0"
|
||||
version = "0.161.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_permissions",
|
||||
|
@ -1588,7 +1584,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_fs"
|
||||
version = "0.83.0"
|
||||
version = "0.84.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"base32",
|
||||
|
@ -1610,9 +1606,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_graph"
|
||||
version = "0.83.3"
|
||||
version = "0.83.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77163c46755676d8f793fc19e365537ba660a8db173cd1e02d21eb010c0b3cef"
|
||||
checksum = "5bd20bc0780071989c622cbfd5d4fb2e4fd05a247ccd7f791f13c8d2c3792228"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
|
@ -1639,7 +1635,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_http"
|
||||
version = "0.171.0"
|
||||
version = "0.172.0"
|
||||
dependencies = [
|
||||
"async-compression",
|
||||
"async-trait",
|
||||
|
@ -1678,7 +1674,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_io"
|
||||
version = "0.83.0"
|
||||
version = "0.84.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"deno_core",
|
||||
|
@ -1699,7 +1695,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_kv"
|
||||
version = "0.81.0"
|
||||
version = "0.82.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
|
@ -1771,12 +1767,17 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_napi"
|
||||
version = "0.104.0"
|
||||
version = "0.105.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_permissions",
|
||||
"libc",
|
||||
"libloading 0.7.4",
|
||||
"libuv-sys-lite",
|
||||
"log",
|
||||
"napi_sym",
|
||||
"thiserror",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1794,7 +1795,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_net"
|
||||
version = "0.165.0"
|
||||
version = "0.166.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_permissions",
|
||||
|
@ -1811,7 +1812,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_node"
|
||||
version = "0.110.0"
|
||||
version = "0.111.0"
|
||||
dependencies = [
|
||||
"aead-gcm-stream",
|
||||
"aes",
|
||||
|
@ -1960,7 +1961,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_permissions"
|
||||
version = "0.33.0"
|
||||
version = "0.34.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_path_util",
|
||||
|
@ -1977,7 +1978,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_resolver"
|
||||
version = "0.5.0"
|
||||
version = "0.6.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base32",
|
||||
|
@ -1993,7 +1994,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_runtime"
|
||||
version = "0.182.0"
|
||||
version = "0.183.0"
|
||||
dependencies = [
|
||||
"color-print",
|
||||
"deno_ast",
|
||||
|
@ -2111,7 +2112,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_tls"
|
||||
version = "0.160.0"
|
||||
version = "0.161.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_native_certs",
|
||||
|
@ -2160,7 +2161,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_url"
|
||||
version = "0.173.0"
|
||||
version = "0.174.0"
|
||||
dependencies = [
|
||||
"deno_bench_util",
|
||||
"deno_console",
|
||||
|
@ -2172,7 +2173,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_web"
|
||||
version = "0.204.0"
|
||||
version = "0.205.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"base64-simd 0.8.0",
|
||||
|
@ -2194,7 +2195,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_webgpu"
|
||||
version = "0.140.0"
|
||||
version = "0.141.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"raw-window-handle",
|
||||
|
@ -2207,7 +2208,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_webidl"
|
||||
version = "0.173.0"
|
||||
version = "0.174.0"
|
||||
dependencies = [
|
||||
"deno_bench_util",
|
||||
"deno_core",
|
||||
|
@ -2215,7 +2216,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_websocket"
|
||||
version = "0.178.0"
|
||||
version = "0.179.0"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"deno_core",
|
||||
|
@ -2237,7 +2238,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "deno_webstorage"
|
||||
version = "0.168.0"
|
||||
version = "0.169.0"
|
||||
dependencies = [
|
||||
"deno_core",
|
||||
"deno_web",
|
||||
|
@ -2893,29 +2894,6 @@ version = "1.0.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "31ae425815400e5ed474178a7a22e275a9687086a12ca63ec793ff292d8fdae8"
|
||||
|
||||
[[package]]
|
||||
name = "eszip"
|
||||
version = "0.79.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8eb55c89bdde75a3826a79d49c9d847623ae7fbdb2695b542982982da990d33e"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"base64 0.21.7",
|
||||
"deno_ast",
|
||||
"deno_graph",
|
||||
"deno_npm",
|
||||
"deno_semver",
|
||||
"futures",
|
||||
"hashlink 0.8.4",
|
||||
"indexmap",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"thiserror",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fallible-iterator"
|
||||
version = "0.3.0"
|
||||
|
@ -3527,15 +3505,6 @@ dependencies = [
|
|||
"allocator-api2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7"
|
||||
dependencies = [
|
||||
"hashbrown",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
version = "0.9.1"
|
||||
|
@ -4036,9 +4005,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "jsonc-parser"
|
||||
version = "0.26.1"
|
||||
version = "0.26.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "57c78ad024523b61a2f20b1cad47413dd24db744a15d3d1b7276e69d1bee106c"
|
||||
checksum = "b558af6b49fd918e970471374e7a798b2c9bbcda624a210ffa3901ee5614bc8e"
|
||||
dependencies = [
|
||||
"serde_json",
|
||||
]
|
||||
|
@ -4514,7 +4483,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "napi_sym"
|
||||
version = "0.103.0"
|
||||
version = "0.104.0"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"serde",
|
||||
|
@ -4569,7 +4538,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "node_resolver"
|
||||
version = "0.12.0"
|
||||
version = "0.13.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
|
@ -5813,7 +5782,7 @@ dependencies = [
|
|||
"bitflags 2.6.0",
|
||||
"fallible-iterator",
|
||||
"fallible-streaming-iterator",
|
||||
"hashlink 0.9.1",
|
||||
"hashlink",
|
||||
"libsqlite3-sys",
|
||||
"smallvec",
|
||||
]
|
||||
|
|
60
Cargo.toml
60
Cargo.toml
|
@ -5,7 +5,6 @@ resolver = "2"
|
|||
members = [
|
||||
"bench_util",
|
||||
"cli",
|
||||
"cli/napi/sym",
|
||||
"ext/broadcast_channel",
|
||||
"ext/cache",
|
||||
"ext/canvas",
|
||||
|
@ -19,6 +18,7 @@ members = [
|
|||
"ext/io",
|
||||
"ext/kv",
|
||||
"ext/napi",
|
||||
"ext/napi/sym",
|
||||
"ext/net",
|
||||
"ext/node",
|
||||
"ext/url",
|
||||
|
@ -48,16 +48,16 @@ repository = "https://github.com/denoland/deno"
|
|||
deno_ast = { version = "=0.42.2", features = ["transpiling"] }
|
||||
deno_core = { version = "0.314.2" }
|
||||
|
||||
deno_bench_util = { version = "0.167.0", path = "./bench_util" }
|
||||
deno_bench_util = { version = "0.168.0", path = "./bench_util" }
|
||||
deno_lockfile = "=0.23.1"
|
||||
deno_media_type = { version = "0.1.4", features = ["module_specifier"] }
|
||||
deno_npm = "=0.25.4"
|
||||
deno_path_util = "=0.2.1"
|
||||
deno_permissions = { version = "0.33.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.182.0", path = "./runtime" }
|
||||
deno_permissions = { version = "0.34.0", path = "./runtime/permissions" }
|
||||
deno_runtime = { version = "0.183.0", path = "./runtime" }
|
||||
deno_semver = "=0.5.16"
|
||||
deno_terminal = "0.2.0"
|
||||
napi_sym = { version = "0.103.0", path = "./cli/napi/sym" }
|
||||
napi_sym = { version = "0.104.0", path = "./ext/napi/sym" }
|
||||
test_util = { package = "test_server", path = "./tests/util/server" }
|
||||
|
||||
denokv_proto = "0.8.1"
|
||||
|
@ -66,32 +66,32 @@ denokv_remote = "0.8.1"
|
|||
denokv_sqlite = { default-features = false, version = "0.8.2" }
|
||||
|
||||
# exts
|
||||
deno_broadcast_channel = { version = "0.167.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.105.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.42.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.173.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.53.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.187.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.197.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.160.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.83.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.171.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.83.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.81.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.104.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.165.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.110.0", path = "./ext/node" }
|
||||
deno_tls = { version = "0.160.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.173.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.204.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.140.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.173.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.178.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.168.0", path = "./ext/webstorage" }
|
||||
deno_broadcast_channel = { version = "0.168.0", path = "./ext/broadcast_channel" }
|
||||
deno_cache = { version = "0.106.0", path = "./ext/cache" }
|
||||
deno_canvas = { version = "0.43.0", path = "./ext/canvas" }
|
||||
deno_console = { version = "0.174.0", path = "./ext/console" }
|
||||
deno_cron = { version = "0.54.0", path = "./ext/cron" }
|
||||
deno_crypto = { version = "0.188.0", path = "./ext/crypto" }
|
||||
deno_fetch = { version = "0.198.0", path = "./ext/fetch" }
|
||||
deno_ffi = { version = "0.161.0", path = "./ext/ffi" }
|
||||
deno_fs = { version = "0.84.0", path = "./ext/fs" }
|
||||
deno_http = { version = "0.172.0", path = "./ext/http" }
|
||||
deno_io = { version = "0.84.0", path = "./ext/io" }
|
||||
deno_kv = { version = "0.82.0", path = "./ext/kv" }
|
||||
deno_napi = { version = "0.105.0", path = "./ext/napi" }
|
||||
deno_net = { version = "0.166.0", path = "./ext/net" }
|
||||
deno_node = { version = "0.111.0", path = "./ext/node" }
|
||||
deno_tls = { version = "0.161.0", path = "./ext/tls" }
|
||||
deno_url = { version = "0.174.0", path = "./ext/url" }
|
||||
deno_web = { version = "0.205.0", path = "./ext/web" }
|
||||
deno_webgpu = { version = "0.141.0", path = "./ext/webgpu" }
|
||||
deno_webidl = { version = "0.174.0", path = "./ext/webidl" }
|
||||
deno_websocket = { version = "0.179.0", path = "./ext/websocket" }
|
||||
deno_webstorage = { version = "0.169.0", path = "./ext/webstorage" }
|
||||
|
||||
# resolvers
|
||||
deno_resolver = { version = "0.5.0", path = "./resolvers/deno" }
|
||||
node_resolver = { version = "0.12.0", path = "./resolvers/node" }
|
||||
deno_resolver = { version = "0.6.0", path = "./resolvers/deno" }
|
||||
node_resolver = { version = "0.13.0", path = "./resolvers/node" }
|
||||
|
||||
aes = "=0.8.3"
|
||||
anyhow = "1.0.57"
|
||||
|
@ -137,7 +137,7 @@ hyper-util = { version = "=0.1.7", features = ["tokio", "client", "client-legacy
|
|||
hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] }
|
||||
indexmap = { version = "2", features = ["serde"] }
|
||||
ipnet = "2.3"
|
||||
jsonc-parser = { version = "=0.26.1", features = ["serde"] }
|
||||
jsonc-parser = { version = "=0.26.2", features = ["serde"] }
|
||||
lazy-regex = "3"
|
||||
libc = "0.2.126"
|
||||
libz-sys = { version = "1.1.20", default-features = false }
|
||||
|
|
|
@ -46,6 +46,12 @@ brew install deno
|
|||
choco install deno
|
||||
```
|
||||
|
||||
[WinGet](https://winstall.app/apps/DenoLand.Deno) (Windows):
|
||||
|
||||
```powershell
|
||||
winget install --id=DenoLand.Deno
|
||||
```
|
||||
|
||||
### Build and install from source
|
||||
|
||||
Complete instructions for building Deno from source can be found in the manual
|
||||
|
|
38
Releases.md
38
Releases.md
|
@ -6,6 +6,44 @@ https://github.com/denoland/deno/releases
|
|||
We also have one-line install commands at:
|
||||
https://github.com/denoland/deno_install
|
||||
|
||||
### 2.0.3 / 2024.10.25
|
||||
|
||||
- feat(lsp): interactive inlay hints (#26382)
|
||||
- fix: support node-api in denort (#26389)
|
||||
- fix(check): support `--frozen` on deno check (#26479)
|
||||
- fix(cli): increase size of blocking task threadpool on windows (#26465)
|
||||
- fix(config): schemas for lint rule and tag autocompletion (#26515)
|
||||
- fix(ext/console): ignore casing for named colors in css parsing (#26466)
|
||||
- fix(ext/ffi): return u64/i64 as bigints from nonblocking ffi calls (#26486)
|
||||
- fix(ext/node): cancel pending ipc writes on channel close (#26504)
|
||||
- fix(ext/node): map `ERROR_INVALID_NAME` to `ENOENT` on windows (#26475)
|
||||
- fix(ext/node): only set our end of child process pipe to nonblocking mode
|
||||
(#26495)
|
||||
- fix(ext/node): properly map reparse point error in readlink (#26375)
|
||||
- fix(ext/node): refactor http.ServerResponse into function class (#26210)
|
||||
- fix(ext/node): stub HTTPParser internal binding (#26401)
|
||||
- fix(ext/node): use primordials in `ext/node/polyfills/https.ts` (#26323)
|
||||
- fix(fmt): --ext flag requires to pass files (#26525)
|
||||
- fix(fmt): upgrade formatters (#26469)
|
||||
- fix(help): missing package specifier (#26380)
|
||||
- fix(info): resolve workspace member mappings (#26350)
|
||||
- fix(install): better json editing (#26450)
|
||||
- fix(install): cache all exports of JSR packages listed in `deno.json` (#26501)
|
||||
- fix(install): cache type only module deps in `deno install` (#26497)
|
||||
- fix(install): don't cache json exports of JSR packages (for now) (#26530)
|
||||
- fix(install): update lockfile when using package.json (#26458)
|
||||
- fix(lsp): import-map-remap quickfix for type imports (#26454)
|
||||
- fix(node/util): support array formats in `styleText` (#26507)
|
||||
- fix(node:tls): set TLSSocket.alpnProtocol for client connections (#26476)
|
||||
- fix(npm): ensure scoped package name is encoded in URLs (#26390)
|
||||
- fix(npm): support version ranges with && or comma (#26453)
|
||||
- fix: `.npmrc` settings not being passed to install/add command (#26473)
|
||||
- fix: add 'fmt-component' to unstable features in schema file (#26526)
|
||||
- fix: share inotify fd across watchers (#26200)
|
||||
- fix: unpin tokio version (#26457)
|
||||
- perf(compile): pass module source data from binary directly to v8 (#26494)
|
||||
- perf: avoid multiple calls to runMicrotask (#26378)
|
||||
|
||||
### 2.0.2 / 2024.10.17
|
||||
|
||||
- fix(cli): set napi object property properly (#26344)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_bench_util"
|
||||
version = "0.167.0"
|
||||
version = "0.168.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno"
|
||||
version = "2.0.2"
|
||||
version = "2.0.3"
|
||||
authors.workspace = true
|
||||
default-run = "deno"
|
||||
edition.workspace = true
|
||||
|
@ -73,7 +73,7 @@ deno_cache_dir = { workspace = true }
|
|||
deno_config = { version = "=0.37.2", features = ["workspace", "sync"] }
|
||||
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
|
||||
deno_doc = { version = "0.154.0", default-features = false, features = ["rust", "html", "syntect"] }
|
||||
deno_graph = { version = "=0.83.3" }
|
||||
deno_graph = { version = "=0.83.4" }
|
||||
deno_lint = { version = "=0.67.0", features = ["docs"] }
|
||||
deno_lockfile.workspace = true
|
||||
deno_npm.workspace = true
|
||||
|
@ -84,9 +84,7 @@ deno_runtime = { workspace = true, features = ["include_js_files_for_snapshottin
|
|||
deno_semver.workspace = true
|
||||
deno_task_shell = "=0.18.1"
|
||||
deno_terminal.workspace = true
|
||||
eszip = "=0.79.1"
|
||||
libsui = "0.4.0"
|
||||
napi_sym.workspace = true
|
||||
node_resolver.workspace = true
|
||||
|
||||
anstream = "0.6.14"
|
||||
|
@ -175,14 +173,12 @@ zstd.workspace = true
|
|||
[target.'cfg(windows)'.dependencies]
|
||||
junction.workspace = true
|
||||
winapi = { workspace = true, features = ["knownfolders", "mswsock", "objbase", "shlobj", "tlhelp32", "winbase", "winerror", "winsock2"] }
|
||||
windows-sys.workspace = true
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
nix.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
deno_bench_util.workspace = true
|
||||
libuv-sys-lite = "=1.48.2"
|
||||
pretty_assertions.workspace = true
|
||||
test_util.workspace = true
|
||||
|
||||
|
|
|
@ -2274,7 +2274,7 @@ Ignore formatting a file by adding an ignore comment at the top of the file:
|
|||
"sass", "less", "html", "svelte", "vue", "astro", "yml", "yaml",
|
||||
"ipynb",
|
||||
])
|
||||
.help_heading(FMT_HEADING),
|
||||
.help_heading(FMT_HEADING).requires("files"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("ignore")
|
||||
|
@ -6802,6 +6802,32 @@ mod tests {
|
|||
..Flags::default()
|
||||
}
|
||||
);
|
||||
|
||||
let r = flags_from_vec(svec!["deno", "fmt", "--ext", "html"]);
|
||||
assert!(r.is_err());
|
||||
let r = flags_from_vec(svec!["deno", "fmt", "--ext", "html", "./**"]);
|
||||
assert_eq!(
|
||||
r.unwrap(),
|
||||
Flags {
|
||||
subcommand: DenoSubcommand::Fmt(FmtFlags {
|
||||
check: false,
|
||||
files: FileFlags {
|
||||
include: vec!["./**".to_string()],
|
||||
ignore: vec![],
|
||||
},
|
||||
use_tabs: None,
|
||||
line_width: None,
|
||||
indent_width: None,
|
||||
single_quote: None,
|
||||
prose_wrap: None,
|
||||
no_semicolons: None,
|
||||
unstable_component: false,
|
||||
watch: Default::default(),
|
||||
}),
|
||||
ext: Some("html".to_string()),
|
||||
..Flags::default()
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -578,6 +578,7 @@ fn discover_npmrc(
|
|||
let resolved = npmrc
|
||||
.as_resolved(npm_registry_url())
|
||||
.context("Failed to resolve .npmrc options")?;
|
||||
log::debug!(".npmrc found at: '{}'", path.display());
|
||||
Ok(Arc::new(resolved))
|
||||
}
|
||||
|
||||
|
|
|
@ -1,167 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::net::TcpStream;
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
use std::sync::atomic::AtomicU16;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::time::Duration;
|
||||
use std::time::Instant;
|
||||
|
||||
use super::Result;
|
||||
|
||||
pub use test_util::parse_wrk_output;
|
||||
pub use test_util::WrkOutput as HttpBenchmarkResult;
|
||||
// Some of the benchmarks in this file have been renamed. In case the history
|
||||
// somehow gets messed up:
|
||||
// "node_http" was once called "node"
|
||||
// "deno_tcp" was once called "deno"
|
||||
// "deno_http" was once called "deno_net_http"
|
||||
|
||||
const DURATION: &str = "10s";
|
||||
|
||||
pub fn benchmark(
|
||||
target_path: &Path,
|
||||
) -> Result<HashMap<String, HttpBenchmarkResult>> {
|
||||
let deno_exe = test_util::deno_exe_path();
|
||||
let deno_exe = deno_exe.to_string();
|
||||
|
||||
let hyper_hello_exe = target_path.join("test_server");
|
||||
let hyper_hello_exe = hyper_hello_exe.to_str().unwrap();
|
||||
|
||||
let mut res = HashMap::new();
|
||||
let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR"));
|
||||
let http_dir = manifest_dir.join("bench").join("http");
|
||||
for entry in std::fs::read_dir(&http_dir)? {
|
||||
let entry = entry?;
|
||||
let pathbuf = entry.path();
|
||||
let path = pathbuf.to_str().unwrap();
|
||||
if path.ends_with(".lua") {
|
||||
continue;
|
||||
}
|
||||
let file_stem = pathbuf.file_stem().unwrap().to_str().unwrap();
|
||||
|
||||
let lua_script = http_dir.join(format!("{file_stem}.lua"));
|
||||
let mut maybe_lua = None;
|
||||
if lua_script.exists() {
|
||||
maybe_lua = Some(lua_script.to_str().unwrap());
|
||||
}
|
||||
|
||||
let port = get_port();
|
||||
// deno run -A --unstable-net <path> <addr>
|
||||
res.insert(
|
||||
file_stem.to_string(),
|
||||
run(
|
||||
&[
|
||||
deno_exe.as_str(),
|
||||
"run",
|
||||
"--allow-all",
|
||||
"--unstable-net",
|
||||
"--enable-testing-features-do-not-use",
|
||||
path,
|
||||
&server_addr(port),
|
||||
],
|
||||
port,
|
||||
None,
|
||||
None,
|
||||
maybe_lua,
|
||||
)?,
|
||||
);
|
||||
}
|
||||
|
||||
res.insert("hyper".to_string(), hyper_http(hyper_hello_exe)?);
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
fn run(
|
||||
server_cmd: &[&str],
|
||||
port: u16,
|
||||
env: Option<Vec<(String, String)>>,
|
||||
origin_cmd: Option<&[&str]>,
|
||||
lua_script: Option<&str>,
|
||||
) -> Result<HttpBenchmarkResult> {
|
||||
// Wait for port 4544 to become available.
|
||||
// TODO Need to use SO_REUSEPORT with tokio::net::TcpListener.
|
||||
std::thread::sleep(Duration::from_secs(5));
|
||||
|
||||
let mut origin = None;
|
||||
if let Some(cmd) = origin_cmd {
|
||||
let mut com = Command::new(cmd[0]);
|
||||
com.args(&cmd[1..]);
|
||||
if let Some(env) = env.clone() {
|
||||
com.envs(env);
|
||||
}
|
||||
origin = Some(com.spawn()?);
|
||||
};
|
||||
|
||||
println!("{}", server_cmd.join(" "));
|
||||
let mut server = {
|
||||
let mut com = Command::new(server_cmd[0]);
|
||||
com.args(&server_cmd[1..]);
|
||||
if let Some(env) = env {
|
||||
com.envs(env);
|
||||
}
|
||||
com.spawn()?
|
||||
};
|
||||
|
||||
// Wait for server to wake up.
|
||||
let now = Instant::now();
|
||||
let addr = format!("127.0.0.1:{port}");
|
||||
while now.elapsed().as_secs() < 30 {
|
||||
if TcpStream::connect(&addr).is_ok() {
|
||||
break;
|
||||
}
|
||||
std::thread::sleep(Duration::from_millis(10));
|
||||
}
|
||||
TcpStream::connect(&addr).expect("Failed to connect to server in time");
|
||||
println!("Server took {} ms to start", now.elapsed().as_millis());
|
||||
|
||||
let wrk = test_util::prebuilt_tool_path("wrk");
|
||||
assert!(wrk.is_file());
|
||||
|
||||
let addr = format!("http://{addr}/");
|
||||
let wrk = wrk.to_string();
|
||||
let mut wrk_cmd = vec![wrk.as_str(), "-d", DURATION, "--latency", &addr];
|
||||
|
||||
if let Some(lua_script) = lua_script {
|
||||
wrk_cmd.push("-s");
|
||||
wrk_cmd.push(lua_script);
|
||||
}
|
||||
|
||||
println!("{}", wrk_cmd.join(" "));
|
||||
let output = test_util::run_collect(&wrk_cmd, None, None, None, true).0;
|
||||
|
||||
std::thread::sleep(Duration::from_secs(1)); // wait to capture failure. TODO racy.
|
||||
|
||||
println!("{output}");
|
||||
assert!(
|
||||
server.try_wait()?.map(|s| s.success()).unwrap_or(true),
|
||||
"server ended with error"
|
||||
);
|
||||
|
||||
server.kill()?;
|
||||
if let Some(mut origin) = origin {
|
||||
origin.kill()?;
|
||||
}
|
||||
|
||||
Ok(parse_wrk_output(&output))
|
||||
}
|
||||
|
||||
static NEXT_PORT: AtomicU16 = AtomicU16::new(4544);
|
||||
pub(crate) fn get_port() -> u16 {
|
||||
let p = NEXT_PORT.load(Ordering::SeqCst);
|
||||
NEXT_PORT.store(p.wrapping_add(1), Ordering::SeqCst);
|
||||
p
|
||||
}
|
||||
|
||||
fn server_addr(port: u16) -> String {
|
||||
format!("0.0.0.0:{port}")
|
||||
}
|
||||
|
||||
fn hyper_http(exe: &str) -> Result<HttpBenchmarkResult> {
|
||||
let port = get_port();
|
||||
println!("http_benchmark testing RUST hyper");
|
||||
run(&[exe, &port.to_string()], port, None, None, None)
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
import { Hono } from "https://deno.land/x/hono@v2.0.9/mod.ts";
|
||||
|
||||
const addr = Deno.args[0] || "127.0.0.1:4500";
|
||||
const [hostname, port] = addr.split(":");
|
||||
|
||||
const app = new Hono();
|
||||
app.get("/", (c) => c.text("Hello, World!"));
|
||||
|
||||
Deno.serve({ port: Number(port), hostname }, app.fetch);
|
|
@ -1,14 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
const addr = Deno.args[0] || "127.0.0.1:4500";
|
||||
const [hostname, port] = addr.split(":");
|
||||
const { serve } = Deno;
|
||||
|
||||
const path = new URL("../testdata/128k.bin", import.meta.url).pathname;
|
||||
|
||||
function handler() {
|
||||
const file = Deno.openSync(path);
|
||||
return new Response(file.readable);
|
||||
}
|
||||
|
||||
serve({ hostname, port: Number(port) }, handler);
|
|
@ -1,5 +0,0 @@
|
|||
wrk.headers["foo"] = "bar"
|
||||
wrk.headers["User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36"
|
||||
wrk.headers["Viewport-Width"] = "1920"
|
||||
wrk.headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9"
|
||||
wrk.headers["Accept-Language"] = "en,la;q=0.9"
|
|
@ -1,11 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
const addr = Deno.args[0] ?? "127.0.0.1:4500";
|
||||
const [hostname, port] = addr.split(":");
|
||||
const { serve } = Deno;
|
||||
|
||||
function handler() {
|
||||
return new Response("Hello World");
|
||||
}
|
||||
|
||||
serve({ hostname, port: Number(port), reusePort: true }, handler);
|
|
@ -1,5 +0,0 @@
|
|||
wrk.method = "POST"
|
||||
wrk.headers["Content-Type"] = "application/octet-stream"
|
||||
|
||||
file = io.open("./cli/bench/testdata/128k.bin", "rb")
|
||||
wrk.body = file:read("*a")
|
|
@ -1,3 +0,0 @@
|
|||
wrk.method = "POST"
|
||||
wrk.headers["Content-Type"] = "application/json"
|
||||
wrk.body = '{"hello":"deno"}'
|
|
@ -1,25 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
import { renderToReadableStream } from "https://esm.run/react-dom/server";
|
||||
import * as React from "https://esm.run/react";
|
||||
const { serve } = Deno;
|
||||
const addr = Deno.args[0] || "127.0.0.1:4500";
|
||||
const [hostname, port] = addr.split(":");
|
||||
|
||||
const App = () => (
|
||||
<html>
|
||||
<body>
|
||||
<h1>Hello World</h1>
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
|
||||
const headers = {
|
||||
headers: {
|
||||
"Content-Type": "text/html",
|
||||
},
|
||||
};
|
||||
|
||||
serve({ hostname, port: Number(port) }, async () => {
|
||||
return new Response(await renderToReadableStream(<App />), headers);
|
||||
});
|
|
@ -1,34 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// Used for benchmarking Deno's networking.
|
||||
// TODO(bartlomieju): Replace this with a real HTTP server once
|
||||
// https://github.com/denoland/deno/issues/726 is completed.
|
||||
// Note: this is a keep-alive server.
|
||||
// deno-lint-ignore-file no-console
|
||||
const addr = Deno.args[0] || "127.0.0.1:4500";
|
||||
const [hostname, port] = addr.split(":");
|
||||
const listener = Deno.listen({ hostname, port: Number(port) });
|
||||
const response = new TextEncoder().encode(
|
||||
"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World\n",
|
||||
);
|
||||
async function handle(conn: Deno.Conn): Promise<void> {
|
||||
const buffer = new Uint8Array(1024);
|
||||
try {
|
||||
while (true) {
|
||||
await conn.read(buffer);
|
||||
await conn.write(response);
|
||||
}
|
||||
} catch (e) {
|
||||
if (
|
||||
!(e instanceof Deno.errors.BrokenPipe) &&
|
||||
!(e instanceof Deno.errors.ConnectionReset)
|
||||
) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
conn.close();
|
||||
}
|
||||
|
||||
console.log("Listening on", addr);
|
||||
for await (const conn of listener) {
|
||||
handle(conn);
|
||||
}
|
|
@ -17,7 +17,6 @@ use std::process::Stdio;
|
|||
use std::time::SystemTime;
|
||||
use test_util::PathRef;
|
||||
|
||||
mod http;
|
||||
mod lsp;
|
||||
|
||||
fn read_json(filename: &Path) -> Result<Value> {
|
||||
|
@ -345,9 +344,11 @@ struct BenchResult {
|
|||
binary_size: HashMap<String, i64>,
|
||||
bundle_size: HashMap<String, i64>,
|
||||
cargo_deps: usize,
|
||||
// TODO(bartlomieju): remove
|
||||
max_latency: HashMap<String, f64>,
|
||||
max_memory: HashMap<String, i64>,
|
||||
lsp_exec_time: HashMap<String, i64>,
|
||||
// TODO(bartlomieju): remove
|
||||
req_per_sec: HashMap<String, i64>,
|
||||
syscall_count: HashMap<String, i64>,
|
||||
thread_count: HashMap<String, i64>,
|
||||
|
@ -362,7 +363,6 @@ async fn main() -> Result<()> {
|
|||
"binary_size",
|
||||
"cargo_deps",
|
||||
"lsp",
|
||||
"http",
|
||||
"strace",
|
||||
"mem_usage",
|
||||
];
|
||||
|
@ -427,21 +427,6 @@ async fn main() -> Result<()> {
|
|||
new_data.lsp_exec_time = lsp_exec_times;
|
||||
}
|
||||
|
||||
if benchmarks.contains(&"http") && cfg!(not(target_os = "windows")) {
|
||||
let stats = http::benchmark(target_dir.as_path())?;
|
||||
let req_per_sec = stats
|
||||
.iter()
|
||||
.map(|(name, result)| (name.clone(), result.requests as i64))
|
||||
.collect();
|
||||
new_data.req_per_sec = req_per_sec;
|
||||
let max_latency = stats
|
||||
.iter()
|
||||
.map(|(name, result)| (name.clone(), result.latency))
|
||||
.collect();
|
||||
|
||||
new_data.max_latency = max_latency;
|
||||
}
|
||||
|
||||
if cfg!(target_os = "linux") && benchmarks.contains(&"strace") {
|
||||
use std::io::Read;
|
||||
|
||||
|
|
55
cli/build.rs
55
cli/build.rs
|
@ -365,6 +365,9 @@ fn main() {
|
|||
return;
|
||||
}
|
||||
|
||||
deno_napi::print_linker_flags("deno");
|
||||
deno_napi::print_linker_flags("denort");
|
||||
|
||||
// Host snapshots won't work when cross compiling.
|
||||
let target = env::var("TARGET").unwrap();
|
||||
let host = env::var("HOST").unwrap();
|
||||
|
@ -374,58 +377,6 @@ fn main() {
|
|||
panic!("Cross compiling with snapshot is not supported.");
|
||||
}
|
||||
|
||||
let symbols_file_name = match env::consts::OS {
|
||||
"android" | "freebsd" | "openbsd" => {
|
||||
"generated_symbol_exports_list_linux.def".to_string()
|
||||
}
|
||||
os => format!("generated_symbol_exports_list_{}.def", os),
|
||||
};
|
||||
let symbols_path = std::path::Path::new("napi")
|
||||
.join(symbols_file_name)
|
||||
.canonicalize()
|
||||
.expect(
|
||||
"Missing symbols list! Generate using tools/napi/generate_symbols_lists.js",
|
||||
);
|
||||
|
||||
println!("cargo:rustc-rerun-if-changed={}", symbols_path.display());
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
println!(
|
||||
"cargo:rustc-link-arg-bin=deno=/DEF:{}",
|
||||
symbols_path.display()
|
||||
);
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
println!(
|
||||
"cargo:rustc-link-arg-bin=deno=-Wl,-exported_symbols_list,{}",
|
||||
symbols_path.display()
|
||||
);
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// If a custom compiler is set, the glibc version is not reliable.
|
||||
// Here, we assume that if a custom compiler is used, that it will be modern enough to support a dynamic symbol list.
|
||||
if env::var("CC").is_err()
|
||||
&& glibc_version::get_version()
|
||||
.map(|ver| ver.major <= 2 && ver.minor < 35)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
println!("cargo:warning=Compiling with all symbols exported, this will result in a larger binary. Please use glibc 2.35 or later for an optimised build.");
|
||||
println!("cargo:rustc-link-arg-bin=deno=-rdynamic");
|
||||
} else {
|
||||
println!(
|
||||
"cargo:rustc-link-arg-bin=deno=-Wl,--export-dynamic-symbol-list={}",
|
||||
symbols_path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "android")]
|
||||
println!(
|
||||
"cargo:rustc-link-arg-bin=deno=-Wl,--export-dynamic-symbol-list={}",
|
||||
symbols_path.display()
|
||||
);
|
||||
|
||||
// To debug snapshot issues uncomment:
|
||||
// op_fetch_asset::trace_serializer();
|
||||
|
||||
|
|
25
cli/cache/emit.rs
vendored
25
cli/cache/emit.rs
vendored
|
@ -39,7 +39,7 @@ impl EmitCache {
|
|||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
expected_source_hash: u64,
|
||||
) -> Option<Vec<u8>> {
|
||||
) -> Option<String> {
|
||||
let emit_filename = self.get_emit_filename(specifier)?;
|
||||
let bytes = self.disk_cache.get(&emit_filename).ok()?;
|
||||
self
|
||||
|
@ -100,7 +100,7 @@ impl EmitFileSerializer {
|
|||
&self,
|
||||
mut bytes: Vec<u8>,
|
||||
expected_source_hash: u64,
|
||||
) -> Option<Vec<u8>> {
|
||||
) -> Option<String> {
|
||||
let last_newline_index = bytes.iter().rposition(|&b| b == b'\n')?;
|
||||
let (content, last_line) = bytes.split_at(last_newline_index);
|
||||
let hashes = last_line.strip_prefix(LAST_LINE_PREFIX.as_bytes())?;
|
||||
|
@ -120,7 +120,7 @@ impl EmitFileSerializer {
|
|||
|
||||
// everything looks good, truncate and return it
|
||||
bytes.truncate(content.len());
|
||||
Some(bytes)
|
||||
String::from_utf8(bytes).ok()
|
||||
}
|
||||
|
||||
pub fn serialize(&self, code: &[u8], source_hash: u64) -> Vec<u8> {
|
||||
|
@ -170,8 +170,6 @@ mod test {
|
|||
},
|
||||
emit_failed_flag: Default::default(),
|
||||
};
|
||||
let to_string =
|
||||
|bytes: Vec<u8>| -> String { String::from_utf8(bytes).unwrap() };
|
||||
|
||||
let specifier1 =
|
||||
ModuleSpecifier::from_file_path(temp_dir.path().join("file1.ts"))
|
||||
|
@ -188,13 +186,10 @@ mod test {
|
|||
assert_eq!(cache.get_emit_code(&specifier1, 5), None);
|
||||
// providing the correct source hash
|
||||
assert_eq!(
|
||||
cache.get_emit_code(&specifier1, 10).map(to_string),
|
||||
cache.get_emit_code(&specifier1, 10),
|
||||
Some(emit_code1.clone()),
|
||||
);
|
||||
assert_eq!(
|
||||
cache.get_emit_code(&specifier2, 2).map(to_string),
|
||||
Some(emit_code2)
|
||||
);
|
||||
assert_eq!(cache.get_emit_code(&specifier2, 2), Some(emit_code2));
|
||||
|
||||
// try changing the cli version (should not load previous ones)
|
||||
let cache = EmitCache {
|
||||
|
@ -215,18 +210,12 @@ mod test {
|
|||
},
|
||||
emit_failed_flag: Default::default(),
|
||||
};
|
||||
assert_eq!(
|
||||
cache.get_emit_code(&specifier1, 5).map(to_string),
|
||||
Some(emit_code1)
|
||||
);
|
||||
assert_eq!(cache.get_emit_code(&specifier1, 5), Some(emit_code1));
|
||||
|
||||
// adding when already exists should not cause issue
|
||||
let emit_code3 = "asdf".to_string();
|
||||
cache.set_emit_code(&specifier1, 20, emit_code3.as_bytes());
|
||||
assert_eq!(cache.get_emit_code(&specifier1, 5), None);
|
||||
assert_eq!(
|
||||
cache.get_emit_code(&specifier1, 20).map(to_string),
|
||||
Some(emit_code3)
|
||||
);
|
||||
assert_eq!(cache.get_emit_code(&specifier1, 20), Some(emit_code3));
|
||||
}
|
||||
}
|
||||
|
|
1
cli/cache/mod.rs
vendored
1
cli/cache/mod.rs
vendored
|
@ -378,6 +378,7 @@ impl Loader for FetchCacher {
|
|||
} else {
|
||||
FetchPermissionsOptionRef::DynamicContainer(&permissions)
|
||||
},
|
||||
maybe_auth: None,
|
||||
maybe_accept: None,
|
||||
maybe_cache_setting: maybe_cache_setting.as_ref(),
|
||||
},
|
||||
|
|
56
cli/emit.rs
56
cli/emit.rs
|
@ -13,7 +13,6 @@ use deno_core::error::AnyError;
|
|||
use deno_core::futures::stream::FuturesUnordered;
|
||||
use deno_core::futures::FutureExt;
|
||||
use deno_core::futures::StreamExt;
|
||||
use deno_core::ModuleCodeBytes;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_graph::MediaType;
|
||||
use deno_graph::Module;
|
||||
|
@ -60,6 +59,7 @@ impl Emitter {
|
|||
continue;
|
||||
};
|
||||
|
||||
// todo(https://github.com/denoland/deno_media_type/pull/12): use is_emittable()
|
||||
let is_emittable = matches!(
|
||||
module.media_type,
|
||||
MediaType::TypeScript
|
||||
|
@ -93,7 +93,7 @@ impl Emitter {
|
|||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
source: &str,
|
||||
) -> Option<Vec<u8>> {
|
||||
) -> Option<String> {
|
||||
let source_hash = self.get_source_hash(source);
|
||||
self.emit_cache.get_emit_code(specifier, source_hash)
|
||||
}
|
||||
|
@ -103,7 +103,7 @@ impl Emitter {
|
|||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
source: &Arc<str>,
|
||||
) -> Result<ModuleCodeBytes, AnyError> {
|
||||
) -> Result<String, AnyError> {
|
||||
// Note: keep this in sync with the sync version below
|
||||
let helper = EmitParsedSourceHelper(self);
|
||||
match helper.pre_emit_parsed_source(specifier, source) {
|
||||
|
@ -112,7 +112,7 @@ impl Emitter {
|
|||
let parsed_source_cache = self.parsed_source_cache.clone();
|
||||
let transpile_and_emit_options =
|
||||
self.transpile_and_emit_options.clone();
|
||||
let transpile_result = deno_core::unsync::spawn_blocking({
|
||||
let transpiled_source = deno_core::unsync::spawn_blocking({
|
||||
let specifier = specifier.clone();
|
||||
let source = source.clone();
|
||||
move || -> Result<_, AnyError> {
|
||||
|
@ -128,11 +128,12 @@ impl Emitter {
|
|||
})
|
||||
.await
|
||||
.unwrap()?;
|
||||
Ok(helper.post_emit_parsed_source(
|
||||
helper.post_emit_parsed_source(
|
||||
specifier,
|
||||
transpile_result,
|
||||
&transpiled_source,
|
||||
source_hash,
|
||||
))
|
||||
);
|
||||
Ok(transpiled_source)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -142,13 +143,13 @@ impl Emitter {
|
|||
specifier: &ModuleSpecifier,
|
||||
media_type: MediaType,
|
||||
source: &Arc<str>,
|
||||
) -> Result<ModuleCodeBytes, AnyError> {
|
||||
) -> Result<String, AnyError> {
|
||||
// Note: keep this in sync with the async version above
|
||||
let helper = EmitParsedSourceHelper(self);
|
||||
match helper.pre_emit_parsed_source(specifier, source) {
|
||||
PreEmitResult::Cached(emitted_text) => Ok(emitted_text),
|
||||
PreEmitResult::NotCached { source_hash } => {
|
||||
let transpile_result = EmitParsedSourceHelper::transpile(
|
||||
let transpiled_source = EmitParsedSourceHelper::transpile(
|
||||
&self.parsed_source_cache,
|
||||
specifier,
|
||||
source.clone(),
|
||||
|
@ -156,11 +157,12 @@ impl Emitter {
|
|||
&self.transpile_and_emit_options.0,
|
||||
&self.transpile_and_emit_options.1,
|
||||
)?;
|
||||
Ok(helper.post_emit_parsed_source(
|
||||
helper.post_emit_parsed_source(
|
||||
specifier,
|
||||
transpile_result,
|
||||
&transpiled_source,
|
||||
source_hash,
|
||||
))
|
||||
);
|
||||
Ok(transpiled_source)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -226,7 +228,7 @@ impl Emitter {
|
|||
}
|
||||
|
||||
enum PreEmitResult {
|
||||
Cached(ModuleCodeBytes),
|
||||
Cached(String),
|
||||
NotCached { source_hash: u64 },
|
||||
}
|
||||
|
||||
|
@ -244,7 +246,7 @@ impl<'a> EmitParsedSourceHelper<'a> {
|
|||
if let Some(emit_code) =
|
||||
self.0.emit_cache.get_emit_code(specifier, source_hash)
|
||||
{
|
||||
PreEmitResult::Cached(emit_code.into_boxed_slice().into())
|
||||
PreEmitResult::Cached(emit_code)
|
||||
} else {
|
||||
PreEmitResult::NotCached { source_hash }
|
||||
}
|
||||
|
@ -257,21 +259,14 @@ impl<'a> EmitParsedSourceHelper<'a> {
|
|||
media_type: MediaType,
|
||||
transpile_options: &deno_ast::TranspileOptions,
|
||||
emit_options: &deno_ast::EmitOptions,
|
||||
) -> Result<TranspileResult, AnyError> {
|
||||
) -> Result<String, AnyError> {
|
||||
// nothing else needs the parsed source at this point, so remove from
|
||||
// the cache in order to not transpile owned
|
||||
let parsed_source = parsed_source_cache
|
||||
.remove_or_parse_module(specifier, source, media_type)?;
|
||||
ensure_no_import_assertion(&parsed_source)?;
|
||||
Ok(parsed_source.transpile(transpile_options, emit_options)?)
|
||||
}
|
||||
|
||||
pub fn post_emit_parsed_source(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
transpile_result: TranspileResult,
|
||||
source_hash: u64,
|
||||
) -> ModuleCodeBytes {
|
||||
let transpile_result =
|
||||
parsed_source.transpile(transpile_options, emit_options)?;
|
||||
let transpiled_source = match transpile_result {
|
||||
TranspileResult::Owned(source) => source,
|
||||
TranspileResult::Cloned(source) => {
|
||||
|
@ -280,12 +275,21 @@ impl<'a> EmitParsedSourceHelper<'a> {
|
|||
}
|
||||
};
|
||||
debug_assert!(transpiled_source.source_map.is_none());
|
||||
let text = String::from_utf8(transpiled_source.source)?;
|
||||
Ok(text)
|
||||
}
|
||||
|
||||
pub fn post_emit_parsed_source(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
transpiled_source: &str,
|
||||
source_hash: u64,
|
||||
) {
|
||||
self.0.emit_cache.set_emit_code(
|
||||
specifier,
|
||||
source_hash,
|
||||
&transpiled_source.source,
|
||||
transpiled_source.as_bytes(),
|
||||
);
|
||||
transpiled_source.source.into_boxed_slice().into()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -762,6 +762,7 @@ impl CliFactory {
|
|||
let cli_options = self.cli_options()?;
|
||||
Ok(DenoCompileBinaryWriter::new(
|
||||
self.deno_dir()?,
|
||||
self.emitter()?,
|
||||
self.file_fetcher()?,
|
||||
self.http_client_provider(),
|
||||
self.npm_resolver().await?.as_ref(),
|
||||
|
|
|
@ -24,6 +24,7 @@ use deno_graph::source::LoaderChecksum;
|
|||
use deno_path_util::url_to_file_path;
|
||||
use deno_runtime::deno_permissions::PermissionsContainer;
|
||||
use deno_runtime::deno_web::BlobStore;
|
||||
use http::header;
|
||||
use log::debug;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
|
@ -181,6 +182,7 @@ pub enum FetchPermissionsOptionRef<'a> {
|
|||
pub struct FetchOptions<'a> {
|
||||
pub specifier: &'a ModuleSpecifier,
|
||||
pub permissions: FetchPermissionsOptionRef<'a>,
|
||||
pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
|
||||
pub maybe_accept: Option<&'a str>,
|
||||
pub maybe_cache_setting: Option<&'a CacheSetting>,
|
||||
}
|
||||
|
@ -350,6 +352,7 @@ impl FileFetcher {
|
|||
maybe_accept: Option<&str>,
|
||||
cache_setting: &CacheSetting,
|
||||
maybe_checksum: Option<&LoaderChecksum>,
|
||||
maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
|
||||
) -> Result<FileOrRedirect, AnyError> {
|
||||
debug!(
|
||||
"FileFetcher::fetch_remote_no_follow - specifier: {}",
|
||||
|
@ -442,6 +445,7 @@ impl FileFetcher {
|
|||
.as_ref()
|
||||
.map(|(_, etag)| etag.clone()),
|
||||
maybe_auth_token: maybe_auth_token.clone(),
|
||||
maybe_auth: maybe_auth.clone(),
|
||||
maybe_progress_guard: maybe_progress_guard.as_ref(),
|
||||
})
|
||||
.await?
|
||||
|
@ -538,7 +542,18 @@ impl FileFetcher {
|
|||
specifier: &ModuleSpecifier,
|
||||
) -> Result<File, AnyError> {
|
||||
self
|
||||
.fetch_inner(specifier, FetchPermissionsOptionRef::AllowAll)
|
||||
.fetch_inner(specifier, None, FetchPermissionsOptionRef::AllowAll)
|
||||
.await
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub async fn fetch_bypass_permissions_with_maybe_auth(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
|
||||
) -> Result<File, AnyError> {
|
||||
self
|
||||
.fetch_inner(specifier, maybe_auth, FetchPermissionsOptionRef::AllowAll)
|
||||
.await
|
||||
}
|
||||
|
||||
|
@ -552,6 +567,7 @@ impl FileFetcher {
|
|||
self
|
||||
.fetch_inner(
|
||||
specifier,
|
||||
None,
|
||||
FetchPermissionsOptionRef::StaticContainer(permissions),
|
||||
)
|
||||
.await
|
||||
|
@ -560,12 +576,14 @@ impl FileFetcher {
|
|||
async fn fetch_inner(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
|
||||
permissions: FetchPermissionsOptionRef<'_>,
|
||||
) -> Result<File, AnyError> {
|
||||
self
|
||||
.fetch_with_options(FetchOptions {
|
||||
specifier,
|
||||
permissions,
|
||||
maybe_auth,
|
||||
maybe_accept: None,
|
||||
maybe_cache_setting: None,
|
||||
})
|
||||
|
@ -585,12 +603,14 @@ impl FileFetcher {
|
|||
max_redirect: usize,
|
||||
) -> Result<File, AnyError> {
|
||||
let mut specifier = Cow::Borrowed(options.specifier);
|
||||
let mut maybe_auth = options.maybe_auth.clone();
|
||||
for _ in 0..=max_redirect {
|
||||
match self
|
||||
.fetch_no_follow_with_options(FetchNoFollowOptions {
|
||||
fetch_options: FetchOptions {
|
||||
specifier: &specifier,
|
||||
permissions: options.permissions,
|
||||
maybe_auth: maybe_auth.clone(),
|
||||
maybe_accept: options.maybe_accept,
|
||||
maybe_cache_setting: options.maybe_cache_setting,
|
||||
},
|
||||
|
@ -602,6 +622,10 @@ impl FileFetcher {
|
|||
return Ok(file);
|
||||
}
|
||||
FileOrRedirect::Redirect(redirect_specifier) => {
|
||||
// If we were redirected to another origin, don't send the auth header anymore.
|
||||
if redirect_specifier.origin() != specifier.origin() {
|
||||
maybe_auth = None;
|
||||
}
|
||||
specifier = Cow::Owned(redirect_specifier);
|
||||
}
|
||||
}
|
||||
|
@ -666,6 +690,7 @@ impl FileFetcher {
|
|||
options.maybe_accept,
|
||||
options.maybe_cache_setting.unwrap_or(&self.cache_setting),
|
||||
maybe_checksum,
|
||||
options.maybe_auth,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
@ -756,6 +781,7 @@ mod tests {
|
|||
FetchOptions {
|
||||
specifier,
|
||||
permissions: FetchPermissionsOptionRef::AllowAll,
|
||||
maybe_auth: None,
|
||||
maybe_accept: None,
|
||||
maybe_cache_setting: Some(&file_fetcher.cache_setting),
|
||||
},
|
||||
|
@ -1255,6 +1281,7 @@ mod tests {
|
|||
FetchOptions {
|
||||
specifier: &specifier,
|
||||
permissions: FetchPermissionsOptionRef::AllowAll,
|
||||
maybe_auth: None,
|
||||
maybe_accept: None,
|
||||
maybe_cache_setting: Some(&file_fetcher.cache_setting),
|
||||
},
|
||||
|
@ -1268,6 +1295,7 @@ mod tests {
|
|||
FetchOptions {
|
||||
specifier: &specifier,
|
||||
permissions: FetchPermissionsOptionRef::AllowAll,
|
||||
maybe_auth: None,
|
||||
maybe_accept: None,
|
||||
maybe_cache_setting: Some(&file_fetcher.cache_setting),
|
||||
},
|
||||
|
|
|
@ -1009,7 +1009,11 @@ impl deno_graph::source::Reporter for FileWatcherReporter {
|
|||
) {
|
||||
let mut file_paths = self.file_paths.lock();
|
||||
if specifier.scheme() == "file" {
|
||||
file_paths.push(specifier.to_file_path().unwrap());
|
||||
// Don't trust that the path is a valid path at this point:
|
||||
// https://github.com/denoland/deno/issues/26209.
|
||||
if let Ok(file_path) = specifier.to_file_path() {
|
||||
file_paths.push(file_path);
|
||||
}
|
||||
}
|
||||
|
||||
if modules_done == modules_total {
|
||||
|
|
|
@ -19,6 +19,7 @@ use deno_runtime::deno_fetch;
|
|||
use deno_runtime::deno_fetch::create_http_client;
|
||||
use deno_runtime::deno_fetch::CreateHttpClientOptions;
|
||||
use deno_runtime::deno_tls::RootCertStoreProvider;
|
||||
use http::header;
|
||||
use http::header::HeaderName;
|
||||
use http::header::HeaderValue;
|
||||
use http::header::ACCEPT;
|
||||
|
@ -204,6 +205,7 @@ pub struct FetchOnceArgs<'a> {
|
|||
pub maybe_accept: Option<String>,
|
||||
pub maybe_etag: Option<String>,
|
||||
pub maybe_auth_token: Option<AuthToken>,
|
||||
pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
|
||||
pub maybe_progress_guard: Option<&'a UpdateGuard>,
|
||||
}
|
||||
|
||||
|
@ -382,6 +384,8 @@ impl HttpClient {
|
|||
request
|
||||
.headers_mut()
|
||||
.insert(AUTHORIZATION, authorization_val);
|
||||
} else if let Some((header, value)) = args.maybe_auth {
|
||||
request.headers_mut().insert(header, value);
|
||||
}
|
||||
if let Some(accept) = args.maybe_accept {
|
||||
let accepts_val = HeaderValue::from_str(&accept)?;
|
||||
|
@ -792,6 +796,7 @@ mod test {
|
|||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
maybe_auth: None,
|
||||
})
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Code(body, headers)) = result {
|
||||
|
@ -818,6 +823,7 @@ mod test {
|
|||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
maybe_auth: None,
|
||||
})
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Code(body, headers)) = result {
|
||||
|
@ -845,6 +851,7 @@ mod test {
|
|||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
maybe_auth: None,
|
||||
})
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Code(body, headers)) = result {
|
||||
|
@ -866,6 +873,7 @@ mod test {
|
|||
maybe_etag: Some("33a64df551425fcc55e".to_string()),
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
maybe_auth: None,
|
||||
})
|
||||
.await;
|
||||
assert_eq!(res.unwrap(), FetchOnceResult::NotModified);
|
||||
|
@ -885,6 +893,7 @@ mod test {
|
|||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
maybe_auth: None,
|
||||
})
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Code(body, headers)) = result {
|
||||
|
@ -914,6 +923,7 @@ mod test {
|
|||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
maybe_auth: None,
|
||||
})
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Code(body, _)) = result {
|
||||
|
@ -939,6 +949,7 @@ mod test {
|
|||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
maybe_auth: None,
|
||||
})
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Redirect(url, _)) = result {
|
||||
|
@ -974,6 +985,7 @@ mod test {
|
|||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
maybe_auth: None,
|
||||
})
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Code(body, headers)) = result {
|
||||
|
@ -1021,6 +1033,7 @@ mod test {
|
|||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
maybe_auth: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
|
@ -1083,6 +1096,7 @@ mod test {
|
|||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
maybe_auth: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
|
@ -1136,6 +1150,7 @@ mod test {
|
|||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
maybe_auth: None,
|
||||
})
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Code(body, headers)) = result {
|
||||
|
@ -1177,6 +1192,7 @@ mod test {
|
|||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
maybe_auth: None,
|
||||
})
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Code(body, headers)) = result {
|
||||
|
@ -1199,6 +1215,7 @@ mod test {
|
|||
maybe_etag: Some("33a64df551425fcc55e".to_string()),
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
maybe_auth: None,
|
||||
})
|
||||
.await;
|
||||
assert_eq!(res.unwrap(), FetchOnceResult::NotModified);
|
||||
|
@ -1233,6 +1250,7 @@ mod test {
|
|||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
maybe_auth: None,
|
||||
})
|
||||
.await;
|
||||
if let Ok(FetchOnceResult::Code(body, headers)) = result {
|
||||
|
@ -1262,6 +1280,7 @@ mod test {
|
|||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
maybe_auth: None,
|
||||
})
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
|
@ -1283,6 +1302,7 @@ mod test {
|
|||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
maybe_auth: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
|
@ -1306,6 +1326,7 @@ mod test {
|
|||
maybe_etag: None,
|
||||
maybe_auth_token: None,
|
||||
maybe_progress_guard: None,
|
||||
maybe_auth: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
|
|
|
@ -18,7 +18,6 @@ use deno_lint::diagnostic::LintDiagnosticRange;
|
|||
use deno_ast::SourceRange;
|
||||
use deno_ast::SourceRangedForSpanned;
|
||||
use deno_ast::SourceTextInfo;
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::error::custom_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde::Deserialize;
|
||||
|
@ -40,6 +39,7 @@ use import_map::ImportMap;
|
|||
use node_resolver::NpmResolver;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use std::borrow::Cow;
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
|
@ -598,68 +598,62 @@ pub fn fix_ts_import_changes(
|
|||
|
||||
/// Fix tsc import code actions so that the module specifier is correct for
|
||||
/// resolution by Deno (includes the extension).
|
||||
fn fix_ts_import_action(
|
||||
fn fix_ts_import_action<'a>(
|
||||
referrer: &ModuleSpecifier,
|
||||
action: &tsc::CodeFixAction,
|
||||
action: &'a tsc::CodeFixAction,
|
||||
import_mapper: &TsResponseImportMapper,
|
||||
) -> Result<Option<tsc::CodeFixAction>, AnyError> {
|
||||
if matches!(
|
||||
) -> Option<Cow<'a, tsc::CodeFixAction>> {
|
||||
if !matches!(
|
||||
action.fix_name.as_str(),
|
||||
"import" | "fixMissingFunctionDeclaration"
|
||||
) {
|
||||
let change = action
|
||||
return Some(Cow::Borrowed(action));
|
||||
}
|
||||
let specifier = (|| {
|
||||
let text_change = action.changes.first()?.text_changes.first()?;
|
||||
let captures = IMPORT_SPECIFIER_RE.captures(&text_change.new_text)?;
|
||||
Some(captures.get(1)?.as_str())
|
||||
})();
|
||||
let Some(specifier) = specifier else {
|
||||
return Some(Cow::Borrowed(action));
|
||||
};
|
||||
if let Some(new_specifier) =
|
||||
import_mapper.check_unresolved_specifier(specifier, referrer)
|
||||
{
|
||||
let description = action.description.replace(specifier, &new_specifier);
|
||||
let changes = action
|
||||
.changes
|
||||
.first()
|
||||
.ok_or_else(|| anyhow!("Unexpected action changes."))?;
|
||||
let text_change = change
|
||||
.text_changes
|
||||
.first()
|
||||
.ok_or_else(|| anyhow!("Missing text change."))?;
|
||||
if let Some(captures) = IMPORT_SPECIFIER_RE.captures(&text_change.new_text)
|
||||
{
|
||||
let specifier = captures
|
||||
.get(1)
|
||||
.ok_or_else(|| anyhow!("Missing capture."))?
|
||||
.as_str();
|
||||
if let Some(new_specifier) =
|
||||
import_mapper.check_unresolved_specifier(specifier, referrer)
|
||||
{
|
||||
let description = action.description.replace(specifier, &new_specifier);
|
||||
let changes = action
|
||||
.changes
|
||||
.iter()
|
||||
.map(|c| {
|
||||
let text_changes = c
|
||||
.text_changes
|
||||
.iter()
|
||||
.map(|c| {
|
||||
let text_changes = c
|
||||
.text_changes
|
||||
.iter()
|
||||
.map(|tc| tsc::TextChange {
|
||||
span: tc.span.clone(),
|
||||
new_text: tc.new_text.replace(specifier, &new_specifier),
|
||||
})
|
||||
.collect();
|
||||
tsc::FileTextChanges {
|
||||
file_name: c.file_name.clone(),
|
||||
text_changes,
|
||||
is_new_file: c.is_new_file,
|
||||
}
|
||||
.map(|tc| tsc::TextChange {
|
||||
span: tc.span.clone(),
|
||||
new_text: tc.new_text.replace(specifier, &new_specifier),
|
||||
})
|
||||
.collect();
|
||||
tsc::FileTextChanges {
|
||||
file_name: c.file_name.clone(),
|
||||
text_changes,
|
||||
is_new_file: c.is_new_file,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
return Ok(Some(tsc::CodeFixAction {
|
||||
description,
|
||||
changes,
|
||||
commands: None,
|
||||
fix_name: action.fix_name.clone(),
|
||||
fix_id: None,
|
||||
fix_all_description: None,
|
||||
}));
|
||||
} else if !import_mapper.is_valid_import(specifier, referrer) {
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
Some(Cow::Owned(tsc::CodeFixAction {
|
||||
description,
|
||||
changes,
|
||||
commands: None,
|
||||
fix_name: action.fix_name.clone(),
|
||||
fix_id: None,
|
||||
fix_all_description: None,
|
||||
}))
|
||||
} else if !import_mapper.is_valid_import(specifier, referrer) {
|
||||
None
|
||||
} else {
|
||||
Some(Cow::Borrowed(action))
|
||||
}
|
||||
|
||||
Ok(Some(action.clone()))
|
||||
}
|
||||
|
||||
/// Determines if two TypeScript diagnostic codes are effectively equivalent.
|
||||
|
@ -1004,8 +998,7 @@ impl CodeActionCollection {
|
|||
specifier,
|
||||
action,
|
||||
&language_server.get_ts_response_import_mapper(specifier),
|
||||
)?
|
||||
else {
|
||||
) else {
|
||||
return Ok(());
|
||||
};
|
||||
let edit = ts_changes_to_edit(&action.changes, language_server)?;
|
||||
|
@ -1027,7 +1020,7 @@ impl CodeActionCollection {
|
|||
});
|
||||
self
|
||||
.actions
|
||||
.push(CodeActionKind::Tsc(code_action, action.clone()));
|
||||
.push(CodeActionKind::Tsc(code_action, action.as_ref().clone()));
|
||||
|
||||
if let Some(fix_id) = &action.fix_id {
|
||||
if let Some(CodeActionKind::Tsc(existing_fix_all, existing_action)) =
|
||||
|
|
|
@ -439,6 +439,8 @@ pub struct LanguagePreferences {
|
|||
pub use_aliases_for_renames: bool,
|
||||
#[serde(default)]
|
||||
pub quote_style: QuoteStyle,
|
||||
#[serde(default)]
|
||||
pub prefer_type_only_auto_imports: bool,
|
||||
}
|
||||
|
||||
impl Default for LanguagePreferences {
|
||||
|
@ -449,6 +451,7 @@ impl Default for LanguagePreferences {
|
|||
auto_import_file_exclude_patterns: vec![],
|
||||
use_aliases_for_renames: true,
|
||||
quote_style: Default::default(),
|
||||
prefer_type_only_auto_imports: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2251,6 +2254,7 @@ mod tests {
|
|||
auto_import_file_exclude_patterns: vec![],
|
||||
use_aliases_for_renames: true,
|
||||
quote_style: QuoteStyle::Auto,
|
||||
prefer_type_only_auto_imports: false,
|
||||
},
|
||||
suggest: CompletionSettings {
|
||||
complete_function_calls: false,
|
||||
|
@ -2296,6 +2300,7 @@ mod tests {
|
|||
auto_import_file_exclude_patterns: vec![],
|
||||
use_aliases_for_renames: true,
|
||||
quote_style: QuoteStyle::Auto,
|
||||
prefer_type_only_auto_imports: false,
|
||||
},
|
||||
suggest: CompletionSettings {
|
||||
complete_function_calls: false,
|
||||
|
|
|
@ -4,6 +4,7 @@ use dashmap::DashMap;
|
|||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_npm::npm_rc::NpmRc;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::Version;
|
||||
use serde::Deserialize;
|
||||
|
@ -25,7 +26,10 @@ pub struct CliNpmSearchApi {
|
|||
|
||||
impl CliNpmSearchApi {
|
||||
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
|
||||
let resolver = NpmFetchResolver::new(file_fetcher.clone());
|
||||
let resolver = NpmFetchResolver::new(
|
||||
file_fetcher.clone(),
|
||||
Arc::new(NpmRc::default().as_resolved(npm_registry_url()).unwrap()),
|
||||
);
|
||||
Self {
|
||||
file_fetcher,
|
||||
resolver,
|
||||
|
|
|
@ -482,6 +482,7 @@ impl ModuleRegistry {
|
|||
.fetch_with_options(FetchOptions {
|
||||
specifier: &specifier,
|
||||
permissions: FetchPermissionsOptionRef::AllowAll,
|
||||
maybe_auth: None,
|
||||
maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"),
|
||||
maybe_cache_setting: None,
|
||||
})
|
||||
|
|
|
@ -4952,6 +4952,8 @@ pub struct UserPreferences {
|
|||
pub auto_import_file_exclude_patterns: Option<Vec<String>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub interactive_inlay_hints: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub prefer_type_only_auto_imports: Option<bool>,
|
||||
}
|
||||
|
||||
impl UserPreferences {
|
||||
|
@ -5074,6 +5076,9 @@ impl UserPreferences {
|
|||
} else {
|
||||
Some(language_settings.preferences.quote_style)
|
||||
},
|
||||
prefer_type_only_auto_imports: Some(
|
||||
language_settings.preferences.prefer_type_only_auto_imports,
|
||||
),
|
||||
..base_preferences
|
||||
}
|
||||
}
|
||||
|
@ -6215,7 +6220,7 @@ mod tests {
|
|||
let change = changes.text_changes.first().unwrap();
|
||||
assert_eq!(
|
||||
change.new_text,
|
||||
"import type { someLongVariable } from './b.ts'\n"
|
||||
"import { someLongVariable } from './b.ts'\n"
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -15,7 +15,6 @@ mod js;
|
|||
mod jsr;
|
||||
mod lsp;
|
||||
mod module_loader;
|
||||
mod napi;
|
||||
mod node;
|
||||
mod npm;
|
||||
mod ops;
|
||||
|
@ -169,10 +168,10 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
|
|||
if std::io::stderr().is_terminal() {
|
||||
log::warn!(
|
||||
"{} command is intended to be run by text editors and IDEs and shouldn't be run manually.
|
||||
|
||||
|
||||
Visit https://docs.deno.com/runtime/getting_started/setup_your_environment/ for instruction
|
||||
how to setup your favorite text editor.
|
||||
|
||||
|
||||
Press Ctrl+C to exit.
|
||||
", colors::cyan("deno lsp"));
|
||||
}
|
||||
|
|
|
@ -88,11 +88,10 @@ fn main() {
|
|||
let standalone = standalone::extract_standalone(Cow::Owned(args));
|
||||
let future = async move {
|
||||
match standalone {
|
||||
Ok(Some(future)) => {
|
||||
let (metadata, eszip) = future.await?;
|
||||
util::logger::init(metadata.log_level);
|
||||
load_env_vars(&metadata.env_vars_from_env_file);
|
||||
let exit_code = standalone::run(eszip, metadata).await?;
|
||||
Ok(Some(data)) => {
|
||||
util::logger::init(data.metadata.log_level);
|
||||
load_env_vars(&data.metadata.env_vars_from_env_file);
|
||||
let exit_code = standalone::run(data).await?;
|
||||
std::process::exit(exit_code);
|
||||
}
|
||||
Ok(None) => Ok(()),
|
||||
|
|
|
@ -541,7 +541,8 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
self.parsed_source_cache.free(specifier);
|
||||
|
||||
Ok(Some(ModuleCodeStringSource {
|
||||
code: ModuleSourceCode::Bytes(transpile_result),
|
||||
// note: it's faster to provide a string if we know it's a string
|
||||
code: ModuleSourceCode::String(transpile_result.into()),
|
||||
found_url: specifier.clone(),
|
||||
media_type,
|
||||
}))
|
||||
|
@ -571,7 +572,8 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
self.parsed_source_cache.free(specifier);
|
||||
|
||||
Ok(Some(ModuleCodeStringSource {
|
||||
code: ModuleSourceCode::Bytes(transpile_result),
|
||||
// note: it's faster to provide a string if we know it's a string
|
||||
code: ModuleSourceCode::String(transpile_result.into()),
|
||||
found_url: specifier.clone(),
|
||||
media_type,
|
||||
}))
|
||||
|
|
|
@ -1,114 +0,0 @@
|
|||
# napi
|
||||
|
||||
This directory contains source for Deno's Node-API implementation. It depends on
|
||||
`napi_sym` and `deno_napi`.
|
||||
|
||||
Files are generally organized the same as in Node.js's implementation to ease in
|
||||
ensuring compatibility.
|
||||
|
||||
## Adding a new function
|
||||
|
||||
Add the symbol name to
|
||||
[`cli/napi_sym/symbol_exports.json`](../napi_sym/symbol_exports.json).
|
||||
|
||||
```diff
|
||||
{
|
||||
"symbols": [
|
||||
...
|
||||
"napi_get_undefined",
|
||||
- "napi_get_null"
|
||||
+ "napi_get_null",
|
||||
+ "napi_get_boolean"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Determine where to place the implementation. `napi_get_boolean` is related to JS
|
||||
values so we will place it in `js_native_api.rs`. If something is not clear,
|
||||
just create a new file module.
|
||||
|
||||
See [`napi_sym`](../napi_sym/) for writing the implementation:
|
||||
|
||||
```rust
|
||||
#[napi_sym::napi_sym]
|
||||
pub fn napi_get_boolean(
|
||||
env: *mut Env,
|
||||
value: bool,
|
||||
result: *mut napi_value,
|
||||
) -> Result {
|
||||
// ...
|
||||
Ok(())
|
||||
}
|
||||
```
|
||||
|
||||
Update the generated symbol lists using the script:
|
||||
|
||||
```
|
||||
deno run --allow-write tools/napi/generate_symbols_lists.js
|
||||
```
|
||||
|
||||
Add a test in [`/tests/napi`](../../tests/napi/). You can also refer to Node.js
|
||||
test suite for Node-API.
|
||||
|
||||
```js
|
||||
// tests/napi/boolean_test.js
|
||||
import { assertEquals, loadTestLibrary } from "./common.js";
|
||||
const lib = loadTestLibrary();
|
||||
Deno.test("napi get boolean", function () {
|
||||
assertEquals(lib.test_get_boolean(true), true);
|
||||
assertEquals(lib.test_get_boolean(false), false);
|
||||
});
|
||||
```
|
||||
|
||||
```rust
|
||||
// tests/napi/src/boolean.rs
|
||||
|
||||
use napi_sys::Status::napi_ok;
|
||||
use napi_sys::ValueType::napi_boolean;
|
||||
use napi_sys::*;
|
||||
|
||||
extern "C" fn test_boolean(
|
||||
env: napi_env,
|
||||
info: napi_callback_info,
|
||||
) -> napi_value {
|
||||
let (args, argc, _) = crate::get_callback_info!(env, info, 1);
|
||||
assert_eq!(argc, 1);
|
||||
|
||||
let mut ty = -1;
|
||||
assert!(unsafe { napi_typeof(env, args[0], &mut ty) } == napi_ok);
|
||||
assert_eq!(ty, napi_boolean);
|
||||
|
||||
// Use napi_get_boolean here...
|
||||
|
||||
value
|
||||
}
|
||||
|
||||
pub fn init(env: napi_env, exports: napi_value) {
|
||||
let properties = &[crate::new_property!(env, "test_boolean\0", test_boolean)];
|
||||
|
||||
unsafe {
|
||||
napi_define_properties(env, exports, properties.len(), properties.as_ptr())
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
```diff
|
||||
// tests/napi/src/lib.rs
|
||||
|
||||
+ mod boolean;
|
||||
|
||||
...
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn napi_register_module_v1(
|
||||
env: napi_env,
|
||||
exports: napi_value,
|
||||
) -> napi_value {
|
||||
...
|
||||
+ boolean::init(env, exports);
|
||||
|
||||
exports
|
||||
}
|
||||
```
|
||||
|
||||
Run the test using `cargo test -p tests/napi`.
|
|
@ -1,21 +0,0 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
#![allow(unused_mut)]
|
||||
#![allow(non_camel_case_types)]
|
||||
#![allow(clippy::undocumented_unsafe_blocks)]
|
||||
|
||||
//! Symbols to be exported are now defined in this JSON file.
|
||||
//! The `#[napi_sym]` macro checks for missing entries and panics.
|
||||
//!
|
||||
//! `./tools/napi/generate_symbols_list.js` is used to generate the LINK `cli/exports.def` on Windows,
|
||||
//! which is also checked into git.
|
||||
//!
|
||||
//! To add a new napi function:
|
||||
//! 1. Place `#[napi_sym]` on top of your implementation.
|
||||
//! 2. Add the function's identifier to this JSON list.
|
||||
//! 3. Finally, run `tools/napi/generate_symbols_list.js` to update `cli/napi/generated_symbol_exports_list_*.def`.
|
||||
|
||||
pub mod js_native_api;
|
||||
pub mod node_api;
|
||||
pub mod util;
|
||||
pub mod uv;
|
2
cli/npm/managed/cache/mod.rs
vendored
2
cli/npm/managed/cache/mod.rs
vendored
|
@ -26,7 +26,7 @@ use crate::cache::CACHE_PERM;
|
|||
use crate::util::fs::atomic_write_file_with_retries;
|
||||
use crate::util::fs::hard_link_dir_recursive;
|
||||
|
||||
mod registry_info;
|
||||
pub mod registry_info;
|
||||
mod tarball;
|
||||
mod tarball_extract;
|
||||
|
||||
|
|
68
cli/npm/managed/cache/registry_info.rs
vendored
68
cli/npm/managed/cache/registry_info.rs
vendored
|
@ -84,7 +84,7 @@ impl RegistryInfoDownloader {
|
|||
self.load_package_info_inner(name).await.with_context(|| {
|
||||
format!(
|
||||
"Error getting response at {} for package \"{}\"",
|
||||
self.get_package_url(name),
|
||||
get_package_url(&self.npmrc, name),
|
||||
name
|
||||
)
|
||||
})
|
||||
|
@ -190,7 +190,7 @@ impl RegistryInfoDownloader {
|
|||
|
||||
fn create_load_future(self: &Arc<Self>, name: &str) -> LoadFuture {
|
||||
let downloader = self.clone();
|
||||
let package_url = self.get_package_url(name);
|
||||
let package_url = get_package_url(&self.npmrc, name);
|
||||
let registry_config = self.npmrc.get_registry_config(name);
|
||||
let maybe_auth_header =
|
||||
match maybe_auth_header_for_npm_registry(registry_config) {
|
||||
|
@ -239,36 +239,36 @@ impl RegistryInfoDownloader {
|
|||
.map(|r| r.map_err(Arc::new))
|
||||
.boxed_local()
|
||||
}
|
||||
|
||||
fn get_package_url(&self, name: &str) -> Url {
|
||||
let registry_url = self.npmrc.get_registry_url(name);
|
||||
// The '/' character in scoped package names "@scope/name" must be
|
||||
// encoded for older third party registries. Newer registries and
|
||||
// npm itself support both ways
|
||||
// - encoded: https://registry.npmjs.org/@rollup%2fplugin-json
|
||||
// - non-ecoded: https://registry.npmjs.org/@rollup/plugin-json
|
||||
// To support as many third party registries as possible we'll
|
||||
// always encode the '/' character.
|
||||
|
||||
// list of all characters used in npm packages:
|
||||
// !, ', (, ), *, -, ., /, [0-9], @, [A-Za-z], _, ~
|
||||
const ASCII_SET: percent_encoding::AsciiSet =
|
||||
percent_encoding::NON_ALPHANUMERIC
|
||||
.remove(b'!')
|
||||
.remove(b'\'')
|
||||
.remove(b'(')
|
||||
.remove(b')')
|
||||
.remove(b'*')
|
||||
.remove(b'-')
|
||||
.remove(b'.')
|
||||
.remove(b'@')
|
||||
.remove(b'_')
|
||||
.remove(b'~');
|
||||
let name = percent_encoding::utf8_percent_encode(name, &ASCII_SET);
|
||||
registry_url
|
||||
// Ensure that scoped package name percent encoding is lower cased
|
||||
// to match npm.
|
||||
.join(&name.to_string().replace("%2F", "%2f"))
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_package_url(npmrc: &ResolvedNpmRc, name: &str) -> Url {
|
||||
let registry_url = npmrc.get_registry_url(name);
|
||||
// The '/' character in scoped package names "@scope/name" must be
|
||||
// encoded for older third party registries. Newer registries and
|
||||
// npm itself support both ways
|
||||
// - encoded: https://registry.npmjs.org/@rollup%2fplugin-json
|
||||
// - non-ecoded: https://registry.npmjs.org/@rollup/plugin-json
|
||||
// To support as many third party registries as possible we'll
|
||||
// always encode the '/' character.
|
||||
|
||||
// list of all characters used in npm packages:
|
||||
// !, ', (, ), *, -, ., /, [0-9], @, [A-Za-z], _, ~
|
||||
const ASCII_SET: percent_encoding::AsciiSet =
|
||||
percent_encoding::NON_ALPHANUMERIC
|
||||
.remove(b'!')
|
||||
.remove(b'\'')
|
||||
.remove(b'(')
|
||||
.remove(b')')
|
||||
.remove(b'*')
|
||||
.remove(b'-')
|
||||
.remove(b'.')
|
||||
.remove(b'@')
|
||||
.remove(b'_')
|
||||
.remove(b'~');
|
||||
let name = percent_encoding::utf8_percent_encode(name, &ASCII_SET);
|
||||
registry_url
|
||||
// Ensure that scoped package name percent encoding is lower cased
|
||||
// to match npm.
|
||||
.join(&name.to_string().replace("%2F", "%2f"))
|
||||
.unwrap()
|
||||
}
|
||||
|
|
|
@ -55,7 +55,7 @@ use super::CliNpmResolver;
|
|||
use super::InnerCliNpmResolverRef;
|
||||
use super::ResolvePkgFolderFromDenoReqError;
|
||||
|
||||
mod cache;
|
||||
pub mod cache;
|
||||
mod registry;
|
||||
mod resolution;
|
||||
mod resolvers;
|
||||
|
|
|
@ -8,10 +8,12 @@ use std::path::Path;
|
|||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use common::maybe_auth_header_for_npm_registry;
|
||||
use dashmap::DashMap;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_npm::npm_rc::ResolvedNpmRc;
|
||||
use deno_npm::registry::NpmPackageInfo;
|
||||
use deno_resolver::npm::ByonmNpmResolver;
|
||||
use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError;
|
||||
|
@ -19,10 +21,10 @@ use deno_runtime::deno_node::NodeRequireResolver;
|
|||
use deno_runtime::ops::process::NpmProcessStateProvider;
|
||||
use deno_semver::package::PackageNv;
|
||||
use deno_semver::package::PackageReq;
|
||||
use managed::cache::registry_info::get_package_url;
|
||||
use node_resolver::NpmResolver;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::args::npm_registry_url;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
|
||||
pub use self::byonm::CliByonmNpmResolver;
|
||||
|
@ -115,14 +117,19 @@ pub struct NpmFetchResolver {
|
|||
nv_by_req: DashMap<PackageReq, Option<PackageNv>>,
|
||||
info_by_name: DashMap<String, Option<Arc<NpmPackageInfo>>>,
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
npmrc: Arc<ResolvedNpmRc>,
|
||||
}
|
||||
|
||||
impl NpmFetchResolver {
|
||||
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
|
||||
pub fn new(
|
||||
file_fetcher: Arc<FileFetcher>,
|
||||
npmrc: Arc<ResolvedNpmRc>,
|
||||
) -> Self {
|
||||
Self {
|
||||
nv_by_req: Default::default(),
|
||||
info_by_name: Default::default(),
|
||||
file_fetcher,
|
||||
npmrc,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -157,11 +164,21 @@ impl NpmFetchResolver {
|
|||
return info.value().clone();
|
||||
}
|
||||
let fetch_package_info = || async {
|
||||
let info_url = npm_registry_url().join(name).ok()?;
|
||||
let info_url = get_package_url(&self.npmrc, name);
|
||||
let file_fetcher = self.file_fetcher.clone();
|
||||
let registry_config = self.npmrc.get_registry_config(name);
|
||||
// TODO(bartlomieju): this should error out, not use `.ok()`.
|
||||
let maybe_auth_header =
|
||||
maybe_auth_header_for_npm_registry(registry_config).ok()?;
|
||||
// spawn due to the lsp's `Send` requirement
|
||||
let file = deno_core::unsync::spawn(async move {
|
||||
file_fetcher.fetch_bypass_permissions(&info_url).await.ok()
|
||||
file_fetcher
|
||||
.fetch_bypass_permissions_with_maybe_auth(
|
||||
&info_url,
|
||||
maybe_auth_header,
|
||||
)
|
||||
.await
|
||||
.ok()
|
||||
})
|
||||
.await
|
||||
.ok()??;
|
||||
|
|
|
@ -291,7 +291,7 @@
|
|||
"type": "array",
|
||||
"description": "List of tag names that will be run. Empty list disables all tags and will only use rules from `include`.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
"$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/tags.v1.json"
|
||||
},
|
||||
"minItems": 0,
|
||||
"uniqueItems": true
|
||||
|
@ -300,7 +300,7 @@
|
|||
"type": "array",
|
||||
"description": "List of rule names that will be excluded from configured tag sets. If the same rule is in `include` it will be run.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
"$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/rules.v1.json"
|
||||
},
|
||||
"minItems": 0,
|
||||
"uniqueItems": true
|
||||
|
@ -309,7 +309,7 @@
|
|||
"type": "array",
|
||||
"description": "List of rule names that will be run. Even if the same rule is in `exclude` it will be run.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
"$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/rules.v1.json"
|
||||
},
|
||||
"minItems": 0,
|
||||
"uniqueItems": true
|
||||
|
@ -531,6 +531,7 @@
|
|||
"detect-cjs",
|
||||
"ffi",
|
||||
"fs",
|
||||
"fmt-component",
|
||||
"http",
|
||||
"kv",
|
||||
"net",
|
||||
|
|
|
@ -9,14 +9,18 @@ use std::ffi::OsString;
|
|||
use std::fs;
|
||||
use std::fs::File;
|
||||
use std::future::Future;
|
||||
use std::io::ErrorKind;
|
||||
use std::io::Read;
|
||||
use std::io::Seek;
|
||||
use std::io::SeekFrom;
|
||||
use std::io::Write;
|
||||
use std::ops::Range;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use std::sync::Arc;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_config::workspace::PackageJsonDepResolution;
|
||||
use deno_config::workspace::ResolverWorkspaceJsrPackage;
|
||||
|
@ -30,13 +34,22 @@ use deno_core::futures::AsyncReadExt;
|
|||
use deno_core::futures::AsyncSeekExt;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_graph::source::RealFileSystem;
|
||||
use deno_graph::ModuleGraph;
|
||||
use deno_npm::resolution::SerializedNpmResolutionSnapshot;
|
||||
use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
|
||||
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
||||
use deno_npm::NpmPackageId;
|
||||
use deno_npm::NpmSystemInfo;
|
||||
use deno_runtime::deno_fs;
|
||||
use deno_runtime::deno_fs::FileSystem;
|
||||
use deno_runtime::deno_fs::RealFs;
|
||||
use deno_runtime::deno_io::fs::FsError;
|
||||
use deno_runtime::deno_node::PackageJson;
|
||||
use deno_semver::npm::NpmVersionReqParseError;
|
||||
use deno_semver::package::PackageReq;
|
||||
use deno_semver::Version;
|
||||
use deno_semver::VersionReqSpecifierParseError;
|
||||
use eszip::EszipRelativeFileBaseUrl;
|
||||
use indexmap::IndexMap;
|
||||
use log::Level;
|
||||
use serde::Deserialize;
|
||||
|
@ -49,6 +62,7 @@ use crate::args::NpmInstallDepsProvider;
|
|||
use crate::args::PermissionFlags;
|
||||
use crate::args::UnstableConfig;
|
||||
use crate::cache::DenoDir;
|
||||
use crate::emit::Emitter;
|
||||
use crate::file_fetcher::FileFetcher;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::npm::CliNpmResolver;
|
||||
|
@ -60,12 +74,63 @@ use crate::util::fs::canonicalize_path_maybe_not_exists;
|
|||
use crate::util::progress_bar::ProgressBar;
|
||||
use crate::util::progress_bar::ProgressBarStyle;
|
||||
|
||||
use super::file_system::DenoCompileFileSystem;
|
||||
use super::serialization::deserialize_binary_data_section;
|
||||
use super::serialization::serialize_binary_data_section;
|
||||
use super::serialization::DenoCompileModuleData;
|
||||
use super::serialization::DeserializedDataSection;
|
||||
use super::serialization::RemoteModulesStore;
|
||||
use super::serialization::RemoteModulesStoreBuilder;
|
||||
use super::virtual_fs::FileBackedVfs;
|
||||
use super::virtual_fs::VfsBuilder;
|
||||
use super::virtual_fs::VfsRoot;
|
||||
use super::virtual_fs::VirtualDirectory;
|
||||
|
||||
const MAGIC_TRAILER: &[u8; 8] = b"d3n0l4nd";
|
||||
/// A URL that can be designated as the base for relative URLs.
|
||||
///
|
||||
/// After creation, this URL may be used to get the key for a
|
||||
/// module in the binary.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct StandaloneRelativeFileBaseUrl<'a>(&'a Url);
|
||||
|
||||
impl<'a> From<&'a Url> for StandaloneRelativeFileBaseUrl<'a> {
|
||||
fn from(url: &'a Url) -> Self {
|
||||
Self(url)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> StandaloneRelativeFileBaseUrl<'a> {
|
||||
pub fn new(url: &'a Url) -> Self {
|
||||
debug_assert_eq!(url.scheme(), "file");
|
||||
Self(url)
|
||||
}
|
||||
|
||||
/// Gets the module map key of the provided specifier.
|
||||
///
|
||||
/// * Descendant file specifiers will be made relative to the base.
|
||||
/// * Non-descendant file specifiers will stay as-is (absolute).
|
||||
/// * Non-file specifiers will stay as-is.
|
||||
pub fn specifier_key<'b>(&self, target: &'b Url) -> Cow<'b, str> {
|
||||
if target.scheme() != "file" {
|
||||
return Cow::Borrowed(target.as_str());
|
||||
}
|
||||
|
||||
match self.0.make_relative(target) {
|
||||
Some(relative) => {
|
||||
if relative.starts_with("../") {
|
||||
Cow::Borrowed(target.as_str())
|
||||
} else {
|
||||
Cow::Owned(relative)
|
||||
}
|
||||
}
|
||||
None => Cow::Borrowed(target.as_str()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn inner(&self) -> &Url {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub enum NodeModules {
|
||||
|
@ -120,78 +185,23 @@ pub struct Metadata {
|
|||
pub unstable_config: UnstableConfig,
|
||||
}
|
||||
|
||||
pub fn load_npm_vfs(root_dir_path: PathBuf) -> Result<FileBackedVfs, AnyError> {
|
||||
let data = libsui::find_section("d3n0l4nd").unwrap();
|
||||
|
||||
// We do the first part sync so it can complete quickly
|
||||
let trailer: [u8; TRAILER_SIZE] = data[0..TRAILER_SIZE].try_into().unwrap();
|
||||
let trailer = match Trailer::parse(&trailer)? {
|
||||
None => panic!("Could not find trailer"),
|
||||
Some(trailer) => trailer,
|
||||
};
|
||||
let data = &data[TRAILER_SIZE..];
|
||||
|
||||
let vfs_data =
|
||||
&data[trailer.npm_vfs_pos as usize..trailer.npm_files_pos as usize];
|
||||
let mut dir: VirtualDirectory = serde_json::from_slice(vfs_data)?;
|
||||
|
||||
// align the name of the directory with the root dir
|
||||
dir.name = root_dir_path
|
||||
.file_name()
|
||||
.unwrap()
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
let fs_root = VfsRoot {
|
||||
dir,
|
||||
root_path: root_dir_path,
|
||||
start_file_offset: trailer.npm_files_pos,
|
||||
};
|
||||
Ok(FileBackedVfs::new(data.to_vec(), fs_root))
|
||||
}
|
||||
|
||||
fn write_binary_bytes(
|
||||
mut file_writer: File,
|
||||
original_bin: Vec<u8>,
|
||||
metadata: &Metadata,
|
||||
eszip: eszip::EszipV2,
|
||||
npm_vfs: Option<&VirtualDirectory>,
|
||||
npm_files: &Vec<Vec<u8>>,
|
||||
npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
|
||||
remote_modules: &RemoteModulesStoreBuilder,
|
||||
vfs: VfsBuilder,
|
||||
compile_flags: &CompileFlags,
|
||||
) -> Result<(), AnyError> {
|
||||
let metadata = serde_json::to_string(metadata)?.as_bytes().to_vec();
|
||||
let npm_vfs = serde_json::to_string(&npm_vfs)?.as_bytes().to_vec();
|
||||
let eszip_archive = eszip.into_bytes();
|
||||
|
||||
let mut writer = Vec::new();
|
||||
|
||||
// write the trailer, which includes the positions
|
||||
// of the data blocks in the file
|
||||
writer.write_all(&{
|
||||
let metadata_pos = eszip_archive.len() as u64;
|
||||
let npm_vfs_pos = metadata_pos + (metadata.len() as u64);
|
||||
let npm_files_pos = npm_vfs_pos + (npm_vfs.len() as u64);
|
||||
Trailer {
|
||||
eszip_pos: 0,
|
||||
metadata_pos,
|
||||
npm_vfs_pos,
|
||||
npm_files_pos,
|
||||
}
|
||||
.as_bytes()
|
||||
})?;
|
||||
|
||||
writer.write_all(&eszip_archive)?;
|
||||
writer.write_all(&metadata)?;
|
||||
writer.write_all(&npm_vfs)?;
|
||||
for file in npm_files {
|
||||
writer.write_all(file)?;
|
||||
}
|
||||
let data_section_bytes =
|
||||
serialize_binary_data_section(metadata, npm_snapshot, remote_modules, vfs)?;
|
||||
|
||||
let target = compile_flags.resolve_target();
|
||||
if target.contains("linux") {
|
||||
libsui::Elf::new(&original_bin).append(
|
||||
"d3n0l4nd",
|
||||
&writer,
|
||||
&data_section_bytes,
|
||||
&mut file_writer,
|
||||
)?;
|
||||
} else if target.contains("windows") {
|
||||
|
@ -201,11 +211,11 @@ fn write_binary_bytes(
|
|||
pe = pe.set_icon(&icon)?;
|
||||
}
|
||||
|
||||
pe.write_resource("d3n0l4nd", writer)?
|
||||
pe.write_resource("d3n0l4nd", data_section_bytes)?
|
||||
.build(&mut file_writer)?;
|
||||
} else if target.contains("darwin") {
|
||||
libsui::Macho::from(original_bin)?
|
||||
.write_section("d3n0l4nd", writer)?
|
||||
.write_section("d3n0l4nd", data_section_bytes)?
|
||||
.build_and_sign(&mut file_writer)?;
|
||||
}
|
||||
Ok(())
|
||||
|
@ -221,6 +231,63 @@ pub fn is_standalone_binary(exe_path: &Path) -> bool {
|
|||
|| libsui::utils::is_macho(&data)
|
||||
}
|
||||
|
||||
pub struct StandaloneData {
|
||||
pub fs: Arc<dyn deno_fs::FileSystem>,
|
||||
pub metadata: Metadata,
|
||||
pub modules: StandaloneModules,
|
||||
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
|
||||
pub root_path: PathBuf,
|
||||
pub vfs: Arc<FileBackedVfs>,
|
||||
}
|
||||
|
||||
pub struct StandaloneModules {
|
||||
remote_modules: RemoteModulesStore,
|
||||
vfs: Arc<FileBackedVfs>,
|
||||
}
|
||||
|
||||
impl StandaloneModules {
|
||||
pub fn resolve_specifier<'a>(
|
||||
&'a self,
|
||||
specifier: &'a ModuleSpecifier,
|
||||
) -> Result<Option<&'a ModuleSpecifier>, AnyError> {
|
||||
if specifier.scheme() == "file" {
|
||||
Ok(Some(specifier))
|
||||
} else {
|
||||
self.remote_modules.resolve_specifier(specifier)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read<'a>(
|
||||
&'a self,
|
||||
specifier: &'a ModuleSpecifier,
|
||||
) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> {
|
||||
if specifier.scheme() == "file" {
|
||||
let path = deno_path_util::url_to_file_path(specifier)?;
|
||||
let bytes = match self.vfs.file_entry(&path) {
|
||||
Ok(entry) => self.vfs.read_file_all(entry)?,
|
||||
Err(err) if err.kind() == ErrorKind::NotFound => {
|
||||
let bytes = match RealFs.read_file_sync(&path, None) {
|
||||
Ok(bytes) => bytes,
|
||||
Err(FsError::Io(err)) if err.kind() == ErrorKind::NotFound => {
|
||||
return Ok(None)
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
};
|
||||
Cow::Owned(bytes)
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
};
|
||||
Ok(Some(DenoCompileModuleData {
|
||||
media_type: MediaType::from_specifier(specifier),
|
||||
specifier,
|
||||
data: bytes,
|
||||
}))
|
||||
} else {
|
||||
self.remote_modules.read(specifier)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// This function will try to run this binary as a standalone binary
|
||||
/// produced by `deno compile`. It determines if this is a standalone
|
||||
/// binary by skipping over the trailer width at the end of the file,
|
||||
|
@ -228,110 +295,66 @@ pub fn is_standalone_binary(exe_path: &Path) -> bool {
|
|||
/// the bundle is executed. If not, this function exits with `Ok(None)`.
|
||||
pub fn extract_standalone(
|
||||
cli_args: Cow<Vec<OsString>>,
|
||||
) -> Result<
|
||||
Option<impl Future<Output = Result<(Metadata, eszip::EszipV2), AnyError>>>,
|
||||
AnyError,
|
||||
> {
|
||||
) -> Result<Option<StandaloneData>, AnyError> {
|
||||
let Some(data) = libsui::find_section("d3n0l4nd") else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
// We do the first part sync so it can complete quickly
|
||||
let trailer = match Trailer::parse(&data[0..TRAILER_SIZE])? {
|
||||
let DeserializedDataSection {
|
||||
mut metadata,
|
||||
npm_snapshot,
|
||||
remote_modules,
|
||||
mut vfs_dir,
|
||||
vfs_files_data,
|
||||
} = match deserialize_binary_data_section(data)? {
|
||||
Some(data_section) => data_section,
|
||||
None => return Ok(None),
|
||||
Some(trailer) => trailer,
|
||||
};
|
||||
|
||||
let root_path = {
|
||||
let maybe_current_exe = std::env::current_exe().ok();
|
||||
let current_exe_name = maybe_current_exe
|
||||
.as_ref()
|
||||
.and_then(|p| p.file_name())
|
||||
.map(|p| p.to_string_lossy())
|
||||
// should never happen
|
||||
.unwrap_or_else(|| Cow::Borrowed("binary"));
|
||||
std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name))
|
||||
};
|
||||
let cli_args = cli_args.into_owned();
|
||||
// If we have an eszip, read it out
|
||||
Ok(Some(async move {
|
||||
let bufreader =
|
||||
deno_core::futures::io::BufReader::new(&data[TRAILER_SIZE..]);
|
||||
metadata.argv.reserve(cli_args.len() - 1);
|
||||
for arg in cli_args.into_iter().skip(1) {
|
||||
metadata.argv.push(arg.into_string().unwrap());
|
||||
}
|
||||
let vfs = {
|
||||
// align the name of the directory with the root dir
|
||||
vfs_dir.name = root_path.file_name().unwrap().to_string_lossy().to_string();
|
||||
|
||||
let (eszip, loader) = eszip::EszipV2::parse(bufreader)
|
||||
.await
|
||||
.context("Failed to parse eszip header")?;
|
||||
|
||||
let bufreader = loader.await.context("Failed to parse eszip archive")?;
|
||||
|
||||
let mut metadata = String::new();
|
||||
|
||||
bufreader
|
||||
.take(trailer.metadata_len())
|
||||
.read_to_string(&mut metadata)
|
||||
.await
|
||||
.context("Failed to read metadata from the current executable")?;
|
||||
|
||||
let mut metadata: Metadata = serde_json::from_str(&metadata).unwrap();
|
||||
metadata.argv.reserve(cli_args.len() - 1);
|
||||
for arg in cli_args.into_iter().skip(1) {
|
||||
metadata.argv.push(arg.into_string().unwrap());
|
||||
}
|
||||
|
||||
Ok((metadata, eszip))
|
||||
let fs_root = VfsRoot {
|
||||
dir: vfs_dir,
|
||||
root_path: root_path.clone(),
|
||||
start_file_offset: 0,
|
||||
};
|
||||
Arc::new(FileBackedVfs::new(Cow::Borrowed(vfs_files_data), fs_root))
|
||||
};
|
||||
let fs: Arc<dyn deno_fs::FileSystem> =
|
||||
Arc::new(DenoCompileFileSystem::new(vfs.clone()));
|
||||
Ok(Some(StandaloneData {
|
||||
fs,
|
||||
metadata,
|
||||
modules: StandaloneModules {
|
||||
remote_modules,
|
||||
vfs: vfs.clone(),
|
||||
},
|
||||
npm_snapshot,
|
||||
root_path,
|
||||
vfs,
|
||||
}))
|
||||
}
|
||||
|
||||
const TRAILER_SIZE: usize = std::mem::size_of::<Trailer>() + 8; // 8 bytes for the magic trailer string
|
||||
|
||||
struct Trailer {
|
||||
eszip_pos: u64,
|
||||
metadata_pos: u64,
|
||||
npm_vfs_pos: u64,
|
||||
npm_files_pos: u64,
|
||||
}
|
||||
|
||||
impl Trailer {
|
||||
pub fn parse(trailer: &[u8]) -> Result<Option<Trailer>, AnyError> {
|
||||
let (magic_trailer, rest) = trailer.split_at(8);
|
||||
if magic_trailer != MAGIC_TRAILER {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let (eszip_archive_pos, rest) = rest.split_at(8);
|
||||
let (metadata_pos, rest) = rest.split_at(8);
|
||||
let (npm_vfs_pos, npm_files_pos) = rest.split_at(8);
|
||||
let eszip_archive_pos = u64_from_bytes(eszip_archive_pos)?;
|
||||
let metadata_pos = u64_from_bytes(metadata_pos)?;
|
||||
let npm_vfs_pos = u64_from_bytes(npm_vfs_pos)?;
|
||||
let npm_files_pos = u64_from_bytes(npm_files_pos)?;
|
||||
Ok(Some(Trailer {
|
||||
eszip_pos: eszip_archive_pos,
|
||||
metadata_pos,
|
||||
npm_vfs_pos,
|
||||
npm_files_pos,
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn metadata_len(&self) -> u64 {
|
||||
self.npm_vfs_pos - self.metadata_pos
|
||||
}
|
||||
|
||||
pub fn npm_vfs_len(&self) -> u64 {
|
||||
self.npm_files_pos - self.npm_vfs_pos
|
||||
}
|
||||
|
||||
pub fn as_bytes(&self) -> Vec<u8> {
|
||||
let mut trailer = MAGIC_TRAILER.to_vec();
|
||||
trailer.write_all(&self.eszip_pos.to_be_bytes()).unwrap();
|
||||
trailer.write_all(&self.metadata_pos.to_be_bytes()).unwrap();
|
||||
trailer.write_all(&self.npm_vfs_pos.to_be_bytes()).unwrap();
|
||||
trailer
|
||||
.write_all(&self.npm_files_pos.to_be_bytes())
|
||||
.unwrap();
|
||||
trailer
|
||||
}
|
||||
}
|
||||
|
||||
fn u64_from_bytes(arr: &[u8]) -> Result<u64, AnyError> {
|
||||
let fixed_arr: &[u8; 8] = arr
|
||||
.try_into()
|
||||
.context("Failed to convert the buffer into a fixed-size array")?;
|
||||
Ok(u64::from_be_bytes(*fixed_arr))
|
||||
}
|
||||
|
||||
pub struct DenoCompileBinaryWriter<'a> {
|
||||
deno_dir: &'a DenoDir,
|
||||
emitter: &'a Emitter,
|
||||
file_fetcher: &'a FileFetcher,
|
||||
http_client_provider: &'a HttpClientProvider,
|
||||
npm_resolver: &'a dyn CliNpmResolver,
|
||||
|
@ -343,6 +366,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
deno_dir: &'a DenoDir,
|
||||
emitter: &'a Emitter,
|
||||
file_fetcher: &'a FileFetcher,
|
||||
http_client_provider: &'a HttpClientProvider,
|
||||
npm_resolver: &'a dyn CliNpmResolver,
|
||||
|
@ -351,6 +375,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
) -> Self {
|
||||
Self {
|
||||
deno_dir,
|
||||
emitter,
|
||||
file_fetcher,
|
||||
http_client_provider,
|
||||
npm_resolver,
|
||||
|
@ -362,8 +387,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
pub async fn write_bin(
|
||||
&self,
|
||||
writer: File,
|
||||
eszip: eszip::EszipV2,
|
||||
root_dir_url: EszipRelativeFileBaseUrl<'_>,
|
||||
graph: &ModuleGraph,
|
||||
root_dir_url: StandaloneRelativeFileBaseUrl<'_>,
|
||||
entrypoint: &ModuleSpecifier,
|
||||
compile_flags: &CompileFlags,
|
||||
cli_options: &CliOptions,
|
||||
|
@ -390,15 +415,17 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
)
|
||||
}
|
||||
}
|
||||
self.write_standalone_binary(
|
||||
writer,
|
||||
original_binary,
|
||||
eszip,
|
||||
root_dir_url,
|
||||
entrypoint,
|
||||
cli_options,
|
||||
compile_flags,
|
||||
)
|
||||
self
|
||||
.write_standalone_binary(
|
||||
writer,
|
||||
original_binary,
|
||||
graph,
|
||||
root_dir_url,
|
||||
entrypoint,
|
||||
cli_options,
|
||||
compile_flags,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn get_base_binary(
|
||||
|
@ -493,12 +520,12 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
/// This functions creates a standalone deno binary by appending a bundle
|
||||
/// and magic trailer to the currently executing binary.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn write_standalone_binary(
|
||||
async fn write_standalone_binary(
|
||||
&self,
|
||||
writer: File,
|
||||
original_bin: Vec<u8>,
|
||||
mut eszip: eszip::EszipV2,
|
||||
root_dir_url: EszipRelativeFileBaseUrl<'_>,
|
||||
graph: &ModuleGraph,
|
||||
root_dir_url: StandaloneRelativeFileBaseUrl<'_>,
|
||||
entrypoint: &ModuleSpecifier,
|
||||
cli_options: &CliOptions,
|
||||
compile_flags: &CompileFlags,
|
||||
|
@ -512,19 +539,17 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
None => None,
|
||||
};
|
||||
let root_path = root_dir_url.inner().to_file_path().unwrap();
|
||||
let (npm_vfs, npm_files, node_modules) = match self.npm_resolver.as_inner()
|
||||
let (maybe_npm_vfs, node_modules, npm_snapshot) = match self
|
||||
.npm_resolver
|
||||
.as_inner()
|
||||
{
|
||||
InnerCliNpmResolverRef::Managed(managed) => {
|
||||
let snapshot =
|
||||
managed.serialized_valid_snapshot_for_system(&self.npm_system_info);
|
||||
if !snapshot.as_serialized().packages.is_empty() {
|
||||
let (root_dir, files) = self
|
||||
.build_vfs(&root_path, cli_options)?
|
||||
.into_dir_and_files();
|
||||
eszip.add_npm_snapshot(snapshot);
|
||||
let npm_vfs_builder = self.build_npm_vfs(&root_path, cli_options)?;
|
||||
(
|
||||
Some(root_dir),
|
||||
files,
|
||||
Some(npm_vfs_builder),
|
||||
Some(NodeModules::Managed {
|
||||
node_modules_dir: self.npm_resolver.root_node_modules_path().map(
|
||||
|path| {
|
||||
|
@ -536,18 +561,16 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
},
|
||||
),
|
||||
}),
|
||||
Some(snapshot),
|
||||
)
|
||||
} else {
|
||||
(None, Vec::new(), None)
|
||||
(None, None, None)
|
||||
}
|
||||
}
|
||||
InnerCliNpmResolverRef::Byonm(resolver) => {
|
||||
let (root_dir, files) = self
|
||||
.build_vfs(&root_path, cli_options)?
|
||||
.into_dir_and_files();
|
||||
let npm_vfs_builder = self.build_npm_vfs(&root_path, cli_options)?;
|
||||
(
|
||||
Some(root_dir),
|
||||
files,
|
||||
Some(npm_vfs_builder),
|
||||
Some(NodeModules::Byonm {
|
||||
root_node_modules_dir: resolver.root_node_modules_path().map(
|
||||
|node_modules_dir| {
|
||||
|
@ -560,9 +583,67 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
},
|
||||
),
|
||||
}),
|
||||
None,
|
||||
)
|
||||
}
|
||||
};
|
||||
let mut vfs = if let Some(npm_vfs) = maybe_npm_vfs {
|
||||
npm_vfs
|
||||
} else {
|
||||
VfsBuilder::new(root_path.clone())?
|
||||
};
|
||||
let mut remote_modules_store = RemoteModulesStoreBuilder::default();
|
||||
for module in graph.modules() {
|
||||
if module.specifier().scheme() == "data" {
|
||||
continue; // don't store data urls as an entry as they're in the code
|
||||
}
|
||||
let (maybe_source, media_type) = match module {
|
||||
deno_graph::Module::Js(m) => {
|
||||
// todo(https://github.com/denoland/deno_media_type/pull/12): use is_emittable()
|
||||
let is_emittable = matches!(
|
||||
m.media_type,
|
||||
MediaType::TypeScript
|
||||
| MediaType::Mts
|
||||
| MediaType::Cts
|
||||
| MediaType::Jsx
|
||||
| MediaType::Tsx
|
||||
);
|
||||
let source = if is_emittable {
|
||||
let source = self
|
||||
.emitter
|
||||
.emit_parsed_source(&m.specifier, m.media_type, &m.source)
|
||||
.await?;
|
||||
source.into_bytes()
|
||||
} else {
|
||||
m.source.as_bytes().to_vec()
|
||||
};
|
||||
(Some(source), m.media_type)
|
||||
}
|
||||
deno_graph::Module::Json(m) => {
|
||||
(Some(m.source.as_bytes().to_vec()), m.media_type)
|
||||
}
|
||||
deno_graph::Module::Npm(_)
|
||||
| deno_graph::Module::Node(_)
|
||||
| deno_graph::Module::External(_) => (None, MediaType::Unknown),
|
||||
};
|
||||
if module.specifier().scheme() == "file" {
|
||||
let file_path = deno_path_util::url_to_file_path(module.specifier())?;
|
||||
vfs
|
||||
.add_file_with_data(
|
||||
&file_path,
|
||||
match maybe_source {
|
||||
Some(source) => source,
|
||||
None => RealFs.read_file_sync(&file_path, None)?,
|
||||
},
|
||||
)
|
||||
.with_context(|| {
|
||||
format!("Failed adding '{}'", file_path.display())
|
||||
})?;
|
||||
} else if let Some(source) = maybe_source {
|
||||
remote_modules_store.add(module.specifier(), media_type, source);
|
||||
}
|
||||
}
|
||||
remote_modules_store.add_redirects(&graph.redirects);
|
||||
|
||||
let env_vars_from_env_file = match cli_options.env_file_name() {
|
||||
Some(env_filename) => {
|
||||
|
@ -636,14 +717,14 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
writer,
|
||||
original_bin,
|
||||
&metadata,
|
||||
eszip,
|
||||
npm_vfs.as_ref(),
|
||||
&npm_files,
|
||||
npm_snapshot.map(|s| s.into_serialized()),
|
||||
&remote_modules_store,
|
||||
vfs,
|
||||
compile_flags,
|
||||
)
|
||||
}
|
||||
|
||||
fn build_vfs(
|
||||
fn build_npm_vfs(
|
||||
&self,
|
||||
root_path: &Path,
|
||||
cli_options: &CliOptions,
|
||||
|
@ -664,8 +745,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
} else {
|
||||
// DO NOT include the user's registry url as it may contain credentials,
|
||||
// but also don't make this dependent on the registry url
|
||||
let root_path = npm_resolver.global_cache_root_folder();
|
||||
let mut builder = VfsBuilder::new(root_path)?;
|
||||
let global_cache_root_path = npm_resolver.global_cache_root_folder();
|
||||
let mut builder = VfsBuilder::new(global_cache_root_path)?;
|
||||
let mut packages =
|
||||
npm_resolver.all_system_packages(&self.npm_system_info);
|
||||
packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism
|
||||
|
@ -675,12 +756,12 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
builder.add_dir_recursive(&folder)?;
|
||||
}
|
||||
|
||||
// Flatten all the registries folders into a single "node_modules/localhost" folder
|
||||
// Flatten all the registries folders into a single ".deno_compile_node_modules/localhost" folder
|
||||
// that will be used by denort when loading the npm cache. This avoids us exposing
|
||||
// the user's private registry information and means we don't have to bother
|
||||
// serializing all the different registry config into the binary.
|
||||
builder.with_root_dir(|root_dir| {
|
||||
root_dir.name = "node_modules".to_string();
|
||||
root_dir.name = ".deno_compile_node_modules".to_string();
|
||||
let mut new_entries = Vec::with_capacity(root_dir.entries.len());
|
||||
let mut localhost_entries = IndexMap::new();
|
||||
for entry in std::mem::take(&mut root_dir.entries) {
|
||||
|
@ -715,6 +796,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
|
|||
root_dir.entries = new_entries;
|
||||
});
|
||||
|
||||
builder.set_new_root_path(root_path.to_path_buf())?;
|
||||
|
||||
Ok(builder)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,8 +22,8 @@ use super::virtual_fs::FileBackedVfs;
|
|||
pub struct DenoCompileFileSystem(Arc<FileBackedVfs>);
|
||||
|
||||
impl DenoCompileFileSystem {
|
||||
pub fn new(vfs: FileBackedVfs) -> Self {
|
||||
Self(Arc::new(vfs))
|
||||
pub fn new(vfs: Arc<FileBackedVfs>) -> Self {
|
||||
Self(vfs)
|
||||
}
|
||||
|
||||
fn error_if_in_vfs(&self, path: &Path) -> FsResult<()> {
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
#![allow(dead_code)]
|
||||
#![allow(unused_imports)]
|
||||
|
||||
use binary::StandaloneData;
|
||||
use binary::StandaloneModules;
|
||||
use deno_ast::MediaType;
|
||||
use deno_cache_dir::npm::NpmCacheDir;
|
||||
use deno_config::workspace::MappedResolution;
|
||||
|
@ -38,7 +40,6 @@ use deno_runtime::permissions::RuntimePermissionDescriptorParser;
|
|||
use deno_runtime::WorkerExecutionMode;
|
||||
use deno_runtime::WorkerLogLevel;
|
||||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use eszip::EszipRelativeFileBaseUrl;
|
||||
use import_map::parse_from_json;
|
||||
use node_resolver::analyze::NodeCodeTranslator;
|
||||
use node_resolver::NodeResolutionMode;
|
||||
|
@ -54,6 +55,7 @@ use crate::args::CacheSetting;
|
|||
use crate::args::NpmInstallDepsProvider;
|
||||
use crate::args::StorageKeyResolver;
|
||||
use crate::cache::Caches;
|
||||
use crate::cache::DenoCacheEnvFsAdapter;
|
||||
use crate::cache::DenoDirProvider;
|
||||
use crate::cache::NodeAnalysisCache;
|
||||
use crate::cache::RealDenoCacheEnv;
|
||||
|
@ -78,52 +80,18 @@ use crate::worker::ModuleLoaderFactory;
|
|||
|
||||
pub mod binary;
|
||||
mod file_system;
|
||||
mod serialization;
|
||||
mod virtual_fs;
|
||||
|
||||
pub use binary::extract_standalone;
|
||||
pub use binary::is_standalone_binary;
|
||||
pub use binary::DenoCompileBinaryWriter;
|
||||
|
||||
use self::binary::load_npm_vfs;
|
||||
use self::binary::Metadata;
|
||||
use self::file_system::DenoCompileFileSystem;
|
||||
|
||||
struct WorkspaceEszipModule {
|
||||
specifier: ModuleSpecifier,
|
||||
inner: eszip::Module,
|
||||
}
|
||||
|
||||
struct WorkspaceEszip {
|
||||
eszip: eszip::EszipV2,
|
||||
root_dir_url: Arc<ModuleSpecifier>,
|
||||
}
|
||||
|
||||
impl WorkspaceEszip {
|
||||
pub fn get_module(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
) -> Option<WorkspaceEszipModule> {
|
||||
if specifier.scheme() == "file" {
|
||||
let specifier_key = EszipRelativeFileBaseUrl::new(&self.root_dir_url)
|
||||
.specifier_key(specifier);
|
||||
let module = self.eszip.get_module(&specifier_key)?;
|
||||
let specifier = self.root_dir_url.join(&module.specifier).unwrap();
|
||||
Some(WorkspaceEszipModule {
|
||||
specifier,
|
||||
inner: module,
|
||||
})
|
||||
} else {
|
||||
let module = self.eszip.get_module(specifier.as_str())?;
|
||||
Some(WorkspaceEszipModule {
|
||||
specifier: ModuleSpecifier::parse(&module.specifier).unwrap(),
|
||||
inner: module,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct SharedModuleLoaderState {
|
||||
eszip: WorkspaceEszip,
|
||||
modules: StandaloneModules,
|
||||
workspace_resolver: WorkspaceResolver,
|
||||
node_resolver: Arc<CliNodeResolver>,
|
||||
npm_module_loader: Arc<NpmModuleLoader>,
|
||||
|
@ -249,8 +217,10 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
}
|
||||
|
||||
if specifier.scheme() == "jsr" {
|
||||
if let Some(module) = self.shared.eszip.get_module(&specifier) {
|
||||
return Ok(module.specifier);
|
||||
if let Some(specifier) =
|
||||
self.shared.modules.resolve_specifier(&specifier)?
|
||||
{
|
||||
return Ok(specifier.clone());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -345,56 +315,30 @@ impl ModuleLoader for EmbeddedModuleLoader {
|
|||
);
|
||||
}
|
||||
|
||||
let Some(module) = self.shared.eszip.get_module(original_specifier) else {
|
||||
return deno_core::ModuleLoadResponse::Sync(Err(type_error(format!(
|
||||
"{MODULE_NOT_FOUND}: {}",
|
||||
original_specifier
|
||||
))));
|
||||
};
|
||||
let original_specifier = original_specifier.clone();
|
||||
|
||||
deno_core::ModuleLoadResponse::Async(
|
||||
async move {
|
||||
let code = module.inner.source().await.ok_or_else(|| {
|
||||
type_error(format!("Module not found: {}", original_specifier))
|
||||
})?;
|
||||
let code = arc_u8_to_arc_str(code)
|
||||
.map_err(|_| type_error("Module source is not utf-8"))?;
|
||||
Ok(deno_core::ModuleSource::new_with_redirect(
|
||||
match module.inner.kind {
|
||||
eszip::ModuleKind::JavaScript => ModuleType::JavaScript,
|
||||
eszip::ModuleKind::Json => ModuleType::Json,
|
||||
eszip::ModuleKind::Jsonc => {
|
||||
return Err(type_error("jsonc modules not supported"))
|
||||
}
|
||||
eszip::ModuleKind::OpaqueData => {
|
||||
unreachable!();
|
||||
}
|
||||
},
|
||||
ModuleSourceCode::String(code.into()),
|
||||
&original_specifier,
|
||||
&module.specifier,
|
||||
None,
|
||||
match self.shared.modules.read(original_specifier) {
|
||||
Ok(Some(module)) => {
|
||||
let (module_specifier, module_type, module_source) =
|
||||
module.into_for_v8();
|
||||
deno_core::ModuleLoadResponse::Sync(Ok(
|
||||
deno_core::ModuleSource::new_with_redirect(
|
||||
module_type,
|
||||
module_source,
|
||||
original_specifier,
|
||||
module_specifier,
|
||||
None,
|
||||
),
|
||||
))
|
||||
}
|
||||
.boxed_local(),
|
||||
)
|
||||
Ok(None) => deno_core::ModuleLoadResponse::Sync(Err(type_error(
|
||||
format!("{MODULE_NOT_FOUND}: {}", original_specifier),
|
||||
))),
|
||||
Err(err) => deno_core::ModuleLoadResponse::Sync(Err(type_error(
|
||||
format!("{:?}", err),
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn arc_u8_to_arc_str(
|
||||
arc_u8: Arc<[u8]>,
|
||||
) -> Result<Arc<str>, std::str::Utf8Error> {
|
||||
// Check that the string is valid UTF-8.
|
||||
std::str::from_utf8(&arc_u8)?;
|
||||
// SAFETY: the string is valid UTF-8, and the layout Arc<[u8]> is the same as
|
||||
// Arc<str>. This is proven by the From<Arc<str>> impl for Arc<[u8]> from the
|
||||
// standard library.
|
||||
Ok(unsafe {
|
||||
std::mem::transmute::<std::sync::Arc<[u8]>, std::sync::Arc<str>>(arc_u8)
|
||||
})
|
||||
}
|
||||
|
||||
struct StandaloneModuleLoaderFactory {
|
||||
shared: Arc<SharedModuleLoaderState>,
|
||||
}
|
||||
|
@ -439,13 +383,15 @@ impl RootCertStoreProvider for StandaloneRootCertStoreProvider {
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn run(
|
||||
mut eszip: eszip::EszipV2,
|
||||
metadata: Metadata,
|
||||
) -> Result<i32, AnyError> {
|
||||
let current_exe_path = std::env::current_exe().unwrap();
|
||||
let current_exe_name =
|
||||
current_exe_path.file_name().unwrap().to_string_lossy();
|
||||
pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
|
||||
let StandaloneData {
|
||||
fs,
|
||||
metadata,
|
||||
modules,
|
||||
npm_snapshot,
|
||||
root_path,
|
||||
vfs,
|
||||
} = data;
|
||||
let deno_dir_provider = Arc::new(DenoDirProvider::new(None));
|
||||
let root_cert_store_provider = Arc::new(StandaloneRootCertStoreProvider {
|
||||
ca_stores: metadata.ca_stores,
|
||||
|
@ -459,112 +405,83 @@ pub async fn run(
|
|||
));
|
||||
// use a dummy npm registry url
|
||||
let npm_registry_url = ModuleSpecifier::parse("https://localhost/").unwrap();
|
||||
let root_path =
|
||||
std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name));
|
||||
let root_dir_url =
|
||||
Arc::new(ModuleSpecifier::from_directory_path(&root_path).unwrap());
|
||||
let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap();
|
||||
let root_node_modules_path = root_path.join("node_modules");
|
||||
let npm_cache_dir = NpmCacheDir::new(
|
||||
&RealDenoCacheEnv,
|
||||
root_node_modules_path.clone(),
|
||||
vec![npm_registry_url.clone()],
|
||||
);
|
||||
let npm_global_cache_dir = npm_cache_dir.get_cache_location();
|
||||
let npm_global_cache_dir = root_path.join(".deno_compile_node_modules");
|
||||
let cache_setting = CacheSetting::Only;
|
||||
let (fs, npm_resolver, maybe_vfs_root) = match metadata.node_modules {
|
||||
let npm_resolver = match metadata.node_modules {
|
||||
Some(binary::NodeModules::Managed { node_modules_dir }) => {
|
||||
// this will always have a snapshot
|
||||
let snapshot = eszip.take_npm_snapshot().unwrap();
|
||||
let vfs_root_dir_path = if node_modules_dir.is_some() {
|
||||
root_path.clone()
|
||||
} else {
|
||||
npm_cache_dir.root_dir().to_owned()
|
||||
};
|
||||
let vfs = load_npm_vfs(vfs_root_dir_path.clone())
|
||||
.context("Failed to load npm vfs.")?;
|
||||
let snapshot = npm_snapshot.unwrap();
|
||||
let maybe_node_modules_path = node_modules_dir
|
||||
.map(|node_modules_dir| vfs_root_dir_path.join(node_modules_dir));
|
||||
let fs = Arc::new(DenoCompileFileSystem::new(vfs))
|
||||
as Arc<dyn deno_fs::FileSystem>;
|
||||
let npm_resolver =
|
||||
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
|
||||
CliNpmResolverManagedCreateOptions {
|
||||
snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some(
|
||||
snapshot,
|
||||
)),
|
||||
maybe_lockfile: None,
|
||||
fs: fs.clone(),
|
||||
http_client_provider: http_client_provider.clone(),
|
||||
npm_global_cache_dir,
|
||||
cache_setting,
|
||||
text_only_progress_bar: progress_bar,
|
||||
maybe_node_modules_path,
|
||||
npm_system_info: Default::default(),
|
||||
npm_install_deps_provider: Arc::new(
|
||||
// this is only used for installing packages, which isn't necessary with deno compile
|
||||
NpmInstallDepsProvider::empty(),
|
||||
),
|
||||
// create an npmrc that uses the fake npm_registry_url to resolve packages
|
||||
npmrc: Arc::new(ResolvedNpmRc {
|
||||
default_config: deno_npm::npm_rc::RegistryConfigWithUrl {
|
||||
registry_url: npm_registry_url.clone(),
|
||||
config: Default::default(),
|
||||
},
|
||||
scopes: Default::default(),
|
||||
registry_configs: Default::default(),
|
||||
}),
|
||||
lifecycle_scripts: Default::default(),
|
||||
},
|
||||
))
|
||||
.await?;
|
||||
(fs, npm_resolver, Some(vfs_root_dir_path))
|
||||
.map(|node_modules_dir| root_path.join(node_modules_dir));
|
||||
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
|
||||
CliNpmResolverManagedCreateOptions {
|
||||
snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some(
|
||||
snapshot,
|
||||
)),
|
||||
maybe_lockfile: None,
|
||||
fs: fs.clone(),
|
||||
http_client_provider: http_client_provider.clone(),
|
||||
npm_global_cache_dir,
|
||||
cache_setting,
|
||||
text_only_progress_bar: progress_bar,
|
||||
maybe_node_modules_path,
|
||||
npm_system_info: Default::default(),
|
||||
npm_install_deps_provider: Arc::new(
|
||||
// this is only used for installing packages, which isn't necessary with deno compile
|
||||
NpmInstallDepsProvider::empty(),
|
||||
),
|
||||
// create an npmrc that uses the fake npm_registry_url to resolve packages
|
||||
npmrc: Arc::new(ResolvedNpmRc {
|
||||
default_config: deno_npm::npm_rc::RegistryConfigWithUrl {
|
||||
registry_url: npm_registry_url.clone(),
|
||||
config: Default::default(),
|
||||
},
|
||||
scopes: Default::default(),
|
||||
registry_configs: Default::default(),
|
||||
}),
|
||||
lifecycle_scripts: Default::default(),
|
||||
},
|
||||
))
|
||||
.await?
|
||||
}
|
||||
Some(binary::NodeModules::Byonm {
|
||||
root_node_modules_dir,
|
||||
}) => {
|
||||
let vfs_root_dir_path = root_path.clone();
|
||||
let vfs = load_npm_vfs(vfs_root_dir_path.clone())
|
||||
.context("Failed to load vfs.")?;
|
||||
let root_node_modules_dir =
|
||||
root_node_modules_dir.map(|p| vfs.root().join(p));
|
||||
let fs = Arc::new(DenoCompileFileSystem::new(vfs))
|
||||
as Arc<dyn deno_fs::FileSystem>;
|
||||
let npm_resolver = create_cli_npm_resolver(
|
||||
CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions {
|
||||
create_cli_npm_resolver(CliNpmResolverCreateOptions::Byonm(
|
||||
CliByonmNpmResolverCreateOptions {
|
||||
fs: CliDenoResolverFs(fs.clone()),
|
||||
root_node_modules_dir,
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
(fs, npm_resolver, Some(vfs_root_dir_path))
|
||||
},
|
||||
))
|
||||
.await?
|
||||
}
|
||||
None => {
|
||||
let fs = Arc::new(deno_fs::RealFs) as Arc<dyn deno_fs::FileSystem>;
|
||||
let npm_resolver =
|
||||
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
|
||||
CliNpmResolverManagedCreateOptions {
|
||||
snapshot: CliNpmResolverManagedSnapshotOption::Specified(None),
|
||||
maybe_lockfile: None,
|
||||
fs: fs.clone(),
|
||||
http_client_provider: http_client_provider.clone(),
|
||||
npm_global_cache_dir,
|
||||
cache_setting,
|
||||
text_only_progress_bar: progress_bar,
|
||||
maybe_node_modules_path: None,
|
||||
npm_system_info: Default::default(),
|
||||
npm_install_deps_provider: Arc::new(
|
||||
// this is only used for installing packages, which isn't necessary with deno compile
|
||||
NpmInstallDepsProvider::empty(),
|
||||
),
|
||||
// Packages from different registries are already inlined in the ESZip,
|
||||
// so no need to create actual `.npmrc` configuration.
|
||||
npmrc: create_default_npmrc(),
|
||||
lifecycle_scripts: Default::default(),
|
||||
},
|
||||
))
|
||||
.await?;
|
||||
(fs, npm_resolver, None)
|
||||
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
|
||||
CliNpmResolverManagedCreateOptions {
|
||||
snapshot: CliNpmResolverManagedSnapshotOption::Specified(None),
|
||||
maybe_lockfile: None,
|
||||
fs: fs.clone(),
|
||||
http_client_provider: http_client_provider.clone(),
|
||||
npm_global_cache_dir,
|
||||
cache_setting,
|
||||
text_only_progress_bar: progress_bar,
|
||||
maybe_node_modules_path: None,
|
||||
npm_system_info: Default::default(),
|
||||
npm_install_deps_provider: Arc::new(
|
||||
// this is only used for installing packages, which isn't necessary with deno compile
|
||||
NpmInstallDepsProvider::empty(),
|
||||
),
|
||||
// Packages from different registries are already inlined in the binary,
|
||||
// so no need to create actual `.npmrc` configuration.
|
||||
npmrc: create_default_npmrc(),
|
||||
lifecycle_scripts: Default::default(),
|
||||
},
|
||||
))
|
||||
.await?
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -645,10 +562,7 @@ pub async fn run(
|
|||
};
|
||||
let module_loader_factory = StandaloneModuleLoaderFactory {
|
||||
shared: Arc::new(SharedModuleLoaderState {
|
||||
eszip: WorkspaceEszip {
|
||||
eszip,
|
||||
root_dir_url,
|
||||
},
|
||||
modules,
|
||||
workspace_resolver,
|
||||
node_resolver: cli_node_resolver.clone(),
|
||||
npm_module_loader: Arc::new(NpmModuleLoader::new(
|
||||
|
@ -663,19 +577,17 @@ pub async fn run(
|
|||
let permissions = {
|
||||
let mut permissions =
|
||||
metadata.permissions.to_options(/* cli_arg_urls */ &[]);
|
||||
// if running with an npm vfs, grant read access to it
|
||||
if let Some(vfs_root) = maybe_vfs_root {
|
||||
match &mut permissions.allow_read {
|
||||
Some(vec) if vec.is_empty() => {
|
||||
// do nothing, already granted
|
||||
}
|
||||
Some(vec) => {
|
||||
vec.push(vfs_root.to_string_lossy().to_string());
|
||||
}
|
||||
None => {
|
||||
permissions.allow_read =
|
||||
Some(vec![vfs_root.to_string_lossy().to_string()]);
|
||||
}
|
||||
// grant read access to the vfs
|
||||
match &mut permissions.allow_read {
|
||||
Some(vec) if vec.is_empty() => {
|
||||
// do nothing, already granted
|
||||
}
|
||||
Some(vec) => {
|
||||
vec.push(root_path.to_string_lossy().to_string());
|
||||
}
|
||||
None => {
|
||||
permissions.allow_read =
|
||||
Some(vec![root_path.to_string_lossy().to_string()]);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
642
cli/standalone/serialization.rs
Normal file
642
cli/standalone/serialization.rs
Normal file
|
@ -0,0 +1,642 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::io::Write;
|
||||
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::FastString;
|
||||
use deno_core::ModuleSourceCode;
|
||||
use deno_core::ModuleType;
|
||||
use deno_npm::resolution::SerializedNpmResolutionSnapshot;
|
||||
use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
|
||||
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
|
||||
use deno_npm::NpmPackageId;
|
||||
use deno_semver::package::PackageReq;
|
||||
|
||||
use crate::standalone::virtual_fs::VirtualDirectory;
|
||||
|
||||
use super::binary::Metadata;
|
||||
use super::virtual_fs::VfsBuilder;
|
||||
|
||||
const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd";
|
||||
|
||||
/// Binary format:
|
||||
/// * d3n0l4nd
|
||||
/// * <metadata_len><metadata>
|
||||
/// * <npm_snapshot_len><npm_snapshot>
|
||||
/// * <remote_modules_len><remote_modules>
|
||||
/// * <vfs_headers_len><vfs_headers>
|
||||
/// * <vfs_file_data_len><vfs_file_data>
|
||||
/// * d3n0l4nd
|
||||
pub fn serialize_binary_data_section(
|
||||
metadata: &Metadata,
|
||||
npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
|
||||
remote_modules: &RemoteModulesStoreBuilder,
|
||||
vfs: VfsBuilder,
|
||||
) -> Result<Vec<u8>, AnyError> {
|
||||
fn write_bytes_with_len(bytes: &mut Vec<u8>, data: &[u8]) {
|
||||
bytes.extend_from_slice(&(data.len() as u64).to_le_bytes());
|
||||
bytes.extend_from_slice(data);
|
||||
}
|
||||
|
||||
let mut bytes = Vec::new();
|
||||
bytes.extend_from_slice(MAGIC_BYTES);
|
||||
|
||||
// 1. Metadata
|
||||
{
|
||||
let metadata = serde_json::to_string(metadata)?;
|
||||
write_bytes_with_len(&mut bytes, metadata.as_bytes());
|
||||
}
|
||||
// 2. Npm snapshot
|
||||
{
|
||||
let npm_snapshot =
|
||||
npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default();
|
||||
write_bytes_with_len(&mut bytes, &npm_snapshot);
|
||||
}
|
||||
// 3. Remote modules
|
||||
{
|
||||
let update_index = bytes.len();
|
||||
bytes.extend_from_slice(&(0_u64).to_le_bytes());
|
||||
let start_index = bytes.len();
|
||||
remote_modules.write(&mut bytes)?;
|
||||
let length = bytes.len() - start_index;
|
||||
let length_bytes = (length as u64).to_le_bytes();
|
||||
bytes[update_index..update_index + length_bytes.len()]
|
||||
.copy_from_slice(&length_bytes);
|
||||
}
|
||||
// 4. VFS
|
||||
{
|
||||
let (vfs, vfs_files) = vfs.into_dir_and_files();
|
||||
let vfs = serde_json::to_string(&vfs)?;
|
||||
write_bytes_with_len(&mut bytes, vfs.as_bytes());
|
||||
let vfs_bytes_len = vfs_files.iter().map(|f| f.len() as u64).sum::<u64>();
|
||||
bytes.extend_from_slice(&vfs_bytes_len.to_le_bytes());
|
||||
for file in &vfs_files {
|
||||
bytes.extend_from_slice(file);
|
||||
}
|
||||
}
|
||||
|
||||
// write the magic bytes at the end so we can use it
|
||||
// to make sure we've deserialized correctly
|
||||
bytes.extend_from_slice(MAGIC_BYTES);
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
pub struct DeserializedDataSection {
|
||||
pub metadata: Metadata,
|
||||
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
|
||||
pub remote_modules: RemoteModulesStore,
|
||||
pub vfs_dir: VirtualDirectory,
|
||||
pub vfs_files_data: &'static [u8],
|
||||
}
|
||||
|
||||
pub fn deserialize_binary_data_section(
|
||||
data: &'static [u8],
|
||||
) -> Result<Option<DeserializedDataSection>, AnyError> {
|
||||
fn read_bytes_with_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> {
|
||||
let (input, len) = read_u64(input)?;
|
||||
let (input, data) = read_bytes(input, len as usize)?;
|
||||
Ok((input, data))
|
||||
}
|
||||
|
||||
fn read_magic_bytes(input: &[u8]) -> Result<(&[u8], bool), AnyError> {
|
||||
if input.len() < MAGIC_BYTES.len() {
|
||||
bail!("Unexpected end of data. Could not find magic bytes.");
|
||||
}
|
||||
let (magic_bytes, input) = input.split_at(MAGIC_BYTES.len());
|
||||
if magic_bytes != MAGIC_BYTES {
|
||||
return Ok((input, false));
|
||||
}
|
||||
Ok((input, true))
|
||||
}
|
||||
|
||||
let (input, found) = read_magic_bytes(data)?;
|
||||
if !found {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// 1. Metadata
|
||||
let (input, data) = read_bytes_with_len(input).context("reading metadata")?;
|
||||
let metadata: Metadata =
|
||||
serde_json::from_slice(data).context("deserializing metadata")?;
|
||||
// 2. Npm snapshot
|
||||
let (input, data) =
|
||||
read_bytes_with_len(input).context("reading npm snapshot")?;
|
||||
let npm_snapshot = if data.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(deserialize_npm_snapshot(data).context("deserializing npm snapshot")?)
|
||||
};
|
||||
// 3. Remote modules
|
||||
let (input, data) =
|
||||
read_bytes_with_len(input).context("reading remote modules data")?;
|
||||
let remote_modules =
|
||||
RemoteModulesStore::build(data).context("deserializing remote modules")?;
|
||||
// 4. VFS
|
||||
let (input, data) = read_bytes_with_len(input).context("vfs")?;
|
||||
let vfs_dir: VirtualDirectory =
|
||||
serde_json::from_slice(data).context("deserializing vfs data")?;
|
||||
let (input, vfs_files_data) =
|
||||
read_bytes_with_len(input).context("reading vfs files data")?;
|
||||
|
||||
// finally ensure we read the magic bytes at the end
|
||||
let (_input, found) = read_magic_bytes(input)?;
|
||||
if !found {
|
||||
bail!("Could not find magic bytes at the end of the data.");
|
||||
}
|
||||
|
||||
Ok(Some(DeserializedDataSection {
|
||||
metadata,
|
||||
npm_snapshot,
|
||||
remote_modules,
|
||||
vfs_dir,
|
||||
vfs_files_data,
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct RemoteModulesStoreBuilder {
|
||||
specifiers: Vec<(String, u64)>,
|
||||
data: Vec<(MediaType, Vec<u8>)>,
|
||||
data_byte_len: u64,
|
||||
redirects: Vec<(String, String)>,
|
||||
redirects_len: u64,
|
||||
}
|
||||
|
||||
impl RemoteModulesStoreBuilder {
|
||||
pub fn add(&mut self, specifier: &Url, media_type: MediaType, data: Vec<u8>) {
|
||||
let specifier = specifier.to_string();
|
||||
self.specifiers.push((specifier, self.data_byte_len));
|
||||
self.data_byte_len += 1 + 8 + data.len() as u64; // media type (1 byte), data length (8 bytes), data
|
||||
self.data.push((media_type, data));
|
||||
}
|
||||
|
||||
pub fn add_redirects(&mut self, redirects: &BTreeMap<Url, Url>) {
|
||||
self.redirects.reserve(redirects.len());
|
||||
for (from, to) in redirects {
|
||||
let from = from.to_string();
|
||||
let to = to.to_string();
|
||||
self.redirects_len += (4 + from.len() + 4 + to.len()) as u64;
|
||||
self.redirects.push((from, to));
|
||||
}
|
||||
}
|
||||
|
||||
fn write(&self, writer: &mut dyn Write) -> Result<(), AnyError> {
|
||||
writer.write_all(&(self.specifiers.len() as u32).to_le_bytes())?;
|
||||
writer.write_all(&(self.redirects.len() as u32).to_le_bytes())?;
|
||||
for (specifier, offset) in &self.specifiers {
|
||||
writer.write_all(&(specifier.len() as u32).to_le_bytes())?;
|
||||
writer.write_all(specifier.as_bytes())?;
|
||||
writer.write_all(&offset.to_le_bytes())?;
|
||||
}
|
||||
for (from, to) in &self.redirects {
|
||||
writer.write_all(&(from.len() as u32).to_le_bytes())?;
|
||||
writer.write_all(from.as_bytes())?;
|
||||
writer.write_all(&(to.len() as u32).to_le_bytes())?;
|
||||
writer.write_all(to.as_bytes())?;
|
||||
}
|
||||
for (media_type, data) in &self.data {
|
||||
writer.write_all(&[serialize_media_type(*media_type)])?;
|
||||
writer.write_all(&(data.len() as u64).to_le_bytes())?;
|
||||
writer.write_all(data)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DenoCompileModuleData<'a> {
|
||||
pub specifier: &'a Url,
|
||||
pub media_type: MediaType,
|
||||
pub data: Cow<'static, [u8]>,
|
||||
}
|
||||
|
||||
impl<'a> DenoCompileModuleData<'a> {
|
||||
pub fn into_for_v8(self) -> (&'a Url, ModuleType, ModuleSourceCode) {
|
||||
fn into_bytes(data: Cow<'static, [u8]>) -> ModuleSourceCode {
|
||||
ModuleSourceCode::Bytes(match data {
|
||||
Cow::Borrowed(d) => d.into(),
|
||||
Cow::Owned(d) => d.into_boxed_slice().into(),
|
||||
})
|
||||
}
|
||||
|
||||
fn into_string_unsafe(data: Cow<'static, [u8]>) -> ModuleSourceCode {
|
||||
// todo(https://github.com/denoland/deno_core/pull/943): store whether
|
||||
// the string is ascii or not ahead of time so we can avoid the is_ascii()
|
||||
// check in FastString::from_static
|
||||
match data {
|
||||
Cow::Borrowed(d) => ModuleSourceCode::String(
|
||||
// SAFETY: we know this is a valid utf8 string
|
||||
unsafe { FastString::from_static(std::str::from_utf8_unchecked(d)) },
|
||||
),
|
||||
Cow::Owned(d) => ModuleSourceCode::Bytes(d.into_boxed_slice().into()),
|
||||
}
|
||||
}
|
||||
|
||||
let (media_type, source) = match self.media_type {
|
||||
MediaType::JavaScript
|
||||
| MediaType::Jsx
|
||||
| MediaType::Mjs
|
||||
| MediaType::Cjs
|
||||
| MediaType::TypeScript
|
||||
| MediaType::Mts
|
||||
| MediaType::Cts
|
||||
| MediaType::Dts
|
||||
| MediaType::Dmts
|
||||
| MediaType::Dcts
|
||||
| MediaType::Tsx => {
|
||||
(ModuleType::JavaScript, into_string_unsafe(self.data))
|
||||
}
|
||||
MediaType::Json => (ModuleType::Json, into_string_unsafe(self.data)),
|
||||
MediaType::Wasm => (ModuleType::Wasm, into_bytes(self.data)),
|
||||
// just assume javascript if we made it here
|
||||
MediaType::TsBuildInfo | MediaType::SourceMap | MediaType::Unknown => {
|
||||
(ModuleType::JavaScript, into_bytes(self.data))
|
||||
}
|
||||
};
|
||||
(self.specifier, media_type, source)
|
||||
}
|
||||
}
|
||||
|
||||
enum RemoteModulesStoreSpecifierValue {
|
||||
Data(usize),
|
||||
Redirect(Url),
|
||||
}
|
||||
|
||||
pub struct RemoteModulesStore {
|
||||
specifiers: HashMap<Url, RemoteModulesStoreSpecifierValue>,
|
||||
files_data: &'static [u8],
|
||||
}
|
||||
|
||||
impl RemoteModulesStore {
|
||||
fn build(data: &'static [u8]) -> Result<Self, AnyError> {
|
||||
fn read_specifier(input: &[u8]) -> Result<(&[u8], (Url, u64)), AnyError> {
|
||||
let (input, specifier) = read_string_lossy(input)?;
|
||||
let specifier = Url::parse(&specifier)?;
|
||||
let (input, offset) = read_u64(input)?;
|
||||
Ok((input, (specifier, offset)))
|
||||
}
|
||||
|
||||
fn read_redirect(input: &[u8]) -> Result<(&[u8], (Url, Url)), AnyError> {
|
||||
let (input, from) = read_string_lossy(input)?;
|
||||
let from = Url::parse(&from)?;
|
||||
let (input, to) = read_string_lossy(input)?;
|
||||
let to = Url::parse(&to)?;
|
||||
Ok((input, (from, to)))
|
||||
}
|
||||
|
||||
fn read_headers(
|
||||
input: &[u8],
|
||||
) -> Result<(&[u8], HashMap<Url, RemoteModulesStoreSpecifierValue>), AnyError>
|
||||
{
|
||||
let (input, specifiers_len) = read_u32_as_usize(input)?;
|
||||
let (mut input, redirects_len) = read_u32_as_usize(input)?;
|
||||
let mut specifiers =
|
||||
HashMap::with_capacity(specifiers_len + redirects_len);
|
||||
for _ in 0..specifiers_len {
|
||||
let (current_input, (specifier, offset)) =
|
||||
read_specifier(input).context("reading specifier")?;
|
||||
input = current_input;
|
||||
specifiers.insert(
|
||||
specifier,
|
||||
RemoteModulesStoreSpecifierValue::Data(offset as usize),
|
||||
);
|
||||
}
|
||||
|
||||
for _ in 0..redirects_len {
|
||||
let (current_input, (from, to)) = read_redirect(input)?;
|
||||
input = current_input;
|
||||
specifiers.insert(from, RemoteModulesStoreSpecifierValue::Redirect(to));
|
||||
}
|
||||
|
||||
Ok((input, specifiers))
|
||||
}
|
||||
|
||||
let (files_data, specifiers) = read_headers(data)?;
|
||||
|
||||
Ok(Self {
|
||||
specifiers,
|
||||
files_data,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn resolve_specifier<'a>(
|
||||
&'a self,
|
||||
specifier: &'a Url,
|
||||
) -> Result<Option<&'a Url>, AnyError> {
|
||||
let mut count = 0;
|
||||
let mut current = specifier;
|
||||
loop {
|
||||
if count > 10 {
|
||||
bail!("Too many redirects resolving '{}'", specifier);
|
||||
}
|
||||
match self.specifiers.get(current) {
|
||||
Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => {
|
||||
current = to;
|
||||
count += 1;
|
||||
}
|
||||
Some(RemoteModulesStoreSpecifierValue::Data(_)) => {
|
||||
return Ok(Some(current));
|
||||
}
|
||||
None => {
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read<'a>(
|
||||
&'a self,
|
||||
specifier: &'a Url,
|
||||
) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> {
|
||||
let mut count = 0;
|
||||
let mut current = specifier;
|
||||
loop {
|
||||
if count > 10 {
|
||||
bail!("Too many redirects resolving '{}'", specifier);
|
||||
}
|
||||
match self.specifiers.get(current) {
|
||||
Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => {
|
||||
current = to;
|
||||
count += 1;
|
||||
}
|
||||
Some(RemoteModulesStoreSpecifierValue::Data(offset)) => {
|
||||
let input = &self.files_data[*offset..];
|
||||
let (input, media_type_byte) = read_bytes(input, 1)?;
|
||||
let media_type = deserialize_media_type(media_type_byte[0])?;
|
||||
let (input, len) = read_u64(input)?;
|
||||
let (_input, data) = read_bytes(input, len as usize)?;
|
||||
return Ok(Some(DenoCompileModuleData {
|
||||
specifier,
|
||||
media_type,
|
||||
data: Cow::Borrowed(data),
|
||||
}));
|
||||
}
|
||||
None => {
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_npm_snapshot(
|
||||
mut snapshot: SerializedNpmResolutionSnapshot,
|
||||
) -> Vec<u8> {
|
||||
fn append_string(bytes: &mut Vec<u8>, string: &str) {
|
||||
let len = string.len() as u32;
|
||||
bytes.extend_from_slice(&len.to_le_bytes());
|
||||
bytes.extend_from_slice(string.as_bytes());
|
||||
}
|
||||
|
||||
snapshot.packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism
|
||||
let ids_to_stored_ids = snapshot
|
||||
.packages
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, pkg)| (&pkg.id, i as u32))
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let mut root_packages: Vec<_> = snapshot.root_packages.iter().collect();
|
||||
root_packages.sort();
|
||||
let mut bytes = Vec::new();
|
||||
|
||||
bytes.extend_from_slice(&(snapshot.packages.len() as u32).to_le_bytes());
|
||||
for pkg in &snapshot.packages {
|
||||
append_string(&mut bytes, &pkg.id.as_serialized());
|
||||
}
|
||||
|
||||
bytes.extend_from_slice(&(root_packages.len() as u32).to_le_bytes());
|
||||
for (req, id) in root_packages {
|
||||
append_string(&mut bytes, &req.to_string());
|
||||
let id = ids_to_stored_ids.get(&id).unwrap();
|
||||
bytes.extend_from_slice(&id.to_le_bytes());
|
||||
}
|
||||
|
||||
for pkg in &snapshot.packages {
|
||||
let deps_len = pkg.dependencies.len() as u32;
|
||||
bytes.extend_from_slice(&deps_len.to_le_bytes());
|
||||
let mut deps: Vec<_> = pkg.dependencies.iter().collect();
|
||||
deps.sort();
|
||||
for (req, id) in deps {
|
||||
append_string(&mut bytes, req);
|
||||
let id = ids_to_stored_ids.get(&id).unwrap();
|
||||
bytes.extend_from_slice(&id.to_le_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
fn deserialize_npm_snapshot(
|
||||
input: &[u8],
|
||||
) -> Result<ValidSerializedNpmResolutionSnapshot, AnyError> {
|
||||
fn parse_id(input: &[u8]) -> Result<(&[u8], NpmPackageId), AnyError> {
|
||||
let (input, id) = read_string_lossy(input)?;
|
||||
let id = NpmPackageId::from_serialized(&id)?;
|
||||
Ok((input, id))
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_lifetimes)] // clippy bug
|
||||
fn parse_root_package<'a>(
|
||||
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
|
||||
) -> impl Fn(&[u8]) -> Result<(&[u8], (PackageReq, NpmPackageId)), AnyError> + 'a
|
||||
{
|
||||
|input| {
|
||||
let (input, req) = read_string_lossy(input)?;
|
||||
let req = PackageReq::from_str(&req)?;
|
||||
let (input, id) = read_u32_as_usize(input)?;
|
||||
Ok((input, (req, id_to_npm_id(id)?)))
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_lifetimes)] // clippy bug
|
||||
fn parse_package_dep<'a>(
|
||||
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
|
||||
) -> impl Fn(&[u8]) -> Result<(&[u8], (String, NpmPackageId)), AnyError> + 'a
|
||||
{
|
||||
|input| {
|
||||
let (input, req) = read_string_lossy(input)?;
|
||||
let (input, id) = read_u32_as_usize(input)?;
|
||||
Ok((input, (req.into_owned(), id_to_npm_id(id)?)))
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_package<'a>(
|
||||
input: &'a [u8],
|
||||
id: NpmPackageId,
|
||||
id_to_npm_id: &impl Fn(usize) -> Result<NpmPackageId, AnyError>,
|
||||
) -> Result<(&'a [u8], SerializedNpmResolutionSnapshotPackage), AnyError> {
|
||||
let (input, deps_len) = read_u32_as_usize(input)?;
|
||||
let (input, dependencies) =
|
||||
parse_hashmap_n_times(input, deps_len, parse_package_dep(id_to_npm_id))?;
|
||||
Ok((
|
||||
input,
|
||||
SerializedNpmResolutionSnapshotPackage {
|
||||
id,
|
||||
system: Default::default(),
|
||||
dist: Default::default(),
|
||||
dependencies,
|
||||
optional_dependencies: Default::default(),
|
||||
bin: None,
|
||||
scripts: Default::default(),
|
||||
deprecated: Default::default(),
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
let (input, packages_len) = read_u32_as_usize(input)?;
|
||||
|
||||
// get a hashmap of all the npm package ids to their serialized ids
|
||||
let (input, data_ids_to_npm_ids) =
|
||||
parse_vec_n_times(input, packages_len, parse_id)
|
||||
.context("deserializing id")?;
|
||||
let data_id_to_npm_id = |id: usize| {
|
||||
data_ids_to_npm_ids
|
||||
.get(id)
|
||||
.cloned()
|
||||
.ok_or_else(|| deno_core::anyhow::anyhow!("Invalid npm package id"))
|
||||
};
|
||||
|
||||
let (input, root_packages_len) = read_u32_as_usize(input)?;
|
||||
let (input, root_packages) = parse_hashmap_n_times(
|
||||
input,
|
||||
root_packages_len,
|
||||
parse_root_package(&data_id_to_npm_id),
|
||||
)
|
||||
.context("deserializing root package")?;
|
||||
let (input, packages) =
|
||||
parse_vec_n_times_with_index(input, packages_len, |input, index| {
|
||||
parse_package(input, data_id_to_npm_id(index)?, &data_id_to_npm_id)
|
||||
})
|
||||
.context("deserializing package")?;
|
||||
|
||||
if !input.is_empty() {
|
||||
bail!("Unexpected data left over");
|
||||
}
|
||||
|
||||
Ok(
|
||||
SerializedNpmResolutionSnapshot {
|
||||
packages,
|
||||
root_packages,
|
||||
}
|
||||
// this is ok because we have already verified that all the
|
||||
// identifiers found in the snapshot are valid via the
|
||||
// npm package id -> npm package id mapping
|
||||
.into_valid_unsafe(),
|
||||
)
|
||||
}
|
||||
|
||||
fn serialize_media_type(media_type: MediaType) -> u8 {
|
||||
match media_type {
|
||||
MediaType::JavaScript => 0,
|
||||
MediaType::Jsx => 1,
|
||||
MediaType::Mjs => 2,
|
||||
MediaType::Cjs => 3,
|
||||
MediaType::TypeScript => 4,
|
||||
MediaType::Mts => 5,
|
||||
MediaType::Cts => 6,
|
||||
MediaType::Dts => 7,
|
||||
MediaType::Dmts => 8,
|
||||
MediaType::Dcts => 9,
|
||||
MediaType::Tsx => 10,
|
||||
MediaType::Json => 11,
|
||||
MediaType::Wasm => 12,
|
||||
MediaType::TsBuildInfo => 13,
|
||||
MediaType::SourceMap => 14,
|
||||
MediaType::Unknown => 15,
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_media_type(value: u8) -> Result<MediaType, AnyError> {
|
||||
match value {
|
||||
0 => Ok(MediaType::JavaScript),
|
||||
1 => Ok(MediaType::Jsx),
|
||||
2 => Ok(MediaType::Mjs),
|
||||
3 => Ok(MediaType::Cjs),
|
||||
4 => Ok(MediaType::TypeScript),
|
||||
5 => Ok(MediaType::Mts),
|
||||
6 => Ok(MediaType::Cts),
|
||||
7 => Ok(MediaType::Dts),
|
||||
8 => Ok(MediaType::Dmts),
|
||||
9 => Ok(MediaType::Dcts),
|
||||
10 => Ok(MediaType::Tsx),
|
||||
11 => Ok(MediaType::Json),
|
||||
12 => Ok(MediaType::Wasm),
|
||||
13 => Ok(MediaType::TsBuildInfo),
|
||||
14 => Ok(MediaType::SourceMap),
|
||||
15 => Ok(MediaType::Unknown),
|
||||
_ => bail!("Unknown media type value: {}", value),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_hashmap_n_times<TKey: std::cmp::Eq + std::hash::Hash, TValue>(
|
||||
mut input: &[u8],
|
||||
times: usize,
|
||||
parse: impl Fn(&[u8]) -> Result<(&[u8], (TKey, TValue)), AnyError>,
|
||||
) -> Result<(&[u8], HashMap<TKey, TValue>), AnyError> {
|
||||
let mut results = HashMap::with_capacity(times);
|
||||
for _ in 0..times {
|
||||
let result = parse(input);
|
||||
let (new_input, (key, value)) = result?;
|
||||
results.insert(key, value);
|
||||
input = new_input;
|
||||
}
|
||||
Ok((input, results))
|
||||
}
|
||||
|
||||
fn parse_vec_n_times<TResult>(
|
||||
input: &[u8],
|
||||
times: usize,
|
||||
parse: impl Fn(&[u8]) -> Result<(&[u8], TResult), AnyError>,
|
||||
) -> Result<(&[u8], Vec<TResult>), AnyError> {
|
||||
parse_vec_n_times_with_index(input, times, |input, _index| parse(input))
|
||||
}
|
||||
|
||||
fn parse_vec_n_times_with_index<TResult>(
|
||||
mut input: &[u8],
|
||||
times: usize,
|
||||
parse: impl Fn(&[u8], usize) -> Result<(&[u8], TResult), AnyError>,
|
||||
) -> Result<(&[u8], Vec<TResult>), AnyError> {
|
||||
let mut results = Vec::with_capacity(times);
|
||||
for i in 0..times {
|
||||
let result = parse(input, i);
|
||||
let (new_input, result) = result?;
|
||||
results.push(result);
|
||||
input = new_input;
|
||||
}
|
||||
Ok((input, results))
|
||||
}
|
||||
|
||||
fn read_bytes(input: &[u8], len: usize) -> Result<(&[u8], &[u8]), AnyError> {
|
||||
if input.len() < len {
|
||||
bail!("Unexpected end of data.",);
|
||||
}
|
||||
let (len_bytes, input) = input.split_at(len);
|
||||
Ok((input, len_bytes))
|
||||
}
|
||||
|
||||
fn read_string_lossy(input: &[u8]) -> Result<(&[u8], Cow<str>), AnyError> {
|
||||
let (input, str_len) = read_u32_as_usize(input)?;
|
||||
let (input, data_bytes) = read_bytes(input, str_len)?;
|
||||
Ok((input, String::from_utf8_lossy(data_bytes)))
|
||||
}
|
||||
|
||||
fn read_u32_as_usize(input: &[u8]) -> Result<(&[u8], usize), AnyError> {
|
||||
let (input, len_bytes) = read_bytes(input, 4)?;
|
||||
let len = u32::from_le_bytes(len_bytes.try_into()?);
|
||||
Ok((input, len as usize))
|
||||
}
|
||||
|
||||
fn read_u64(input: &[u8]) -> Result<(&[u8], u64), AnyError> {
|
||||
let (input, len_bytes) = read_bytes(input, 8)?;
|
||||
let len = u64::from_le_bytes(len_bytes.try_into()?);
|
||||
Ok((input, len))
|
||||
}
|
|
@ -7,6 +7,7 @@ use std::fs::File;
|
|||
use std::io::Read;
|
||||
use std::io::Seek;
|
||||
use std::io::SeekFrom;
|
||||
use std::ops::Range;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
|
@ -67,6 +68,26 @@ impl VfsBuilder {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn set_new_root_path(
|
||||
&mut self,
|
||||
root_path: PathBuf,
|
||||
) -> Result<(), AnyError> {
|
||||
let root_path = canonicalize_path(&root_path)?;
|
||||
self.root_path = root_path;
|
||||
self.root_dir = VirtualDirectory {
|
||||
name: self
|
||||
.root_path
|
||||
.file_stem()
|
||||
.map(|s| s.to_string_lossy().into_owned())
|
||||
.unwrap_or("root".to_string()),
|
||||
entries: vec![VfsEntry::Dir(VirtualDirectory {
|
||||
name: std::mem::take(&mut self.root_dir.name),
|
||||
entries: std::mem::take(&mut self.root_dir.entries),
|
||||
})],
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn with_root_dir<R>(
|
||||
&mut self,
|
||||
with_root: impl FnOnce(&mut VirtualDirectory) -> R,
|
||||
|
@ -119,7 +140,7 @@ impl VfsBuilder {
|
|||
// inline the symlink and make the target file
|
||||
let file_bytes = std::fs::read(&target)
|
||||
.with_context(|| format!("Reading {}", path.display()))?;
|
||||
self.add_file(&path, file_bytes)?;
|
||||
self.add_file_with_data_inner(&path, file_bytes)?;
|
||||
} else {
|
||||
log::warn!(
|
||||
"{} Symlink target is outside '{}'. Excluding symlink at '{}' with target '{}'.",
|
||||
|
@ -191,16 +212,32 @@ impl VfsBuilder {
|
|||
self.add_file_at_path_not_symlink(&target_path)
|
||||
}
|
||||
|
||||
pub fn add_file_at_path_not_symlink(
|
||||
fn add_file_at_path_not_symlink(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
) -> Result<(), AnyError> {
|
||||
let file_bytes = std::fs::read(path)
|
||||
.with_context(|| format!("Reading {}", path.display()))?;
|
||||
self.add_file(path, file_bytes)
|
||||
self.add_file_with_data_inner(path, file_bytes)
|
||||
}
|
||||
|
||||
fn add_file(&mut self, path: &Path, data: Vec<u8>) -> Result<(), AnyError> {
|
||||
pub fn add_file_with_data(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
data: Vec<u8>,
|
||||
) -> Result<(), AnyError> {
|
||||
let target_path = canonicalize_path(path)?;
|
||||
if target_path != path {
|
||||
self.add_symlink(path, &target_path)?;
|
||||
}
|
||||
self.add_file_with_data_inner(&target_path, data)
|
||||
}
|
||||
|
||||
fn add_file_with_data_inner(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
data: Vec<u8>,
|
||||
) -> Result<(), AnyError> {
|
||||
log::debug!("Adding file '{}'", path.display());
|
||||
let checksum = util::checksum::gen(&[&data]);
|
||||
let offset = if let Some(offset) = self.file_offsets.get(&checksum) {
|
||||
|
@ -249,8 +286,15 @@ impl VfsBuilder {
|
|||
path.display(),
|
||||
target.display()
|
||||
);
|
||||
let dest = self.path_relative_root(target)?;
|
||||
if dest == self.path_relative_root(path)? {
|
||||
let relative_target = self.path_relative_root(target)?;
|
||||
let relative_path = match self.path_relative_root(path) {
|
||||
Ok(path) => path,
|
||||
Err(StripRootError { .. }) => {
|
||||
// ignore if the original path is outside the root directory
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
if relative_target == relative_path {
|
||||
// it's the same, ignore
|
||||
return Ok(());
|
||||
}
|
||||
|
@ -263,7 +307,7 @@ impl VfsBuilder {
|
|||
insert_index,
|
||||
VfsEntry::Symlink(VirtualSymlink {
|
||||
name: name.to_string(),
|
||||
dest_parts: dest
|
||||
dest_parts: relative_target
|
||||
.components()
|
||||
.map(|c| c.as_os_str().to_string_lossy().to_string())
|
||||
.collect::<Vec<_>>(),
|
||||
|
@ -751,14 +795,14 @@ impl deno_io::fs::File for FileBackedVfsFile {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct FileBackedVfs {
|
||||
file: Mutex<Vec<u8>>,
|
||||
vfs_data: Cow<'static, [u8]>,
|
||||
fs_root: VfsRoot,
|
||||
}
|
||||
|
||||
impl FileBackedVfs {
|
||||
pub fn new(file: Vec<u8>, fs_root: VfsRoot) -> Self {
|
||||
pub fn new(data: Cow<'static, [u8]>, fs_root: VfsRoot) -> Self {
|
||||
Self {
|
||||
file: Mutex::new(file),
|
||||
vfs_data: data,
|
||||
fs_root,
|
||||
}
|
||||
}
|
||||
|
@ -827,10 +871,15 @@ impl FileBackedVfs {
|
|||
Ok(path)
|
||||
}
|
||||
|
||||
pub fn read_file_all(&self, file: &VirtualFile) -> std::io::Result<Vec<u8>> {
|
||||
let mut buf = vec![0; file.len as usize];
|
||||
self.read_file(file, 0, &mut buf)?;
|
||||
Ok(buf)
|
||||
pub fn read_file_all(
|
||||
&self,
|
||||
file: &VirtualFile,
|
||||
) -> std::io::Result<Cow<'static, [u8]>> {
|
||||
let read_range = self.get_read_range(file, 0, file.len)?;
|
||||
match &self.vfs_data {
|
||||
Cow::Borrowed(data) => Ok(Cow::Borrowed(&data[read_range])),
|
||||
Cow::Owned(data) => Ok(Cow::Owned(data[read_range].to_vec())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read_file(
|
||||
|
@ -839,18 +888,27 @@ impl FileBackedVfs {
|
|||
pos: u64,
|
||||
buf: &mut [u8],
|
||||
) -> std::io::Result<usize> {
|
||||
let data = self.file.lock();
|
||||
let read_range = self.get_read_range(file, pos, buf.len() as u64)?;
|
||||
buf.copy_from_slice(&self.vfs_data[read_range]);
|
||||
Ok(buf.len())
|
||||
}
|
||||
|
||||
fn get_read_range(
|
||||
&self,
|
||||
file: &VirtualFile,
|
||||
pos: u64,
|
||||
len: u64,
|
||||
) -> std::io::Result<Range<usize>> {
|
||||
let data = &self.vfs_data;
|
||||
let start = self.fs_root.start_file_offset + file.offset + pos;
|
||||
let end = start + buf.len() as u64;
|
||||
let end = start + len;
|
||||
if end > data.len() as u64 {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::UnexpectedEof,
|
||||
"unexpected EOF",
|
||||
));
|
||||
}
|
||||
|
||||
buf.copy_from_slice(&data[start as usize..end as usize]);
|
||||
Ok(buf.len())
|
||||
Ok(start as usize..end as usize)
|
||||
}
|
||||
|
||||
pub fn dir_entry(&self, path: &Path) -> std::io::Result<&VirtualDirectory> {
|
||||
|
@ -888,7 +946,7 @@ mod test {
|
|||
#[track_caller]
|
||||
fn read_file(vfs: &FileBackedVfs, path: &Path) -> String {
|
||||
let file = vfs.file_entry(path).unwrap();
|
||||
String::from_utf8(vfs.read_file_all(file).unwrap()).unwrap()
|
||||
String::from_utf8(vfs.read_file_all(file).unwrap().into_owned()).unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -901,20 +959,23 @@ mod test {
|
|||
let src_path = src_path.to_path_buf();
|
||||
let mut builder = VfsBuilder::new(src_path.clone()).unwrap();
|
||||
builder
|
||||
.add_file(&src_path.join("a.txt"), "data".into())
|
||||
.add_file_with_data_inner(&src_path.join("a.txt"), "data".into())
|
||||
.unwrap();
|
||||
builder
|
||||
.add_file(&src_path.join("b.txt"), "data".into())
|
||||
.add_file_with_data_inner(&src_path.join("b.txt"), "data".into())
|
||||
.unwrap();
|
||||
assert_eq!(builder.files.len(), 1); // because duplicate data
|
||||
builder
|
||||
.add_file(&src_path.join("c.txt"), "c".into())
|
||||
.add_file_with_data_inner(&src_path.join("c.txt"), "c".into())
|
||||
.unwrap();
|
||||
builder
|
||||
.add_file(&src_path.join("sub_dir").join("d.txt"), "d".into())
|
||||
.add_file_with_data_inner(
|
||||
&src_path.join("sub_dir").join("d.txt"),
|
||||
"d".into(),
|
||||
)
|
||||
.unwrap();
|
||||
builder
|
||||
.add_file(&src_path.join("e.txt"), "e".into())
|
||||
.add_file_with_data_inner(&src_path.join("e.txt"), "e".into())
|
||||
.unwrap();
|
||||
builder
|
||||
.add_symlink(
|
||||
|
@ -1031,7 +1092,7 @@ mod test {
|
|||
(
|
||||
dest_path.to_path_buf(),
|
||||
FileBackedVfs::new(
|
||||
data,
|
||||
Cow::Owned(data),
|
||||
VfsRoot {
|
||||
dir: root_dir,
|
||||
root_path: dest_path.to_path_buf(),
|
||||
|
@ -1082,7 +1143,7 @@ mod test {
|
|||
let temp_path = temp_dir.path().canonicalize();
|
||||
let mut builder = VfsBuilder::new(temp_path.to_path_buf()).unwrap();
|
||||
builder
|
||||
.add_file(
|
||||
.add_file_with_data_inner(
|
||||
temp_path.join("a.txt").as_path(),
|
||||
"0123456789".to_string().into_bytes(),
|
||||
)
|
||||
|
|
|
@ -5,6 +5,7 @@ use crate::args::CompileFlags;
|
|||
use crate::args::Flags;
|
||||
use crate::factory::CliFactory;
|
||||
use crate::http_util::HttpClientProvider;
|
||||
use crate::standalone::binary::StandaloneRelativeFileBaseUrl;
|
||||
use crate::standalone::is_standalone_binary;
|
||||
use deno_ast::ModuleSpecifier;
|
||||
use deno_core::anyhow::bail;
|
||||
|
@ -14,7 +15,6 @@ use deno_core::error::AnyError;
|
|||
use deno_core::resolve_url_or_path;
|
||||
use deno_graph::GraphKind;
|
||||
use deno_terminal::colors;
|
||||
use eszip::EszipRelativeFileBaseUrl;
|
||||
use rand::Rng;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
@ -29,7 +29,6 @@ pub async fn compile(
|
|||
let factory = CliFactory::from_flags(flags);
|
||||
let cli_options = factory.cli_options()?;
|
||||
let module_graph_creator = factory.module_graph_creator().await?;
|
||||
let parsed_source_cache = factory.parsed_source_cache();
|
||||
let binary_writer = factory.create_compile_binary_writer().await?;
|
||||
let http_client = factory.http_client_provider();
|
||||
let module_specifier = cli_options.resolve_main_module()?;
|
||||
|
@ -80,7 +79,7 @@ pub async fn compile(
|
|||
let graph = if cli_options.type_check_mode().is_true() {
|
||||
// In this case, the previous graph creation did type checking, which will
|
||||
// create a module graph with types information in it. We don't want to
|
||||
// store that in the eszip so create a code only module graph from scratch.
|
||||
// store that in the binary so create a code only module graph from scratch.
|
||||
module_graph_creator
|
||||
.create_graph(GraphKind::CodeOnly, module_roots)
|
||||
.await?
|
||||
|
@ -91,11 +90,6 @@ pub async fn compile(
|
|||
let ts_config_for_emit = cli_options
|
||||
.resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?;
|
||||
check_warn_tsconfig(&ts_config_for_emit);
|
||||
let (transpile_options, emit_options) =
|
||||
crate::args::ts_config_to_transpile_and_emit_options(
|
||||
ts_config_for_emit.ts_config,
|
||||
)?;
|
||||
let parser = parsed_source_cache.as_capturing_parser();
|
||||
let root_dir_url = resolve_root_dir_from_specifiers(
|
||||
cli_options.workspace().root_dir(),
|
||||
graph.specifiers().map(|(s, _)| s).chain(
|
||||
|
@ -106,17 +100,6 @@ pub async fn compile(
|
|||
),
|
||||
);
|
||||
log::debug!("Binary root dir: {}", root_dir_url);
|
||||
let root_dir_url = EszipRelativeFileBaseUrl::new(&root_dir_url);
|
||||
let eszip = eszip::EszipV2::from_graph(eszip::FromGraphOptions {
|
||||
graph,
|
||||
parser,
|
||||
transpile_options,
|
||||
emit_options,
|
||||
// make all the modules relative to the root folder
|
||||
relative_file_base: Some(root_dir_url),
|
||||
npm_packages: None,
|
||||
})?;
|
||||
|
||||
log::info!(
|
||||
"{} {} to {}",
|
||||
colors::green("Compile"),
|
||||
|
@ -143,15 +126,18 @@ pub async fn compile(
|
|||
let write_result = binary_writer
|
||||
.write_bin(
|
||||
file,
|
||||
eszip,
|
||||
root_dir_url,
|
||||
&graph,
|
||||
StandaloneRelativeFileBaseUrl::from(&root_dir_url),
|
||||
module_specifier,
|
||||
&compile_flags,
|
||||
cli_options,
|
||||
)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("Writing temporary file '{}'", temp_path.display())
|
||||
format!(
|
||||
"Writing deno compile executable to temporary file '{}'",
|
||||
temp_path.display()
|
||||
)
|
||||
});
|
||||
|
||||
// set it as executable
|
||||
|
|
|
@ -571,7 +571,7 @@ pub async fn cover_files(
|
|||
| MediaType::Cjs
|
||||
| MediaType::Mjs
|
||||
| MediaType::Json => None,
|
||||
MediaType::Dts | MediaType::Dmts | MediaType::Dcts => Some(Vec::new()),
|
||||
MediaType::Dts | MediaType::Dmts | MediaType::Dcts => Some(String::new()),
|
||||
MediaType::TypeScript
|
||||
| MediaType::Jsx
|
||||
| MediaType::Mts
|
||||
|
@ -593,8 +593,7 @@ pub async fn cover_files(
|
|||
}
|
||||
};
|
||||
let runtime_code: String = match transpiled_code {
|
||||
Some(code) => String::from_utf8(code)
|
||||
.with_context(|| format!("Failed decoding {}", file.specifier))?,
|
||||
Some(code) => code,
|
||||
None => original_source.to_string(),
|
||||
};
|
||||
|
||||
|
|
|
@ -367,10 +367,14 @@ pub async fn add(
|
|||
Default::default(),
|
||||
None,
|
||||
);
|
||||
|
||||
let npmrc = cli_factory.cli_options().unwrap().npmrc();
|
||||
|
||||
deps_file_fetcher.set_download_log_level(log::Level::Trace);
|
||||
let deps_file_fetcher = Arc::new(deps_file_fetcher);
|
||||
let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone()));
|
||||
let npm_resolver = Arc::new(NpmFetchResolver::new(deps_file_fetcher));
|
||||
let npm_resolver =
|
||||
Arc::new(NpmFetchResolver::new(deps_file_fetcher, npmrc.clone()));
|
||||
|
||||
let mut selected_packages = Vec::with_capacity(add_flags.packages.len());
|
||||
let mut package_reqs = Vec::with_capacity(add_flags.packages.len());
|
||||
|
|
|
@ -579,6 +579,10 @@ pub async fn upgrade(
|
|||
|
||||
let output_exe_path =
|
||||
full_path_output_flag.as_ref().unwrap_or(¤t_exe_path);
|
||||
|
||||
#[cfg(windows)]
|
||||
kill_running_deno_lsp_processes();
|
||||
|
||||
let output_result = if *output_exe_path == current_exe_path {
|
||||
replace_exe(&new_exe_path, output_exe_path)
|
||||
} else {
|
||||
|
@ -966,6 +970,34 @@ fn check_windows_access_denied_error(
|
|||
})
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn kill_running_deno_lsp_processes() {
|
||||
// limit this to `deno lsp` invocations to avoid killing important programs someone might be running
|
||||
let is_debug = log::log_enabled!(log::Level::Debug);
|
||||
let get_pipe = || {
|
||||
if is_debug {
|
||||
std::process::Stdio::inherit()
|
||||
} else {
|
||||
std::process::Stdio::null()
|
||||
}
|
||||
};
|
||||
let _ = Command::new("powershell.exe")
|
||||
.args([
|
||||
"-Command",
|
||||
r#"Get-WmiObject Win32_Process | Where-Object {
|
||||
$_.Name -eq 'deno.exe' -and
|
||||
$_.CommandLine -match '^(?:\"[^\"]+\"|\S+)\s+lsp\b'
|
||||
} | ForEach-Object {
|
||||
if ($_.Terminate()) {
|
||||
Write-Host 'Terminated:' $_.ProcessId
|
||||
}
|
||||
}"#,
|
||||
])
|
||||
.stdout(get_pipe())
|
||||
.stderr(get_pipe())
|
||||
.output();
|
||||
}
|
||||
|
||||
fn set_exe_permissions(
|
||||
current_exe_path: &Path,
|
||||
output_exe_path: &Path,
|
||||
|
|
|
@ -516,7 +516,6 @@ delete Object.prototype.__proto__;
|
|||
/** @typedef {{
|
||||
* ls: ts.LanguageService & { [k:string]: any },
|
||||
* compilerOptions: ts.CompilerOptions,
|
||||
* forceEnabledVerbatimModuleSyntax: boolean,
|
||||
* }} LanguageServiceEntry */
|
||||
/** @type {{ unscoped: LanguageServiceEntry, byScope: Map<string, LanguageServiceEntry> }} */
|
||||
const languageServiceEntries = {
|
||||
|
@ -1026,7 +1025,7 @@ delete Object.prototype.__proto__;
|
|||
: ts.sortAndDeduplicateDiagnostics(
|
||||
checkFiles.map((s) => program.getSemanticDiagnostics(s)).flat(),
|
||||
)),
|
||||
].filter(filterMapDiagnostic.bind(null, false));
|
||||
].filter(filterMapDiagnostic);
|
||||
|
||||
// emit the tsbuildinfo file
|
||||
// @ts-ignore: emitBuildInfo is not exposed (https://github.com/microsoft/TypeScript/issues/49871)
|
||||
|
@ -1041,28 +1040,11 @@ delete Object.prototype.__proto__;
|
|||
debug("<<< exec stop");
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {boolean} isLsp
|
||||
* @param {ts.Diagnostic} diagnostic
|
||||
*/
|
||||
function filterMapDiagnostic(isLsp, diagnostic) {
|
||||
/** @param {ts.Diagnostic} diagnostic */
|
||||
function filterMapDiagnostic(diagnostic) {
|
||||
if (IGNORED_DIAGNOSTICS.includes(diagnostic.code)) {
|
||||
return false;
|
||||
}
|
||||
if (isLsp) {
|
||||
// TS1484: `...` is a type and must be imported using a type-only import when 'verbatimModuleSyntax' is enabled.
|
||||
// We force-enable `verbatimModuleSyntax` in the LSP so the `type`
|
||||
// modifier is used when auto-importing types. But we don't want this
|
||||
// diagnostic unless it was explicitly enabled by the user.
|
||||
if (diagnostic.code == 1484) {
|
||||
const entry = (lastRequestScope
|
||||
? languageServiceEntries.byScope.get(lastRequestScope)
|
||||
: null) ?? languageServiceEntries.unscoped;
|
||||
if (entry.forceEnabledVerbatimModuleSyntax) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
// make the diagnostic for using an `export =` in an es module a warning
|
||||
if (diagnostic.code === 1203) {
|
||||
diagnostic.category = ts.DiagnosticCategory.Warning;
|
||||
|
@ -1159,12 +1141,10 @@ delete Object.prototype.__proto__;
|
|||
"strict": true,
|
||||
"target": "esnext",
|
||||
"useDefineForClassFields": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"jsx": "react",
|
||||
"jsxFactory": "React.createElement",
|
||||
"jsxFragmentFactory": "React.Fragment",
|
||||
}),
|
||||
forceEnabledVerbatimModuleSyntax: true,
|
||||
};
|
||||
setLogDebug(enableDebugLogging, "TSLS");
|
||||
debug("serverInit()");
|
||||
|
@ -1230,17 +1210,8 @@ delete Object.prototype.__proto__;
|
|||
const ls = oldEntry
|
||||
? oldEntry.ls
|
||||
: ts.createLanguageService(host, documentRegistry);
|
||||
let forceEnabledVerbatimModuleSyntax = false;
|
||||
if (!config["verbatimModuleSyntax"]) {
|
||||
config["verbatimModuleSyntax"] = true;
|
||||
forceEnabledVerbatimModuleSyntax = true;
|
||||
}
|
||||
const compilerOptions = lspTsConfigToCompilerOptions(config);
|
||||
newByScope.set(scope, {
|
||||
ls,
|
||||
compilerOptions,
|
||||
forceEnabledVerbatimModuleSyntax,
|
||||
});
|
||||
newByScope.set(scope, { ls, compilerOptions });
|
||||
languageServiceEntries.byScope.delete(scope);
|
||||
}
|
||||
for (const oldEntry of languageServiceEntries.byScope.values()) {
|
||||
|
@ -1305,7 +1276,7 @@ delete Object.prototype.__proto__;
|
|||
...ls.getSemanticDiagnostics(specifier),
|
||||
...ls.getSuggestionDiagnostics(specifier),
|
||||
...ls.getSyntacticDiagnostics(specifier),
|
||||
].filter(filterMapDiagnostic.bind(null, true)));
|
||||
].filter(filterMapDiagnostic));
|
||||
}
|
||||
return respond(id, diagnosticMap);
|
||||
} catch (e) {
|
||||
|
|
|
@ -5,6 +5,7 @@ use crate::args::TypeCheckMode;
|
|||
use crate::cache::FastInsecureHasher;
|
||||
use crate::node;
|
||||
use crate::npm::CliNpmResolver;
|
||||
use crate::npm::ResolvePkgFolderFromDenoReqError;
|
||||
use crate::util::checksum;
|
||||
use crate::util::path::mapped_specifier_for_tsc;
|
||||
|
||||
|
@ -35,6 +36,7 @@ use deno_runtime::deno_node::NodeResolver;
|
|||
use deno_semver::npm::NpmPackageReqReference;
|
||||
use node_resolver::errors::NodeJsErrorCode;
|
||||
use node_resolver::errors::NodeJsErrorCoded;
|
||||
use node_resolver::errors::ResolvePkgSubpathFromDenoModuleError;
|
||||
use node_resolver::NodeModuleKind;
|
||||
use node_resolver::NodeResolution;
|
||||
use node_resolver::NodeResolutionMode;
|
||||
|
@ -45,6 +47,7 @@ use std::fmt;
|
|||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use thiserror::Error;
|
||||
|
||||
mod diagnostics;
|
||||
|
||||
|
@ -688,12 +691,30 @@ fn op_resolve_inner(
|
|||
Some(ResolutionResolved { specifier, .. }) => {
|
||||
resolve_graph_specifier_types(specifier, &referrer, state)?
|
||||
}
|
||||
_ => resolve_non_graph_specifier_types(
|
||||
&specifier,
|
||||
&referrer,
|
||||
referrer_kind,
|
||||
state,
|
||||
)?,
|
||||
_ => {
|
||||
match resolve_non_graph_specifier_types(
|
||||
&specifier,
|
||||
&referrer,
|
||||
referrer_kind,
|
||||
state,
|
||||
) {
|
||||
Ok(maybe_result) => maybe_result,
|
||||
Err(
|
||||
err @ ResolveNonGraphSpecifierTypesError::ResolvePkgFolderFromDenoReq(
|
||||
ResolvePkgFolderFromDenoReqError::Managed(_),
|
||||
),
|
||||
) => {
|
||||
// it's most likely requesting the jsxImportSource, which isn't loaded
|
||||
// into the graph when not using jsx, so just ignore this error
|
||||
if specifier.ends_with("/jsx-runtime") {
|
||||
None
|
||||
} else {
|
||||
return Err(err.into());
|
||||
}
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
}
|
||||
};
|
||||
let result = match maybe_result {
|
||||
Some((specifier, media_type)) => {
|
||||
|
@ -818,12 +839,23 @@ fn resolve_graph_specifier_types(
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
enum ResolveNonGraphSpecifierTypesError {
|
||||
#[error(transparent)]
|
||||
ResolvePkgFolderFromDenoReq(#[from] ResolvePkgFolderFromDenoReqError),
|
||||
#[error(transparent)]
|
||||
ResolvePkgSubpathFromDenoModule(#[from] ResolvePkgSubpathFromDenoModuleError),
|
||||
}
|
||||
|
||||
fn resolve_non_graph_specifier_types(
|
||||
raw_specifier: &str,
|
||||
referrer: &ModuleSpecifier,
|
||||
referrer_kind: NodeModuleKind,
|
||||
state: &State,
|
||||
) -> Result<Option<(ModuleSpecifier, MediaType)>, AnyError> {
|
||||
) -> Result<
|
||||
Option<(ModuleSpecifier, MediaType)>,
|
||||
ResolveNonGraphSpecifierTypesError,
|
||||
> {
|
||||
let npm = match state.maybe_npm.as_ref() {
|
||||
Some(npm) => npm,
|
||||
None => return Ok(None), // we only support non-graph types for npm packages
|
||||
|
|
|
@ -30,7 +30,7 @@ use tokio::sync::mpsc;
|
|||
use tokio::sync::mpsc::UnboundedReceiver;
|
||||
use tokio::time::sleep;
|
||||
|
||||
const CLEAR_SCREEN: &str = "\x1B[2J\x1B[1;1H";
|
||||
const CLEAR_SCREEN: &str = "\x1B[H\x1B[2J\x1B[3J";
|
||||
const DEBOUNCE_INTERVAL: Duration = Duration::from_millis(200);
|
||||
|
||||
struct DebouncedReceiver {
|
||||
|
|
|
@ -103,6 +103,21 @@ pub fn arc_str_to_bytes(arc_str: Arc<str>) -> Arc<[u8]> {
|
|||
unsafe { Arc::from_raw(raw as *const [u8]) }
|
||||
}
|
||||
|
||||
/// Converts an `Arc<u8>` to an `Arc<str>` if able.
|
||||
#[allow(dead_code)]
|
||||
pub fn arc_u8_to_arc_str(
|
||||
arc_u8: Arc<[u8]>,
|
||||
) -> Result<Arc<str>, std::str::Utf8Error> {
|
||||
// Check that the string is valid UTF-8.
|
||||
std::str::from_utf8(&arc_u8)?;
|
||||
// SAFETY: the string is valid UTF-8, and the layout Arc<[u8]> is the same as
|
||||
// Arc<str>. This is proven by the From<Arc<str>> impl for Arc<[u8]> from the
|
||||
// standard library.
|
||||
Ok(unsafe {
|
||||
std::mem::transmute::<std::sync::Arc<[u8]>, std::sync::Arc<str>>(arc_u8)
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::sync::Arc;
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_broadcast_channel"
|
||||
version = "0.167.0"
|
||||
version = "0.168.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
2
ext/cache/Cargo.toml
vendored
2
ext/cache/Cargo.toml
vendored
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_cache"
|
||||
version = "0.105.0"
|
||||
version = "0.106.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_canvas"
|
||||
version = "0.42.0"
|
||||
version = "0.43.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_console"
|
||||
version = "0.173.0"
|
||||
version = "0.174.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_cron"
|
||||
version = "0.53.0"
|
||||
version = "0.54.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_crypto"
|
||||
version = "0.187.0"
|
||||
version = "0.188.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_fetch"
|
||||
version = "0.197.0"
|
||||
version = "0.198.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_ffi"
|
||||
version = "0.160.0"
|
||||
version = "0.161.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_fs"
|
||||
version = "0.83.0"
|
||||
version = "0.84.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
@ -31,7 +31,7 @@ serde.workspace = true
|
|||
thiserror.workspace = true
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
nix = { workspace = true, features = ["user"] }
|
||||
nix = { workspace = true, features = ["fs", "user"] }
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
winapi = { workspace = true, features = ["winbase"] }
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_http"
|
||||
version = "0.171.0"
|
||||
version = "0.172.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_io"
|
||||
version = "0.83.0"
|
||||
version = "0.84.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
30
ext/io/fs.rs
30
ext/io/fs.rs
|
@ -1,6 +1,7 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::fmt::Formatter;
|
||||
use std::io;
|
||||
use std::rc::Rc;
|
||||
use std::time::SystemTime;
|
||||
|
@ -21,6 +22,21 @@ pub enum FsError {
|
|||
NotCapable(&'static str),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for FsError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
FsError::Io(err) => std::fmt::Display::fmt(err, f),
|
||||
FsError::FileBusy => f.write_str("file busy"),
|
||||
FsError::NotSupported => f.write_str("not supported"),
|
||||
FsError::NotCapable(err) => {
|
||||
f.write_str(&format!("requires {err} access"))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for FsError {}
|
||||
|
||||
impl FsError {
|
||||
pub fn kind(&self) -> io::ErrorKind {
|
||||
match self {
|
||||
|
@ -55,20 +71,6 @@ impl From<io::ErrorKind> for FsError {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<FsError> for deno_core::error::AnyError {
|
||||
fn from(err: FsError) -> Self {
|
||||
match err {
|
||||
FsError::Io(err) => err.into(),
|
||||
FsError::FileBusy => deno_core::error::resource_unavailable(),
|
||||
FsError::NotSupported => deno_core::error::not_supported(),
|
||||
FsError::NotCapable(err) => deno_core::error::custom_error(
|
||||
"NotCapable",
|
||||
format!("permission denied: {err}"),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<JoinError> for FsError {
|
||||
fn from(err: JoinError) -> Self {
|
||||
if err.is_cancelled() {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_kv"
|
||||
version = "0.81.0"
|
||||
version = "0.82.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_napi"
|
||||
version = "0.104.0"
|
||||
version = "0.105.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
@ -16,5 +16,14 @@ path = "lib.rs"
|
|||
[dependencies]
|
||||
deno_core.workspace = true
|
||||
deno_permissions.workspace = true
|
||||
libc.workspace = true
|
||||
libloading = { version = "0.7" }
|
||||
log.workspace = true
|
||||
napi_sym.workspace = true
|
||||
thiserror.workspace = true
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
windows-sys.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
libuv-sys-lite = "=1.48.2"
|
||||
|
|
|
@ -0,0 +1,114 @@
|
|||
# napi
|
||||
|
||||
This directory contains source for Deno's Node-API implementation. It depends on
|
||||
`napi_sym` and `deno_napi`.
|
||||
|
||||
Files are generally organized the same as in Node.js's implementation to ease in
|
||||
ensuring compatibility.
|
||||
|
||||
## Adding a new function
|
||||
|
||||
Add the symbol name to
|
||||
[`cli/napi_sym/symbol_exports.json`](../napi_sym/symbol_exports.json).
|
||||
|
||||
```diff
|
||||
{
|
||||
"symbols": [
|
||||
...
|
||||
"napi_get_undefined",
|
||||
- "napi_get_null"
|
||||
+ "napi_get_null",
|
||||
+ "napi_get_boolean"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Determine where to place the implementation. `napi_get_boolean` is related to JS
|
||||
values so we will place it in `js_native_api.rs`. If something is not clear,
|
||||
just create a new file module.
|
||||
|
||||
See [`napi_sym`](../napi_sym/) for writing the implementation:
|
||||
|
||||
```rust
|
||||
#[napi_sym::napi_sym]
|
||||
fn napi_get_boolean(
|
||||
env: *mut Env,
|
||||
value: bool,
|
||||
result: *mut napi_value,
|
||||
) -> Result {
|
||||
// ...
|
||||
Ok(())
|
||||
}
|
||||
```
|
||||
|
||||
Update the generated symbol lists using the script:
|
||||
|
||||
```
|
||||
deno run --allow-write tools/napi/generate_symbols_lists.js
|
||||
```
|
||||
|
||||
Add a test in [`/tests/napi`](../../tests/napi/). You can also refer to Node.js
|
||||
test suite for Node-API.
|
||||
|
||||
```js
|
||||
// tests/napi/boolean_test.js
|
||||
import { assertEquals, loadTestLibrary } from "./common.js";
|
||||
const lib = loadTestLibrary();
|
||||
Deno.test("napi get boolean", function () {
|
||||
assertEquals(lib.test_get_boolean(true), true);
|
||||
assertEquals(lib.test_get_boolean(false), false);
|
||||
});
|
||||
```
|
||||
|
||||
```rust
|
||||
// tests/napi/src/boolean.rs
|
||||
|
||||
use napi_sys::Status::napi_ok;
|
||||
use napi_sys::ValueType::napi_boolean;
|
||||
use napi_sys::*;
|
||||
|
||||
extern "C" fn test_boolean(
|
||||
env: napi_env,
|
||||
info: napi_callback_info,
|
||||
) -> napi_value {
|
||||
let (args, argc, _) = crate::get_callback_info!(env, info, 1);
|
||||
assert_eq!(argc, 1);
|
||||
|
||||
let mut ty = -1;
|
||||
assert!(unsafe { napi_typeof(env, args[0], &mut ty) } == napi_ok);
|
||||
assert_eq!(ty, napi_boolean);
|
||||
|
||||
// Use napi_get_boolean here...
|
||||
|
||||
value
|
||||
}
|
||||
|
||||
pub fn init(env: napi_env, exports: napi_value) {
|
||||
let properties = &[crate::new_property!(env, "test_boolean\0", test_boolean)];
|
||||
|
||||
unsafe {
|
||||
napi_define_properties(env, exports, properties.len(), properties.as_ptr())
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
```diff
|
||||
// tests/napi/src/lib.rs
|
||||
|
||||
+ mod boolean;
|
||||
|
||||
...
|
||||
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn napi_register_module_v1(
|
||||
env: napi_env,
|
||||
exports: napi_value,
|
||||
) -> napi_value {
|
||||
...
|
||||
+ boolean::init(env, exports);
|
||||
|
||||
exports
|
||||
}
|
||||
```
|
||||
|
||||
Run the test using `cargo test -p tests/napi`.
|
22
ext/napi/build.rs
Normal file
22
ext/napi/build.rs
Normal file
|
@ -0,0 +1,22 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
fn main() {
|
||||
let symbols_file_name = match std::env::consts::OS {
|
||||
"android" | "freebsd" | "openbsd" => {
|
||||
"generated_symbol_exports_list_linux.def".to_string()
|
||||
}
|
||||
os => format!("generated_symbol_exports_list_{}.def", os),
|
||||
};
|
||||
let symbols_path = std::path::Path::new(".")
|
||||
.join(symbols_file_name)
|
||||
.canonicalize()
|
||||
.expect(
|
||||
"Missing symbols list! Generate using tools/napi/generate_symbols_lists.js",
|
||||
);
|
||||
|
||||
println!("cargo:rustc-rerun-if-changed={}", symbols_path.display());
|
||||
|
||||
let path = std::path::PathBuf::from(std::env::var("OUT_DIR").unwrap())
|
||||
.join("napi_symbol_path.txt");
|
||||
std::fs::write(path, symbols_path.as_os_str().as_encoded_bytes()).unwrap();
|
||||
}
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
const NAPI_VERSION: u32 = 9;
|
||||
|
||||
use deno_runtime::deno_napi::*;
|
||||
use crate::*;
|
||||
use libc::INT_MAX;
|
||||
|
||||
use super::util::check_new_from_utf8;
|
||||
|
@ -17,9 +17,9 @@ use super::util::napi_set_last_error;
|
|||
use super::util::v8_name_from_property_descriptor;
|
||||
use crate::check_arg;
|
||||
use crate::check_env;
|
||||
use deno_runtime::deno_napi::function::create_function;
|
||||
use deno_runtime::deno_napi::function::create_function_template;
|
||||
use deno_runtime::deno_napi::function::CallbackInfo;
|
||||
use crate::function::create_function;
|
||||
use crate::function::create_function_template;
|
||||
use crate::function::CallbackInfo;
|
||||
use napi_sym::napi_sym;
|
||||
use std::ptr::NonNull;
|
||||
|
||||
|
@ -1083,7 +1083,7 @@ fn napi_create_string_latin1(
|
|||
}
|
||||
|
||||
#[napi_sym]
|
||||
fn napi_create_string_utf8(
|
||||
pub(crate) fn napi_create_string_utf8(
|
||||
env_ptr: *mut Env,
|
||||
string: *const c_char,
|
||||
length: usize,
|
||||
|
@ -1647,7 +1647,7 @@ fn napi_get_cb_info(
|
|||
check_arg!(env, argc);
|
||||
let argc = unsafe { *argc as usize };
|
||||
for i in 0..argc {
|
||||
let mut arg = args.get(i as _);
|
||||
let arg = args.get(i as _);
|
||||
unsafe {
|
||||
*argv.add(i) = arg.into();
|
||||
}
|
|
@ -5,6 +5,22 @@
|
|||
#![allow(clippy::undocumented_unsafe_blocks)]
|
||||
#![deny(clippy::missing_safety_doc)]
|
||||
|
||||
//! Symbols to be exported are now defined in this JSON file.
|
||||
//! The `#[napi_sym]` macro checks for missing entries and panics.
|
||||
//!
|
||||
//! `./tools/napi/generate_symbols_list.js` is used to generate the LINK `cli/exports.def` on Windows,
|
||||
//! which is also checked into git.
|
||||
//!
|
||||
//! To add a new napi function:
|
||||
//! 1. Place `#[napi_sym]` on top of your implementation.
|
||||
//! 2. Add the function's identifier to this JSON list.
|
||||
//! 3. Finally, run `tools/napi/generate_symbols_list.js` to update `ext/napi/generated_symbol_exports_list_*.def`.
|
||||
|
||||
pub mod js_native_api;
|
||||
pub mod node_api;
|
||||
pub mod util;
|
||||
pub mod uv;
|
||||
|
||||
use core::ptr::NonNull;
|
||||
use deno_core::op2;
|
||||
use deno_core::parking_lot::RwLock;
|
||||
|
@ -631,3 +647,30 @@ where
|
|||
|
||||
Ok(exports)
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
pub fn print_linker_flags(name: &str) {
|
||||
let symbols_path =
|
||||
include_str!(concat!(env!("OUT_DIR"), "/napi_symbol_path.txt"));
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
println!("cargo:rustc-link-arg-bin={name}=/DEF:{}", symbols_path);
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
println!(
|
||||
"cargo:rustc-link-arg-bin={name}=-Wl,-exported_symbols_list,{}",
|
||||
symbols_path,
|
||||
);
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
println!(
|
||||
"cargo:rustc-link-arg-bin={name}=-Wl,--export-dynamic-symbol-list={}",
|
||||
symbols_path,
|
||||
);
|
||||
|
||||
#[cfg(target_os = "android")]
|
||||
println!(
|
||||
"cargo:rustc-link-arg-bin={name}=-Wl,--export-dynamic-symbol-list={}",
|
||||
symbols_path,
|
||||
);
|
||||
}
|
||||
|
|
|
@ -9,10 +9,10 @@ use super::util::napi_set_last_error;
|
|||
use super::util::SendPtr;
|
||||
use crate::check_arg;
|
||||
use crate::check_env;
|
||||
use crate::*;
|
||||
use deno_core::parking_lot::Condvar;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::V8CrossThreadTaskSpawner;
|
||||
use deno_runtime::deno_napi::*;
|
||||
use napi_sym::napi_sym;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::atomic::AtomicU8;
|
||||
|
@ -488,7 +488,7 @@ impl AsyncWork {
|
|||
}
|
||||
|
||||
#[napi_sym]
|
||||
fn napi_create_async_work(
|
||||
pub(crate) fn napi_create_async_work(
|
||||
env: *mut Env,
|
||||
async_resource: napi_value,
|
||||
async_resource_name: napi_value,
|
||||
|
@ -537,7 +537,10 @@ fn napi_create_async_work(
|
|||
}
|
||||
|
||||
#[napi_sym]
|
||||
fn napi_delete_async_work(env: *mut Env, work: napi_async_work) -> napi_status {
|
||||
pub(crate) fn napi_delete_async_work(
|
||||
env: *mut Env,
|
||||
work: napi_async_work,
|
||||
) -> napi_status {
|
||||
let env = check_env!(env);
|
||||
check_arg!(env, work);
|
||||
|
||||
|
@ -560,7 +563,10 @@ fn napi_get_uv_event_loop(
|
|||
}
|
||||
|
||||
#[napi_sym]
|
||||
fn napi_queue_async_work(env: *mut Env, work: napi_async_work) -> napi_status {
|
||||
pub(crate) fn napi_queue_async_work(
|
||||
env: *mut Env,
|
||||
work: napi_async_work,
|
||||
) -> napi_status {
|
||||
let env = check_env!(env);
|
||||
check_arg!(env, work);
|
||||
|
||||
|
@ -897,7 +903,7 @@ fn napi_create_threadsafe_function(
|
|||
};
|
||||
let resource_name = resource_name.to_rust_string_lossy(&mut env.scope());
|
||||
|
||||
let mut tsfn = Box::new(TsFn {
|
||||
let tsfn = Box::new(TsFn {
|
||||
env,
|
||||
func,
|
||||
max_queue_size,
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "napi_sym"
|
||||
version = "0.103.0"
|
||||
version = "0.104.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
|
@ -2,7 +2,8 @@
|
|||
|
||||
A proc_macro for Deno's Node-API implementation. It does the following things:
|
||||
|
||||
- Marks the symbol as `#[no_mangle]` and rewrites it as `pub extern "C" $name`.
|
||||
- Marks the symbol as `#[no_mangle]` and rewrites it as
|
||||
`unsafe extern "C" $name`.
|
||||
- Asserts that the function symbol is present in
|
||||
[`symbol_exports.json`](./symbol_exports.json).
|
||||
- Maps `deno_napi::Result` to raw `napi_result`.
|
|
@ -1,9 +1,9 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
use deno_runtime::deno_napi::*;
|
||||
use crate::*;
|
||||
use libc::INT_MAX;
|
||||
|
||||
#[repr(transparent)]
|
||||
pub struct SendPtr<T>(pub *const T);
|
||||
pub(crate) struct SendPtr<T>(pub *const T);
|
||||
|
||||
impl<T> SendPtr<T> {
|
||||
// silly function to get around `clippy::redundant_locals`
|
||||
|
@ -37,7 +37,7 @@ impl Drop for BufferFinalizer {
|
|||
}
|
||||
}
|
||||
|
||||
pub extern "C" fn backing_store_deleter_callback(
|
||||
pub(crate) extern "C" fn backing_store_deleter_callback(
|
||||
data: *mut c_void,
|
||||
_byte_length: usize,
|
||||
deleter_data: *mut c_void,
|
||||
|
@ -50,7 +50,7 @@ pub extern "C" fn backing_store_deleter_callback(
|
|||
drop(finalizer);
|
||||
}
|
||||
|
||||
pub fn make_external_backing_store(
|
||||
pub(crate) fn make_external_backing_store(
|
||||
env: *mut Env,
|
||||
data: *mut c_void,
|
||||
byte_length: usize,
|
||||
|
@ -90,9 +90,7 @@ macro_rules! check_env {
|
|||
macro_rules! return_error_status_if_false {
|
||||
($env: expr, $condition: expr, $status: ident) => {
|
||||
if !$condition {
|
||||
return Err(
|
||||
$crate::napi::util::napi_set_last_error($env, $status).into(),
|
||||
);
|
||||
return Err($crate::util::napi_set_last_error($env, $status).into());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -101,7 +99,7 @@ macro_rules! return_error_status_if_false {
|
|||
macro_rules! return_status_if_false {
|
||||
($env: expr, $condition: expr, $status: ident) => {
|
||||
if !$condition {
|
||||
return $crate::napi::util::napi_set_last_error($env, $status);
|
||||
return $crate::util::napi_set_last_error($env, $status);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -222,7 +220,7 @@ macro_rules! check_arg {
|
|||
($env: expr, $ptr: expr) => {
|
||||
$crate::return_status_if_false!(
|
||||
$env,
|
||||
!$crate::napi::util::Nullable::is_null(&$ptr),
|
||||
!$crate::util::Nullable::is_null(&$ptr),
|
||||
napi_invalid_arg
|
||||
);
|
||||
};
|
||||
|
@ -230,17 +228,17 @@ macro_rules! check_arg {
|
|||
|
||||
#[macro_export]
|
||||
macro_rules! napi_wrap {
|
||||
( $( # $attr:tt )* fn $name:ident $( < $( $x:lifetime ),* > )? ( $env:ident : & $( $lt:lifetime )? mut Env $( , $ident:ident : $ty:ty )* $(,)? ) -> napi_status $body:block ) => {
|
||||
$( # $attr )*
|
||||
( $( # [ $attr:meta ] )* $vis:vis fn $name:ident $( < $( $x:lifetime ),* > )? ( $env:ident : & $( $lt:lifetime )? mut Env $( , $ident:ident : $ty:ty )* $(,)? ) -> napi_status $body:block ) => {
|
||||
$( # [ $attr ] )*
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn $name $( < $( $x ),* > )? ( env_ptr : *mut Env , $( $ident : $ty ),* ) -> napi_status {
|
||||
$vis unsafe extern "C" fn $name $( < $( $x ),* > )? ( env_ptr : *mut Env , $( $ident : $ty ),* ) -> napi_status {
|
||||
let env: & $( $lt )? mut Env = $crate::check_env!(env_ptr);
|
||||
|
||||
if env.last_exception.is_some() {
|
||||
return napi_pending_exception;
|
||||
}
|
||||
|
||||
$crate::napi::util::napi_clear_last_error(env);
|
||||
$crate::util::napi_clear_last_error(env);
|
||||
|
||||
let scope_env = unsafe { &mut *env_ptr };
|
||||
let scope = &mut scope_env.scope();
|
||||
|
@ -259,21 +257,21 @@ macro_rules! napi_wrap {
|
|||
let env = unsafe { &mut *env_ptr };
|
||||
let global = v8::Global::new(env.isolate(), exception);
|
||||
env.last_exception = Some(global);
|
||||
return $crate::napi::util::napi_set_last_error(env_ptr, napi_pending_exception);
|
||||
return $crate::util::napi_set_last_error(env_ptr, napi_pending_exception);
|
||||
}
|
||||
|
||||
if result != napi_ok {
|
||||
return $crate::napi::util::napi_set_last_error(env_ptr, result);
|
||||
return $crate::util::napi_set_last_error(env_ptr, result);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
( $( # $attr:tt )* fn $name:ident $( < $( $x:lifetime ),* > )? ( $( $ident:ident : $ty:ty ),* $(,)? ) -> napi_status $body:block ) => {
|
||||
$( # $attr )*
|
||||
( $( # [ $attr:meta ] )* $vis:vis fn $name:ident $( < $( $x:lifetime ),* > )? ( $( $ident:ident : $ty:ty ),* $(,)? ) -> napi_status $body:block ) => {
|
||||
$( # [ $attr ] )*
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn $name $( < $( $x ),* > )? ( $( $ident : $ty ),* ) -> napi_status {
|
||||
$vis unsafe extern "C" fn $name $( < $( $x ),* > )? ( $( $ident : $ty ),* ) -> napi_status {
|
||||
#[inline(always)]
|
||||
fn inner $( < $( $x ),* > )? ( $( $ident : $ty ),* ) -> napi_status $body
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::*;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_runtime::deno_napi::*;
|
||||
use std::mem::MaybeUninit;
|
||||
use std::ptr::addr_of_mut;
|
||||
|
||||
|
@ -16,10 +16,10 @@ fn assert_ok(res: c_int) -> c_int {
|
|||
res
|
||||
}
|
||||
|
||||
use crate::napi::js_native_api::napi_create_string_utf8;
|
||||
use crate::napi::node_api::napi_create_async_work;
|
||||
use crate::napi::node_api::napi_delete_async_work;
|
||||
use crate::napi::node_api::napi_queue_async_work;
|
||||
use js_native_api::napi_create_string_utf8;
|
||||
use node_api::napi_create_async_work;
|
||||
use node_api::napi_delete_async_work;
|
||||
use node_api::napi_queue_async_work;
|
||||
use std::ffi::c_int;
|
||||
|
||||
const UV_MUTEX_SIZE: usize = {
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_net"
|
||||
version = "0.165.0"
|
||||
version = "0.166.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
[package]
|
||||
name = "deno_node"
|
||||
version = "0.110.0"
|
||||
version = "0.111.0"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
|
|
@ -24,7 +24,7 @@ use once_cell::sync::Lazy;
|
|||
extern crate libz_sys as zlib;
|
||||
|
||||
mod global;
|
||||
mod ops;
|
||||
pub mod ops;
|
||||
mod polyfill;
|
||||
|
||||
pub use deno_package_json::PackageJson;
|
||||
|
|
|
@ -7,9 +7,6 @@ use std::net::Ipv4Addr;
|
|||
use std::net::Ipv6Addr;
|
||||
use std::net::SocketAddr;
|
||||
|
||||
use deno_core::anyhow::anyhow;
|
||||
use deno_core::anyhow::bail;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
use deno_core::OpState;
|
||||
|
||||
|
@ -27,13 +24,25 @@ impl deno_core::GarbageCollected for BlockListResource {}
|
|||
#[derive(Serialize)]
|
||||
struct SocketAddressSerialization(String, String);
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum BlocklistError {
|
||||
#[error("{0}")]
|
||||
AddrParse(#[from] std::net::AddrParseError),
|
||||
#[error("{0}")]
|
||||
IpNetwork(#[from] ipnetwork::IpNetworkError),
|
||||
#[error("Invalid address")]
|
||||
InvalidAddress,
|
||||
#[error("IP version mismatch between start and end addresses")]
|
||||
IpVersionMismatch,
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_socket_address_parse(
|
||||
state: &mut OpState,
|
||||
#[string] addr: &str,
|
||||
#[smi] port: u16,
|
||||
#[string] family: &str,
|
||||
) -> Result<bool, AnyError> {
|
||||
) -> Result<bool, BlocklistError> {
|
||||
let ip = addr.parse::<IpAddr>()?;
|
||||
let parsed: SocketAddr = SocketAddr::new(ip, port);
|
||||
let parsed_ip_str = parsed.ip().to_string();
|
||||
|
@ -52,7 +61,7 @@ pub fn op_socket_address_parse(
|
|||
Ok(false)
|
||||
}
|
||||
} else {
|
||||
Err(anyhow!("Invalid address"))
|
||||
Err(BlocklistError::InvalidAddress)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -60,8 +69,8 @@ pub fn op_socket_address_parse(
|
|||
#[serde]
|
||||
pub fn op_socket_address_get_serialization(
|
||||
state: &mut OpState,
|
||||
) -> Result<SocketAddressSerialization, AnyError> {
|
||||
Ok(state.take::<SocketAddressSerialization>())
|
||||
) -> SocketAddressSerialization {
|
||||
state.take::<SocketAddressSerialization>()
|
||||
}
|
||||
|
||||
#[op2]
|
||||
|
@ -77,7 +86,7 @@ pub fn op_blocklist_new() -> BlockListResource {
|
|||
pub fn op_blocklist_add_address(
|
||||
#[cppgc] wrap: &BlockListResource,
|
||||
#[string] addr: &str,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), BlocklistError> {
|
||||
wrap.blocklist.borrow_mut().add_address(addr)
|
||||
}
|
||||
|
||||
|
@ -86,7 +95,7 @@ pub fn op_blocklist_add_range(
|
|||
#[cppgc] wrap: &BlockListResource,
|
||||
#[string] start: &str,
|
||||
#[string] end: &str,
|
||||
) -> Result<bool, AnyError> {
|
||||
) -> Result<bool, BlocklistError> {
|
||||
wrap.blocklist.borrow_mut().add_range(start, end)
|
||||
}
|
||||
|
||||
|
@ -95,7 +104,7 @@ pub fn op_blocklist_add_subnet(
|
|||
#[cppgc] wrap: &BlockListResource,
|
||||
#[string] addr: &str,
|
||||
#[smi] prefix: u8,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), BlocklistError> {
|
||||
wrap.blocklist.borrow_mut().add_subnet(addr, prefix)
|
||||
}
|
||||
|
||||
|
@ -104,7 +113,7 @@ pub fn op_blocklist_check(
|
|||
#[cppgc] wrap: &BlockListResource,
|
||||
#[string] addr: &str,
|
||||
#[string] r#type: &str,
|
||||
) -> Result<bool, AnyError> {
|
||||
) -> Result<bool, BlocklistError> {
|
||||
wrap.blocklist.borrow().check(addr, r#type)
|
||||
}
|
||||
|
||||
|
@ -123,7 +132,7 @@ impl BlockList {
|
|||
&mut self,
|
||||
addr: IpAddr,
|
||||
prefix: Option<u8>,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), BlocklistError> {
|
||||
match addr {
|
||||
IpAddr::V4(addr) => {
|
||||
let ipv4_prefix = prefix.unwrap_or(32);
|
||||
|
@ -154,7 +163,7 @@ impl BlockList {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn add_address(&mut self, address: &str) -> Result<(), AnyError> {
|
||||
pub fn add_address(&mut self, address: &str) -> Result<(), BlocklistError> {
|
||||
let ip: IpAddr = address.parse()?;
|
||||
self.map_addr_add_network(ip, None)?;
|
||||
Ok(())
|
||||
|
@ -164,7 +173,7 @@ impl BlockList {
|
|||
&mut self,
|
||||
start: &str,
|
||||
end: &str,
|
||||
) -> Result<bool, AnyError> {
|
||||
) -> Result<bool, BlocklistError> {
|
||||
let start_ip: IpAddr = start.parse()?;
|
||||
let end_ip: IpAddr = end.parse()?;
|
||||
|
||||
|
@ -193,25 +202,33 @@ impl BlockList {
|
|||
self.map_addr_add_network(IpAddr::V6(addr), None)?;
|
||||
}
|
||||
}
|
||||
_ => bail!("IP version mismatch between start and end addresses"),
|
||||
_ => return Err(BlocklistError::IpVersionMismatch),
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
pub fn add_subnet(&mut self, addr: &str, prefix: u8) -> Result<(), AnyError> {
|
||||
pub fn add_subnet(
|
||||
&mut self,
|
||||
addr: &str,
|
||||
prefix: u8,
|
||||
) -> Result<(), BlocklistError> {
|
||||
let ip: IpAddr = addr.parse()?;
|
||||
self.map_addr_add_network(ip, Some(prefix))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn check(&self, addr: &str, r#type: &str) -> Result<bool, AnyError> {
|
||||
pub fn check(
|
||||
&self,
|
||||
addr: &str,
|
||||
r#type: &str,
|
||||
) -> Result<bool, BlocklistError> {
|
||||
let addr: IpAddr = addr.parse()?;
|
||||
let family = r#type.to_lowercase();
|
||||
if family == "ipv4" && addr.is_ipv4() || family == "ipv6" && addr.is_ipv6()
|
||||
{
|
||||
Ok(self.rules.iter().any(|net| net.contains(addr)))
|
||||
} else {
|
||||
Err(anyhow!("Invalid address"))
|
||||
Err(BlocklistError::InvalidAddress)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
use std::cell::RefCell;
|
||||
use std::rc::Rc;
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
use deno_core::OpState;
|
||||
use deno_fs::FileSystemRc;
|
||||
|
@ -11,11 +10,27 @@ use serde::Serialize;
|
|||
|
||||
use crate::NodePermissions;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum FsError {
|
||||
#[error(transparent)]
|
||||
Permission(deno_core::error::AnyError),
|
||||
#[error("{0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
#[cfg(windows)]
|
||||
#[error("Path has no root.")]
|
||||
PathHasNoRoot,
|
||||
#[cfg(not(any(unix, windows)))]
|
||||
#[error("Unsupported platform.")]
|
||||
UnsupportedPlatform,
|
||||
#[error(transparent)]
|
||||
Fs(#[from] deno_io::fs::FsError),
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_node_fs_exists_sync<P>(
|
||||
state: &mut OpState,
|
||||
#[string] path: String,
|
||||
) -> Result<bool, AnyError>
|
||||
) -> Result<bool, deno_core::error::AnyError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -30,7 +45,7 @@ where
|
|||
pub async fn op_node_fs_exists<P>(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[string] path: String,
|
||||
) -> Result<bool, AnyError>
|
||||
) -> Result<bool, FsError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -38,7 +53,8 @@ where
|
|||
let mut state = state.borrow_mut();
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_read_with_api_name(&path, Some("node:fs.exists()"))?;
|
||||
.check_read_with_api_name(&path, Some("node:fs.exists()"))
|
||||
.map_err(FsError::Permission)?;
|
||||
(state.borrow::<FileSystemRc>().clone(), path)
|
||||
};
|
||||
|
||||
|
@ -50,16 +66,18 @@ pub fn op_node_cp_sync<P>(
|
|||
state: &mut OpState,
|
||||
#[string] path: &str,
|
||||
#[string] new_path: &str,
|
||||
) -> Result<(), AnyError>
|
||||
) -> Result<(), FsError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_read_with_api_name(path, Some("node:fs.cpSync"))?;
|
||||
.check_read_with_api_name(path, Some("node:fs.cpSync"))
|
||||
.map_err(FsError::Permission)?;
|
||||
let new_path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write_with_api_name(new_path, Some("node:fs.cpSync"))?;
|
||||
.check_write_with_api_name(new_path, Some("node:fs.cpSync"))
|
||||
.map_err(FsError::Permission)?;
|
||||
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
fs.cp_sync(&path, &new_path)?;
|
||||
|
@ -71,7 +89,7 @@ pub async fn op_node_cp<P>(
|
|||
state: Rc<RefCell<OpState>>,
|
||||
#[string] path: String,
|
||||
#[string] new_path: String,
|
||||
) -> Result<(), AnyError>
|
||||
) -> Result<(), FsError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -79,10 +97,12 @@ where
|
|||
let mut state = state.borrow_mut();
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_read_with_api_name(&path, Some("node:fs.cpSync"))?;
|
||||
.check_read_with_api_name(&path, Some("node:fs.cpSync"))
|
||||
.map_err(FsError::Permission)?;
|
||||
let new_path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write_with_api_name(&new_path, Some("node:fs.cpSync"))?;
|
||||
.check_write_with_api_name(&new_path, Some("node:fs.cpSync"))
|
||||
.map_err(FsError::Permission)?;
|
||||
(state.borrow::<FileSystemRc>().clone(), path, new_path)
|
||||
};
|
||||
|
||||
|
@ -108,7 +128,7 @@ pub fn op_node_statfs<P>(
|
|||
state: Rc<RefCell<OpState>>,
|
||||
#[string] path: String,
|
||||
bigint: bool,
|
||||
) -> Result<StatFs, AnyError>
|
||||
) -> Result<StatFs, FsError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -116,10 +136,12 @@ where
|
|||
let mut state = state.borrow_mut();
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_read_with_api_name(&path, Some("node:fs.statfs"))?;
|
||||
.check_read_with_api_name(&path, Some("node:fs.statfs"))
|
||||
.map_err(FsError::Permission)?;
|
||||
state
|
||||
.borrow_mut::<P>()
|
||||
.check_sys("statfs", "node:fs.statfs")?;
|
||||
.check_sys("statfs", "node:fs.statfs")
|
||||
.map_err(FsError::Permission)?;
|
||||
path
|
||||
};
|
||||
#[cfg(unix)]
|
||||
|
@ -176,7 +198,6 @@ where
|
|||
}
|
||||
#[cfg(windows)]
|
||||
{
|
||||
use deno_core::anyhow::anyhow;
|
||||
use std::ffi::OsStr;
|
||||
use std::os::windows::ffi::OsStrExt;
|
||||
use windows_sys::Win32::Storage::FileSystem::GetDiskFreeSpaceW;
|
||||
|
@ -186,10 +207,7 @@ where
|
|||
// call below.
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let path = path.canonicalize()?;
|
||||
let root = path
|
||||
.ancestors()
|
||||
.last()
|
||||
.ok_or(anyhow!("Path has no root."))?;
|
||||
let root = path.ancestors().last().ok_or(FsError::PathHasNoRoot)?;
|
||||
let mut root = OsStr::new(root).encode_wide().collect::<Vec<_>>();
|
||||
root.push(0);
|
||||
let mut sectors_per_cluster = 0;
|
||||
|
@ -229,7 +247,7 @@ where
|
|||
{
|
||||
let _ = path;
|
||||
let _ = bigint;
|
||||
Err(anyhow!("Unsupported platform."))
|
||||
Err(FsError::UnsupportedPlatform)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -241,13 +259,14 @@ pub fn op_node_lutimes_sync<P>(
|
|||
#[smi] atime_nanos: u32,
|
||||
#[number] mtime_secs: i64,
|
||||
#[smi] mtime_nanos: u32,
|
||||
) -> Result<(), AnyError>
|
||||
) -> Result<(), FsError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write_with_api_name(path, Some("node:fs.lutimes"))?;
|
||||
.check_write_with_api_name(path, Some("node:fs.lutimes"))
|
||||
.map_err(FsError::Permission)?;
|
||||
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
fs.lutime_sync(&path, atime_secs, atime_nanos, mtime_secs, mtime_nanos)?;
|
||||
|
@ -262,7 +281,7 @@ pub async fn op_node_lutimes<P>(
|
|||
#[smi] atime_nanos: u32,
|
||||
#[number] mtime_secs: i64,
|
||||
#[smi] mtime_nanos: u32,
|
||||
) -> Result<(), AnyError>
|
||||
) -> Result<(), FsError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -270,7 +289,8 @@ where
|
|||
let mut state = state.borrow_mut();
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write_with_api_name(&path, Some("node:fs.lutimesSync"))?;
|
||||
.check_write_with_api_name(&path, Some("node:fs.lutimesSync"))
|
||||
.map_err(FsError::Permission)?;
|
||||
(state.borrow::<FileSystemRc>().clone(), path)
|
||||
};
|
||||
|
||||
|
@ -286,13 +306,14 @@ pub fn op_node_lchown_sync<P>(
|
|||
#[string] path: String,
|
||||
uid: Option<u32>,
|
||||
gid: Option<u32>,
|
||||
) -> Result<(), AnyError>
|
||||
) -> Result<(), FsError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write_with_api_name(&path, Some("node:fs.lchownSync"))?;
|
||||
.check_write_with_api_name(&path, Some("node:fs.lchownSync"))
|
||||
.map_err(FsError::Permission)?;
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
fs.lchown_sync(&path, uid, gid)?;
|
||||
Ok(())
|
||||
|
@ -304,7 +325,7 @@ pub async fn op_node_lchown<P>(
|
|||
#[string] path: String,
|
||||
uid: Option<u32>,
|
||||
gid: Option<u32>,
|
||||
) -> Result<(), AnyError>
|
||||
) -> Result<(), FsError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -312,7 +333,8 @@ where
|
|||
let mut state = state.borrow_mut();
|
||||
let path = state
|
||||
.borrow_mut::<P>()
|
||||
.check_write_with_api_name(&path, Some("node:fs.lchown"))?;
|
||||
.check_write_with_api_name(&path, Some("node:fs.lchown"))
|
||||
.map_err(FsError::Permission)?;
|
||||
(state.borrow::<FileSystemRc>().clone(), path)
|
||||
};
|
||||
fs.lchown_async(path, uid, gid).await?;
|
||||
|
|
|
@ -7,7 +7,6 @@ use std::rc::Rc;
|
|||
use std::task::Poll;
|
||||
|
||||
use bytes::Bytes;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::futures::future::poll_fn;
|
||||
use deno_core::op2;
|
||||
use deno_core::serde::Serialize;
|
||||
|
@ -110,17 +109,28 @@ impl Resource for Http2ServerSendResponse {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Http2Error {
|
||||
#[error(transparent)]
|
||||
Resource(deno_core::error::AnyError),
|
||||
#[error(transparent)]
|
||||
UrlParse(#[from] url::ParseError),
|
||||
#[error(transparent)]
|
||||
H2(#[from] h2::Error),
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
#[serde]
|
||||
pub async fn op_http2_connect(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[smi] rid: ResourceId,
|
||||
#[string] url: String,
|
||||
) -> Result<(ResourceId, ResourceId), AnyError> {
|
||||
) -> Result<(ResourceId, ResourceId), Http2Error> {
|
||||
// No permission check necessary because we're using an existing connection
|
||||
let network_stream = {
|
||||
let mut state = state.borrow_mut();
|
||||
take_network_stream_resource(&mut state.resource_table, rid)?
|
||||
take_network_stream_resource(&mut state.resource_table, rid)
|
||||
.map_err(Http2Error::Resource)?
|
||||
};
|
||||
|
||||
let url = Url::parse(&url)?;
|
||||
|
@ -144,9 +154,10 @@ pub async fn op_http2_connect(
|
|||
pub async fn op_http2_listen(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[smi] rid: ResourceId,
|
||||
) -> Result<ResourceId, AnyError> {
|
||||
) -> Result<ResourceId, Http2Error> {
|
||||
let stream =
|
||||
take_network_stream_resource(&mut state.borrow_mut().resource_table, rid)?;
|
||||
take_network_stream_resource(&mut state.borrow_mut().resource_table, rid)
|
||||
.map_err(Http2Error::Resource)?;
|
||||
|
||||
let conn = h2::server::Builder::new().handshake(stream).await?;
|
||||
Ok(
|
||||
|
@ -166,12 +177,13 @@ pub async fn op_http2_accept(
|
|||
#[smi] rid: ResourceId,
|
||||
) -> Result<
|
||||
Option<(Vec<(ByteString, ByteString)>, ResourceId, ResourceId)>,
|
||||
AnyError,
|
||||
Http2Error,
|
||||
> {
|
||||
let resource = state
|
||||
.borrow()
|
||||
.resource_table
|
||||
.get::<Http2ServerConnection>(rid)?;
|
||||
.get::<Http2ServerConnection>(rid)
|
||||
.map_err(Http2Error::Resource)?;
|
||||
let mut conn = RcRef::map(&resource, |r| &r.conn).borrow_mut().await;
|
||||
if let Some(res) = conn.accept().await {
|
||||
let (req, resp) = res?;
|
||||
|
@ -233,11 +245,12 @@ pub async fn op_http2_send_response(
|
|||
#[smi] rid: ResourceId,
|
||||
#[smi] status: u16,
|
||||
#[serde] headers: Vec<(ByteString, ByteString)>,
|
||||
) -> Result<(ResourceId, u32), AnyError> {
|
||||
) -> Result<(ResourceId, u32), Http2Error> {
|
||||
let resource = state
|
||||
.borrow()
|
||||
.resource_table
|
||||
.get::<Http2ServerSendResponse>(rid)?;
|
||||
.get::<Http2ServerSendResponse>(rid)
|
||||
.map_err(Http2Error::Resource)?;
|
||||
let mut send_response = RcRef::map(resource, |r| &r.send_response)
|
||||
.borrow_mut()
|
||||
.await;
|
||||
|
@ -262,8 +275,12 @@ pub async fn op_http2_send_response(
|
|||
pub async fn op_http2_poll_client_connection(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[smi] rid: ResourceId,
|
||||
) -> Result<(), AnyError> {
|
||||
let resource = state.borrow().resource_table.get::<Http2ClientConn>(rid)?;
|
||||
) -> Result<(), Http2Error> {
|
||||
let resource = state
|
||||
.borrow()
|
||||
.resource_table
|
||||
.get::<Http2ClientConn>(rid)
|
||||
.map_err(Http2Error::Resource)?;
|
||||
|
||||
let cancel_handle = RcRef::map(resource.clone(), |this| &this.cancel_handle);
|
||||
let mut conn = RcRef::map(resource, |this| &this.conn).borrow_mut().await;
|
||||
|
@ -289,11 +306,12 @@ pub async fn op_http2_client_request(
|
|||
// 4 strings of keys?
|
||||
#[serde] mut pseudo_headers: HashMap<String, String>,
|
||||
#[serde] headers: Vec<(ByteString, ByteString)>,
|
||||
) -> Result<(ResourceId, u32), AnyError> {
|
||||
) -> Result<(ResourceId, u32), Http2Error> {
|
||||
let resource = state
|
||||
.borrow()
|
||||
.resource_table
|
||||
.get::<Http2Client>(client_rid)?;
|
||||
.get::<Http2Client>(client_rid)
|
||||
.map_err(Http2Error::Resource)?;
|
||||
|
||||
let url = resource.url.clone();
|
||||
|
||||
|
@ -326,7 +344,10 @@ pub async fn op_http2_client_request(
|
|||
|
||||
let resource = {
|
||||
let state = state.borrow();
|
||||
state.resource_table.get::<Http2Client>(client_rid)?
|
||||
state
|
||||
.resource_table
|
||||
.get::<Http2Client>(client_rid)
|
||||
.map_err(Http2Error::Resource)?
|
||||
};
|
||||
let mut client = RcRef::map(&resource, |r| &r.client).borrow_mut().await;
|
||||
poll_fn(|cx| client.poll_ready(cx)).await?;
|
||||
|
@ -345,11 +366,12 @@ pub async fn op_http2_client_send_data(
|
|||
#[smi] stream_rid: ResourceId,
|
||||
#[buffer] data: JsBuffer,
|
||||
end_of_stream: bool,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), Http2Error> {
|
||||
let resource = state
|
||||
.borrow()
|
||||
.resource_table
|
||||
.get::<Http2ClientStream>(stream_rid)?;
|
||||
.get::<Http2ClientStream>(stream_rid)
|
||||
.map_err(Http2Error::Resource)?;
|
||||
let mut stream = RcRef::map(&resource, |r| &r.stream).borrow_mut().await;
|
||||
|
||||
stream.send_data(data.to_vec().into(), end_of_stream)?;
|
||||
|
@ -361,7 +383,7 @@ pub async fn op_http2_client_reset_stream(
|
|||
state: Rc<RefCell<OpState>>,
|
||||
#[smi] stream_rid: ResourceId,
|
||||
#[smi] code: u32,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), deno_core::error::AnyError> {
|
||||
let resource = state
|
||||
.borrow()
|
||||
.resource_table
|
||||
|
@ -376,11 +398,12 @@ pub async fn op_http2_client_send_trailers(
|
|||
state: Rc<RefCell<OpState>>,
|
||||
#[smi] stream_rid: ResourceId,
|
||||
#[serde] trailers: Vec<(ByteString, ByteString)>,
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), Http2Error> {
|
||||
let resource = state
|
||||
.borrow()
|
||||
.resource_table
|
||||
.get::<Http2ClientStream>(stream_rid)?;
|
||||
.get::<Http2ClientStream>(stream_rid)
|
||||
.map_err(Http2Error::Resource)?;
|
||||
let mut stream = RcRef::map(&resource, |r| &r.stream).borrow_mut().await;
|
||||
|
||||
let mut trailers_map = http::HeaderMap::new();
|
||||
|
@ -408,11 +431,12 @@ pub struct Http2ClientResponse {
|
|||
pub async fn op_http2_client_get_response(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[smi] stream_rid: ResourceId,
|
||||
) -> Result<(Http2ClientResponse, bool), AnyError> {
|
||||
) -> Result<(Http2ClientResponse, bool), Http2Error> {
|
||||
let resource = state
|
||||
.borrow()
|
||||
.resource_table
|
||||
.get::<Http2ClientStream>(stream_rid)?;
|
||||
.get::<Http2ClientStream>(stream_rid)
|
||||
.map_err(Http2Error::Resource)?;
|
||||
let mut response_future =
|
||||
RcRef::map(&resource, |r| &r.response).borrow_mut().await;
|
||||
|
||||
|
@ -478,11 +502,12 @@ fn poll_data_or_trailers(
|
|||
pub async fn op_http2_client_get_response_body_chunk(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[smi] body_rid: ResourceId,
|
||||
) -> Result<(Option<Vec<u8>>, bool, bool), AnyError> {
|
||||
) -> Result<(Option<Vec<u8>>, bool, bool), Http2Error> {
|
||||
let resource = state
|
||||
.borrow()
|
||||
.resource_table
|
||||
.get::<Http2ClientResponseBody>(body_rid)?;
|
||||
.get::<Http2ClientResponseBody>(body_rid)
|
||||
.map_err(Http2Error::Resource)?;
|
||||
let mut body = RcRef::map(&resource, |r| &r.body).borrow_mut().await;
|
||||
|
||||
loop {
|
||||
|
@ -525,7 +550,7 @@ pub async fn op_http2_client_get_response_body_chunk(
|
|||
pub async fn op_http2_client_get_response_trailers(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[smi] body_rid: ResourceId,
|
||||
) -> Result<Option<Vec<(ByteString, ByteString)>>, AnyError> {
|
||||
) -> Result<Option<Vec<(ByteString, ByteString)>>, deno_core::error::AnyError> {
|
||||
let resource = state
|
||||
.borrow()
|
||||
.resource_table
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::anyhow::Error;
|
||||
use deno_core::error::range_error;
|
||||
use deno_core::op2;
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
@ -11,19 +9,21 @@ use std::borrow::Cow;
|
|||
|
||||
const PUNY_PREFIX: &str = "xn--";
|
||||
|
||||
fn invalid_input_err() -> Error {
|
||||
range_error("Invalid input")
|
||||
}
|
||||
|
||||
fn not_basic_err() -> Error {
|
||||
range_error("Illegal input >= 0x80 (not a basic code point)")
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum IdnaError {
|
||||
#[error("Invalid input")]
|
||||
InvalidInput,
|
||||
#[error("Input would take more than 63 characters to encode")]
|
||||
InputTooLong,
|
||||
#[error("Illegal input >= 0x80 (not a basic code point)")]
|
||||
IllegalInput,
|
||||
}
|
||||
|
||||
/// map a domain by mapping each label with the given function
|
||||
fn map_domain<E>(
|
||||
fn map_domain(
|
||||
domain: &str,
|
||||
f: impl Fn(&str) -> Result<Cow<'_, str>, E>,
|
||||
) -> Result<String, E> {
|
||||
f: impl Fn(&str) -> Result<Cow<'_, str>, IdnaError>,
|
||||
) -> Result<String, IdnaError> {
|
||||
let mut result = String::with_capacity(domain.len());
|
||||
let mut domain = domain;
|
||||
|
||||
|
@ -48,7 +48,7 @@ fn map_domain<E>(
|
|||
/// Maps a unicode domain to ascii by punycode encoding each label
|
||||
///
|
||||
/// Note this is not IDNA2003 or IDNA2008 compliant, rather it matches node.js's punycode implementation
|
||||
fn to_ascii(input: &str) -> Result<String, Error> {
|
||||
fn to_ascii(input: &str) -> Result<String, IdnaError> {
|
||||
if input.is_ascii() {
|
||||
return Ok(input.into());
|
||||
}
|
||||
|
@ -61,9 +61,7 @@ fn to_ascii(input: &str) -> Result<String, Error> {
|
|||
} else {
|
||||
idna::punycode::encode_str(label)
|
||||
.map(|encoded| [PUNY_PREFIX, &encoded].join("").into()) // add the prefix
|
||||
.ok_or_else(|| {
|
||||
Error::msg("Input would take more than 63 characters to encode") // only error possible per the docs
|
||||
})
|
||||
.ok_or(IdnaError::InputTooLong) // only error possible per the docs
|
||||
}
|
||||
})?;
|
||||
|
||||
|
@ -74,13 +72,13 @@ fn to_ascii(input: &str) -> Result<String, Error> {
|
|||
/// Maps an ascii domain to unicode by punycode decoding each label
|
||||
///
|
||||
/// Note this is not IDNA2003 or IDNA2008 compliant, rather it matches node.js's punycode implementation
|
||||
fn to_unicode(input: &str) -> Result<String, Error> {
|
||||
fn to_unicode(input: &str) -> Result<String, IdnaError> {
|
||||
map_domain(input, |s| {
|
||||
if let Some(puny) = s.strip_prefix(PUNY_PREFIX) {
|
||||
// it's a punycode encoded label
|
||||
Ok(
|
||||
idna::punycode::decode_to_string(&puny.to_lowercase())
|
||||
.ok_or_else(invalid_input_err)?
|
||||
.ok_or(IdnaError::InvalidInput)?
|
||||
.into(),
|
||||
)
|
||||
} else {
|
||||
|
@ -95,7 +93,7 @@ fn to_unicode(input: &str) -> Result<String, Error> {
|
|||
#[string]
|
||||
pub fn op_node_idna_punycode_to_ascii(
|
||||
#[string] domain: String,
|
||||
) -> Result<String, Error> {
|
||||
) -> Result<String, IdnaError> {
|
||||
to_ascii(&domain)
|
||||
}
|
||||
|
||||
|
@ -105,7 +103,7 @@ pub fn op_node_idna_punycode_to_ascii(
|
|||
#[string]
|
||||
pub fn op_node_idna_punycode_to_unicode(
|
||||
#[string] domain: String,
|
||||
) -> Result<String, Error> {
|
||||
) -> Result<String, IdnaError> {
|
||||
to_unicode(&domain)
|
||||
}
|
||||
|
||||
|
@ -115,8 +113,8 @@ pub fn op_node_idna_punycode_to_unicode(
|
|||
#[string]
|
||||
pub fn op_node_idna_domain_to_ascii(
|
||||
#[string] domain: String,
|
||||
) -> Result<String, Error> {
|
||||
idna::domain_to_ascii(&domain).map_err(|e| e.into())
|
||||
) -> Result<String, idna::Errors> {
|
||||
idna::domain_to_ascii(&domain)
|
||||
}
|
||||
|
||||
/// Converts a domain to Unicode as per the IDNA spec
|
||||
|
@ -131,7 +129,7 @@ pub fn op_node_idna_domain_to_unicode(#[string] domain: String) -> String {
|
|||
#[string]
|
||||
pub fn op_node_idna_punycode_decode(
|
||||
#[string] domain: String,
|
||||
) -> Result<String, Error> {
|
||||
) -> Result<String, IdnaError> {
|
||||
if domain.is_empty() {
|
||||
return Ok(domain);
|
||||
}
|
||||
|
@ -147,11 +145,10 @@ pub fn op_node_idna_punycode_decode(
|
|||
.unwrap_or(domain.len() - 1);
|
||||
|
||||
if !domain[..last_dash].is_ascii() {
|
||||
return Err(not_basic_err());
|
||||
return Err(IdnaError::IllegalInput);
|
||||
}
|
||||
|
||||
idna::punycode::decode_to_string(&domain)
|
||||
.ok_or_else(|| deno_core::error::range_error("Invalid input"))
|
||||
idna::punycode::decode_to_string(&domain).ok_or(IdnaError::InvalidInput)
|
||||
}
|
||||
|
||||
#[op2]
|
||||
|
|
|
@ -17,8 +17,6 @@ mod impl_ {
|
|||
use std::task::Context;
|
||||
use std::task::Poll;
|
||||
|
||||
use deno_core::error::bad_resource_id;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
use deno_core::serde;
|
||||
use deno_core::serde::Serializer;
|
||||
|
@ -167,7 +165,7 @@ mod impl_ {
|
|||
#[smi]
|
||||
pub fn op_node_child_ipc_pipe(
|
||||
state: &mut OpState,
|
||||
) -> Result<Option<ResourceId>, AnyError> {
|
||||
) -> Result<Option<ResourceId>, io::Error> {
|
||||
let fd = match state.try_borrow_mut::<crate::ChildPipeFd>() {
|
||||
Some(child_pipe_fd) => child_pipe_fd.0,
|
||||
None => return Ok(None),
|
||||
|
@ -180,6 +178,18 @@ mod impl_ {
|
|||
))
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum IpcError {
|
||||
#[error(transparent)]
|
||||
Resource(deno_core::error::AnyError),
|
||||
#[error(transparent)]
|
||||
IpcJsonStream(#[from] IpcJsonStreamError),
|
||||
#[error(transparent)]
|
||||
Canceled(#[from] deno_core::Canceled),
|
||||
#[error("failed to serialize json value: {0}")]
|
||||
SerdeJson(serde_json::Error),
|
||||
}
|
||||
|
||||
#[op2(async)]
|
||||
pub fn op_node_ipc_write<'a>(
|
||||
scope: &mut v8::HandleScope<'a>,
|
||||
|
@ -192,27 +202,23 @@ mod impl_ {
|
|||
// ideally we would just return `Result<(impl Future, bool), ..>`, but that's not
|
||||
// supported by `op2` currently.
|
||||
queue_ok: v8::Local<'a, v8::Array>,
|
||||
) -> Result<impl Future<Output = Result<(), AnyError>>, AnyError> {
|
||||
) -> Result<impl Future<Output = Result<(), io::Error>>, IpcError> {
|
||||
let mut serialized = Vec::with_capacity(64);
|
||||
let mut ser = serde_json::Serializer::new(&mut serialized);
|
||||
serialize_v8_value(scope, value, &mut ser).map_err(|e| {
|
||||
deno_core::error::type_error(format!(
|
||||
"failed to serialize json value: {e}"
|
||||
))
|
||||
})?;
|
||||
serialize_v8_value(scope, value, &mut ser).map_err(IpcError::SerdeJson)?;
|
||||
serialized.push(b'\n');
|
||||
|
||||
let stream = state
|
||||
.borrow()
|
||||
.resource_table
|
||||
.get::<IpcJsonStreamResource>(rid)
|
||||
.map_err(|_| bad_resource_id())?;
|
||||
.map_err(IpcError::Resource)?;
|
||||
let old = stream
|
||||
.queued_bytes
|
||||
.fetch_add(serialized.len(), std::sync::atomic::Ordering::Relaxed);
|
||||
if old + serialized.len() > 2 * INITIAL_CAPACITY {
|
||||
// sending messages too fast
|
||||
let v = false.to_v8(scope)?;
|
||||
let v = false.to_v8(scope).unwrap(); // Infallible
|
||||
queue_ok.set_index(scope, 0, v);
|
||||
}
|
||||
Ok(async move {
|
||||
|
@ -246,12 +252,12 @@ mod impl_ {
|
|||
pub async fn op_node_ipc_read(
|
||||
state: Rc<RefCell<OpState>>,
|
||||
#[smi] rid: ResourceId,
|
||||
) -> Result<serde_json::Value, AnyError> {
|
||||
) -> Result<serde_json::Value, IpcError> {
|
||||
let stream = state
|
||||
.borrow()
|
||||
.resource_table
|
||||
.get::<IpcJsonStreamResource>(rid)
|
||||
.map_err(|_| bad_resource_id())?;
|
||||
.map_err(IpcError::Resource)?;
|
||||
|
||||
let cancel = stream.cancel.clone();
|
||||
let mut stream = RcRef::map(stream, |r| &r.read_half).borrow_mut().await;
|
||||
|
@ -407,7 +413,7 @@ mod impl_ {
|
|||
async fn write_msg_bytes(
|
||||
self: Rc<Self>,
|
||||
msg: &[u8],
|
||||
) -> Result<(), AnyError> {
|
||||
) -> Result<(), io::Error> {
|
||||
let mut write_half =
|
||||
RcRef::map(self, |r| &r.write_half).borrow_mut().await;
|
||||
write_half.write_all(msg).await?;
|
||||
|
@ -462,6 +468,14 @@ mod impl_ {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum IpcJsonStreamError {
|
||||
#[error("{0}")]
|
||||
Io(#[source] std::io::Error),
|
||||
#[error("{0}")]
|
||||
SimdJson(#[source] simd_json::Error),
|
||||
}
|
||||
|
||||
// JSON serialization stream over IPC pipe.
|
||||
//
|
||||
// `\n` is used as a delimiter between messages.
|
||||
|
@ -482,7 +496,7 @@ mod impl_ {
|
|||
|
||||
async fn read_msg(
|
||||
&mut self,
|
||||
) -> Result<Option<serde_json::Value>, AnyError> {
|
||||
) -> Result<Option<serde_json::Value>, IpcJsonStreamError> {
|
||||
let mut json = None;
|
||||
let nread = read_msg_inner(
|
||||
&mut self.pipe,
|
||||
|
@ -490,7 +504,8 @@ mod impl_ {
|
|||
&mut json,
|
||||
&mut self.read_buffer,
|
||||
)
|
||||
.await?;
|
||||
.await
|
||||
.map_err(IpcJsonStreamError::Io)?;
|
||||
if nread == 0 {
|
||||
// EOF.
|
||||
return Ok(None);
|
||||
|
@ -500,7 +515,8 @@ mod impl_ {
|
|||
Some(v) => v,
|
||||
None => {
|
||||
// Took more than a single read and some buffering.
|
||||
simd_json::from_slice(&mut self.buffer[..nread])?
|
||||
simd_json::from_slice(&mut self.buffer[..nread])
|
||||
.map_err(IpcJsonStreamError::SimdJson)?
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -1,28 +1,38 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use crate::NodePermissions;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
use deno_core::OpState;
|
||||
|
||||
mod cpus;
|
||||
mod priority;
|
||||
pub mod priority;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum OsError {
|
||||
#[error(transparent)]
|
||||
Priority(priority::PriorityError),
|
||||
#[error(transparent)]
|
||||
Permission(deno_core::error::AnyError),
|
||||
#[error("Failed to get cpu info")]
|
||||
FailedToGetCpuInfo,
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_node_os_get_priority<P>(
|
||||
state: &mut OpState,
|
||||
pid: u32,
|
||||
) -> Result<i32, AnyError>
|
||||
) -> Result<i32, OsError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
{
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
permissions.check_sys("getPriority", "node:os.getPriority()")?;
|
||||
permissions
|
||||
.check_sys("getPriority", "node:os.getPriority()")
|
||||
.map_err(OsError::Permission)?;
|
||||
}
|
||||
|
||||
priority::get_priority(pid)
|
||||
priority::get_priority(pid).map_err(OsError::Priority)
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
|
@ -30,21 +40,25 @@ pub fn op_node_os_set_priority<P>(
|
|||
state: &mut OpState,
|
||||
pid: u32,
|
||||
priority: i32,
|
||||
) -> Result<(), AnyError>
|
||||
) -> Result<(), OsError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
{
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
permissions.check_sys("setPriority", "node:os.setPriority()")?;
|
||||
permissions
|
||||
.check_sys("setPriority", "node:os.setPriority()")
|
||||
.map_err(OsError::Permission)?;
|
||||
}
|
||||
|
||||
priority::set_priority(pid, priority)
|
||||
priority::set_priority(pid, priority).map_err(OsError::Priority)
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_node_os_username<P>(state: &mut OpState) -> Result<String, AnyError>
|
||||
pub fn op_node_os_username<P>(
|
||||
state: &mut OpState,
|
||||
) -> Result<String, deno_core::error::AnyError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -57,7 +71,9 @@ where
|
|||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_geteuid<P>(state: &mut OpState) -> Result<u32, AnyError>
|
||||
pub fn op_geteuid<P>(
|
||||
state: &mut OpState,
|
||||
) -> Result<u32, deno_core::error::AnyError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -76,7 +92,9 @@ where
|
|||
}
|
||||
|
||||
#[op2(fast)]
|
||||
pub fn op_getegid<P>(state: &mut OpState) -> Result<u32, AnyError>
|
||||
pub fn op_getegid<P>(
|
||||
state: &mut OpState,
|
||||
) -> Result<u32, deno_core::error::AnyError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -96,21 +114,25 @@ where
|
|||
|
||||
#[op2]
|
||||
#[serde]
|
||||
pub fn op_cpus<P>(state: &mut OpState) -> Result<Vec<cpus::CpuInfo>, AnyError>
|
||||
pub fn op_cpus<P>(state: &mut OpState) -> Result<Vec<cpus::CpuInfo>, OsError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
{
|
||||
let permissions = state.borrow_mut::<P>();
|
||||
permissions.check_sys("cpus", "node:os.cpus()")?;
|
||||
permissions
|
||||
.check_sys("cpus", "node:os.cpus()")
|
||||
.map_err(OsError::Permission)?;
|
||||
}
|
||||
|
||||
cpus::cpu_info().ok_or_else(|| type_error("Failed to get cpu info"))
|
||||
cpus::cpu_info().ok_or(OsError::FailedToGetCpuInfo)
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_homedir<P>(state: &mut OpState) -> Result<Option<String>, AnyError>
|
||||
pub fn op_homedir<P>(
|
||||
state: &mut OpState,
|
||||
) -> Result<Option<String>, deno_core::error::AnyError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
|
|
@ -1,12 +1,18 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
|
||||
pub use impl_::*;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum PriorityError {
|
||||
#[error("{0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
#[cfg(windows)]
|
||||
#[error("Invalid priority")]
|
||||
InvalidPriority,
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
mod impl_ {
|
||||
use super::*;
|
||||
use errno::errno;
|
||||
use errno::set_errno;
|
||||
use errno::Errno;
|
||||
|
@ -16,7 +22,7 @@ mod impl_ {
|
|||
const PRIORITY_HIGH: i32 = -14;
|
||||
|
||||
// Ref: https://github.com/libuv/libuv/blob/55376b044b74db40772e8a6e24d67a8673998e02/src/unix/core.c#L1533-L1547
|
||||
pub fn get_priority(pid: u32) -> Result<i32, AnyError> {
|
||||
pub fn get_priority(pid: u32) -> Result<i32, super::PriorityError> {
|
||||
set_errno(Errno(0));
|
||||
match (
|
||||
// SAFETY: libc::getpriority is unsafe
|
||||
|
@ -29,7 +35,10 @@ mod impl_ {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn set_priority(pid: u32, priority: i32) -> Result<(), AnyError> {
|
||||
pub fn set_priority(
|
||||
pid: u32,
|
||||
priority: i32,
|
||||
) -> Result<(), super::PriorityError> {
|
||||
// SAFETY: libc::setpriority is unsafe
|
||||
match unsafe { libc::setpriority(PRIO_PROCESS, pid as id_t, priority) } {
|
||||
-1 => Err(std::io::Error::last_os_error().into()),
|
||||
|
@ -40,8 +49,6 @@ mod impl_ {
|
|||
|
||||
#[cfg(windows)]
|
||||
mod impl_ {
|
||||
use super::*;
|
||||
use deno_core::error::type_error;
|
||||
use winapi::shared::minwindef::DWORD;
|
||||
use winapi::shared::minwindef::FALSE;
|
||||
use winapi::shared::ntdef::NULL;
|
||||
|
@ -67,7 +74,7 @@ mod impl_ {
|
|||
const PRIORITY_HIGHEST: i32 = -20;
|
||||
|
||||
// Ported from: https://github.com/libuv/libuv/blob/a877ca2435134ef86315326ef4ef0c16bdbabf17/src/win/util.c#L1649-L1685
|
||||
pub fn get_priority(pid: u32) -> Result<i32, AnyError> {
|
||||
pub fn get_priority(pid: u32) -> Result<i32, super::PriorityError> {
|
||||
// SAFETY: Windows API calls
|
||||
unsafe {
|
||||
let handle = if pid == 0 {
|
||||
|
@ -95,7 +102,10 @@ mod impl_ {
|
|||
}
|
||||
|
||||
// Ported from: https://github.com/libuv/libuv/blob/a877ca2435134ef86315326ef4ef0c16bdbabf17/src/win/util.c#L1688-L1719
|
||||
pub fn set_priority(pid: u32, priority: i32) -> Result<(), AnyError> {
|
||||
pub fn set_priority(
|
||||
pid: u32,
|
||||
priority: i32,
|
||||
) -> Result<(), super::PriorityError> {
|
||||
// SAFETY: Windows API calls
|
||||
unsafe {
|
||||
let handle = if pid == 0 {
|
||||
|
@ -109,7 +119,7 @@ mod impl_ {
|
|||
#[allow(clippy::manual_range_contains)]
|
||||
let priority_class =
|
||||
if priority < PRIORITY_HIGHEST || priority > PRIORITY_LOW {
|
||||
return Err(type_error("Invalid priority"));
|
||||
return Err(super::PriorityError::InvalidPriority);
|
||||
} else if priority < PRIORITY_HIGH {
|
||||
REALTIME_PRIORITY_CLASS
|
||||
} else if priority < PRIORITY_ABOVE_NORMAL {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
use deno_core::OpState;
|
||||
use deno_permissions::PermissionsContainer;
|
||||
|
@ -51,7 +50,7 @@ pub fn op_node_process_kill(
|
|||
state: &mut OpState,
|
||||
#[smi] pid: i32,
|
||||
#[smi] sig: i32,
|
||||
) -> Result<i32, AnyError> {
|
||||
) -> Result<i32, deno_core::error::AnyError> {
|
||||
state
|
||||
.borrow_mut::<PermissionsContainer>()
|
||||
.check_run_all("process.kill")?;
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::anyhow::Context;
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::v8;
|
||||
|
@ -30,7 +27,7 @@ use crate::NpmResolverRc;
|
|||
fn ensure_read_permission<'a, P>(
|
||||
state: &mut OpState,
|
||||
file_path: &'a Path,
|
||||
) -> Result<Cow<'a, Path>, AnyError>
|
||||
) -> Result<Cow<'a, Path>, deno_core::error::AnyError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -39,6 +36,32 @@ where
|
|||
resolver.ensure_read_permission(permissions, file_path)
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum RequireError {
|
||||
#[error(transparent)]
|
||||
UrlParse(#[from] url::ParseError),
|
||||
#[error(transparent)]
|
||||
Permission(deno_core::error::AnyError),
|
||||
#[error(transparent)]
|
||||
PackageExportsResolve(
|
||||
#[from] node_resolver::errors::PackageExportsResolveError,
|
||||
),
|
||||
#[error(transparent)]
|
||||
PackageJsonLoad(#[from] node_resolver::errors::PackageJsonLoadError),
|
||||
#[error(transparent)]
|
||||
ClosestPkgJson(#[from] node_resolver::errors::ClosestPkgJsonError),
|
||||
#[error(transparent)]
|
||||
PackageImportsResolve(
|
||||
#[from] node_resolver::errors::PackageImportsResolveError,
|
||||
),
|
||||
#[error("failed to convert '{0}' to file path")]
|
||||
FilePathConversion(Url),
|
||||
#[error(transparent)]
|
||||
Fs(#[from] deno_io::fs::FsError),
|
||||
#[error("Unable to get CWD: {0}")]
|
||||
UnableToGetCwd(deno_io::fs::FsError),
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[serde]
|
||||
pub fn op_require_init_paths() -> Vec<String> {
|
||||
|
@ -95,7 +118,7 @@ pub fn op_require_init_paths() -> Vec<String> {
|
|||
pub fn op_require_node_module_paths<P>(
|
||||
state: &mut OpState,
|
||||
#[string] from: String,
|
||||
) -> Result<Vec<String>, AnyError>
|
||||
) -> Result<Vec<String>, RequireError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -104,12 +127,12 @@ where
|
|||
let from = if from.starts_with("file:///") {
|
||||
url_to_file_path(&Url::parse(&from)?)?
|
||||
} else {
|
||||
let current_dir =
|
||||
&(fs.cwd().map_err(AnyError::from)).context("Unable to get CWD")?;
|
||||
deno_path_util::normalize_path(current_dir.join(from))
|
||||
let current_dir = &fs.cwd().map_err(RequireError::UnableToGetCwd)?;
|
||||
normalize_path(current_dir.join(from))
|
||||
};
|
||||
|
||||
let from = ensure_read_permission::<P>(state, &from)?;
|
||||
let from = ensure_read_permission::<P>(state, &from)
|
||||
.map_err(RequireError::Permission)?;
|
||||
|
||||
if cfg!(windows) {
|
||||
// return root node_modules when path is 'D:\\'.
|
||||
|
@ -264,7 +287,7 @@ pub fn op_require_path_is_absolute(#[string] p: String) -> bool {
|
|||
pub fn op_require_stat<P>(
|
||||
state: &mut OpState,
|
||||
#[string] path: String,
|
||||
) -> Result<i32, AnyError>
|
||||
) -> Result<i32, deno_core::error::AnyError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -287,12 +310,13 @@ where
|
|||
pub fn op_require_real_path<P>(
|
||||
state: &mut OpState,
|
||||
#[string] request: String,
|
||||
) -> Result<String, AnyError>
|
||||
) -> Result<String, RequireError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
let path = PathBuf::from(request);
|
||||
let path = ensure_read_permission::<P>(state, &path)?;
|
||||
let path = ensure_read_permission::<P>(state, &path)
|
||||
.map_err(RequireError::Permission)?;
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
let canonicalized_path =
|
||||
deno_path_util::strip_unc_prefix(fs.realpath_sync(&path)?);
|
||||
|
@ -319,12 +343,14 @@ pub fn op_require_path_resolve(#[serde] parts: Vec<String>) -> String {
|
|||
#[string]
|
||||
pub fn op_require_path_dirname(
|
||||
#[string] request: String,
|
||||
) -> Result<String, AnyError> {
|
||||
) -> Result<String, deno_core::error::AnyError> {
|
||||
let p = PathBuf::from(request);
|
||||
if let Some(parent) = p.parent() {
|
||||
Ok(parent.to_string_lossy().into_owned())
|
||||
} else {
|
||||
Err(generic_error("Path doesn't have a parent"))
|
||||
Err(deno_core::error::generic_error(
|
||||
"Path doesn't have a parent",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -332,12 +358,14 @@ pub fn op_require_path_dirname(
|
|||
#[string]
|
||||
pub fn op_require_path_basename(
|
||||
#[string] request: String,
|
||||
) -> Result<String, AnyError> {
|
||||
) -> Result<String, deno_core::error::AnyError> {
|
||||
let p = PathBuf::from(request);
|
||||
if let Some(path) = p.file_name() {
|
||||
Ok(path.to_string_lossy().into_owned())
|
||||
} else {
|
||||
Err(generic_error("Path doesn't have a file name"))
|
||||
Err(deno_core::error::generic_error(
|
||||
"Path doesn't have a file name",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -348,7 +376,7 @@ pub fn op_require_try_self_parent_path<P>(
|
|||
has_parent: bool,
|
||||
#[string] maybe_parent_filename: Option<String>,
|
||||
#[string] maybe_parent_id: Option<String>,
|
||||
) -> Result<Option<String>, AnyError>
|
||||
) -> Result<Option<String>, deno_core::error::AnyError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -378,7 +406,7 @@ pub fn op_require_try_self<P>(
|
|||
state: &mut OpState,
|
||||
#[string] parent_path: Option<String>,
|
||||
#[string] request: String,
|
||||
) -> Result<Option<String>, AnyError>
|
||||
) -> Result<Option<String>, RequireError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -440,12 +468,13 @@ where
|
|||
pub fn op_require_read_file<P>(
|
||||
state: &mut OpState,
|
||||
#[string] file_path: String,
|
||||
) -> Result<String, AnyError>
|
||||
) -> Result<String, RequireError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
let file_path = PathBuf::from(file_path);
|
||||
let file_path = ensure_read_permission::<P>(state, &file_path)?;
|
||||
let file_path = ensure_read_permission::<P>(state, &file_path)
|
||||
.map_err(RequireError::Permission)?;
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
Ok(fs.read_text_file_lossy_sync(&file_path, None)?)
|
||||
}
|
||||
|
@ -472,7 +501,7 @@ pub fn op_require_resolve_exports<P>(
|
|||
#[string] name: String,
|
||||
#[string] expansion: String,
|
||||
#[string] parent_path: String,
|
||||
) -> Result<Option<String>, AnyError>
|
||||
) -> Result<Option<String>, RequireError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -525,16 +554,14 @@ where
|
|||
pub fn op_require_read_closest_package_json<P>(
|
||||
state: &mut OpState,
|
||||
#[string] filename: String,
|
||||
) -> Result<Option<PackageJsonRc>, AnyError>
|
||||
) -> Result<Option<PackageJsonRc>, node_resolver::errors::ClosestPkgJsonError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
let filename = PathBuf::from(filename);
|
||||
// permissions: allow reading the closest package.json files
|
||||
let node_resolver = state.borrow::<NodeResolverRc>().clone();
|
||||
node_resolver
|
||||
.get_closest_package_json_from_path(&filename)
|
||||
.map_err(AnyError::from)
|
||||
node_resolver.get_closest_package_json_from_path(&filename)
|
||||
}
|
||||
|
||||
#[op2]
|
||||
|
@ -564,12 +591,13 @@ pub fn op_require_package_imports_resolve<P>(
|
|||
state: &mut OpState,
|
||||
#[string] referrer_filename: String,
|
||||
#[string] request: String,
|
||||
) -> Result<Option<String>, AnyError>
|
||||
) -> Result<Option<String>, RequireError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
let referrer_path = PathBuf::from(&referrer_filename);
|
||||
let referrer_path = ensure_read_permission::<P>(state, &referrer_path)?;
|
||||
let referrer_path = ensure_read_permission::<P>(state, &referrer_path)
|
||||
.map_err(RequireError::Permission)?;
|
||||
let node_resolver = state.borrow::<NodeResolverRc>();
|
||||
let Some(pkg) =
|
||||
node_resolver.get_closest_package_json_from_path(&referrer_path)?
|
||||
|
@ -578,8 +606,7 @@ where
|
|||
};
|
||||
|
||||
if pkg.imports.is_some() {
|
||||
let referrer_url =
|
||||
deno_core::url::Url::from_file_path(&referrer_filename).unwrap();
|
||||
let referrer_url = Url::from_file_path(&referrer_filename).unwrap();
|
||||
let url = node_resolver.package_imports_resolve(
|
||||
&request,
|
||||
Some(&referrer_url),
|
||||
|
@ -604,17 +631,15 @@ pub fn op_require_break_on_next_statement(state: Rc<RefCell<OpState>>) {
|
|||
inspector.wait_for_session_and_break_on_next_statement()
|
||||
}
|
||||
|
||||
fn url_to_file_path_string(url: &Url) -> Result<String, AnyError> {
|
||||
fn url_to_file_path_string(url: &Url) -> Result<String, RequireError> {
|
||||
let file_path = url_to_file_path(url)?;
|
||||
Ok(file_path.to_string_lossy().into_owned())
|
||||
}
|
||||
|
||||
fn url_to_file_path(url: &Url) -> Result<PathBuf, AnyError> {
|
||||
fn url_to_file_path(url: &Url) -> Result<PathBuf, RequireError> {
|
||||
match url.to_file_path() {
|
||||
Ok(file_path) => Ok(file_path),
|
||||
Err(()) => {
|
||||
deno_core::anyhow::bail!("failed to convert '{}' to file path", url)
|
||||
}
|
||||
Err(()) => Err(RequireError::FilePathConversion(url.clone())),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
use deno_core::OpState;
|
||||
use deno_core::ResourceHandle;
|
||||
|
@ -22,7 +21,7 @@ enum HandleType {
|
|||
pub fn op_node_guess_handle_type(
|
||||
state: &mut OpState,
|
||||
rid: u32,
|
||||
) -> Result<u32, AnyError> {
|
||||
) -> Result<u32, deno_core::error::AnyError> {
|
||||
let handle = state.resource_table.get_handle(rid)?;
|
||||
|
||||
let handle_type = match handle {
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
|
||||
use deno_core::op2;
|
||||
use deno_core::v8;
|
||||
use deno_core::FastString;
|
||||
|
@ -206,10 +204,9 @@ pub fn op_v8_write_value(
|
|||
scope: &mut v8::HandleScope,
|
||||
#[cppgc] ser: &Serializer,
|
||||
value: v8::Local<v8::Value>,
|
||||
) -> Result<(), AnyError> {
|
||||
) {
|
||||
let context = scope.get_current_context();
|
||||
ser.inner.write_value(context, value);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct DeserBuffer {
|
||||
|
@ -271,11 +268,13 @@ pub fn op_v8_new_deserializer(
|
|||
scope: &mut v8::HandleScope,
|
||||
obj: v8::Local<v8::Object>,
|
||||
buffer: v8::Local<v8::ArrayBufferView>,
|
||||
) -> Result<Deserializer<'static>, AnyError> {
|
||||
) -> Result<Deserializer<'static>, deno_core::error::AnyError> {
|
||||
let offset = buffer.byte_offset();
|
||||
let len = buffer.byte_length();
|
||||
let backing_store = buffer.get_backing_store().ok_or_else(|| {
|
||||
generic_error("deserialization buffer has no backing store")
|
||||
deno_core::error::generic_error(
|
||||
"deserialization buffer has no backing store",
|
||||
)
|
||||
})?;
|
||||
let (buf_slice, buf_ptr) = if let Some(data) = backing_store.data() {
|
||||
// SAFETY: the offset is valid for the underlying buffer because we're getting it directly from v8
|
||||
|
@ -317,10 +316,10 @@ pub fn op_v8_transfer_array_buffer_de(
|
|||
#[op2(fast)]
|
||||
pub fn op_v8_read_double(
|
||||
#[cppgc] deser: &Deserializer,
|
||||
) -> Result<f64, AnyError> {
|
||||
) -> Result<f64, deno_core::error::AnyError> {
|
||||
let mut double = 0f64;
|
||||
if !deser.inner.read_double(&mut double) {
|
||||
return Err(type_error("ReadDouble() failed"));
|
||||
return Err(deno_core::error::type_error("ReadDouble() failed"));
|
||||
}
|
||||
Ok(double)
|
||||
}
|
||||
|
@ -355,10 +354,10 @@ pub fn op_v8_read_raw_bytes(
|
|||
#[op2(fast)]
|
||||
pub fn op_v8_read_uint32(
|
||||
#[cppgc] deser: &Deserializer,
|
||||
) -> Result<u32, AnyError> {
|
||||
) -> Result<u32, deno_core::error::AnyError> {
|
||||
let mut value = 0;
|
||||
if !deser.inner.read_uint32(&mut value) {
|
||||
return Err(type_error("ReadUint32() failed"));
|
||||
return Err(deno_core::error::type_error("ReadUint32() failed"));
|
||||
}
|
||||
|
||||
Ok(value)
|
||||
|
@ -368,10 +367,10 @@ pub fn op_v8_read_uint32(
|
|||
#[serde]
|
||||
pub fn op_v8_read_uint64(
|
||||
#[cppgc] deser: &Deserializer,
|
||||
) -> Result<(u32, u32), AnyError> {
|
||||
) -> Result<(u32, u32), deno_core::error::AnyError> {
|
||||
let mut val = 0;
|
||||
if !deser.inner.read_uint64(&mut val) {
|
||||
return Err(type_error("ReadUint64() failed"));
|
||||
return Err(deno_core::error::type_error("ReadUint64() failed"));
|
||||
}
|
||||
|
||||
Ok(((val >> 32) as u32, val as u32))
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_core::error::generic_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::OpState;
|
||||
|
@ -19,7 +17,7 @@ use crate::NodeResolverRc;
|
|||
fn ensure_read_permission<'a, P>(
|
||||
state: &mut OpState,
|
||||
file_path: &'a Path,
|
||||
) -> Result<Cow<'a, Path>, AnyError>
|
||||
) -> Result<Cow<'a, Path>, deno_core::error::AnyError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -28,12 +26,36 @@ where
|
|||
resolver.ensure_read_permission(permissions, file_path)
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum WorkerThreadsFilenameError {
|
||||
#[error(transparent)]
|
||||
Permission(deno_core::error::AnyError),
|
||||
#[error("{0}")]
|
||||
UrlParse(#[from] url::ParseError),
|
||||
#[error("Relative path entries must start with '.' or '..'")]
|
||||
InvalidRelativeUrl,
|
||||
#[error("URL from Path-String")]
|
||||
UrlFromPathString,
|
||||
#[error("URL to Path-String")]
|
||||
UrlToPathString,
|
||||
#[error("URL to Path")]
|
||||
UrlToPath,
|
||||
#[error("File not found [{0:?}]")]
|
||||
FileNotFound(PathBuf),
|
||||
#[error("Neither ESM nor CJS")]
|
||||
NeitherEsmNorCjs,
|
||||
#[error("{0}")]
|
||||
UrlToNodeResolution(node_resolver::errors::UrlToNodeResolutionError),
|
||||
#[error(transparent)]
|
||||
Fs(#[from] deno_io::fs::FsError),
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[string]
|
||||
pub fn op_worker_threads_filename<P>(
|
||||
state: &mut OpState,
|
||||
#[string] specifier: String,
|
||||
) -> Result<String, AnyError>
|
||||
) -> Result<String, WorkerThreadsFilenameError>
|
||||
where
|
||||
P: NodePermissions + 'static,
|
||||
{
|
||||
|
@ -45,40 +67,47 @@ where
|
|||
} else {
|
||||
let path = PathBuf::from(&specifier);
|
||||
if path.is_relative() && !specifier.starts_with('.') {
|
||||
return Err(generic_error(
|
||||
"Relative path entries must start with '.' or '..'",
|
||||
));
|
||||
return Err(WorkerThreadsFilenameError::InvalidRelativeUrl);
|
||||
}
|
||||
let path = ensure_read_permission::<P>(state, &path)?;
|
||||
let path = ensure_read_permission::<P>(state, &path)
|
||||
.map_err(WorkerThreadsFilenameError::Permission)?;
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
let canonicalized_path =
|
||||
deno_path_util::strip_unc_prefix(fs.realpath_sync(&path)?);
|
||||
Url::from_file_path(canonicalized_path)
|
||||
.map_err(|e| generic_error(format!("URL from Path-String: {:#?}", e)))?
|
||||
.map_err(|_| WorkerThreadsFilenameError::UrlFromPathString)?
|
||||
};
|
||||
let url_path = url
|
||||
.to_file_path()
|
||||
.map_err(|e| generic_error(format!("URL to Path-String: {:#?}", e)))?;
|
||||
let url_path = ensure_read_permission::<P>(state, &url_path)?;
|
||||
.map_err(|_| WorkerThreadsFilenameError::UrlToPathString)?;
|
||||
let url_path = ensure_read_permission::<P>(state, &url_path)
|
||||
.map_err(WorkerThreadsFilenameError::Permission)?;
|
||||
let fs = state.borrow::<FileSystemRc>();
|
||||
if !fs.exists_sync(&url_path) {
|
||||
return Err(generic_error(format!("File not found [{:?}]", url_path)));
|
||||
return Err(WorkerThreadsFilenameError::FileNotFound(
|
||||
url_path.to_path_buf(),
|
||||
));
|
||||
}
|
||||
let node_resolver = state.borrow::<NodeResolverRc>();
|
||||
match node_resolver.url_to_node_resolution(url)? {
|
||||
match node_resolver
|
||||
.url_to_node_resolution(url)
|
||||
.map_err(WorkerThreadsFilenameError::UrlToNodeResolution)?
|
||||
{
|
||||
NodeResolution::Esm(u) => Ok(u.to_string()),
|
||||
NodeResolution::CommonJs(u) => wrap_cjs(u),
|
||||
NodeResolution::BuiltIn(_) => Err(generic_error("Neither ESM nor CJS")),
|
||||
NodeResolution::BuiltIn(_) => {
|
||||
Err(WorkerThreadsFilenameError::NeitherEsmNorCjs)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
///
|
||||
/// Wrap a CJS file-URL and the required setup in a stringified `data:`-URL
|
||||
///
|
||||
fn wrap_cjs(url: Url) -> Result<String, AnyError> {
|
||||
fn wrap_cjs(url: Url) -> Result<String, WorkerThreadsFilenameError> {
|
||||
let path = url
|
||||
.to_file_path()
|
||||
.map_err(|e| generic_error(format!("URL to Path: {:#?}", e)))?;
|
||||
.map_err(|_| WorkerThreadsFilenameError::UrlToPath)?;
|
||||
let filename = path.file_name().unwrap().to_string_lossy();
|
||||
Ok(format!(
|
||||
"data:text/javascript,import {{ createRequire }} from \"node:module\";\
|
||||
|
|
|
@ -9,8 +9,6 @@ use brotli::BrotliDecompressStream;
|
|||
use brotli::BrotliResult;
|
||||
use brotli::BrotliState;
|
||||
use brotli::Decompressor;
|
||||
use deno_core::error::type_error;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::op2;
|
||||
use deno_core::JsBuffer;
|
||||
use deno_core::OpState;
|
||||
|
@ -19,7 +17,23 @@ use deno_core::ToJsBuffer;
|
|||
use std::cell::RefCell;
|
||||
use std::io::Read;
|
||||
|
||||
fn encoder_mode(mode: u32) -> Result<BrotliEncoderMode, AnyError> {
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum BrotliError {
|
||||
#[error("Invalid encoder mode")]
|
||||
InvalidEncoderMode,
|
||||
#[error("Failed to compress")]
|
||||
CompressFailed,
|
||||
#[error("Failed to decompress")]
|
||||
DecompressFailed,
|
||||
#[error(transparent)]
|
||||
Join(#[from] tokio::task::JoinError),
|
||||
#[error(transparent)]
|
||||
Resource(deno_core::error::AnyError),
|
||||
#[error("{0}")]
|
||||
Io(std::io::Error),
|
||||
}
|
||||
|
||||
fn encoder_mode(mode: u32) -> Result<BrotliEncoderMode, BrotliError> {
|
||||
Ok(match mode {
|
||||
0 => BrotliEncoderMode::BROTLI_MODE_GENERIC,
|
||||
1 => BrotliEncoderMode::BROTLI_MODE_TEXT,
|
||||
|
@ -28,7 +42,7 @@ fn encoder_mode(mode: u32) -> Result<BrotliEncoderMode, AnyError> {
|
|||
4 => BrotliEncoderMode::BROTLI_FORCE_MSB_PRIOR,
|
||||
5 => BrotliEncoderMode::BROTLI_FORCE_UTF8_PRIOR,
|
||||
6 => BrotliEncoderMode::BROTLI_FORCE_SIGNED_PRIOR,
|
||||
_ => return Err(type_error("Invalid encoder mode")),
|
||||
_ => return Err(BrotliError::InvalidEncoderMode),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -40,7 +54,7 @@ pub fn op_brotli_compress(
|
|||
#[smi] quality: i32,
|
||||
#[smi] lgwin: i32,
|
||||
#[smi] mode: u32,
|
||||
) -> Result<usize, AnyError> {
|
||||
) -> Result<usize, BrotliError> {
|
||||
let mode = encoder_mode(mode)?;
|
||||
let mut out_size = out.len();
|
||||
|
||||
|
@ -57,7 +71,7 @@ pub fn op_brotli_compress(
|
|||
&mut |_, _, _, _| (),
|
||||
);
|
||||
if result != 1 {
|
||||
return Err(type_error("Failed to compress"));
|
||||
return Err(BrotliError::CompressFailed);
|
||||
}
|
||||
|
||||
Ok(out_size)
|
||||
|
@ -87,7 +101,7 @@ pub async fn op_brotli_compress_async(
|
|||
#[smi] quality: i32,
|
||||
#[smi] lgwin: i32,
|
||||
#[smi] mode: u32,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
) -> Result<ToJsBuffer, BrotliError> {
|
||||
let mode = encoder_mode(mode)?;
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let input = &*input;
|
||||
|
@ -107,7 +121,7 @@ pub async fn op_brotli_compress_async(
|
|||
&mut |_, _, _, _| (),
|
||||
);
|
||||
if result != 1 {
|
||||
return Err(type_error("Failed to compress"));
|
||||
return Err(BrotliError::CompressFailed);
|
||||
}
|
||||
|
||||
out.truncate(out_size);
|
||||
|
@ -151,8 +165,11 @@ pub fn op_brotli_compress_stream(
|
|||
#[smi] rid: u32,
|
||||
#[buffer] input: &[u8],
|
||||
#[buffer] output: &mut [u8],
|
||||
) -> Result<usize, AnyError> {
|
||||
let ctx = state.resource_table.get::<BrotliCompressCtx>(rid)?;
|
||||
) -> Result<usize, BrotliError> {
|
||||
let ctx = state
|
||||
.resource_table
|
||||
.get::<BrotliCompressCtx>(rid)
|
||||
.map_err(BrotliError::Resource)?;
|
||||
let mut inst = ctx.inst.borrow_mut();
|
||||
let mut output_offset = 0;
|
||||
|
||||
|
@ -168,7 +185,7 @@ pub fn op_brotli_compress_stream(
|
|||
&mut |_, _, _, _| (),
|
||||
);
|
||||
if !result {
|
||||
return Err(type_error("Failed to compress"));
|
||||
return Err(BrotliError::CompressFailed);
|
||||
}
|
||||
|
||||
Ok(output_offset)
|
||||
|
@ -180,8 +197,11 @@ pub fn op_brotli_compress_stream_end(
|
|||
state: &mut OpState,
|
||||
#[smi] rid: u32,
|
||||
#[buffer] output: &mut [u8],
|
||||
) -> Result<usize, AnyError> {
|
||||
let ctx = state.resource_table.get::<BrotliCompressCtx>(rid)?;
|
||||
) -> Result<usize, BrotliError> {
|
||||
let ctx = state
|
||||
.resource_table
|
||||
.get::<BrotliCompressCtx>(rid)
|
||||
.map_err(BrotliError::Resource)?;
|
||||
let mut inst = ctx.inst.borrow_mut();
|
||||
let mut output_offset = 0;
|
||||
|
||||
|
@ -197,13 +217,13 @@ pub fn op_brotli_compress_stream_end(
|
|||
&mut |_, _, _, _| (),
|
||||
);
|
||||
if !result {
|
||||
return Err(type_error("Failed to compress"));
|
||||
return Err(BrotliError::CompressFailed);
|
||||
}
|
||||
|
||||
Ok(output_offset)
|
||||
}
|
||||
|
||||
fn brotli_decompress(buffer: &[u8]) -> Result<ToJsBuffer, AnyError> {
|
||||
fn brotli_decompress(buffer: &[u8]) -> Result<ToJsBuffer, std::io::Error> {
|
||||
let mut output = Vec::with_capacity(4096);
|
||||
let mut decompressor = Decompressor::new(buffer, buffer.len());
|
||||
decompressor.read_to_end(&mut output)?;
|
||||
|
@ -214,7 +234,7 @@ fn brotli_decompress(buffer: &[u8]) -> Result<ToJsBuffer, AnyError> {
|
|||
#[serde]
|
||||
pub fn op_brotli_decompress(
|
||||
#[buffer] buffer: &[u8],
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
) -> Result<ToJsBuffer, std::io::Error> {
|
||||
brotli_decompress(buffer)
|
||||
}
|
||||
|
||||
|
@ -222,8 +242,11 @@ pub fn op_brotli_decompress(
|
|||
#[serde]
|
||||
pub async fn op_brotli_decompress_async(
|
||||
#[buffer] buffer: JsBuffer,
|
||||
) -> Result<ToJsBuffer, AnyError> {
|
||||
tokio::task::spawn_blocking(move || brotli_decompress(&buffer)).await?
|
||||
) -> Result<ToJsBuffer, BrotliError> {
|
||||
tokio::task::spawn_blocking(move || {
|
||||
brotli_decompress(&buffer).map_err(BrotliError::Io)
|
||||
})
|
||||
.await?
|
||||
}
|
||||
|
||||
struct BrotliDecompressCtx {
|
||||
|
@ -252,8 +275,11 @@ pub fn op_brotli_decompress_stream(
|
|||
#[smi] rid: u32,
|
||||
#[buffer] input: &[u8],
|
||||
#[buffer] output: &mut [u8],
|
||||
) -> Result<usize, AnyError> {
|
||||
let ctx = state.resource_table.get::<BrotliDecompressCtx>(rid)?;
|
||||
) -> Result<usize, BrotliError> {
|
||||
let ctx = state
|
||||
.resource_table
|
||||
.get::<BrotliDecompressCtx>(rid)
|
||||
.map_err(BrotliError::Resource)?;
|
||||
let mut inst = ctx.inst.borrow_mut();
|
||||
let mut output_offset = 0;
|
||||
|
||||
|
@ -268,7 +294,7 @@ pub fn op_brotli_decompress_stream(
|
|||
&mut inst,
|
||||
);
|
||||
if matches!(result, BrotliResult::ResultFailure) {
|
||||
return Err(type_error("Failed to decompress"));
|
||||
return Err(BrotliError::DecompressFailed);
|
||||
}
|
||||
|
||||
Ok(output_offset)
|
||||
|
@ -280,8 +306,11 @@ pub fn op_brotli_decompress_stream_end(
|
|||
state: &mut OpState,
|
||||
#[smi] rid: u32,
|
||||
#[buffer] output: &mut [u8],
|
||||
) -> Result<usize, AnyError> {
|
||||
let ctx = state.resource_table.get::<BrotliDecompressCtx>(rid)?;
|
||||
) -> Result<usize, BrotliError> {
|
||||
let ctx = state
|
||||
.resource_table
|
||||
.get::<BrotliDecompressCtx>(rid)
|
||||
.map_err(BrotliError::Resource)?;
|
||||
let mut inst = ctx.inst.borrow_mut();
|
||||
let mut output_offset = 0;
|
||||
|
||||
|
@ -296,7 +325,7 @@ pub fn op_brotli_decompress_stream_end(
|
|||
&mut inst,
|
||||
);
|
||||
if matches!(result, BrotliResult::ResultFailure) {
|
||||
return Err(type_error("Failed to decompress"));
|
||||
return Err(BrotliError::DecompressFailed);
|
||||
}
|
||||
|
||||
Ok(output_offset)
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue