1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-12-02 17:01:14 -05:00

Merge branch 'main' into Fix-UNC-Path-Permissions-Issue-on-Windows

This commit is contained in:
Yazan AbdAl-Rahman 2024-10-30 08:54:58 +02:00 committed by GitHub
commit dc619d09d8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
282 changed files with 5410 additions and 2836 deletions

View file

@ -69,7 +69,7 @@
], ],
"plugins": [ "plugins": [
"https://plugins.dprint.dev/typescript-0.93.0.wasm", "https://plugins.dprint.dev/typescript-0.93.0.wasm",
"https://plugins.dprint.dev/json-0.19.3.wasm", "https://plugins.dprint.dev/json-0.19.4.wasm",
"https://plugins.dprint.dev/markdown-0.17.8.wasm", "https://plugins.dprint.dev/markdown-0.17.8.wasm",
"https://plugins.dprint.dev/toml-0.6.3.wasm", "https://plugins.dprint.dev/toml-0.6.3.wasm",
"https://plugins.dprint.dev/exec-0.5.0.json@8d9972eee71fa1590e04873540421f3eda7674d0f1aae3d7c788615e7b7413d0", "https://plugins.dprint.dev/exec-0.5.0.json@8d9972eee71fa1590e04873540421f3eda7674d0f1aae3d7c788615e7b7413d0",

View file

@ -33,7 +33,7 @@ jobs:
- uses: dsherret/rust-toolchain-file@v1 - uses: dsherret/rust-toolchain-file@v1
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x

View file

@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
// Bump this number when you want to purge the cache. // Bump this number when you want to purge the cache.
// Note: the tools/release/01_bump_crate_versions.ts script will update this version // Note: the tools/release/01_bump_crate_versions.ts script will update this version
// automatically via regex, so ensure that this line maintains this format. // automatically via regex, so ensure that this line maintains this format.
const cacheVersion = 21; const cacheVersion = 22;
const ubuntuX86Runner = "ubuntu-22.04"; const ubuntuX86Runner = "ubuntu-22.04";
const ubuntuX86XlRunner = "ubuntu-22.04-xl"; const ubuntuX86XlRunner = "ubuntu-22.04-xl";
@ -193,7 +193,7 @@ const installNodeStep = {
}; };
const installDenoStep = { const installDenoStep = {
name: "Install Deno", name: "Install Deno",
uses: "denoland/setup-deno@v1", uses: "denoland/setup-deno@v2",
with: { "deno-version": "v1.x" }, with: { "deno-version": "v1.x" },
}; };

View file

@ -178,7 +178,7 @@ jobs:
if: '!(matrix.skip)' if: '!(matrix.skip)'
- if: '!(matrix.skip) && (matrix.job == ''lint'' || matrix.job == ''test'' || matrix.job == ''bench'')' - if: '!(matrix.skip) && (matrix.job == ''lint'' || matrix.job == ''test'' || matrix.job == ''bench'')'
name: Install Deno name: Install Deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x
- name: Install Python - name: Install Python
@ -361,8 +361,8 @@ jobs:
path: |- path: |-
~/.cargo/registry/index ~/.cargo/registry/index
~/.cargo/registry/cache ~/.cargo/registry/cache
key: '21-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' key: '22-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '21-cargo-home-${{ matrix.os }}-${{ matrix.arch }}' restore-keys: '22-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
if: '!(matrix.skip)' if: '!(matrix.skip)'
- name: Restore cache build output (PR) - name: Restore cache build output (PR)
uses: actions/cache/restore@v4 uses: actions/cache/restore@v4
@ -375,7 +375,7 @@ jobs:
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: never_saved key: never_saved
restore-keys: '21-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' restore-keys: '22-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
- name: Apply and update mtime cache - name: Apply and update mtime cache
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))' if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
uses: ./.github/mtime_cache uses: ./.github/mtime_cache
@ -685,7 +685,7 @@ jobs:
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.sha256sum !./target/*/*.sha256sum
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: '21-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' key: '22-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
publish-canary: publish-canary:
name: publish canary name: publish canary
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04

View file

@ -40,7 +40,7 @@ jobs:
project_id: denoland project_id: denoland
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x

View file

@ -34,7 +34,7 @@ jobs:
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x

View file

@ -39,7 +39,7 @@ jobs:
- uses: dsherret/rust-toolchain-file@v1 - uses: dsherret/rust-toolchain-file@v1
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x

View file

@ -30,7 +30,7 @@ jobs:
persist-credentials: false persist-credentials: false
- name: Setup Deno - name: Setup Deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: ${{ matrix.deno-version }} deno-version: ${{ matrix.deno-version }}

141
Cargo.lock generated
View file

@ -1154,7 +1154,7 @@ dependencies = [
[[package]] [[package]]
name = "deno" name = "deno"
version = "2.0.2" version = "2.0.3"
dependencies = [ dependencies = [
"anstream", "anstream",
"async-trait", "async-trait",
@ -1196,7 +1196,6 @@ dependencies = [
"dprint-plugin-markdown", "dprint-plugin-markdown",
"dprint-plugin-typescript", "dprint-plugin-typescript",
"env_logger", "env_logger",
"eszip",
"fancy-regex", "fancy-regex",
"faster-hex", "faster-hex",
"flate2", "flate2",
@ -1214,7 +1213,6 @@ dependencies = [
"lazy-regex", "lazy-regex",
"libc", "libc",
"libsui", "libsui",
"libuv-sys-lite",
"libz-sys", "libz-sys",
"log", "log",
"lsp-types", "lsp-types",
@ -1222,7 +1220,6 @@ dependencies = [
"markup_fmt", "markup_fmt",
"memmem", "memmem",
"monch", "monch",
"napi_sym",
"nix", "nix",
"node_resolver", "node_resolver",
"notify", "notify",
@ -1263,9 +1260,7 @@ dependencies = [
"walkdir", "walkdir",
"which 4.4.2", "which 4.4.2",
"winapi", "winapi",
"windows-sys 0.52.0",
"winres", "winres",
"yoke",
"zeromq", "zeromq",
"zip", "zip",
"zstd", "zstd",
@ -1328,7 +1323,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_bench_util" name = "deno_bench_util"
version = "0.167.0" version = "0.168.0"
dependencies = [ dependencies = [
"bencher", "bencher",
"deno_core", "deno_core",
@ -1337,7 +1332,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_broadcast_channel" name = "deno_broadcast_channel"
version = "0.167.0" version = "0.168.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"deno_core", "deno_core",
@ -1348,7 +1343,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_cache" name = "deno_cache"
version = "0.105.0" version = "0.106.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"deno_core", "deno_core",
@ -1361,9 +1356,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_cache_dir" name = "deno_cache_dir"
version = "0.13.0" version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "186a102b13b4512841f5f40784cd25822042d22954afe3b5b070d406d15eb4f2" checksum = "693ca429aebf945de5fef30df232044f9f80be4cc5a5e7c8d767226c43880f5a"
dependencies = [ dependencies = [
"base32", "base32",
"deno_media_type", "deno_media_type",
@ -1381,7 +1376,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_canvas" name = "deno_canvas"
version = "0.42.0" version = "0.43.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_webgpu", "deno_webgpu",
@ -1392,9 +1387,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_config" name = "deno_config"
version = "0.37.1" version = "0.37.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3cb7a1723676fba5964f8d7441d8b53748f9e74d6d4241be7de9730da021859a" checksum = "5900bfb37538d83b19ba0b157cdc785770e38422ee4632411e3bd3d90ac0f537"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"deno_package_json", "deno_package_json",
@ -1416,7 +1411,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_console" name = "deno_console"
version = "0.173.0" version = "0.174.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
] ]
@ -1461,7 +1456,7 @@ checksum = "a13951ea98c0a4c372f162d669193b4c9d991512de9f2381dd161027f34b26b1"
[[package]] [[package]]
name = "deno_cron" name = "deno_cron"
version = "0.53.0" version = "0.54.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1474,7 +1469,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_crypto" name = "deno_crypto"
version = "0.187.0" version = "0.188.0"
dependencies = [ dependencies = [
"aes", "aes",
"aes-gcm", "aes-gcm",
@ -1536,7 +1531,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_fetch" name = "deno_fetch"
version = "0.197.0" version = "0.198.0"
dependencies = [ dependencies = [
"base64 0.21.7", "base64 0.21.7",
"bytes", "bytes",
@ -1569,7 +1564,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_ffi" name = "deno_ffi"
version = "0.160.0" version = "0.161.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_permissions", "deno_permissions",
@ -1578,6 +1573,7 @@ dependencies = [
"libffi", "libffi",
"libffi-sys", "libffi-sys",
"log", "log",
"num-bigint",
"serde", "serde",
"serde-value", "serde-value",
"serde_json", "serde_json",
@ -1588,7 +1584,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_fs" name = "deno_fs"
version = "0.83.0" version = "0.84.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"base32", "base32",
@ -1610,9 +1606,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_graph" name = "deno_graph"
version = "0.83.3" version = "0.83.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77163c46755676d8f793fc19e365537ba660a8db173cd1e02d21eb010c0b3cef" checksum = "5bd20bc0780071989c622cbfd5d4fb2e4fd05a247ccd7f791f13c8d2c3792228"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1639,7 +1635,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_http" name = "deno_http"
version = "0.171.0" version = "0.172.0"
dependencies = [ dependencies = [
"async-compression", "async-compression",
"async-trait", "async-trait",
@ -1678,7 +1674,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_io" name = "deno_io"
version = "0.83.0" version = "0.84.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"deno_core", "deno_core",
@ -1699,7 +1695,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_kv" name = "deno_kv"
version = "0.81.0" version = "0.82.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1771,12 +1767,17 @@ dependencies = [
[[package]] [[package]]
name = "deno_napi" name = "deno_napi"
version = "0.104.0" version = "0.105.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_permissions", "deno_permissions",
"libc",
"libloading 0.7.4", "libloading 0.7.4",
"libuv-sys-lite",
"log",
"napi_sym",
"thiserror", "thiserror",
"windows-sys 0.52.0",
] ]
[[package]] [[package]]
@ -1794,7 +1795,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_net" name = "deno_net"
version = "0.165.0" version = "0.166.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_permissions", "deno_permissions",
@ -1811,7 +1812,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_node" name = "deno_node"
version = "0.110.0" version = "0.111.0"
dependencies = [ dependencies = [
"aead-gcm-stream", "aead-gcm-stream",
"aes", "aes",
@ -1960,7 +1961,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_permissions" name = "deno_permissions"
version = "0.33.0" version = "0.34.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_path_util", "deno_path_util",
@ -1977,7 +1978,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_resolver" name = "deno_resolver"
version = "0.5.0" version = "0.6.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"base32", "base32",
@ -1993,7 +1994,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_runtime" name = "deno_runtime"
version = "0.182.0" version = "0.183.0"
dependencies = [ dependencies = [
"color-print", "color-print",
"deno_ast", "deno_ast",
@ -2043,11 +2044,13 @@ dependencies = [
"percent-encoding", "percent-encoding",
"regex", "regex",
"rustyline", "rustyline",
"same-file",
"serde", "serde",
"signal-hook", "signal-hook",
"signal-hook-registry", "signal-hook-registry",
"tempfile", "tempfile",
"test_server", "test_server",
"thiserror",
"tokio", "tokio",
"tokio-metrics", "tokio-metrics",
"twox-hash", "twox-hash",
@ -2059,9 +2062,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_semver" name = "deno_semver"
version = "0.5.14" version = "0.5.16"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "670fec7ef309384e23c2a90ac5d2d9d91a776d225306c75f5cdd28cf6cc8a59f" checksum = "c957c6a57c38b7dde2315df0da0ec228911e56a74f185b108a488d0401841a67"
dependencies = [ dependencies = [
"monch", "monch",
"once_cell", "once_cell",
@ -2109,7 +2112,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_tls" name = "deno_tls"
version = "0.160.0" version = "0.161.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_native_certs", "deno_native_certs",
@ -2158,7 +2161,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_url" name = "deno_url"
version = "0.173.0" version = "0.174.0"
dependencies = [ dependencies = [
"deno_bench_util", "deno_bench_util",
"deno_console", "deno_console",
@ -2170,7 +2173,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_web" name = "deno_web"
version = "0.204.0" version = "0.205.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"base64-simd 0.8.0", "base64-simd 0.8.0",
@ -2192,7 +2195,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_webgpu" name = "deno_webgpu"
version = "0.140.0" version = "0.141.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"raw-window-handle", "raw-window-handle",
@ -2205,7 +2208,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_webidl" name = "deno_webidl"
version = "0.173.0" version = "0.174.0"
dependencies = [ dependencies = [
"deno_bench_util", "deno_bench_util",
"deno_core", "deno_core",
@ -2213,7 +2216,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_websocket" name = "deno_websocket"
version = "0.178.0" version = "0.179.0"
dependencies = [ dependencies = [
"bytes", "bytes",
"deno_core", "deno_core",
@ -2235,7 +2238,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_webstorage" name = "deno_webstorage"
version = "0.168.0" version = "0.169.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_web", "deno_web",
@ -2563,9 +2566,9 @@ dependencies = [
[[package]] [[package]]
name = "dprint-plugin-json" name = "dprint-plugin-json"
version = "0.19.3" version = "0.19.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a19f4a9f2f548b2098b8ec597d7bb40af133b6e9a3187c1d3c4caa101b8c93c3" checksum = "57f91e594559b450b7c5d6a0ba9f3f9fe951c1ea371168f7c95973da3fdbd85a"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"dprint-core", "dprint-core",
@ -2577,9 +2580,9 @@ dependencies = [
[[package]] [[package]]
name = "dprint-plugin-jupyter" name = "dprint-plugin-jupyter"
version = "0.1.3" version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c67b0e54b552a4775c221b44ed33be918c400bd8041d1f044f947fbb01025cc0" checksum = "d0d20684e37b3824e2bc917cfcb14e2cdf88398eef507335d839cbd78172bfee"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"dprint-core", "dprint-core",
@ -2891,29 +2894,6 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31ae425815400e5ed474178a7a22e275a9687086a12ca63ec793ff292d8fdae8" checksum = "31ae425815400e5ed474178a7a22e275a9687086a12ca63ec793ff292d8fdae8"
[[package]]
name = "eszip"
version = "0.79.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8eb55c89bdde75a3826a79d49c9d847623ae7fbdb2695b542982982da990d33e"
dependencies = [
"anyhow",
"async-trait",
"base64 0.21.7",
"deno_ast",
"deno_graph",
"deno_npm",
"deno_semver",
"futures",
"hashlink 0.8.4",
"indexmap",
"serde",
"serde_json",
"sha2",
"thiserror",
"url",
]
[[package]] [[package]]
name = "fallible-iterator" name = "fallible-iterator"
version = "0.3.0" version = "0.3.0"
@ -3525,15 +3505,6 @@ dependencies = [
"allocator-api2", "allocator-api2",
] ]
[[package]]
name = "hashlink"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7"
dependencies = [
"hashbrown",
]
[[package]] [[package]]
name = "hashlink" name = "hashlink"
version = "0.9.1" version = "0.9.1"
@ -4034,9 +4005,9 @@ dependencies = [
[[package]] [[package]]
name = "jsonc-parser" name = "jsonc-parser"
version = "0.23.0" version = "0.26.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7725c320caac8c21d8228c1d055af27a995d371f78cc763073d3e068323641b5" checksum = "b558af6b49fd918e970471374e7a798b2c9bbcda624a210ffa3901ee5614bc8e"
dependencies = [ dependencies = [
"serde_json", "serde_json",
] ]
@ -4327,9 +4298,9 @@ dependencies = [
[[package]] [[package]]
name = "malva" name = "malva"
version = "0.10.1" version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "484beda6e5d775ed06a8ec0fce79e51d39f49d834ed2a29da3f437079321804f" checksum = "1c67b97ed99f56b86fa3c010843441f1fcdb71884bab96b8551bb3d1e7c6d529"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"itertools 0.13.0", "itertools 0.13.0",
@ -4340,9 +4311,9 @@ dependencies = [
[[package]] [[package]]
name = "markup_fmt" name = "markup_fmt"
version = "0.13.1" version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9dab5ae899659fbe5c8835b2c8ca8d3e357974a3e454138925b404004973361f" checksum = "3f15d7b24ae4ea9b87279bc0696462a4fb6c2168847f2cc162a2da05fe1a0f61"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"css_dataset", "css_dataset",
@ -4512,7 +4483,7 @@ dependencies = [
[[package]] [[package]]
name = "napi_sym" name = "napi_sym"
version = "0.103.0" version = "0.104.0"
dependencies = [ dependencies = [
"quote", "quote",
"serde", "serde",
@ -4567,7 +4538,7 @@ dependencies = [
[[package]] [[package]]
name = "node_resolver" name = "node_resolver"
version = "0.12.0" version = "0.13.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -5811,7 +5782,7 @@ dependencies = [
"bitflags 2.6.0", "bitflags 2.6.0",
"fallible-iterator", "fallible-iterator",
"fallible-streaming-iterator", "fallible-streaming-iterator",
"hashlink 0.9.1", "hashlink",
"libsqlite3-sys", "libsqlite3-sys",
"smallvec", "smallvec",
] ]

View file

@ -5,7 +5,6 @@ resolver = "2"
members = [ members = [
"bench_util", "bench_util",
"cli", "cli",
"cli/napi/sym",
"ext/broadcast_channel", "ext/broadcast_channel",
"ext/cache", "ext/cache",
"ext/canvas", "ext/canvas",
@ -19,6 +18,7 @@ members = [
"ext/io", "ext/io",
"ext/kv", "ext/kv",
"ext/napi", "ext/napi",
"ext/napi/sym",
"ext/net", "ext/net",
"ext/node", "ext/node",
"ext/url", "ext/url",
@ -48,16 +48,16 @@ repository = "https://github.com/denoland/deno"
deno_ast = { version = "=0.42.2", features = ["transpiling"] } deno_ast = { version = "=0.42.2", features = ["transpiling"] }
deno_core = { version = "0.314.2" } deno_core = { version = "0.314.2" }
deno_bench_util = { version = "0.167.0", path = "./bench_util" } deno_bench_util = { version = "0.168.0", path = "./bench_util" }
deno_lockfile = "=0.23.1" deno_lockfile = "=0.23.1"
deno_media_type = { version = "0.1.4", features = ["module_specifier"] } deno_media_type = { version = "0.1.4", features = ["module_specifier"] }
deno_npm = "=0.25.4" deno_npm = "=0.25.4"
deno_path_util = "=0.2.1" deno_path_util = "=0.2.1"
deno_permissions = { version = "0.33.0", path = "./runtime/permissions" } deno_permissions = { version = "0.34.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.182.0", path = "./runtime" } deno_runtime = { version = "0.183.0", path = "./runtime" }
deno_semver = "=0.5.14" deno_semver = "=0.5.16"
deno_terminal = "0.2.0" deno_terminal = "0.2.0"
napi_sym = { version = "0.103.0", path = "./cli/napi/sym" } napi_sym = { version = "0.104.0", path = "./ext/napi/sym" }
test_util = { package = "test_server", path = "./tests/util/server" } test_util = { package = "test_server", path = "./tests/util/server" }
denokv_proto = "0.8.1" denokv_proto = "0.8.1"
@ -66,32 +66,32 @@ denokv_remote = "0.8.1"
denokv_sqlite = { default-features = false, version = "0.8.2" } denokv_sqlite = { default-features = false, version = "0.8.2" }
# exts # exts
deno_broadcast_channel = { version = "0.167.0", path = "./ext/broadcast_channel" } deno_broadcast_channel = { version = "0.168.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.105.0", path = "./ext/cache" } deno_cache = { version = "0.106.0", path = "./ext/cache" }
deno_canvas = { version = "0.42.0", path = "./ext/canvas" } deno_canvas = { version = "0.43.0", path = "./ext/canvas" }
deno_console = { version = "0.173.0", path = "./ext/console" } deno_console = { version = "0.174.0", path = "./ext/console" }
deno_cron = { version = "0.53.0", path = "./ext/cron" } deno_cron = { version = "0.54.0", path = "./ext/cron" }
deno_crypto = { version = "0.187.0", path = "./ext/crypto" } deno_crypto = { version = "0.188.0", path = "./ext/crypto" }
deno_fetch = { version = "0.197.0", path = "./ext/fetch" } deno_fetch = { version = "0.198.0", path = "./ext/fetch" }
deno_ffi = { version = "0.160.0", path = "./ext/ffi" } deno_ffi = { version = "0.161.0", path = "./ext/ffi" }
deno_fs = { version = "0.83.0", path = "./ext/fs" } deno_fs = { version = "0.84.0", path = "./ext/fs" }
deno_http = { version = "0.171.0", path = "./ext/http" } deno_http = { version = "0.172.0", path = "./ext/http" }
deno_io = { version = "0.83.0", path = "./ext/io" } deno_io = { version = "0.84.0", path = "./ext/io" }
deno_kv = { version = "0.81.0", path = "./ext/kv" } deno_kv = { version = "0.82.0", path = "./ext/kv" }
deno_napi = { version = "0.104.0", path = "./ext/napi" } deno_napi = { version = "0.105.0", path = "./ext/napi" }
deno_net = { version = "0.165.0", path = "./ext/net" } deno_net = { version = "0.166.0", path = "./ext/net" }
deno_node = { version = "0.110.0", path = "./ext/node" } deno_node = { version = "0.111.0", path = "./ext/node" }
deno_tls = { version = "0.160.0", path = "./ext/tls" } deno_tls = { version = "0.161.0", path = "./ext/tls" }
deno_url = { version = "0.173.0", path = "./ext/url" } deno_url = { version = "0.174.0", path = "./ext/url" }
deno_web = { version = "0.204.0", path = "./ext/web" } deno_web = { version = "0.205.0", path = "./ext/web" }
deno_webgpu = { version = "0.140.0", path = "./ext/webgpu" } deno_webgpu = { version = "0.141.0", path = "./ext/webgpu" }
deno_webidl = { version = "0.173.0", path = "./ext/webidl" } deno_webidl = { version = "0.174.0", path = "./ext/webidl" }
deno_websocket = { version = "0.178.0", path = "./ext/websocket" } deno_websocket = { version = "0.179.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.168.0", path = "./ext/webstorage" } deno_webstorage = { version = "0.169.0", path = "./ext/webstorage" }
# resolvers # resolvers
deno_resolver = { version = "0.5.0", path = "./resolvers/deno" } deno_resolver = { version = "0.6.0", path = "./resolvers/deno" }
node_resolver = { version = "0.12.0", path = "./resolvers/node" } node_resolver = { version = "0.13.0", path = "./resolvers/node" }
aes = "=0.8.3" aes = "=0.8.3"
anyhow = "1.0.57" anyhow = "1.0.57"
@ -111,7 +111,7 @@ console_static_text = "=0.8.1"
dashmap = "5.5.3" dashmap = "5.5.3"
data-encoding = "2.3.3" data-encoding = "2.3.3"
data-url = "=0.3.0" data-url = "=0.3.0"
deno_cache_dir = "=0.13.0" deno_cache_dir = "=0.13.1"
deno_package_json = { version = "0.1.2", default-features = false } deno_package_json = { version = "0.1.2", default-features = false }
dlopen2 = "0.6.1" dlopen2 = "0.6.1"
ecb = "=0.1.2" ecb = "=0.1.2"
@ -137,7 +137,7 @@ hyper-util = { version = "=0.1.7", features = ["tokio", "client", "client-legacy
hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] } hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] }
indexmap = { version = "2", features = ["serde"] } indexmap = { version = "2", features = ["serde"] }
ipnet = "2.3" ipnet = "2.3"
jsonc-parser = { version = "=0.23.0", features = ["serde"] } jsonc-parser = { version = "=0.26.2", features = ["serde"] }
lazy-regex = "3" lazy-regex = "3"
libc = "0.2.126" libc = "0.2.126"
libz-sys = { version = "1.1.20", default-features = false } libz-sys = { version = "1.1.20", default-features = false }
@ -187,7 +187,7 @@ tar = "=0.4.40"
tempfile = "3.4.0" tempfile = "3.4.0"
termcolor = "1.1.3" termcolor = "1.1.3"
thiserror = "1.0.61" thiserror = "1.0.61"
tokio = { version = "=1.36.0", features = ["full"] } tokio = { version = "1.36.0", features = ["full"] }
tokio-metrics = { version = "0.3.0", features = ["rt"] } tokio-metrics = { version = "0.3.0", features = ["rt"] }
tokio-rustls = { version = "0.26.0", default-features = false, features = ["ring", "tls12"] } tokio-rustls = { version = "0.26.0", default-features = false, features = ["ring", "tls12"] }
tokio-socks = "0.5.1" tokio-socks = "0.5.1"

View file

@ -46,6 +46,12 @@ brew install deno
choco install deno choco install deno
``` ```
[WinGet](https://winstall.app/apps/DenoLand.Deno) (Windows):
```powershell
winget install --id=DenoLand.Deno
```
### Build and install from source ### Build and install from source
Complete instructions for building Deno from source can be found in the manual Complete instructions for building Deno from source can be found in the manual

View file

@ -6,6 +6,44 @@ https://github.com/denoland/deno/releases
We also have one-line install commands at: We also have one-line install commands at:
https://github.com/denoland/deno_install https://github.com/denoland/deno_install
### 2.0.3 / 2024.10.25
- feat(lsp): interactive inlay hints (#26382)
- fix: support node-api in denort (#26389)
- fix(check): support `--frozen` on deno check (#26479)
- fix(cli): increase size of blocking task threadpool on windows (#26465)
- fix(config): schemas for lint rule and tag autocompletion (#26515)
- fix(ext/console): ignore casing for named colors in css parsing (#26466)
- fix(ext/ffi): return u64/i64 as bigints from nonblocking ffi calls (#26486)
- fix(ext/node): cancel pending ipc writes on channel close (#26504)
- fix(ext/node): map `ERROR_INVALID_NAME` to `ENOENT` on windows (#26475)
- fix(ext/node): only set our end of child process pipe to nonblocking mode
(#26495)
- fix(ext/node): properly map reparse point error in readlink (#26375)
- fix(ext/node): refactor http.ServerResponse into function class (#26210)
- fix(ext/node): stub HTTPParser internal binding (#26401)
- fix(ext/node): use primordials in `ext/node/polyfills/https.ts` (#26323)
- fix(fmt): --ext flag requires to pass files (#26525)
- fix(fmt): upgrade formatters (#26469)
- fix(help): missing package specifier (#26380)
- fix(info): resolve workspace member mappings (#26350)
- fix(install): better json editing (#26450)
- fix(install): cache all exports of JSR packages listed in `deno.json` (#26501)
- fix(install): cache type only module deps in `deno install` (#26497)
- fix(install): don't cache json exports of JSR packages (for now) (#26530)
- fix(install): update lockfile when using package.json (#26458)
- fix(lsp): import-map-remap quickfix for type imports (#26454)
- fix(node/util): support array formats in `styleText` (#26507)
- fix(node:tls): set TLSSocket.alpnProtocol for client connections (#26476)
- fix(npm): ensure scoped package name is encoded in URLs (#26390)
- fix(npm): support version ranges with && or comma (#26453)
- fix: `.npmrc` settings not being passed to install/add command (#26473)
- fix: add 'fmt-component' to unstable features in schema file (#26526)
- fix: share inotify fd across watchers (#26200)
- fix: unpin tokio version (#26457)
- perf(compile): pass module source data from binary directly to v8 (#26494)
- perf: avoid multiple calls to runMicrotask (#26378)
### 2.0.2 / 2024.10.17 ### 2.0.2 / 2024.10.17
- fix(cli): set napi object property properly (#26344) - fix(cli): set napi object property properly (#26344)

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_bench_util" name = "deno_bench_util"
version = "0.167.0" version = "0.168.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno" name = "deno"
version = "2.0.2" version = "2.0.3"
authors.workspace = true authors.workspace = true
default-run = "deno" default-run = "deno"
edition.workspace = true edition.workspace = true
@ -70,10 +70,10 @@ winres.workspace = true
[dependencies] [dependencies]
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] } deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
deno_cache_dir = { workspace = true } deno_cache_dir = { workspace = true }
deno_config = { version = "=0.37.1", features = ["workspace", "sync"] } deno_config = { version = "=0.37.2", features = ["workspace", "sync"] }
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "0.154.0", default-features = false, features = ["rust", "html", "syntect"] } deno_doc = { version = "0.154.0", default-features = false, features = ["rust", "html", "syntect"] }
deno_graph = { version = "=0.83.3" } deno_graph = { version = "=0.83.4" }
deno_lint = { version = "=0.67.0", features = ["docs"] } deno_lint = { version = "=0.67.0", features = ["docs"] }
deno_lockfile.workspace = true deno_lockfile.workspace = true
deno_npm.workspace = true deno_npm.workspace = true
@ -84,9 +84,7 @@ deno_runtime = { workspace = true, features = ["include_js_files_for_snapshottin
deno_semver.workspace = true deno_semver.workspace = true
deno_task_shell = "=0.18.1" deno_task_shell = "=0.18.1"
deno_terminal.workspace = true deno_terminal.workspace = true
eszip = "=0.79.1"
libsui = "0.4.0" libsui = "0.4.0"
napi_sym.workspace = true
node_resolver.workspace = true node_resolver.workspace = true
anstream = "0.6.14" anstream = "0.6.14"
@ -106,8 +104,8 @@ data-encoding.workspace = true
dhat = { version = "0.3.3", optional = true } dhat = { version = "0.3.3", optional = true }
dissimilar = "=1.0.4" dissimilar = "=1.0.4"
dotenvy = "0.15.7" dotenvy = "0.15.7"
dprint-plugin-json = "=0.19.3" dprint-plugin-json = "=0.19.4"
dprint-plugin-jupyter = "=0.1.3" dprint-plugin-jupyter = "=0.1.5"
dprint-plugin-markdown = "=0.17.8" dprint-plugin-markdown = "=0.17.8"
dprint-plugin-typescript = "=0.93.0" dprint-plugin-typescript = "=0.93.0"
env_logger = "=0.10.0" env_logger = "=0.10.0"
@ -123,15 +121,15 @@ http-body-util.workspace = true
hyper-util.workspace = true hyper-util.workspace = true
import_map = { version = "=0.20.1", features = ["ext"] } import_map = { version = "=0.20.1", features = ["ext"] }
indexmap.workspace = true indexmap.workspace = true
jsonc-parser.workspace = true jsonc-parser = { workspace = true, features = ["cst", "serde"] }
jupyter_runtime = { package = "runtimelib", version = "=0.14.0" } jupyter_runtime = { package = "runtimelib", version = "=0.14.0" }
lazy-regex.workspace = true lazy-regex.workspace = true
libc.workspace = true libc.workspace = true
libz-sys.workspace = true libz-sys.workspace = true
log = { workspace = true, features = ["serde"] } log = { workspace = true, features = ["serde"] }
lsp-types.workspace = true lsp-types.workspace = true
malva = "=0.10.1" malva = "=0.11.0"
markup_fmt = "=0.13.1" markup_fmt = "=0.14.0"
memmem.workspace = true memmem.workspace = true
monch.workspace = true monch.workspace = true
notify.workspace = true notify.workspace = true
@ -168,7 +166,6 @@ typed-arena = "=2.0.2"
uuid = { workspace = true, features = ["serde"] } uuid = { workspace = true, features = ["serde"] }
walkdir = "=2.3.2" walkdir = "=2.3.2"
which.workspace = true which.workspace = true
yoke.workspace = true
zeromq.workspace = true zeromq.workspace = true
zip = { version = "2.1.6", default-features = false, features = ["deflate-flate2"] } zip = { version = "2.1.6", default-features = false, features = ["deflate-flate2"] }
zstd.workspace = true zstd.workspace = true
@ -176,14 +173,12 @@ zstd.workspace = true
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
junction.workspace = true junction.workspace = true
winapi = { workspace = true, features = ["knownfolders", "mswsock", "objbase", "shlobj", "tlhelp32", "winbase", "winerror", "winsock2"] } winapi = { workspace = true, features = ["knownfolders", "mswsock", "objbase", "shlobj", "tlhelp32", "winbase", "winerror", "winsock2"] }
windows-sys.workspace = true
[target.'cfg(unix)'.dependencies] [target.'cfg(unix)'.dependencies]
nix.workspace = true nix.workspace = true
[dev-dependencies] [dev-dependencies]
deno_bench_util.workspace = true deno_bench_util.workspace = true
libuv-sys-lite = "=1.48.2"
pretty_assertions.workspace = true pretty_assertions.workspace = true
test_util.workspace = true test_util.workspace = true

View file

@ -1856,6 +1856,7 @@ Unless --reload is specified, this command will not re-download already cached d
.required_unless_present("help") .required_unless_present("help")
.value_hint(ValueHint::FilePath), .value_hint(ValueHint::FilePath),
) )
.arg(frozen_lockfile_arg())
.arg(allow_import_arg()) .arg(allow_import_arg())
} }
) )
@ -2273,7 +2274,7 @@ Ignore formatting a file by adding an ignore comment at the top of the file:
"sass", "less", "html", "svelte", "vue", "astro", "yml", "yaml", "sass", "less", "html", "svelte", "vue", "astro", "yml", "yaml",
"ipynb", "ipynb",
]) ])
.help_heading(FMT_HEADING), .help_heading(FMT_HEADING).requires("files"),
) )
.arg( .arg(
Arg::new("ignore") Arg::new("ignore")
@ -4373,6 +4374,7 @@ fn check_parse(
flags.type_check_mode = TypeCheckMode::Local; flags.type_check_mode = TypeCheckMode::Local;
compile_args_without_check_parse(flags, matches)?; compile_args_without_check_parse(flags, matches)?;
unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime); unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime);
frozen_lockfile_arg_parse(flags, matches);
let files = matches.remove_many::<String>("file").unwrap().collect(); let files = matches.remove_many::<String>("file").unwrap().collect();
if matches.get_flag("all") || matches.get_flag("remote") { if matches.get_flag("all") || matches.get_flag("remote") {
flags.type_check_mode = TypeCheckMode::All; flags.type_check_mode = TypeCheckMode::All;
@ -6800,6 +6802,32 @@ mod tests {
..Flags::default() ..Flags::default()
} }
); );
let r = flags_from_vec(svec!["deno", "fmt", "--ext", "html"]);
assert!(r.is_err());
let r = flags_from_vec(svec!["deno", "fmt", "--ext", "html", "./**"]);
assert_eq!(
r.unwrap(),
Flags {
subcommand: DenoSubcommand::Fmt(FmtFlags {
check: false,
files: FileFlags {
include: vec!["./**".to_string()],
ignore: vec![],
},
use_tabs: None,
line_width: None,
indent_width: None,
single_quote: None,
prose_wrap: None,
no_semicolons: None,
unstable_component: false,
watch: Default::default(),
}),
ext: Some("html".to_string()),
..Flags::default()
}
);
} }
#[test] #[test]

View file

@ -578,6 +578,7 @@ fn discover_npmrc(
let resolved = npmrc let resolved = npmrc
.as_resolved(npm_registry_url()) .as_resolved(npm_registry_url())
.context("Failed to resolve .npmrc options")?; .context("Failed to resolve .npmrc options")?;
log::debug!(".npmrc found at: '{}'", path.display());
Ok(Arc::new(resolved)) Ok(Arc::new(resolved))
} }
@ -963,6 +964,9 @@ impl CliOptions {
match self.sub_command() { match self.sub_command() {
DenoSubcommand::Cache(_) => GraphKind::All, DenoSubcommand::Cache(_) => GraphKind::All,
DenoSubcommand::Check(_) => GraphKind::TypesOnly, DenoSubcommand::Check(_) => GraphKind::TypesOnly,
DenoSubcommand::Install(InstallFlags {
kind: InstallKind::Local(_),
}) => GraphKind::All,
_ => self.type_check_mode().as_graph_kind(), _ => self.type_check_mode().as_graph_kind(),
} }
} }
@ -1668,6 +1672,10 @@ impl CliOptions {
if let DenoSubcommand::Run(RunFlags { if let DenoSubcommand::Run(RunFlags {
watch: Some(WatchFlagsWithPaths { paths, .. }), watch: Some(WatchFlagsWithPaths { paths, .. }),
.. ..
})
| DenoSubcommand::Serve(ServeFlags {
watch: Some(WatchFlagsWithPaths { paths, .. }),
..
}) = &self.flags.subcommand }) = &self.flags.subcommand
{ {
full_paths.extend(paths.iter().map(|path| self.initial_cwd.join(path))); full_paths.extend(paths.iter().map(|path| self.initial_cwd.join(path)));

View file

@ -1,167 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::collections::HashMap;
use std::net::TcpStream;
use std::path::Path;
use std::process::Command;
use std::sync::atomic::AtomicU16;
use std::sync::atomic::Ordering;
use std::time::Duration;
use std::time::Instant;
use super::Result;
pub use test_util::parse_wrk_output;
pub use test_util::WrkOutput as HttpBenchmarkResult;
// Some of the benchmarks in this file have been renamed. In case the history
// somehow gets messed up:
// "node_http" was once called "node"
// "deno_tcp" was once called "deno"
// "deno_http" was once called "deno_net_http"
const DURATION: &str = "10s";
pub fn benchmark(
target_path: &Path,
) -> Result<HashMap<String, HttpBenchmarkResult>> {
let deno_exe = test_util::deno_exe_path();
let deno_exe = deno_exe.to_string();
let hyper_hello_exe = target_path.join("test_server");
let hyper_hello_exe = hyper_hello_exe.to_str().unwrap();
let mut res = HashMap::new();
let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR"));
let http_dir = manifest_dir.join("bench").join("http");
for entry in std::fs::read_dir(&http_dir)? {
let entry = entry?;
let pathbuf = entry.path();
let path = pathbuf.to_str().unwrap();
if path.ends_with(".lua") {
continue;
}
let file_stem = pathbuf.file_stem().unwrap().to_str().unwrap();
let lua_script = http_dir.join(format!("{file_stem}.lua"));
let mut maybe_lua = None;
if lua_script.exists() {
maybe_lua = Some(lua_script.to_str().unwrap());
}
let port = get_port();
// deno run -A --unstable-net <path> <addr>
res.insert(
file_stem.to_string(),
run(
&[
deno_exe.as_str(),
"run",
"--allow-all",
"--unstable-net",
"--enable-testing-features-do-not-use",
path,
&server_addr(port),
],
port,
None,
None,
maybe_lua,
)?,
);
}
res.insert("hyper".to_string(), hyper_http(hyper_hello_exe)?);
Ok(res)
}
fn run(
server_cmd: &[&str],
port: u16,
env: Option<Vec<(String, String)>>,
origin_cmd: Option<&[&str]>,
lua_script: Option<&str>,
) -> Result<HttpBenchmarkResult> {
// Wait for port 4544 to become available.
// TODO Need to use SO_REUSEPORT with tokio::net::TcpListener.
std::thread::sleep(Duration::from_secs(5));
let mut origin = None;
if let Some(cmd) = origin_cmd {
let mut com = Command::new(cmd[0]);
com.args(&cmd[1..]);
if let Some(env) = env.clone() {
com.envs(env);
}
origin = Some(com.spawn()?);
};
println!("{}", server_cmd.join(" "));
let mut server = {
let mut com = Command::new(server_cmd[0]);
com.args(&server_cmd[1..]);
if let Some(env) = env {
com.envs(env);
}
com.spawn()?
};
// Wait for server to wake up.
let now = Instant::now();
let addr = format!("127.0.0.1:{port}");
while now.elapsed().as_secs() < 30 {
if TcpStream::connect(&addr).is_ok() {
break;
}
std::thread::sleep(Duration::from_millis(10));
}
TcpStream::connect(&addr).expect("Failed to connect to server in time");
println!("Server took {} ms to start", now.elapsed().as_millis());
let wrk = test_util::prebuilt_tool_path("wrk");
assert!(wrk.is_file());
let addr = format!("http://{addr}/");
let wrk = wrk.to_string();
let mut wrk_cmd = vec![wrk.as_str(), "-d", DURATION, "--latency", &addr];
if let Some(lua_script) = lua_script {
wrk_cmd.push("-s");
wrk_cmd.push(lua_script);
}
println!("{}", wrk_cmd.join(" "));
let output = test_util::run_collect(&wrk_cmd, None, None, None, true).0;
std::thread::sleep(Duration::from_secs(1)); // wait to capture failure. TODO racy.
println!("{output}");
assert!(
server.try_wait()?.map(|s| s.success()).unwrap_or(true),
"server ended with error"
);
server.kill()?;
if let Some(mut origin) = origin {
origin.kill()?;
}
Ok(parse_wrk_output(&output))
}
static NEXT_PORT: AtomicU16 = AtomicU16::new(4544);
pub(crate) fn get_port() -> u16 {
let p = NEXT_PORT.load(Ordering::SeqCst);
NEXT_PORT.store(p.wrapping_add(1), Ordering::SeqCst);
p
}
fn server_addr(port: u16) -> String {
format!("0.0.0.0:{port}")
}
fn hyper_http(exe: &str) -> Result<HttpBenchmarkResult> {
let port = get_port();
println!("http_benchmark testing RUST hyper");
run(&[exe, &port.to_string()], port, None, None, None)
}

View file

@ -1,10 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
import { Hono } from "https://deno.land/x/hono@v2.0.9/mod.ts";
const addr = Deno.args[0] || "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const app = new Hono();
app.get("/", (c) => c.text("Hello, World!"));
Deno.serve({ port: Number(port), hostname }, app.fetch);

View file

@ -1,14 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
const addr = Deno.args[0] || "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const { serve } = Deno;
const path = new URL("../testdata/128k.bin", import.meta.url).pathname;
function handler() {
const file = Deno.openSync(path);
return new Response(file.readable);
}
serve({ hostname, port: Number(port) }, handler);

View file

@ -1,5 +0,0 @@
wrk.headers["foo"] = "bar"
wrk.headers["User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36"
wrk.headers["Viewport-Width"] = "1920"
wrk.headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9"
wrk.headers["Accept-Language"] = "en,la;q=0.9"

View file

@ -1,11 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
const addr = Deno.args[0] ?? "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const { serve } = Deno;
function handler() {
return new Response("Hello World");
}
serve({ hostname, port: Number(port), reusePort: true }, handler);

View file

@ -1,5 +0,0 @@
wrk.method = "POST"
wrk.headers["Content-Type"] = "application/octet-stream"
file = io.open("./cli/bench/testdata/128k.bin", "rb")
wrk.body = file:read("*a")

View file

@ -1,3 +0,0 @@
wrk.method = "POST"
wrk.headers["Content-Type"] = "application/json"
wrk.body = '{"hello":"deno"}'

View file

@ -1,25 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
import { renderToReadableStream } from "https://esm.run/react-dom/server";
import * as React from "https://esm.run/react";
const { serve } = Deno;
const addr = Deno.args[0] || "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const App = () => (
<html>
<body>
<h1>Hello World</h1>
</body>
</html>
);
const headers = {
headers: {
"Content-Type": "text/html",
},
};
serve({ hostname, port: Number(port) }, async () => {
return new Response(await renderToReadableStream(<App />), headers);
});

View file

@ -1,34 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// Used for benchmarking Deno's networking.
// TODO(bartlomieju): Replace this with a real HTTP server once
// https://github.com/denoland/deno/issues/726 is completed.
// Note: this is a keep-alive server.
// deno-lint-ignore-file no-console
const addr = Deno.args[0] || "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const listener = Deno.listen({ hostname, port: Number(port) });
const response = new TextEncoder().encode(
"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World\n",
);
async function handle(conn: Deno.Conn): Promise<void> {
const buffer = new Uint8Array(1024);
try {
while (true) {
await conn.read(buffer);
await conn.write(response);
}
} catch (e) {
if (
!(e instanceof Deno.errors.BrokenPipe) &&
!(e instanceof Deno.errors.ConnectionReset)
) {
throw e;
}
}
conn.close();
}
console.log("Listening on", addr);
for await (const conn of listener) {
handle(conn);
}

View file

@ -17,7 +17,6 @@ use std::process::Stdio;
use std::time::SystemTime; use std::time::SystemTime;
use test_util::PathRef; use test_util::PathRef;
mod http;
mod lsp; mod lsp;
fn read_json(filename: &Path) -> Result<Value> { fn read_json(filename: &Path) -> Result<Value> {
@ -345,9 +344,11 @@ struct BenchResult {
binary_size: HashMap<String, i64>, binary_size: HashMap<String, i64>,
bundle_size: HashMap<String, i64>, bundle_size: HashMap<String, i64>,
cargo_deps: usize, cargo_deps: usize,
// TODO(bartlomieju): remove
max_latency: HashMap<String, f64>, max_latency: HashMap<String, f64>,
max_memory: HashMap<String, i64>, max_memory: HashMap<String, i64>,
lsp_exec_time: HashMap<String, i64>, lsp_exec_time: HashMap<String, i64>,
// TODO(bartlomieju): remove
req_per_sec: HashMap<String, i64>, req_per_sec: HashMap<String, i64>,
syscall_count: HashMap<String, i64>, syscall_count: HashMap<String, i64>,
thread_count: HashMap<String, i64>, thread_count: HashMap<String, i64>,
@ -362,7 +363,6 @@ async fn main() -> Result<()> {
"binary_size", "binary_size",
"cargo_deps", "cargo_deps",
"lsp", "lsp",
"http",
"strace", "strace",
"mem_usage", "mem_usage",
]; ];
@ -427,21 +427,6 @@ async fn main() -> Result<()> {
new_data.lsp_exec_time = lsp_exec_times; new_data.lsp_exec_time = lsp_exec_times;
} }
if benchmarks.contains(&"http") && cfg!(not(target_os = "windows")) {
let stats = http::benchmark(target_dir.as_path())?;
let req_per_sec = stats
.iter()
.map(|(name, result)| (name.clone(), result.requests as i64))
.collect();
new_data.req_per_sec = req_per_sec;
let max_latency = stats
.iter()
.map(|(name, result)| (name.clone(), result.latency))
.collect();
new_data.max_latency = max_latency;
}
if cfg!(target_os = "linux") && benchmarks.contains(&"strace") { if cfg!(target_os = "linux") && benchmarks.contains(&"strace") {
use std::io::Read; use std::io::Read;

View file

@ -365,6 +365,9 @@ fn main() {
return; return;
} }
deno_napi::print_linker_flags("deno");
deno_napi::print_linker_flags("denort");
// Host snapshots won't work when cross compiling. // Host snapshots won't work when cross compiling.
let target = env::var("TARGET").unwrap(); let target = env::var("TARGET").unwrap();
let host = env::var("HOST").unwrap(); let host = env::var("HOST").unwrap();
@ -374,58 +377,6 @@ fn main() {
panic!("Cross compiling with snapshot is not supported."); panic!("Cross compiling with snapshot is not supported.");
} }
let symbols_file_name = match env::consts::OS {
"android" | "freebsd" | "openbsd" => {
"generated_symbol_exports_list_linux.def".to_string()
}
os => format!("generated_symbol_exports_list_{}.def", os),
};
let symbols_path = std::path::Path::new("napi")
.join(symbols_file_name)
.canonicalize()
.expect(
"Missing symbols list! Generate using tools/napi/generate_symbols_lists.js",
);
println!("cargo:rustc-rerun-if-changed={}", symbols_path.display());
#[cfg(target_os = "windows")]
println!(
"cargo:rustc-link-arg-bin=deno=/DEF:{}",
symbols_path.display()
);
#[cfg(target_os = "macos")]
println!(
"cargo:rustc-link-arg-bin=deno=-Wl,-exported_symbols_list,{}",
symbols_path.display()
);
#[cfg(target_os = "linux")]
{
// If a custom compiler is set, the glibc version is not reliable.
// Here, we assume that if a custom compiler is used, that it will be modern enough to support a dynamic symbol list.
if env::var("CC").is_err()
&& glibc_version::get_version()
.map(|ver| ver.major <= 2 && ver.minor < 35)
.unwrap_or(false)
{
println!("cargo:warning=Compiling with all symbols exported, this will result in a larger binary. Please use glibc 2.35 or later for an optimised build.");
println!("cargo:rustc-link-arg-bin=deno=-rdynamic");
} else {
println!(
"cargo:rustc-link-arg-bin=deno=-Wl,--export-dynamic-symbol-list={}",
symbols_path.display()
);
}
}
#[cfg(target_os = "android")]
println!(
"cargo:rustc-link-arg-bin=deno=-Wl,--export-dynamic-symbol-list={}",
symbols_path.display()
);
// To debug snapshot issues uncomment: // To debug snapshot issues uncomment:
// op_fetch_asset::trace_serializer(); // op_fetch_asset::trace_serializer();

25
cli/cache/emit.rs vendored
View file

@ -39,7 +39,7 @@ impl EmitCache {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
expected_source_hash: u64, expected_source_hash: u64,
) -> Option<Vec<u8>> { ) -> Option<String> {
let emit_filename = self.get_emit_filename(specifier)?; let emit_filename = self.get_emit_filename(specifier)?;
let bytes = self.disk_cache.get(&emit_filename).ok()?; let bytes = self.disk_cache.get(&emit_filename).ok()?;
self self
@ -100,7 +100,7 @@ impl EmitFileSerializer {
&self, &self,
mut bytes: Vec<u8>, mut bytes: Vec<u8>,
expected_source_hash: u64, expected_source_hash: u64,
) -> Option<Vec<u8>> { ) -> Option<String> {
let last_newline_index = bytes.iter().rposition(|&b| b == b'\n')?; let last_newline_index = bytes.iter().rposition(|&b| b == b'\n')?;
let (content, last_line) = bytes.split_at(last_newline_index); let (content, last_line) = bytes.split_at(last_newline_index);
let hashes = last_line.strip_prefix(LAST_LINE_PREFIX.as_bytes())?; let hashes = last_line.strip_prefix(LAST_LINE_PREFIX.as_bytes())?;
@ -120,7 +120,7 @@ impl EmitFileSerializer {
// everything looks good, truncate and return it // everything looks good, truncate and return it
bytes.truncate(content.len()); bytes.truncate(content.len());
Some(bytes) String::from_utf8(bytes).ok()
} }
pub fn serialize(&self, code: &[u8], source_hash: u64) -> Vec<u8> { pub fn serialize(&self, code: &[u8], source_hash: u64) -> Vec<u8> {
@ -170,8 +170,6 @@ mod test {
}, },
emit_failed_flag: Default::default(), emit_failed_flag: Default::default(),
}; };
let to_string =
|bytes: Vec<u8>| -> String { String::from_utf8(bytes).unwrap() };
let specifier1 = let specifier1 =
ModuleSpecifier::from_file_path(temp_dir.path().join("file1.ts")) ModuleSpecifier::from_file_path(temp_dir.path().join("file1.ts"))
@ -188,13 +186,10 @@ mod test {
assert_eq!(cache.get_emit_code(&specifier1, 5), None); assert_eq!(cache.get_emit_code(&specifier1, 5), None);
// providing the correct source hash // providing the correct source hash
assert_eq!( assert_eq!(
cache.get_emit_code(&specifier1, 10).map(to_string), cache.get_emit_code(&specifier1, 10),
Some(emit_code1.clone()), Some(emit_code1.clone()),
); );
assert_eq!( assert_eq!(cache.get_emit_code(&specifier2, 2), Some(emit_code2));
cache.get_emit_code(&specifier2, 2).map(to_string),
Some(emit_code2)
);
// try changing the cli version (should not load previous ones) // try changing the cli version (should not load previous ones)
let cache = EmitCache { let cache = EmitCache {
@ -215,18 +210,12 @@ mod test {
}, },
emit_failed_flag: Default::default(), emit_failed_flag: Default::default(),
}; };
assert_eq!( assert_eq!(cache.get_emit_code(&specifier1, 5), Some(emit_code1));
cache.get_emit_code(&specifier1, 5).map(to_string),
Some(emit_code1)
);
// adding when already exists should not cause issue // adding when already exists should not cause issue
let emit_code3 = "asdf".to_string(); let emit_code3 = "asdf".to_string();
cache.set_emit_code(&specifier1, 20, emit_code3.as_bytes()); cache.set_emit_code(&specifier1, 20, emit_code3.as_bytes());
assert_eq!(cache.get_emit_code(&specifier1, 5), None); assert_eq!(cache.get_emit_code(&specifier1, 5), None);
assert_eq!( assert_eq!(cache.get_emit_code(&specifier1, 20), Some(emit_code3));
cache.get_emit_code(&specifier1, 20).map(to_string),
Some(emit_code3)
);
} }
} }

1
cli/cache/mod.rs vendored
View file

@ -378,6 +378,7 @@ impl Loader for FetchCacher {
} else { } else {
FetchPermissionsOptionRef::DynamicContainer(&permissions) FetchPermissionsOptionRef::DynamicContainer(&permissions)
}, },
maybe_auth: None,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: maybe_cache_setting.as_ref(), maybe_cache_setting: maybe_cache_setting.as_ref(),
}, },

View file

@ -13,7 +13,6 @@ use deno_core::error::AnyError;
use deno_core::futures::stream::FuturesUnordered; use deno_core::futures::stream::FuturesUnordered;
use deno_core::futures::FutureExt; use deno_core::futures::FutureExt;
use deno_core::futures::StreamExt; use deno_core::futures::StreamExt;
use deno_core::ModuleCodeBytes;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::MediaType; use deno_graph::MediaType;
use deno_graph::Module; use deno_graph::Module;
@ -60,6 +59,7 @@ impl Emitter {
continue; continue;
}; };
// todo(https://github.com/denoland/deno_media_type/pull/12): use is_emittable()
let is_emittable = matches!( let is_emittable = matches!(
module.media_type, module.media_type,
MediaType::TypeScript MediaType::TypeScript
@ -93,7 +93,7 @@ impl Emitter {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
source: &str, source: &str,
) -> Option<Vec<u8>> { ) -> Option<String> {
let source_hash = self.get_source_hash(source); let source_hash = self.get_source_hash(source);
self.emit_cache.get_emit_code(specifier, source_hash) self.emit_cache.get_emit_code(specifier, source_hash)
} }
@ -103,7 +103,7 @@ impl Emitter {
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
media_type: MediaType, media_type: MediaType,
source: &Arc<str>, source: &Arc<str>,
) -> Result<ModuleCodeBytes, AnyError> { ) -> Result<String, AnyError> {
// Note: keep this in sync with the sync version below // Note: keep this in sync with the sync version below
let helper = EmitParsedSourceHelper(self); let helper = EmitParsedSourceHelper(self);
match helper.pre_emit_parsed_source(specifier, source) { match helper.pre_emit_parsed_source(specifier, source) {
@ -112,7 +112,7 @@ impl Emitter {
let parsed_source_cache = self.parsed_source_cache.clone(); let parsed_source_cache = self.parsed_source_cache.clone();
let transpile_and_emit_options = let transpile_and_emit_options =
self.transpile_and_emit_options.clone(); self.transpile_and_emit_options.clone();
let transpile_result = deno_core::unsync::spawn_blocking({ let transpiled_source = deno_core::unsync::spawn_blocking({
let specifier = specifier.clone(); let specifier = specifier.clone();
let source = source.clone(); let source = source.clone();
move || -> Result<_, AnyError> { move || -> Result<_, AnyError> {
@ -128,11 +128,12 @@ impl Emitter {
}) })
.await .await
.unwrap()?; .unwrap()?;
Ok(helper.post_emit_parsed_source( helper.post_emit_parsed_source(
specifier, specifier,
transpile_result, &transpiled_source,
source_hash, source_hash,
)) );
Ok(transpiled_source)
} }
} }
} }
@ -142,13 +143,13 @@ impl Emitter {
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
media_type: MediaType, media_type: MediaType,
source: &Arc<str>, source: &Arc<str>,
) -> Result<ModuleCodeBytes, AnyError> { ) -> Result<String, AnyError> {
// Note: keep this in sync with the async version above // Note: keep this in sync with the async version above
let helper = EmitParsedSourceHelper(self); let helper = EmitParsedSourceHelper(self);
match helper.pre_emit_parsed_source(specifier, source) { match helper.pre_emit_parsed_source(specifier, source) {
PreEmitResult::Cached(emitted_text) => Ok(emitted_text), PreEmitResult::Cached(emitted_text) => Ok(emitted_text),
PreEmitResult::NotCached { source_hash } => { PreEmitResult::NotCached { source_hash } => {
let transpile_result = EmitParsedSourceHelper::transpile( let transpiled_source = EmitParsedSourceHelper::transpile(
&self.parsed_source_cache, &self.parsed_source_cache,
specifier, specifier,
source.clone(), source.clone(),
@ -156,11 +157,12 @@ impl Emitter {
&self.transpile_and_emit_options.0, &self.transpile_and_emit_options.0,
&self.transpile_and_emit_options.1, &self.transpile_and_emit_options.1,
)?; )?;
Ok(helper.post_emit_parsed_source( helper.post_emit_parsed_source(
specifier, specifier,
transpile_result, &transpiled_source,
source_hash, source_hash,
)) );
Ok(transpiled_source)
} }
} }
} }
@ -226,7 +228,7 @@ impl Emitter {
} }
enum PreEmitResult { enum PreEmitResult {
Cached(ModuleCodeBytes), Cached(String),
NotCached { source_hash: u64 }, NotCached { source_hash: u64 },
} }
@ -244,7 +246,7 @@ impl<'a> EmitParsedSourceHelper<'a> {
if let Some(emit_code) = if let Some(emit_code) =
self.0.emit_cache.get_emit_code(specifier, source_hash) self.0.emit_cache.get_emit_code(specifier, source_hash)
{ {
PreEmitResult::Cached(emit_code.into_boxed_slice().into()) PreEmitResult::Cached(emit_code)
} else { } else {
PreEmitResult::NotCached { source_hash } PreEmitResult::NotCached { source_hash }
} }
@ -257,21 +259,14 @@ impl<'a> EmitParsedSourceHelper<'a> {
media_type: MediaType, media_type: MediaType,
transpile_options: &deno_ast::TranspileOptions, transpile_options: &deno_ast::TranspileOptions,
emit_options: &deno_ast::EmitOptions, emit_options: &deno_ast::EmitOptions,
) -> Result<TranspileResult, AnyError> { ) -> Result<String, AnyError> {
// nothing else needs the parsed source at this point, so remove from // nothing else needs the parsed source at this point, so remove from
// the cache in order to not transpile owned // the cache in order to not transpile owned
let parsed_source = parsed_source_cache let parsed_source = parsed_source_cache
.remove_or_parse_module(specifier, source, media_type)?; .remove_or_parse_module(specifier, source, media_type)?;
ensure_no_import_assertion(&parsed_source)?; ensure_no_import_assertion(&parsed_source)?;
Ok(parsed_source.transpile(transpile_options, emit_options)?) let transpile_result =
} parsed_source.transpile(transpile_options, emit_options)?;
pub fn post_emit_parsed_source(
&self,
specifier: &ModuleSpecifier,
transpile_result: TranspileResult,
source_hash: u64,
) -> ModuleCodeBytes {
let transpiled_source = match transpile_result { let transpiled_source = match transpile_result {
TranspileResult::Owned(source) => source, TranspileResult::Owned(source) => source,
TranspileResult::Cloned(source) => { TranspileResult::Cloned(source) => {
@ -280,12 +275,21 @@ impl<'a> EmitParsedSourceHelper<'a> {
} }
}; };
debug_assert!(transpiled_source.source_map.is_none()); debug_assert!(transpiled_source.source_map.is_none());
let text = String::from_utf8(transpiled_source.source)?;
Ok(text)
}
pub fn post_emit_parsed_source(
&self,
specifier: &ModuleSpecifier,
transpiled_source: &str,
source_hash: u64,
) {
self.0.emit_cache.set_emit_code( self.0.emit_cache.set_emit_code(
specifier, specifier,
source_hash, source_hash,
&transpiled_source.source, transpiled_source.as_bytes(),
); );
transpiled_source.source.into_boxed_slice().into()
} }
} }

View file

@ -762,6 +762,7 @@ impl CliFactory {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
Ok(DenoCompileBinaryWriter::new( Ok(DenoCompileBinaryWriter::new(
self.deno_dir()?, self.deno_dir()?,
self.emitter()?,
self.file_fetcher()?, self.file_fetcher()?,
self.http_client_provider(), self.http_client_provider(),
self.npm_resolver().await?.as_ref(), self.npm_resolver().await?.as_ref(),

View file

@ -24,6 +24,7 @@ use deno_graph::source::LoaderChecksum;
use deno_path_util::url_to_file_path; use deno_path_util::url_to_file_path;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_web::BlobStore; use deno_runtime::deno_web::BlobStore;
use http::header;
use log::debug; use log::debug;
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
@ -181,6 +182,7 @@ pub enum FetchPermissionsOptionRef<'a> {
pub struct FetchOptions<'a> { pub struct FetchOptions<'a> {
pub specifier: &'a ModuleSpecifier, pub specifier: &'a ModuleSpecifier,
pub permissions: FetchPermissionsOptionRef<'a>, pub permissions: FetchPermissionsOptionRef<'a>,
pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
pub maybe_accept: Option<&'a str>, pub maybe_accept: Option<&'a str>,
pub maybe_cache_setting: Option<&'a CacheSetting>, pub maybe_cache_setting: Option<&'a CacheSetting>,
} }
@ -350,6 +352,7 @@ impl FileFetcher {
maybe_accept: Option<&str>, maybe_accept: Option<&str>,
cache_setting: &CacheSetting, cache_setting: &CacheSetting,
maybe_checksum: Option<&LoaderChecksum>, maybe_checksum: Option<&LoaderChecksum>,
maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
) -> Result<FileOrRedirect, AnyError> { ) -> Result<FileOrRedirect, AnyError> {
debug!( debug!(
"FileFetcher::fetch_remote_no_follow - specifier: {}", "FileFetcher::fetch_remote_no_follow - specifier: {}",
@ -442,6 +445,7 @@ impl FileFetcher {
.as_ref() .as_ref()
.map(|(_, etag)| etag.clone()), .map(|(_, etag)| etag.clone()),
maybe_auth_token: maybe_auth_token.clone(), maybe_auth_token: maybe_auth_token.clone(),
maybe_auth: maybe_auth.clone(),
maybe_progress_guard: maybe_progress_guard.as_ref(), maybe_progress_guard: maybe_progress_guard.as_ref(),
}) })
.await? .await?
@ -538,7 +542,18 @@ impl FileFetcher {
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
) -> Result<File, AnyError> { ) -> Result<File, AnyError> {
self self
.fetch_inner(specifier, FetchPermissionsOptionRef::AllowAll) .fetch_inner(specifier, None, FetchPermissionsOptionRef::AllowAll)
.await
}
#[inline(always)]
pub async fn fetch_bypass_permissions_with_maybe_auth(
&self,
specifier: &ModuleSpecifier,
maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
) -> Result<File, AnyError> {
self
.fetch_inner(specifier, maybe_auth, FetchPermissionsOptionRef::AllowAll)
.await .await
} }
@ -552,6 +567,7 @@ impl FileFetcher {
self self
.fetch_inner( .fetch_inner(
specifier, specifier,
None,
FetchPermissionsOptionRef::StaticContainer(permissions), FetchPermissionsOptionRef::StaticContainer(permissions),
) )
.await .await
@ -560,12 +576,14 @@ impl FileFetcher {
async fn fetch_inner( async fn fetch_inner(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
permissions: FetchPermissionsOptionRef<'_>, permissions: FetchPermissionsOptionRef<'_>,
) -> Result<File, AnyError> { ) -> Result<File, AnyError> {
self self
.fetch_with_options(FetchOptions { .fetch_with_options(FetchOptions {
specifier, specifier,
permissions, permissions,
maybe_auth,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: None, maybe_cache_setting: None,
}) })
@ -585,12 +603,14 @@ impl FileFetcher {
max_redirect: usize, max_redirect: usize,
) -> Result<File, AnyError> { ) -> Result<File, AnyError> {
let mut specifier = Cow::Borrowed(options.specifier); let mut specifier = Cow::Borrowed(options.specifier);
let mut maybe_auth = options.maybe_auth.clone();
for _ in 0..=max_redirect { for _ in 0..=max_redirect {
match self match self
.fetch_no_follow_with_options(FetchNoFollowOptions { .fetch_no_follow_with_options(FetchNoFollowOptions {
fetch_options: FetchOptions { fetch_options: FetchOptions {
specifier: &specifier, specifier: &specifier,
permissions: options.permissions, permissions: options.permissions,
maybe_auth: maybe_auth.clone(),
maybe_accept: options.maybe_accept, maybe_accept: options.maybe_accept,
maybe_cache_setting: options.maybe_cache_setting, maybe_cache_setting: options.maybe_cache_setting,
}, },
@ -602,6 +622,10 @@ impl FileFetcher {
return Ok(file); return Ok(file);
} }
FileOrRedirect::Redirect(redirect_specifier) => { FileOrRedirect::Redirect(redirect_specifier) => {
// If we were redirected to another origin, don't send the auth header anymore.
if redirect_specifier.origin() != specifier.origin() {
maybe_auth = None;
}
specifier = Cow::Owned(redirect_specifier); specifier = Cow::Owned(redirect_specifier);
} }
} }
@ -666,6 +690,7 @@ impl FileFetcher {
options.maybe_accept, options.maybe_accept,
options.maybe_cache_setting.unwrap_or(&self.cache_setting), options.maybe_cache_setting.unwrap_or(&self.cache_setting),
maybe_checksum, maybe_checksum,
options.maybe_auth,
) )
.await .await
} }
@ -756,6 +781,7 @@ mod tests {
FetchOptions { FetchOptions {
specifier, specifier,
permissions: FetchPermissionsOptionRef::AllowAll, permissions: FetchPermissionsOptionRef::AllowAll,
maybe_auth: None,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: Some(&file_fetcher.cache_setting), maybe_cache_setting: Some(&file_fetcher.cache_setting),
}, },
@ -1255,6 +1281,7 @@ mod tests {
FetchOptions { FetchOptions {
specifier: &specifier, specifier: &specifier,
permissions: FetchPermissionsOptionRef::AllowAll, permissions: FetchPermissionsOptionRef::AllowAll,
maybe_auth: None,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: Some(&file_fetcher.cache_setting), maybe_cache_setting: Some(&file_fetcher.cache_setting),
}, },
@ -1268,6 +1295,7 @@ mod tests {
FetchOptions { FetchOptions {
specifier: &specifier, specifier: &specifier,
permissions: FetchPermissionsOptionRef::AllowAll, permissions: FetchPermissionsOptionRef::AllowAll,
maybe_auth: None,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: Some(&file_fetcher.cache_setting), maybe_cache_setting: Some(&file_fetcher.cache_setting),
}, },

View file

@ -1009,7 +1009,11 @@ impl deno_graph::source::Reporter for FileWatcherReporter {
) { ) {
let mut file_paths = self.file_paths.lock(); let mut file_paths = self.file_paths.lock();
if specifier.scheme() == "file" { if specifier.scheme() == "file" {
file_paths.push(specifier.to_file_path().unwrap()); // Don't trust that the path is a valid path at this point:
// https://github.com/denoland/deno/issues/26209.
if let Ok(file_path) = specifier.to_file_path() {
file_paths.push(file_path);
}
} }
if modules_done == modules_total { if modules_done == modules_total {

View file

@ -19,6 +19,7 @@ use deno_runtime::deno_fetch;
use deno_runtime::deno_fetch::create_http_client; use deno_runtime::deno_fetch::create_http_client;
use deno_runtime::deno_fetch::CreateHttpClientOptions; use deno_runtime::deno_fetch::CreateHttpClientOptions;
use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_tls::RootCertStoreProvider;
use http::header;
use http::header::HeaderName; use http::header::HeaderName;
use http::header::HeaderValue; use http::header::HeaderValue;
use http::header::ACCEPT; use http::header::ACCEPT;
@ -204,6 +205,7 @@ pub struct FetchOnceArgs<'a> {
pub maybe_accept: Option<String>, pub maybe_accept: Option<String>,
pub maybe_etag: Option<String>, pub maybe_etag: Option<String>,
pub maybe_auth_token: Option<AuthToken>, pub maybe_auth_token: Option<AuthToken>,
pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
pub maybe_progress_guard: Option<&'a UpdateGuard>, pub maybe_progress_guard: Option<&'a UpdateGuard>,
} }
@ -382,6 +384,8 @@ impl HttpClient {
request request
.headers_mut() .headers_mut()
.insert(AUTHORIZATION, authorization_val); .insert(AUTHORIZATION, authorization_val);
} else if let Some((header, value)) = args.maybe_auth {
request.headers_mut().insert(header, value);
} }
if let Some(accept) = args.maybe_accept { if let Some(accept) = args.maybe_accept {
let accepts_val = HeaderValue::from_str(&accept)?; let accepts_val = HeaderValue::from_str(&accept)?;
@ -792,6 +796,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -818,6 +823,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -845,6 +851,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -866,6 +873,7 @@ mod test {
maybe_etag: Some("33a64df551425fcc55e".to_string()), maybe_etag: Some("33a64df551425fcc55e".to_string()),
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
assert_eq!(res.unwrap(), FetchOnceResult::NotModified); assert_eq!(res.unwrap(), FetchOnceResult::NotModified);
@ -885,6 +893,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -914,6 +923,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, _)) = result { if let Ok(FetchOnceResult::Code(body, _)) = result {
@ -939,6 +949,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Redirect(url, _)) = result { if let Ok(FetchOnceResult::Redirect(url, _)) = result {
@ -974,6 +985,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -1021,6 +1033,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
@ -1083,6 +1096,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
@ -1136,6 +1150,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -1177,6 +1192,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -1199,6 +1215,7 @@ mod test {
maybe_etag: Some("33a64df551425fcc55e".to_string()), maybe_etag: Some("33a64df551425fcc55e".to_string()),
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
assert_eq!(res.unwrap(), FetchOnceResult::NotModified); assert_eq!(res.unwrap(), FetchOnceResult::NotModified);
@ -1233,6 +1250,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -1262,6 +1280,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
assert!(result.is_err()); assert!(result.is_err());
@ -1283,6 +1302,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
@ -1306,6 +1326,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;

View file

@ -18,7 +18,6 @@ use deno_lint::diagnostic::LintDiagnosticRange;
use deno_ast::SourceRange; use deno_ast::SourceRange;
use deno_ast::SourceRangedForSpanned; use deno_ast::SourceRangedForSpanned;
use deno_ast::SourceTextInfo; use deno_ast::SourceTextInfo;
use deno_core::anyhow::anyhow;
use deno_core::error::custom_error; use deno_core::error::custom_error;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde::Deserialize; use deno_core::serde::Deserialize;
@ -40,6 +39,7 @@ use import_map::ImportMap;
use node_resolver::NpmResolver; use node_resolver::NpmResolver;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use std::borrow::Cow;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
@ -598,29 +598,25 @@ pub fn fix_ts_import_changes(
/// Fix tsc import code actions so that the module specifier is correct for /// Fix tsc import code actions so that the module specifier is correct for
/// resolution by Deno (includes the extension). /// resolution by Deno (includes the extension).
fn fix_ts_import_action( fn fix_ts_import_action<'a>(
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
action: &tsc::CodeFixAction, action: &'a tsc::CodeFixAction,
import_mapper: &TsResponseImportMapper, import_mapper: &TsResponseImportMapper,
) -> Result<Option<tsc::CodeFixAction>, AnyError> { ) -> Option<Cow<'a, tsc::CodeFixAction>> {
if matches!( if !matches!(
action.fix_name.as_str(), action.fix_name.as_str(),
"import" | "fixMissingFunctionDeclaration" "import" | "fixMissingFunctionDeclaration"
) { ) {
let change = action return Some(Cow::Borrowed(action));
.changes }
.first() let specifier = (|| {
.ok_or_else(|| anyhow!("Unexpected action changes."))?; let text_change = action.changes.first()?.text_changes.first()?;
let text_change = change let captures = IMPORT_SPECIFIER_RE.captures(&text_change.new_text)?;
.text_changes Some(captures.get(1)?.as_str())
.first() })();
.ok_or_else(|| anyhow!("Missing text change."))?; let Some(specifier) = specifier else {
if let Some(captures) = IMPORT_SPECIFIER_RE.captures(&text_change.new_text) return Some(Cow::Borrowed(action));
{ };
let specifier = captures
.get(1)
.ok_or_else(|| anyhow!("Missing capture."))?
.as_str();
if let Some(new_specifier) = if let Some(new_specifier) =
import_mapper.check_unresolved_specifier(specifier, referrer) import_mapper.check_unresolved_specifier(specifier, referrer)
{ {
@ -645,21 +641,19 @@ fn fix_ts_import_action(
}) })
.collect(); .collect();
return Ok(Some(tsc::CodeFixAction { Some(Cow::Owned(tsc::CodeFixAction {
description, description,
changes, changes,
commands: None, commands: None,
fix_name: action.fix_name.clone(), fix_name: action.fix_name.clone(),
fix_id: None, fix_id: None,
fix_all_description: None, fix_all_description: None,
})); }))
} else if !import_mapper.is_valid_import(specifier, referrer) { } else if !import_mapper.is_valid_import(specifier, referrer) {
return Ok(None); None
} else {
Some(Cow::Borrowed(action))
} }
}
}
Ok(Some(action.clone()))
} }
/// Determines if two TypeScript diagnostic codes are effectively equivalent. /// Determines if two TypeScript diagnostic codes are effectively equivalent.
@ -1004,8 +998,7 @@ impl CodeActionCollection {
specifier, specifier,
action, action,
&language_server.get_ts_response_import_mapper(specifier), &language_server.get_ts_response_import_mapper(specifier),
)? ) else {
else {
return Ok(()); return Ok(());
}; };
let edit = ts_changes_to_edit(&action.changes, language_server)?; let edit = ts_changes_to_edit(&action.changes, language_server)?;
@ -1027,7 +1020,7 @@ impl CodeActionCollection {
}); });
self self
.actions .actions
.push(CodeActionKind::Tsc(code_action, action.clone())); .push(CodeActionKind::Tsc(code_action, action.as_ref().clone()));
if let Some(fix_id) = &action.fix_id { if let Some(fix_id) = &action.fix_id {
if let Some(CodeActionKind::Tsc(existing_fix_all, existing_action)) = if let Some(CodeActionKind::Tsc(existing_fix_all, existing_action)) =

View file

@ -439,6 +439,8 @@ pub struct LanguagePreferences {
pub use_aliases_for_renames: bool, pub use_aliases_for_renames: bool,
#[serde(default)] #[serde(default)]
pub quote_style: QuoteStyle, pub quote_style: QuoteStyle,
#[serde(default)]
pub prefer_type_only_auto_imports: bool,
} }
impl Default for LanguagePreferences { impl Default for LanguagePreferences {
@ -449,6 +451,7 @@ impl Default for LanguagePreferences {
auto_import_file_exclude_patterns: vec![], auto_import_file_exclude_patterns: vec![],
use_aliases_for_renames: true, use_aliases_for_renames: true,
quote_style: Default::default(), quote_style: Default::default(),
prefer_type_only_auto_imports: false,
} }
} }
} }
@ -2251,6 +2254,7 @@ mod tests {
auto_import_file_exclude_patterns: vec![], auto_import_file_exclude_patterns: vec![],
use_aliases_for_renames: true, use_aliases_for_renames: true,
quote_style: QuoteStyle::Auto, quote_style: QuoteStyle::Auto,
prefer_type_only_auto_imports: false,
}, },
suggest: CompletionSettings { suggest: CompletionSettings {
complete_function_calls: false, complete_function_calls: false,
@ -2296,6 +2300,7 @@ mod tests {
auto_import_file_exclude_patterns: vec![], auto_import_file_exclude_patterns: vec![],
use_aliases_for_renames: true, use_aliases_for_renames: true,
quote_style: QuoteStyle::Auto, quote_style: QuoteStyle::Auto,
prefer_type_only_auto_imports: false,
}, },
suggest: CompletionSettings { suggest: CompletionSettings {
complete_function_calls: false, complete_function_calls: false,

View file

@ -1499,7 +1499,11 @@ fn diagnose_dependency(
.data_for_specifier(referrer_doc.file_referrer().unwrap_or(referrer)) .data_for_specifier(referrer_doc.file_referrer().unwrap_or(referrer))
.and_then(|d| d.resolver.maybe_import_map()); .and_then(|d| d.resolver.maybe_import_map());
if let Some(import_map) = import_map { if let Some(import_map) = import_map {
if let Resolution::Ok(resolved) = &dependency.maybe_code { let resolved = dependency
.maybe_code
.ok()
.or_else(|| dependency.maybe_type.ok());
if let Some(resolved) = resolved {
if let Some(to) = import_map.lookup(&resolved.specifier, referrer) { if let Some(to) = import_map.lookup(&resolved.specifier, referrer) {
if dependency_key != to { if dependency_key != to {
diagnostics.push( diagnostics.push(

View file

@ -3812,7 +3812,7 @@ impl Inner {
let maybe_inlay_hints = maybe_inlay_hints.map(|hints| { let maybe_inlay_hints = maybe_inlay_hints.map(|hints| {
hints hints
.iter() .iter()
.map(|hint| hint.to_lsp(line_index.clone())) .map(|hint| hint.to_lsp(line_index.clone(), self))
.collect() .collect()
}); });
self.performance.measure(mark); self.performance.measure(mark);

View file

@ -4,6 +4,7 @@ use dashmap::DashMap;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_npm::npm_rc::NpmRc;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::Version; use deno_semver::Version;
use serde::Deserialize; use serde::Deserialize;
@ -25,7 +26,10 @@ pub struct CliNpmSearchApi {
impl CliNpmSearchApi { impl CliNpmSearchApi {
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self { pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
let resolver = NpmFetchResolver::new(file_fetcher.clone()); let resolver = NpmFetchResolver::new(
file_fetcher.clone(),
Arc::new(NpmRc::default().as_resolved(npm_registry_url()).unwrap()),
);
Self { Self {
file_fetcher, file_fetcher,
resolver, resolver,

View file

@ -482,6 +482,7 @@ impl ModuleRegistry {
.fetch_with_options(FetchOptions { .fetch_with_options(FetchOptions {
specifier: &specifier, specifier: &specifier,
permissions: FetchPermissionsOptionRef::AllowAll, permissions: FetchPermissionsOptionRef::AllowAll,
maybe_auth: None,
maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"), maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"),
maybe_cache_setting: None, maybe_cache_setting: None,
}) })

View file

@ -2182,6 +2182,50 @@ impl NavigateToItem {
} }
} }
#[derive(Debug, Clone, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct InlayHintDisplayPart {
pub text: String,
pub span: Option<TextSpan>,
pub file: Option<String>,
}
impl InlayHintDisplayPart {
pub fn to_lsp(
&self,
language_server: &language_server::Inner,
) -> lsp::InlayHintLabelPart {
let location = self.file.as_ref().map(|f| {
let specifier =
resolve_url(f).unwrap_or_else(|_| INVALID_SPECIFIER.clone());
let file_referrer =
language_server.documents.get_file_referrer(&specifier);
let uri = language_server
.url_map
.specifier_to_uri(&specifier, file_referrer.as_deref())
.unwrap_or_else(|_| INVALID_URI.clone());
let range = self
.span
.as_ref()
.and_then(|s| {
let asset_or_doc =
language_server.get_asset_or_document(&specifier).ok()?;
Some(s.to_range(asset_or_doc.line_index()))
})
.unwrap_or_else(|| {
lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0))
});
lsp::Location { uri, range }
});
lsp::InlayHintLabelPart {
value: self.text.clone(),
tooltip: None,
location,
command: None,
}
}
}
#[derive(Debug, Clone, Deserialize)] #[derive(Debug, Clone, Deserialize)]
pub enum InlayHintKind { pub enum InlayHintKind {
Type, Type,
@ -2203,6 +2247,7 @@ impl InlayHintKind {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct InlayHint { pub struct InlayHint {
pub text: String, pub text: String,
pub display_parts: Option<Vec<InlayHintDisplayPart>>,
pub position: u32, pub position: u32,
pub kind: InlayHintKind, pub kind: InlayHintKind,
pub whitespace_before: Option<bool>, pub whitespace_before: Option<bool>,
@ -2210,10 +2255,23 @@ pub struct InlayHint {
} }
impl InlayHint { impl InlayHint {
pub fn to_lsp(&self, line_index: Arc<LineIndex>) -> lsp::InlayHint { pub fn to_lsp(
&self,
line_index: Arc<LineIndex>,
language_server: &language_server::Inner,
) -> lsp::InlayHint {
lsp::InlayHint { lsp::InlayHint {
position: line_index.position_tsc(self.position.into()), position: line_index.position_tsc(self.position.into()),
label: lsp::InlayHintLabel::String(self.text.clone()), label: if let Some(display_parts) = &self.display_parts {
lsp::InlayHintLabel::LabelParts(
display_parts
.iter()
.map(|p| p.to_lsp(language_server))
.collect(),
)
} else {
lsp::InlayHintLabel::String(self.text.clone())
},
kind: self.kind.to_lsp(), kind: self.kind.to_lsp(),
padding_left: self.whitespace_before, padding_left: self.whitespace_before,
padding_right: self.whitespace_after, padding_right: self.whitespace_after,
@ -4892,6 +4950,10 @@ pub struct UserPreferences {
pub allow_rename_of_import_path: Option<bool>, pub allow_rename_of_import_path: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub auto_import_file_exclude_patterns: Option<Vec<String>>, pub auto_import_file_exclude_patterns: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub interactive_inlay_hints: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub prefer_type_only_auto_imports: Option<bool>,
} }
impl UserPreferences { impl UserPreferences {
@ -4909,6 +4971,7 @@ impl UserPreferences {
include_completions_with_snippet_text: Some( include_completions_with_snippet_text: Some(
config.snippet_support_capable(), config.snippet_support_capable(),
), ),
interactive_inlay_hints: Some(true),
provide_refactor_not_applicable_reason: Some(true), provide_refactor_not_applicable_reason: Some(true),
quote_preference: Some(fmt_config.into()), quote_preference: Some(fmt_config.into()),
use_label_details_in_completion_entries: Some(true), use_label_details_in_completion_entries: Some(true),
@ -5013,6 +5076,9 @@ impl UserPreferences {
} else { } else {
Some(language_settings.preferences.quote_style) Some(language_settings.preferences.quote_style)
}, },
prefer_type_only_auto_imports: Some(
language_settings.preferences.prefer_type_only_auto_imports,
),
..base_preferences ..base_preferences
} }
} }
@ -6154,7 +6220,7 @@ mod tests {
let change = changes.text_changes.first().unwrap(); let change = changes.text_changes.first().unwrap();
assert_eq!( assert_eq!(
change.new_text, change.new_text,
"import type { someLongVariable } from './b.ts'\n" "import { someLongVariable } from './b.ts'\n"
); );
} }

View file

@ -15,7 +15,6 @@ mod js;
mod jsr; mod jsr;
mod lsp; mod lsp;
mod module_loader; mod module_loader;
mod napi;
mod node; mod node;
mod npm; mod npm;
mod ops; mod ops;

View file

@ -88,11 +88,10 @@ fn main() {
let standalone = standalone::extract_standalone(Cow::Owned(args)); let standalone = standalone::extract_standalone(Cow::Owned(args));
let future = async move { let future = async move {
match standalone { match standalone {
Ok(Some(future)) => { Ok(Some(data)) => {
let (metadata, eszip) = future.await?; util::logger::init(data.metadata.log_level);
util::logger::init(metadata.log_level); load_env_vars(&data.metadata.env_vars_from_env_file);
load_env_vars(&metadata.env_vars_from_env_file); let exit_code = standalone::run(data).await?;
let exit_code = standalone::run(eszip, metadata).await?;
std::process::exit(exit_code); std::process::exit(exit_code);
} }
Ok(None) => Ok(()), Ok(None) => Ok(()),

View file

@ -541,7 +541,8 @@ impl<TGraphContainer: ModuleGraphContainer>
self.parsed_source_cache.free(specifier); self.parsed_source_cache.free(specifier);
Ok(Some(ModuleCodeStringSource { Ok(Some(ModuleCodeStringSource {
code: ModuleSourceCode::Bytes(transpile_result), // note: it's faster to provide a string if we know it's a string
code: ModuleSourceCode::String(transpile_result.into()),
found_url: specifier.clone(), found_url: specifier.clone(),
media_type, media_type,
})) }))
@ -571,7 +572,8 @@ impl<TGraphContainer: ModuleGraphContainer>
self.parsed_source_cache.free(specifier); self.parsed_source_cache.free(specifier);
Ok(Some(ModuleCodeStringSource { Ok(Some(ModuleCodeStringSource {
code: ModuleSourceCode::Bytes(transpile_result), // note: it's faster to provide a string if we know it's a string
code: ModuleSourceCode::String(transpile_result.into()),
found_url: specifier.clone(), found_url: specifier.clone(),
media_type, media_type,
})) }))

View file

@ -1,114 +0,0 @@
# napi
This directory contains source for Deno's Node-API implementation. It depends on
`napi_sym` and `deno_napi`.
Files are generally organized the same as in Node.js's implementation to ease in
ensuring compatibility.
## Adding a new function
Add the symbol name to
[`cli/napi_sym/symbol_exports.json`](../napi_sym/symbol_exports.json).
```diff
{
"symbols": [
...
"napi_get_undefined",
- "napi_get_null"
+ "napi_get_null",
+ "napi_get_boolean"
]
}
```
Determine where to place the implementation. `napi_get_boolean` is related to JS
values so we will place it in `js_native_api.rs`. If something is not clear,
just create a new file module.
See [`napi_sym`](../napi_sym/) for writing the implementation:
```rust
#[napi_sym::napi_sym]
pub fn napi_get_boolean(
env: *mut Env,
value: bool,
result: *mut napi_value,
) -> Result {
// ...
Ok(())
}
```
Update the generated symbol lists using the script:
```
deno run --allow-write tools/napi/generate_symbols_lists.js
```
Add a test in [`/tests/napi`](../../tests/napi/). You can also refer to Node.js
test suite for Node-API.
```js
// tests/napi/boolean_test.js
import { assertEquals, loadTestLibrary } from "./common.js";
const lib = loadTestLibrary();
Deno.test("napi get boolean", function () {
assertEquals(lib.test_get_boolean(true), true);
assertEquals(lib.test_get_boolean(false), false);
});
```
```rust
// tests/napi/src/boolean.rs
use napi_sys::Status::napi_ok;
use napi_sys::ValueType::napi_boolean;
use napi_sys::*;
extern "C" fn test_boolean(
env: napi_env,
info: napi_callback_info,
) -> napi_value {
let (args, argc, _) = crate::get_callback_info!(env, info, 1);
assert_eq!(argc, 1);
let mut ty = -1;
assert!(unsafe { napi_typeof(env, args[0], &mut ty) } == napi_ok);
assert_eq!(ty, napi_boolean);
// Use napi_get_boolean here...
value
}
pub fn init(env: napi_env, exports: napi_value) {
let properties = &[crate::new_property!(env, "test_boolean\0", test_boolean)];
unsafe {
napi_define_properties(env, exports, properties.len(), properties.as_ptr())
};
}
```
```diff
// tests/napi/src/lib.rs
+ mod boolean;
...
#[no_mangle]
unsafe extern "C" fn napi_register_module_v1(
env: napi_env,
exports: napi_value,
) -> napi_value {
...
+ boolean::init(env, exports);
exports
}
```
Run the test using `cargo test -p tests/napi`.

View file

@ -1,21 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
#![allow(unused_mut)]
#![allow(non_camel_case_types)]
#![allow(clippy::undocumented_unsafe_blocks)]
//! Symbols to be exported are now defined in this JSON file.
//! The `#[napi_sym]` macro checks for missing entries and panics.
//!
//! `./tools/napi/generate_symbols_list.js` is used to generate the LINK `cli/exports.def` on Windows,
//! which is also checked into git.
//!
//! To add a new napi function:
//! 1. Place `#[napi_sym]` on top of your implementation.
//! 2. Add the function's identifier to this JSON list.
//! 3. Finally, run `tools/napi/generate_symbols_list.js` to update `cli/napi/generated_symbol_exports_list_*.def`.
pub mod js_native_api;
pub mod node_api;
pub mod util;
pub mod uv;

View file

@ -3,6 +3,7 @@
use base64::prelude::BASE64_STANDARD; use base64::prelude::BASE64_STANDARD;
use base64::Engine; use base64::Engine;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_npm::npm_rc::RegistryConfig; use deno_npm::npm_rc::RegistryConfig;
use http::header; use http::header;
@ -36,17 +37,21 @@ pub fn maybe_auth_header_for_npm_registry(
} }
if username.is_some() && password.is_some() { if username.is_some() && password.is_some() {
return Ok(Some(( // The npm client does some double encoding when generating the
header::AUTHORIZATION, // bearer token value, see
header::HeaderValue::from_str(&format!( // https://github.com/npm/cli/blob/780afc50e3a345feb1871a28e33fa48235bc3bd5/workspaces/config/lib/index.js#L846-L851
"Basic {}", let pw_base64 = BASE64_STANDARD
BASE64_STANDARD.encode(format!( .decode(password.unwrap())
.with_context(|| "The password in npmrc is an invalid base64 string")?;
let bearer = BASE64_STANDARD.encode(format!(
"{}:{}", "{}:{}",
username.unwrap(), username.unwrap(),
password.unwrap() String::from_utf8_lossy(&pw_base64)
)) ));
))
.unwrap(), return Ok(Some((
header::AUTHORIZATION,
header::HeaderValue::from_str(&format!("Basic {}", bearer)).unwrap(),
))); )));
} }

View file

@ -26,7 +26,7 @@ use crate::cache::CACHE_PERM;
use crate::util::fs::atomic_write_file_with_retries; use crate::util::fs::atomic_write_file_with_retries;
use crate::util::fs::hard_link_dir_recursive; use crate::util::fs::hard_link_dir_recursive;
mod registry_info; pub mod registry_info;
mod tarball; mod tarball;
mod tarball_extract; mod tarball_extract;

View file

@ -84,7 +84,7 @@ impl RegistryInfoDownloader {
self.load_package_info_inner(name).await.with_context(|| { self.load_package_info_inner(name).await.with_context(|| {
format!( format!(
"Error getting response at {} for package \"{}\"", "Error getting response at {} for package \"{}\"",
self.get_package_url(name), get_package_url(&self.npmrc, name),
name name
) )
}) })
@ -190,7 +190,7 @@ impl RegistryInfoDownloader {
fn create_load_future(self: &Arc<Self>, name: &str) -> LoadFuture { fn create_load_future(self: &Arc<Self>, name: &str) -> LoadFuture {
let downloader = self.clone(); let downloader = self.clone();
let package_url = self.get_package_url(name); let package_url = get_package_url(&self.npmrc, name);
let registry_config = self.npmrc.get_registry_config(name); let registry_config = self.npmrc.get_registry_config(name);
let maybe_auth_header = let maybe_auth_header =
match maybe_auth_header_for_npm_registry(registry_config) { match maybe_auth_header_for_npm_registry(registry_config) {
@ -239,9 +239,10 @@ impl RegistryInfoDownloader {
.map(|r| r.map_err(Arc::new)) .map(|r| r.map_err(Arc::new))
.boxed_local() .boxed_local()
} }
}
fn get_package_url(&self, name: &str) -> Url { pub fn get_package_url(npmrc: &ResolvedNpmRc, name: &str) -> Url {
let registry_url = self.npmrc.get_registry_url(name); let registry_url = npmrc.get_registry_url(name);
// The '/' character in scoped package names "@scope/name" must be // The '/' character in scoped package names "@scope/name" must be
// encoded for older third party registries. Newer registries and // encoded for older third party registries. Newer registries and
// npm itself support both ways // npm itself support both ways
@ -270,5 +271,4 @@ impl RegistryInfoDownloader {
// to match npm. // to match npm.
.join(&name.to_string().replace("%2F", "%2f")) .join(&name.to_string().replace("%2F", "%2f"))
.unwrap() .unwrap()
}
} }

View file

@ -55,7 +55,7 @@ use super::CliNpmResolver;
use super::InnerCliNpmResolverRef; use super::InnerCliNpmResolverRef;
use super::ResolvePkgFolderFromDenoReqError; use super::ResolvePkgFolderFromDenoReqError;
mod cache; pub mod cache;
mod registry; mod registry;
mod resolution; mod resolution;
mod resolvers; mod resolvers;

View file

@ -8,10 +8,12 @@ use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use common::maybe_auth_header_for_npm_registry;
use dashmap::DashMap; use dashmap::DashMap;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo; use deno_npm::registry::NpmPackageInfo;
use deno_resolver::npm::ByonmNpmResolver; use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError; use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError;
@ -19,10 +21,10 @@ use deno_runtime::deno_node::NodeRequireResolver;
use deno_runtime::ops::process::NpmProcessStateProvider; use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use managed::cache::registry_info::get_package_url;
use node_resolver::NpmResolver; use node_resolver::NpmResolver;
use thiserror::Error; use thiserror::Error;
use crate::args::npm_registry_url;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
pub use self::byonm::CliByonmNpmResolver; pub use self::byonm::CliByonmNpmResolver;
@ -115,14 +117,19 @@ pub struct NpmFetchResolver {
nv_by_req: DashMap<PackageReq, Option<PackageNv>>, nv_by_req: DashMap<PackageReq, Option<PackageNv>>,
info_by_name: DashMap<String, Option<Arc<NpmPackageInfo>>>, info_by_name: DashMap<String, Option<Arc<NpmPackageInfo>>>,
file_fetcher: Arc<FileFetcher>, file_fetcher: Arc<FileFetcher>,
npmrc: Arc<ResolvedNpmRc>,
} }
impl NpmFetchResolver { impl NpmFetchResolver {
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self { pub fn new(
file_fetcher: Arc<FileFetcher>,
npmrc: Arc<ResolvedNpmRc>,
) -> Self {
Self { Self {
nv_by_req: Default::default(), nv_by_req: Default::default(),
info_by_name: Default::default(), info_by_name: Default::default(),
file_fetcher, file_fetcher,
npmrc,
} }
} }
@ -157,11 +164,21 @@ impl NpmFetchResolver {
return info.value().clone(); return info.value().clone();
} }
let fetch_package_info = || async { let fetch_package_info = || async {
let info_url = npm_registry_url().join(name).ok()?; let info_url = get_package_url(&self.npmrc, name);
let file_fetcher = self.file_fetcher.clone(); let file_fetcher = self.file_fetcher.clone();
let registry_config = self.npmrc.get_registry_config(name);
// TODO(bartlomieju): this should error out, not use `.ok()`.
let maybe_auth_header =
maybe_auth_header_for_npm_registry(registry_config).ok()?;
// spawn due to the lsp's `Send` requirement // spawn due to the lsp's `Send` requirement
let file = deno_core::unsync::spawn(async move { let file = deno_core::unsync::spawn(async move {
file_fetcher.fetch_bypass_permissions(&info_url).await.ok() file_fetcher
.fetch_bypass_permissions_with_maybe_auth(
&info_url,
maybe_auth_header,
)
.await
.ok()
}) })
.await .await
.ok()??; .ok()??;

View file

@ -291,7 +291,7 @@
"type": "array", "type": "array",
"description": "List of tag names that will be run. Empty list disables all tags and will only use rules from `include`.", "description": "List of tag names that will be run. Empty list disables all tags and will only use rules from `include`.",
"items": { "items": {
"type": "string" "$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/tags.v1.json"
}, },
"minItems": 0, "minItems": 0,
"uniqueItems": true "uniqueItems": true
@ -300,7 +300,7 @@
"type": "array", "type": "array",
"description": "List of rule names that will be excluded from configured tag sets. If the same rule is in `include` it will be run.", "description": "List of rule names that will be excluded from configured tag sets. If the same rule is in `include` it will be run.",
"items": { "items": {
"type": "string" "$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/rules.v1.json"
}, },
"minItems": 0, "minItems": 0,
"uniqueItems": true "uniqueItems": true
@ -309,7 +309,7 @@
"type": "array", "type": "array",
"description": "List of rule names that will be run. Even if the same rule is in `exclude` it will be run.", "description": "List of rule names that will be run. Even if the same rule is in `exclude` it will be run.",
"items": { "items": {
"type": "string" "$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/rules.v1.json"
}, },
"minItems": 0, "minItems": 0,
"uniqueItems": true "uniqueItems": true
@ -531,6 +531,7 @@
"detect-cjs", "detect-cjs",
"ffi", "ffi",
"fs", "fs",
"fmt-component",
"http", "http",
"kv", "kv",
"net", "net",

View file

@ -9,14 +9,18 @@ use std::ffi::OsString;
use std::fs; use std::fs;
use std::fs::File; use std::fs::File;
use std::future::Future; use std::future::Future;
use std::io::ErrorKind;
use std::io::Read; use std::io::Read;
use std::io::Seek; use std::io::Seek;
use std::io::SeekFrom; use std::io::SeekFrom;
use std::io::Write; use std::io::Write;
use std::ops::Range;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::process::Command; use std::process::Command;
use std::sync::Arc;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::ResolverWorkspaceJsrPackage; use deno_config::workspace::ResolverWorkspaceJsrPackage;
@ -30,13 +34,22 @@ use deno_core::futures::AsyncReadExt;
use deno_core::futures::AsyncSeekExt; use deno_core::futures::AsyncSeekExt;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::url::Url; use deno_core::url::Url;
use deno_graph::source::RealFileSystem;
use deno_graph::ModuleGraph;
use deno_npm::resolution::SerializedNpmResolutionSnapshot;
use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_npm::NpmSystemInfo; use deno_npm::NpmSystemInfo;
use deno_runtime::deno_fs;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_fs::RealFs;
use deno_runtime::deno_io::fs::FsError;
use deno_runtime::deno_node::PackageJson; use deno_runtime::deno_node::PackageJson;
use deno_semver::npm::NpmVersionReqParseError; use deno_semver::npm::NpmVersionReqParseError;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use deno_semver::Version; use deno_semver::Version;
use deno_semver::VersionReqSpecifierParseError; use deno_semver::VersionReqSpecifierParseError;
use eszip::EszipRelativeFileBaseUrl;
use indexmap::IndexMap; use indexmap::IndexMap;
use log::Level; use log::Level;
use serde::Deserialize; use serde::Deserialize;
@ -49,6 +62,7 @@ use crate::args::NpmInstallDepsProvider;
use crate::args::PermissionFlags; use crate::args::PermissionFlags;
use crate::args::UnstableConfig; use crate::args::UnstableConfig;
use crate::cache::DenoDir; use crate::cache::DenoDir;
use crate::emit::Emitter;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use crate::http_util::HttpClientProvider; use crate::http_util::HttpClientProvider;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
@ -60,12 +74,63 @@ use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle; use crate::util::progress_bar::ProgressBarStyle;
use super::file_system::DenoCompileFileSystem;
use super::serialization::deserialize_binary_data_section;
use super::serialization::serialize_binary_data_section;
use super::serialization::DenoCompileModuleData;
use super::serialization::DeserializedDataSection;
use super::serialization::RemoteModulesStore;
use super::serialization::RemoteModulesStoreBuilder;
use super::virtual_fs::FileBackedVfs; use super::virtual_fs::FileBackedVfs;
use super::virtual_fs::VfsBuilder; use super::virtual_fs::VfsBuilder;
use super::virtual_fs::VfsRoot; use super::virtual_fs::VfsRoot;
use super::virtual_fs::VirtualDirectory; use super::virtual_fs::VirtualDirectory;
const MAGIC_TRAILER: &[u8; 8] = b"d3n0l4nd"; /// A URL that can be designated as the base for relative URLs.
///
/// After creation, this URL may be used to get the key for a
/// module in the binary.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StandaloneRelativeFileBaseUrl<'a>(&'a Url);
impl<'a> From<&'a Url> for StandaloneRelativeFileBaseUrl<'a> {
fn from(url: &'a Url) -> Self {
Self(url)
}
}
impl<'a> StandaloneRelativeFileBaseUrl<'a> {
pub fn new(url: &'a Url) -> Self {
debug_assert_eq!(url.scheme(), "file");
Self(url)
}
/// Gets the module map key of the provided specifier.
///
/// * Descendant file specifiers will be made relative to the base.
/// * Non-descendant file specifiers will stay as-is (absolute).
/// * Non-file specifiers will stay as-is.
pub fn specifier_key<'b>(&self, target: &'b Url) -> Cow<'b, str> {
if target.scheme() != "file" {
return Cow::Borrowed(target.as_str());
}
match self.0.make_relative(target) {
Some(relative) => {
if relative.starts_with("../") {
Cow::Borrowed(target.as_str())
} else {
Cow::Owned(relative)
}
}
None => Cow::Borrowed(target.as_str()),
}
}
pub fn inner(&self) -> &Url {
self.0
}
}
#[derive(Deserialize, Serialize)] #[derive(Deserialize, Serialize)]
pub enum NodeModules { pub enum NodeModules {
@ -120,78 +185,23 @@ pub struct Metadata {
pub unstable_config: UnstableConfig, pub unstable_config: UnstableConfig,
} }
pub fn load_npm_vfs(root_dir_path: PathBuf) -> Result<FileBackedVfs, AnyError> {
let data = libsui::find_section("d3n0l4nd").unwrap();
// We do the first part sync so it can complete quickly
let trailer: [u8; TRAILER_SIZE] = data[0..TRAILER_SIZE].try_into().unwrap();
let trailer = match Trailer::parse(&trailer)? {
None => panic!("Could not find trailer"),
Some(trailer) => trailer,
};
let data = &data[TRAILER_SIZE..];
let vfs_data =
&data[trailer.npm_vfs_pos as usize..trailer.npm_files_pos as usize];
let mut dir: VirtualDirectory = serde_json::from_slice(vfs_data)?;
// align the name of the directory with the root dir
dir.name = root_dir_path
.file_name()
.unwrap()
.to_string_lossy()
.to_string();
let fs_root = VfsRoot {
dir,
root_path: root_dir_path,
start_file_offset: trailer.npm_files_pos,
};
Ok(FileBackedVfs::new(data.to_vec(), fs_root))
}
fn write_binary_bytes( fn write_binary_bytes(
mut file_writer: File, mut file_writer: File,
original_bin: Vec<u8>, original_bin: Vec<u8>,
metadata: &Metadata, metadata: &Metadata,
eszip: eszip::EszipV2, npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
npm_vfs: Option<&VirtualDirectory>, remote_modules: &RemoteModulesStoreBuilder,
npm_files: &Vec<Vec<u8>>, vfs: VfsBuilder,
compile_flags: &CompileFlags, compile_flags: &CompileFlags,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let metadata = serde_json::to_string(metadata)?.as_bytes().to_vec(); let data_section_bytes =
let npm_vfs = serde_json::to_string(&npm_vfs)?.as_bytes().to_vec(); serialize_binary_data_section(metadata, npm_snapshot, remote_modules, vfs)?;
let eszip_archive = eszip.into_bytes();
let mut writer = Vec::new();
// write the trailer, which includes the positions
// of the data blocks in the file
writer.write_all(&{
let metadata_pos = eszip_archive.len() as u64;
let npm_vfs_pos = metadata_pos + (metadata.len() as u64);
let npm_files_pos = npm_vfs_pos + (npm_vfs.len() as u64);
Trailer {
eszip_pos: 0,
metadata_pos,
npm_vfs_pos,
npm_files_pos,
}
.as_bytes()
})?;
writer.write_all(&eszip_archive)?;
writer.write_all(&metadata)?;
writer.write_all(&npm_vfs)?;
for file in npm_files {
writer.write_all(file)?;
}
let target = compile_flags.resolve_target(); let target = compile_flags.resolve_target();
if target.contains("linux") { if target.contains("linux") {
libsui::Elf::new(&original_bin).append( libsui::Elf::new(&original_bin).append(
"d3n0l4nd", "d3n0l4nd",
&writer, &data_section_bytes,
&mut file_writer, &mut file_writer,
)?; )?;
} else if target.contains("windows") { } else if target.contains("windows") {
@ -201,11 +211,11 @@ fn write_binary_bytes(
pe = pe.set_icon(&icon)?; pe = pe.set_icon(&icon)?;
} }
pe.write_resource("d3n0l4nd", writer)? pe.write_resource("d3n0l4nd", data_section_bytes)?
.build(&mut file_writer)?; .build(&mut file_writer)?;
} else if target.contains("darwin") { } else if target.contains("darwin") {
libsui::Macho::from(original_bin)? libsui::Macho::from(original_bin)?
.write_section("d3n0l4nd", writer)? .write_section("d3n0l4nd", data_section_bytes)?
.build_and_sign(&mut file_writer)?; .build_and_sign(&mut file_writer)?;
} }
Ok(()) Ok(())
@ -221,6 +231,63 @@ pub fn is_standalone_binary(exe_path: &Path) -> bool {
|| libsui::utils::is_macho(&data) || libsui::utils::is_macho(&data)
} }
pub struct StandaloneData {
pub fs: Arc<dyn deno_fs::FileSystem>,
pub metadata: Metadata,
pub modules: StandaloneModules,
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
pub root_path: PathBuf,
pub vfs: Arc<FileBackedVfs>,
}
pub struct StandaloneModules {
remote_modules: RemoteModulesStore,
vfs: Arc<FileBackedVfs>,
}
impl StandaloneModules {
pub fn resolve_specifier<'a>(
&'a self,
specifier: &'a ModuleSpecifier,
) -> Result<Option<&'a ModuleSpecifier>, AnyError> {
if specifier.scheme() == "file" {
Ok(Some(specifier))
} else {
self.remote_modules.resolve_specifier(specifier)
}
}
pub fn read<'a>(
&'a self,
specifier: &'a ModuleSpecifier,
) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> {
if specifier.scheme() == "file" {
let path = deno_path_util::url_to_file_path(specifier)?;
let bytes = match self.vfs.file_entry(&path) {
Ok(entry) => self.vfs.read_file_all(entry)?,
Err(err) if err.kind() == ErrorKind::NotFound => {
let bytes = match RealFs.read_file_sync(&path, None) {
Ok(bytes) => bytes,
Err(FsError::Io(err)) if err.kind() == ErrorKind::NotFound => {
return Ok(None)
}
Err(err) => return Err(err.into()),
};
Cow::Owned(bytes)
}
Err(err) => return Err(err.into()),
};
Ok(Some(DenoCompileModuleData {
media_type: MediaType::from_specifier(specifier),
specifier,
data: bytes,
}))
} else {
self.remote_modules.read(specifier)
}
}
}
/// This function will try to run this binary as a standalone binary /// This function will try to run this binary as a standalone binary
/// produced by `deno compile`. It determines if this is a standalone /// produced by `deno compile`. It determines if this is a standalone
/// binary by skipping over the trailer width at the end of the file, /// binary by skipping over the trailer width at the end of the file,
@ -228,110 +295,66 @@ pub fn is_standalone_binary(exe_path: &Path) -> bool {
/// the bundle is executed. If not, this function exits with `Ok(None)`. /// the bundle is executed. If not, this function exits with `Ok(None)`.
pub fn extract_standalone( pub fn extract_standalone(
cli_args: Cow<Vec<OsString>>, cli_args: Cow<Vec<OsString>>,
) -> Result< ) -> Result<Option<StandaloneData>, AnyError> {
Option<impl Future<Output = Result<(Metadata, eszip::EszipV2), AnyError>>>,
AnyError,
> {
let Some(data) = libsui::find_section("d3n0l4nd") else { let Some(data) = libsui::find_section("d3n0l4nd") else {
return Ok(None); return Ok(None);
}; };
// We do the first part sync so it can complete quickly let DeserializedDataSection {
let trailer = match Trailer::parse(&data[0..TRAILER_SIZE])? { mut metadata,
npm_snapshot,
remote_modules,
mut vfs_dir,
vfs_files_data,
} = match deserialize_binary_data_section(data)? {
Some(data_section) => data_section,
None => return Ok(None), None => return Ok(None),
Some(trailer) => trailer,
}; };
let root_path = {
let maybe_current_exe = std::env::current_exe().ok();
let current_exe_name = maybe_current_exe
.as_ref()
.and_then(|p| p.file_name())
.map(|p| p.to_string_lossy())
// should never happen
.unwrap_or_else(|| Cow::Borrowed("binary"));
std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name))
};
let cli_args = cli_args.into_owned(); let cli_args = cli_args.into_owned();
// If we have an eszip, read it out
Ok(Some(async move {
let bufreader =
deno_core::futures::io::BufReader::new(&data[TRAILER_SIZE..]);
let (eszip, loader) = eszip::EszipV2::parse(bufreader)
.await
.context("Failed to parse eszip header")?;
let bufreader = loader.await.context("Failed to parse eszip archive")?;
let mut metadata = String::new();
bufreader
.take(trailer.metadata_len())
.read_to_string(&mut metadata)
.await
.context("Failed to read metadata from the current executable")?;
let mut metadata: Metadata = serde_json::from_str(&metadata).unwrap();
metadata.argv.reserve(cli_args.len() - 1); metadata.argv.reserve(cli_args.len() - 1);
for arg in cli_args.into_iter().skip(1) { for arg in cli_args.into_iter().skip(1) {
metadata.argv.push(arg.into_string().unwrap()); metadata.argv.push(arg.into_string().unwrap());
} }
let vfs = {
// align the name of the directory with the root dir
vfs_dir.name = root_path.file_name().unwrap().to_string_lossy().to_string();
Ok((metadata, eszip)) let fs_root = VfsRoot {
dir: vfs_dir,
root_path: root_path.clone(),
start_file_offset: 0,
};
Arc::new(FileBackedVfs::new(Cow::Borrowed(vfs_files_data), fs_root))
};
let fs: Arc<dyn deno_fs::FileSystem> =
Arc::new(DenoCompileFileSystem::new(vfs.clone()));
Ok(Some(StandaloneData {
fs,
metadata,
modules: StandaloneModules {
remote_modules,
vfs: vfs.clone(),
},
npm_snapshot,
root_path,
vfs,
})) }))
} }
const TRAILER_SIZE: usize = std::mem::size_of::<Trailer>() + 8; // 8 bytes for the magic trailer string
struct Trailer {
eszip_pos: u64,
metadata_pos: u64,
npm_vfs_pos: u64,
npm_files_pos: u64,
}
impl Trailer {
pub fn parse(trailer: &[u8]) -> Result<Option<Trailer>, AnyError> {
let (magic_trailer, rest) = trailer.split_at(8);
if magic_trailer != MAGIC_TRAILER {
return Ok(None);
}
let (eszip_archive_pos, rest) = rest.split_at(8);
let (metadata_pos, rest) = rest.split_at(8);
let (npm_vfs_pos, npm_files_pos) = rest.split_at(8);
let eszip_archive_pos = u64_from_bytes(eszip_archive_pos)?;
let metadata_pos = u64_from_bytes(metadata_pos)?;
let npm_vfs_pos = u64_from_bytes(npm_vfs_pos)?;
let npm_files_pos = u64_from_bytes(npm_files_pos)?;
Ok(Some(Trailer {
eszip_pos: eszip_archive_pos,
metadata_pos,
npm_vfs_pos,
npm_files_pos,
}))
}
pub fn metadata_len(&self) -> u64 {
self.npm_vfs_pos - self.metadata_pos
}
pub fn npm_vfs_len(&self) -> u64 {
self.npm_files_pos - self.npm_vfs_pos
}
pub fn as_bytes(&self) -> Vec<u8> {
let mut trailer = MAGIC_TRAILER.to_vec();
trailer.write_all(&self.eszip_pos.to_be_bytes()).unwrap();
trailer.write_all(&self.metadata_pos.to_be_bytes()).unwrap();
trailer.write_all(&self.npm_vfs_pos.to_be_bytes()).unwrap();
trailer
.write_all(&self.npm_files_pos.to_be_bytes())
.unwrap();
trailer
}
}
fn u64_from_bytes(arr: &[u8]) -> Result<u64, AnyError> {
let fixed_arr: &[u8; 8] = arr
.try_into()
.context("Failed to convert the buffer into a fixed-size array")?;
Ok(u64::from_be_bytes(*fixed_arr))
}
pub struct DenoCompileBinaryWriter<'a> { pub struct DenoCompileBinaryWriter<'a> {
deno_dir: &'a DenoDir, deno_dir: &'a DenoDir,
emitter: &'a Emitter,
file_fetcher: &'a FileFetcher, file_fetcher: &'a FileFetcher,
http_client_provider: &'a HttpClientProvider, http_client_provider: &'a HttpClientProvider,
npm_resolver: &'a dyn CliNpmResolver, npm_resolver: &'a dyn CliNpmResolver,
@ -343,6 +366,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn new( pub fn new(
deno_dir: &'a DenoDir, deno_dir: &'a DenoDir,
emitter: &'a Emitter,
file_fetcher: &'a FileFetcher, file_fetcher: &'a FileFetcher,
http_client_provider: &'a HttpClientProvider, http_client_provider: &'a HttpClientProvider,
npm_resolver: &'a dyn CliNpmResolver, npm_resolver: &'a dyn CliNpmResolver,
@ -351,6 +375,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
) -> Self { ) -> Self {
Self { Self {
deno_dir, deno_dir,
emitter,
file_fetcher, file_fetcher,
http_client_provider, http_client_provider,
npm_resolver, npm_resolver,
@ -362,8 +387,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
pub async fn write_bin( pub async fn write_bin(
&self, &self,
writer: File, writer: File,
eszip: eszip::EszipV2, graph: &ModuleGraph,
root_dir_url: EszipRelativeFileBaseUrl<'_>, root_dir_url: StandaloneRelativeFileBaseUrl<'_>,
entrypoint: &ModuleSpecifier, entrypoint: &ModuleSpecifier,
compile_flags: &CompileFlags, compile_flags: &CompileFlags,
cli_options: &CliOptions, cli_options: &CliOptions,
@ -390,15 +415,17 @@ impl<'a> DenoCompileBinaryWriter<'a> {
) )
} }
} }
self.write_standalone_binary( self
.write_standalone_binary(
writer, writer,
original_binary, original_binary,
eszip, graph,
root_dir_url, root_dir_url,
entrypoint, entrypoint,
cli_options, cli_options,
compile_flags, compile_flags,
) )
.await
} }
async fn get_base_binary( async fn get_base_binary(
@ -493,12 +520,12 @@ impl<'a> DenoCompileBinaryWriter<'a> {
/// This functions creates a standalone deno binary by appending a bundle /// This functions creates a standalone deno binary by appending a bundle
/// and magic trailer to the currently executing binary. /// and magic trailer to the currently executing binary.
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
fn write_standalone_binary( async fn write_standalone_binary(
&self, &self,
writer: File, writer: File,
original_bin: Vec<u8>, original_bin: Vec<u8>,
mut eszip: eszip::EszipV2, graph: &ModuleGraph,
root_dir_url: EszipRelativeFileBaseUrl<'_>, root_dir_url: StandaloneRelativeFileBaseUrl<'_>,
entrypoint: &ModuleSpecifier, entrypoint: &ModuleSpecifier,
cli_options: &CliOptions, cli_options: &CliOptions,
compile_flags: &CompileFlags, compile_flags: &CompileFlags,
@ -512,19 +539,17 @@ impl<'a> DenoCompileBinaryWriter<'a> {
None => None, None => None,
}; };
let root_path = root_dir_url.inner().to_file_path().unwrap(); let root_path = root_dir_url.inner().to_file_path().unwrap();
let (npm_vfs, npm_files, node_modules) = match self.npm_resolver.as_inner() let (maybe_npm_vfs, node_modules, npm_snapshot) = match self
.npm_resolver
.as_inner()
{ {
InnerCliNpmResolverRef::Managed(managed) => { InnerCliNpmResolverRef::Managed(managed) => {
let snapshot = let snapshot =
managed.serialized_valid_snapshot_for_system(&self.npm_system_info); managed.serialized_valid_snapshot_for_system(&self.npm_system_info);
if !snapshot.as_serialized().packages.is_empty() { if !snapshot.as_serialized().packages.is_empty() {
let (root_dir, files) = self let npm_vfs_builder = self.build_npm_vfs(&root_path, cli_options)?;
.build_vfs(&root_path, cli_options)?
.into_dir_and_files();
eszip.add_npm_snapshot(snapshot);
( (
Some(root_dir), Some(npm_vfs_builder),
files,
Some(NodeModules::Managed { Some(NodeModules::Managed {
node_modules_dir: self.npm_resolver.root_node_modules_path().map( node_modules_dir: self.npm_resolver.root_node_modules_path().map(
|path| { |path| {
@ -536,18 +561,16 @@ impl<'a> DenoCompileBinaryWriter<'a> {
}, },
), ),
}), }),
Some(snapshot),
) )
} else { } else {
(None, Vec::new(), None) (None, None, None)
} }
} }
InnerCliNpmResolverRef::Byonm(resolver) => { InnerCliNpmResolverRef::Byonm(resolver) => {
let (root_dir, files) = self let npm_vfs_builder = self.build_npm_vfs(&root_path, cli_options)?;
.build_vfs(&root_path, cli_options)?
.into_dir_and_files();
( (
Some(root_dir), Some(npm_vfs_builder),
files,
Some(NodeModules::Byonm { Some(NodeModules::Byonm {
root_node_modules_dir: resolver.root_node_modules_path().map( root_node_modules_dir: resolver.root_node_modules_path().map(
|node_modules_dir| { |node_modules_dir| {
@ -560,9 +583,67 @@ impl<'a> DenoCompileBinaryWriter<'a> {
}, },
), ),
}), }),
None,
) )
} }
}; };
let mut vfs = if let Some(npm_vfs) = maybe_npm_vfs {
npm_vfs
} else {
VfsBuilder::new(root_path.clone())?
};
let mut remote_modules_store = RemoteModulesStoreBuilder::default();
for module in graph.modules() {
if module.specifier().scheme() == "data" {
continue; // don't store data urls as an entry as they're in the code
}
let (maybe_source, media_type) = match module {
deno_graph::Module::Js(m) => {
// todo(https://github.com/denoland/deno_media_type/pull/12): use is_emittable()
let is_emittable = matches!(
m.media_type,
MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Jsx
| MediaType::Tsx
);
let source = if is_emittable {
let source = self
.emitter
.emit_parsed_source(&m.specifier, m.media_type, &m.source)
.await?;
source.into_bytes()
} else {
m.source.as_bytes().to_vec()
};
(Some(source), m.media_type)
}
deno_graph::Module::Json(m) => {
(Some(m.source.as_bytes().to_vec()), m.media_type)
}
deno_graph::Module::Npm(_)
| deno_graph::Module::Node(_)
| deno_graph::Module::External(_) => (None, MediaType::Unknown),
};
if module.specifier().scheme() == "file" {
let file_path = deno_path_util::url_to_file_path(module.specifier())?;
vfs
.add_file_with_data(
&file_path,
match maybe_source {
Some(source) => source,
None => RealFs.read_file_sync(&file_path, None)?,
},
)
.with_context(|| {
format!("Failed adding '{}'", file_path.display())
})?;
} else if let Some(source) = maybe_source {
remote_modules_store.add(module.specifier(), media_type, source);
}
}
remote_modules_store.add_redirects(&graph.redirects);
let env_vars_from_env_file = match cli_options.env_file_name() { let env_vars_from_env_file = match cli_options.env_file_name() {
Some(env_filename) => { Some(env_filename) => {
@ -636,14 +717,14 @@ impl<'a> DenoCompileBinaryWriter<'a> {
writer, writer,
original_bin, original_bin,
&metadata, &metadata,
eszip, npm_snapshot.map(|s| s.into_serialized()),
npm_vfs.as_ref(), &remote_modules_store,
&npm_files, vfs,
compile_flags, compile_flags,
) )
} }
fn build_vfs( fn build_npm_vfs(
&self, &self,
root_path: &Path, root_path: &Path,
cli_options: &CliOptions, cli_options: &CliOptions,
@ -664,8 +745,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
} else { } else {
// DO NOT include the user's registry url as it may contain credentials, // DO NOT include the user's registry url as it may contain credentials,
// but also don't make this dependent on the registry url // but also don't make this dependent on the registry url
let root_path = npm_resolver.global_cache_root_folder(); let global_cache_root_path = npm_resolver.global_cache_root_folder();
let mut builder = VfsBuilder::new(root_path)?; let mut builder = VfsBuilder::new(global_cache_root_path)?;
let mut packages = let mut packages =
npm_resolver.all_system_packages(&self.npm_system_info); npm_resolver.all_system_packages(&self.npm_system_info);
packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism
@ -675,12 +756,12 @@ impl<'a> DenoCompileBinaryWriter<'a> {
builder.add_dir_recursive(&folder)?; builder.add_dir_recursive(&folder)?;
} }
// Flatten all the registries folders into a single "node_modules/localhost" folder // Flatten all the registries folders into a single ".deno_compile_node_modules/localhost" folder
// that will be used by denort when loading the npm cache. This avoids us exposing // that will be used by denort when loading the npm cache. This avoids us exposing
// the user's private registry information and means we don't have to bother // the user's private registry information and means we don't have to bother
// serializing all the different registry config into the binary. // serializing all the different registry config into the binary.
builder.with_root_dir(|root_dir| { builder.with_root_dir(|root_dir| {
root_dir.name = "node_modules".to_string(); root_dir.name = ".deno_compile_node_modules".to_string();
let mut new_entries = Vec::with_capacity(root_dir.entries.len()); let mut new_entries = Vec::with_capacity(root_dir.entries.len());
let mut localhost_entries = IndexMap::new(); let mut localhost_entries = IndexMap::new();
for entry in std::mem::take(&mut root_dir.entries) { for entry in std::mem::take(&mut root_dir.entries) {
@ -715,6 +796,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
root_dir.entries = new_entries; root_dir.entries = new_entries;
}); });
builder.set_new_root_path(root_path.to_path_buf())?;
Ok(builder) Ok(builder)
} }
} }

View file

@ -22,8 +22,8 @@ use super::virtual_fs::FileBackedVfs;
pub struct DenoCompileFileSystem(Arc<FileBackedVfs>); pub struct DenoCompileFileSystem(Arc<FileBackedVfs>);
impl DenoCompileFileSystem { impl DenoCompileFileSystem {
pub fn new(vfs: FileBackedVfs) -> Self { pub fn new(vfs: Arc<FileBackedVfs>) -> Self {
Self(Arc::new(vfs)) Self(vfs)
} }
fn error_if_in_vfs(&self, path: &Path) -> FsResult<()> { fn error_if_in_vfs(&self, path: &Path) -> FsResult<()> {

View file

@ -5,6 +5,8 @@
#![allow(dead_code)] #![allow(dead_code)]
#![allow(unused_imports)] #![allow(unused_imports)]
use binary::StandaloneData;
use binary::StandaloneModules;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_cache_dir::npm::NpmCacheDir; use deno_cache_dir::npm::NpmCacheDir;
use deno_config::workspace::MappedResolution; use deno_config::workspace::MappedResolution;
@ -38,7 +40,6 @@ use deno_runtime::permissions::RuntimePermissionDescriptorParser;
use deno_runtime::WorkerExecutionMode; use deno_runtime::WorkerExecutionMode;
use deno_runtime::WorkerLogLevel; use deno_runtime::WorkerLogLevel;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use eszip::EszipRelativeFileBaseUrl;
use import_map::parse_from_json; use import_map::parse_from_json;
use node_resolver::analyze::NodeCodeTranslator; use node_resolver::analyze::NodeCodeTranslator;
use node_resolver::NodeResolutionMode; use node_resolver::NodeResolutionMode;
@ -54,6 +55,7 @@ use crate::args::CacheSetting;
use crate::args::NpmInstallDepsProvider; use crate::args::NpmInstallDepsProvider;
use crate::args::StorageKeyResolver; use crate::args::StorageKeyResolver;
use crate::cache::Caches; use crate::cache::Caches;
use crate::cache::DenoCacheEnvFsAdapter;
use crate::cache::DenoDirProvider; use crate::cache::DenoDirProvider;
use crate::cache::NodeAnalysisCache; use crate::cache::NodeAnalysisCache;
use crate::cache::RealDenoCacheEnv; use crate::cache::RealDenoCacheEnv;
@ -78,52 +80,18 @@ use crate::worker::ModuleLoaderFactory;
pub mod binary; pub mod binary;
mod file_system; mod file_system;
mod serialization;
mod virtual_fs; mod virtual_fs;
pub use binary::extract_standalone; pub use binary::extract_standalone;
pub use binary::is_standalone_binary; pub use binary::is_standalone_binary;
pub use binary::DenoCompileBinaryWriter; pub use binary::DenoCompileBinaryWriter;
use self::binary::load_npm_vfs;
use self::binary::Metadata; use self::binary::Metadata;
use self::file_system::DenoCompileFileSystem; use self::file_system::DenoCompileFileSystem;
struct WorkspaceEszipModule {
specifier: ModuleSpecifier,
inner: eszip::Module,
}
struct WorkspaceEszip {
eszip: eszip::EszipV2,
root_dir_url: Arc<ModuleSpecifier>,
}
impl WorkspaceEszip {
pub fn get_module(
&self,
specifier: &ModuleSpecifier,
) -> Option<WorkspaceEszipModule> {
if specifier.scheme() == "file" {
let specifier_key = EszipRelativeFileBaseUrl::new(&self.root_dir_url)
.specifier_key(specifier);
let module = self.eszip.get_module(&specifier_key)?;
let specifier = self.root_dir_url.join(&module.specifier).unwrap();
Some(WorkspaceEszipModule {
specifier,
inner: module,
})
} else {
let module = self.eszip.get_module(specifier.as_str())?;
Some(WorkspaceEszipModule {
specifier: ModuleSpecifier::parse(&module.specifier).unwrap(),
inner: module,
})
}
}
}
struct SharedModuleLoaderState { struct SharedModuleLoaderState {
eszip: WorkspaceEszip, modules: StandaloneModules,
workspace_resolver: WorkspaceResolver, workspace_resolver: WorkspaceResolver,
node_resolver: Arc<CliNodeResolver>, node_resolver: Arc<CliNodeResolver>,
npm_module_loader: Arc<NpmModuleLoader>, npm_module_loader: Arc<NpmModuleLoader>,
@ -249,8 +217,10 @@ impl ModuleLoader for EmbeddedModuleLoader {
} }
if specifier.scheme() == "jsr" { if specifier.scheme() == "jsr" {
if let Some(module) = self.shared.eszip.get_module(&specifier) { if let Some(specifier) =
return Ok(module.specifier); self.shared.modules.resolve_specifier(&specifier)?
{
return Ok(specifier.clone());
} }
} }
@ -345,54 +315,28 @@ impl ModuleLoader for EmbeddedModuleLoader {
); );
} }
let Some(module) = self.shared.eszip.get_module(original_specifier) else { match self.shared.modules.read(original_specifier) {
return deno_core::ModuleLoadResponse::Sync(Err(type_error(format!( Ok(Some(module)) => {
"{MODULE_NOT_FOUND}: {}", let (module_specifier, module_type, module_source) =
original_specifier module.into_for_v8();
)))); deno_core::ModuleLoadResponse::Sync(Ok(
}; deno_core::ModuleSource::new_with_redirect(
let original_specifier = original_specifier.clone(); module_type,
module_source,
deno_core::ModuleLoadResponse::Async( original_specifier,
async move { module_specifier,
let code = module.inner.source().await.ok_or_else(|| {
type_error(format!("Module not found: {}", original_specifier))
})?;
let code = arc_u8_to_arc_str(code)
.map_err(|_| type_error("Module source is not utf-8"))?;
Ok(deno_core::ModuleSource::new_with_redirect(
match module.inner.kind {
eszip::ModuleKind::JavaScript => ModuleType::JavaScript,
eszip::ModuleKind::Json => ModuleType::Json,
eszip::ModuleKind::Jsonc => {
return Err(type_error("jsonc modules not supported"))
}
eszip::ModuleKind::OpaqueData => {
unreachable!();
}
},
ModuleSourceCode::String(code.into()),
&original_specifier,
&module.specifier,
None, None,
),
)) ))
} }
.boxed_local(), Ok(None) => deno_core::ModuleLoadResponse::Sync(Err(type_error(
) format!("{MODULE_NOT_FOUND}: {}", original_specifier),
))),
Err(err) => deno_core::ModuleLoadResponse::Sync(Err(type_error(
format!("{:?}", err),
))),
}
} }
}
fn arc_u8_to_arc_str(
arc_u8: Arc<[u8]>,
) -> Result<Arc<str>, std::str::Utf8Error> {
// Check that the string is valid UTF-8.
std::str::from_utf8(&arc_u8)?;
// SAFETY: the string is valid UTF-8, and the layout Arc<[u8]> is the same as
// Arc<str>. This is proven by the From<Arc<str>> impl for Arc<[u8]> from the
// standard library.
Ok(unsafe {
std::mem::transmute::<std::sync::Arc<[u8]>, std::sync::Arc<str>>(arc_u8)
})
} }
struct StandaloneModuleLoaderFactory { struct StandaloneModuleLoaderFactory {
@ -439,13 +383,15 @@ impl RootCertStoreProvider for StandaloneRootCertStoreProvider {
} }
} }
pub async fn run( pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
mut eszip: eszip::EszipV2, let StandaloneData {
metadata: Metadata, fs,
) -> Result<i32, AnyError> { metadata,
let current_exe_path = std::env::current_exe().unwrap(); modules,
let current_exe_name = npm_snapshot,
current_exe_path.file_name().unwrap().to_string_lossy(); root_path,
vfs,
} = data;
let deno_dir_provider = Arc::new(DenoDirProvider::new(None)); let deno_dir_provider = Arc::new(DenoDirProvider::new(None));
let root_cert_store_provider = Arc::new(StandaloneRootCertStoreProvider { let root_cert_store_provider = Arc::new(StandaloneRootCertStoreProvider {
ca_stores: metadata.ca_stores, ca_stores: metadata.ca_stores,
@ -459,35 +405,16 @@ pub async fn run(
)); ));
// use a dummy npm registry url // use a dummy npm registry url
let npm_registry_url = ModuleSpecifier::parse("https://localhost/").unwrap(); let npm_registry_url = ModuleSpecifier::parse("https://localhost/").unwrap();
let root_path =
std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name));
let root_dir_url = let root_dir_url =
Arc::new(ModuleSpecifier::from_directory_path(&root_path).unwrap()); Arc::new(ModuleSpecifier::from_directory_path(&root_path).unwrap());
let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap(); let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap();
let root_node_modules_path = root_path.join("node_modules"); let npm_global_cache_dir = root_path.join(".deno_compile_node_modules");
let npm_cache_dir = NpmCacheDir::new(
&RealDenoCacheEnv,
root_node_modules_path.clone(),
vec![npm_registry_url.clone()],
);
let npm_global_cache_dir = npm_cache_dir.get_cache_location();
let cache_setting = CacheSetting::Only; let cache_setting = CacheSetting::Only;
let (fs, npm_resolver, maybe_vfs_root) = match metadata.node_modules { let npm_resolver = match metadata.node_modules {
Some(binary::NodeModules::Managed { node_modules_dir }) => { Some(binary::NodeModules::Managed { node_modules_dir }) => {
// this will always have a snapshot let snapshot = npm_snapshot.unwrap();
let snapshot = eszip.take_npm_snapshot().unwrap();
let vfs_root_dir_path = if node_modules_dir.is_some() {
root_path.clone()
} else {
npm_cache_dir.root_dir().to_owned()
};
let vfs = load_npm_vfs(vfs_root_dir_path.clone())
.context("Failed to load npm vfs.")?;
let maybe_node_modules_path = node_modules_dir let maybe_node_modules_path = node_modules_dir
.map(|node_modules_dir| vfs_root_dir_path.join(node_modules_dir)); .map(|node_modules_dir| root_path.join(node_modules_dir));
let fs = Arc::new(DenoCompileFileSystem::new(vfs))
as Arc<dyn deno_fs::FileSystem>;
let npm_resolver =
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed( create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
CliNpmResolverManagedCreateOptions { CliNpmResolverManagedCreateOptions {
snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some( snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some(
@ -517,31 +444,22 @@ pub async fn run(
lifecycle_scripts: Default::default(), lifecycle_scripts: Default::default(),
}, },
)) ))
.await?; .await?
(fs, npm_resolver, Some(vfs_root_dir_path))
} }
Some(binary::NodeModules::Byonm { Some(binary::NodeModules::Byonm {
root_node_modules_dir, root_node_modules_dir,
}) => { }) => {
let vfs_root_dir_path = root_path.clone();
let vfs = load_npm_vfs(vfs_root_dir_path.clone())
.context("Failed to load vfs.")?;
let root_node_modules_dir = let root_node_modules_dir =
root_node_modules_dir.map(|p| vfs.root().join(p)); root_node_modules_dir.map(|p| vfs.root().join(p));
let fs = Arc::new(DenoCompileFileSystem::new(vfs)) create_cli_npm_resolver(CliNpmResolverCreateOptions::Byonm(
as Arc<dyn deno_fs::FileSystem>; CliByonmNpmResolverCreateOptions {
let npm_resolver = create_cli_npm_resolver(
CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions {
fs: CliDenoResolverFs(fs.clone()), fs: CliDenoResolverFs(fs.clone()),
root_node_modules_dir, root_node_modules_dir,
}), },
) ))
.await?; .await?
(fs, npm_resolver, Some(vfs_root_dir_path))
} }
None => { None => {
let fs = Arc::new(deno_fs::RealFs) as Arc<dyn deno_fs::FileSystem>;
let npm_resolver =
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed( create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
CliNpmResolverManagedCreateOptions { CliNpmResolverManagedCreateOptions {
snapshot: CliNpmResolverManagedSnapshotOption::Specified(None), snapshot: CliNpmResolverManagedSnapshotOption::Specified(None),
@ -557,14 +475,13 @@ pub async fn run(
// this is only used for installing packages, which isn't necessary with deno compile // this is only used for installing packages, which isn't necessary with deno compile
NpmInstallDepsProvider::empty(), NpmInstallDepsProvider::empty(),
), ),
// Packages from different registries are already inlined in the ESZip, // Packages from different registries are already inlined in the binary,
// so no need to create actual `.npmrc` configuration. // so no need to create actual `.npmrc` configuration.
npmrc: create_default_npmrc(), npmrc: create_default_npmrc(),
lifecycle_scripts: Default::default(), lifecycle_scripts: Default::default(),
}, },
)) ))
.await?; .await?
(fs, npm_resolver, None)
} }
}; };
@ -645,10 +562,7 @@ pub async fn run(
}; };
let module_loader_factory = StandaloneModuleLoaderFactory { let module_loader_factory = StandaloneModuleLoaderFactory {
shared: Arc::new(SharedModuleLoaderState { shared: Arc::new(SharedModuleLoaderState {
eszip: WorkspaceEszip { modules,
eszip,
root_dir_url,
},
workspace_resolver, workspace_resolver,
node_resolver: cli_node_resolver.clone(), node_resolver: cli_node_resolver.clone(),
npm_module_loader: Arc::new(NpmModuleLoader::new( npm_module_loader: Arc::new(NpmModuleLoader::new(
@ -663,19 +577,17 @@ pub async fn run(
let permissions = { let permissions = {
let mut permissions = let mut permissions =
metadata.permissions.to_options(/* cli_arg_urls */ &[]); metadata.permissions.to_options(/* cli_arg_urls */ &[]);
// if running with an npm vfs, grant read access to it // grant read access to the vfs
if let Some(vfs_root) = maybe_vfs_root {
match &mut permissions.allow_read { match &mut permissions.allow_read {
Some(vec) if vec.is_empty() => { Some(vec) if vec.is_empty() => {
// do nothing, already granted // do nothing, already granted
} }
Some(vec) => { Some(vec) => {
vec.push(vfs_root.to_string_lossy().to_string()); vec.push(root_path.to_string_lossy().to_string());
} }
None => { None => {
permissions.allow_read = permissions.allow_read =
Some(vec![vfs_root.to_string_lossy().to_string()]); Some(vec![root_path.to_string_lossy().to_string()]);
}
} }
} }

View file

@ -0,0 +1,644 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::io::Write;
use deno_ast::MediaType;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_core::FastString;
use deno_core::ModuleSourceCode;
use deno_core::ModuleType;
use deno_npm::resolution::SerializedNpmResolutionSnapshot;
use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_semver::package::PackageReq;
use crate::standalone::virtual_fs::VirtualDirectory;
use super::binary::Metadata;
use super::virtual_fs::VfsBuilder;
const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd";
/// Binary format:
/// * d3n0l4nd
/// * <metadata_len><metadata>
/// * <npm_snapshot_len><npm_snapshot>
/// * <remote_modules_len><remote_modules>
/// * <vfs_headers_len><vfs_headers>
/// * <vfs_file_data_len><vfs_file_data>
/// * d3n0l4nd
pub fn serialize_binary_data_section(
metadata: &Metadata,
npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
remote_modules: &RemoteModulesStoreBuilder,
vfs: VfsBuilder,
) -> Result<Vec<u8>, AnyError> {
fn write_bytes_with_len(bytes: &mut Vec<u8>, data: &[u8]) {
bytes.extend_from_slice(&(data.len() as u64).to_le_bytes());
bytes.extend_from_slice(data);
}
let mut bytes = Vec::new();
bytes.extend_from_slice(MAGIC_BYTES);
// 1. Metadata
{
let metadata = serde_json::to_string(metadata)?;
write_bytes_with_len(&mut bytes, metadata.as_bytes());
}
// 2. Npm snapshot
{
let npm_snapshot =
npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default();
write_bytes_with_len(&mut bytes, &npm_snapshot);
}
// 3. Remote modules
{
let update_index = bytes.len();
bytes.extend_from_slice(&(0_u64).to_le_bytes());
let start_index = bytes.len();
remote_modules.write(&mut bytes)?;
let length = bytes.len() - start_index;
let length_bytes = (length as u64).to_le_bytes();
bytes[update_index..update_index + length_bytes.len()]
.copy_from_slice(&length_bytes);
}
// 4. VFS
{
let (vfs, vfs_files) = vfs.into_dir_and_files();
let vfs = serde_json::to_string(&vfs)?;
write_bytes_with_len(&mut bytes, vfs.as_bytes());
let vfs_bytes_len = vfs_files.iter().map(|f| f.len() as u64).sum::<u64>();
bytes.extend_from_slice(&vfs_bytes_len.to_le_bytes());
for file in &vfs_files {
bytes.extend_from_slice(file);
}
}
// write the magic bytes at the end so we can use it
// to make sure we've deserialized correctly
bytes.extend_from_slice(MAGIC_BYTES);
Ok(bytes)
}
pub struct DeserializedDataSection {
pub metadata: Metadata,
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
pub remote_modules: RemoteModulesStore,
pub vfs_dir: VirtualDirectory,
pub vfs_files_data: &'static [u8],
}
pub fn deserialize_binary_data_section(
data: &'static [u8],
) -> Result<Option<DeserializedDataSection>, AnyError> {
fn read_bytes_with_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> {
let (input, len) = read_u64(input)?;
let (input, data) = read_bytes(input, len as usize)?;
Ok((input, data))
}
fn read_magic_bytes(input: &[u8]) -> Result<(&[u8], bool), AnyError> {
if input.len() < MAGIC_BYTES.len() {
bail!("Unexpected end of data. Could not find magic bytes.");
}
let (magic_bytes, input) = input.split_at(MAGIC_BYTES.len());
if magic_bytes != MAGIC_BYTES {
return Ok((input, false));
}
Ok((input, true))
}
let (input, found) = read_magic_bytes(data)?;
if !found {
return Ok(None);
}
// 1. Metadata
let (input, data) = read_bytes_with_len(input).context("reading metadata")?;
let metadata: Metadata =
serde_json::from_slice(data).context("deserializing metadata")?;
// 2. Npm snapshot
let (input, data) =
read_bytes_with_len(input).context("reading npm snapshot")?;
let npm_snapshot = if data.is_empty() {
None
} else {
Some(deserialize_npm_snapshot(data).context("deserializing npm snapshot")?)
};
// 3. Remote modules
let (input, data) =
read_bytes_with_len(input).context("reading remote modules data")?;
let remote_modules =
RemoteModulesStore::build(data).context("deserializing remote modules")?;
// 4. VFS
let (input, data) = read_bytes_with_len(input).context("vfs")?;
let vfs_dir: VirtualDirectory =
serde_json::from_slice(data).context("deserializing vfs data")?;
let (input, vfs_files_data) =
read_bytes_with_len(input).context("reading vfs files data")?;
// finally ensure we read the magic bytes at the end
let (_input, found) = read_magic_bytes(input)?;
if !found {
bail!("Could not find magic bytes at the end of the data.");
}
Ok(Some(DeserializedDataSection {
metadata,
npm_snapshot,
remote_modules,
vfs_dir,
vfs_files_data,
}))
}
#[derive(Default)]
pub struct RemoteModulesStoreBuilder {
specifiers: Vec<(String, u64)>,
data: Vec<(MediaType, Vec<u8>)>,
data_byte_len: u64,
redirects: Vec<(String, String)>,
redirects_len: u64,
}
impl RemoteModulesStoreBuilder {
pub fn add(&mut self, specifier: &Url, media_type: MediaType, data: Vec<u8>) {
log::debug!("Adding '{}' ({})", specifier, media_type);
let specifier = specifier.to_string();
self.specifiers.push((specifier, self.data_byte_len));
self.data_byte_len += 1 + 8 + data.len() as u64; // media type (1 byte), data length (8 bytes), data
self.data.push((media_type, data));
}
pub fn add_redirects(&mut self, redirects: &BTreeMap<Url, Url>) {
self.redirects.reserve(redirects.len());
for (from, to) in redirects {
log::debug!("Adding redirect '{}' -> '{}'", from, to);
let from = from.to_string();
let to = to.to_string();
self.redirects_len += (4 + from.len() + 4 + to.len()) as u64;
self.redirects.push((from, to));
}
}
fn write(&self, writer: &mut dyn Write) -> Result<(), AnyError> {
writer.write_all(&(self.specifiers.len() as u32).to_le_bytes())?;
writer.write_all(&(self.redirects.len() as u32).to_le_bytes())?;
for (specifier, offset) in &self.specifiers {
writer.write_all(&(specifier.len() as u32).to_le_bytes())?;
writer.write_all(specifier.as_bytes())?;
writer.write_all(&offset.to_le_bytes())?;
}
for (from, to) in &self.redirects {
writer.write_all(&(from.len() as u32).to_le_bytes())?;
writer.write_all(from.as_bytes())?;
writer.write_all(&(to.len() as u32).to_le_bytes())?;
writer.write_all(to.as_bytes())?;
}
for (media_type, data) in &self.data {
writer.write_all(&[serialize_media_type(*media_type)])?;
writer.write_all(&(data.len() as u64).to_le_bytes())?;
writer.write_all(data)?;
}
Ok(())
}
}
pub struct DenoCompileModuleData<'a> {
pub specifier: &'a Url,
pub media_type: MediaType,
pub data: Cow<'static, [u8]>,
}
impl<'a> DenoCompileModuleData<'a> {
pub fn into_for_v8(self) -> (&'a Url, ModuleType, ModuleSourceCode) {
fn into_bytes(data: Cow<'static, [u8]>) -> ModuleSourceCode {
ModuleSourceCode::Bytes(match data {
Cow::Borrowed(d) => d.into(),
Cow::Owned(d) => d.into_boxed_slice().into(),
})
}
fn into_string_unsafe(data: Cow<'static, [u8]>) -> ModuleSourceCode {
// todo(https://github.com/denoland/deno_core/pull/943): store whether
// the string is ascii or not ahead of time so we can avoid the is_ascii()
// check in FastString::from_static
match data {
Cow::Borrowed(d) => ModuleSourceCode::String(
// SAFETY: we know this is a valid utf8 string
unsafe { FastString::from_static(std::str::from_utf8_unchecked(d)) },
),
Cow::Owned(d) => ModuleSourceCode::Bytes(d.into_boxed_slice().into()),
}
}
let (media_type, source) = match self.media_type {
MediaType::JavaScript
| MediaType::Jsx
| MediaType::Mjs
| MediaType::Cjs
| MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts
| MediaType::Tsx => {
(ModuleType::JavaScript, into_string_unsafe(self.data))
}
MediaType::Json => (ModuleType::Json, into_string_unsafe(self.data)),
MediaType::Wasm => (ModuleType::Wasm, into_bytes(self.data)),
// just assume javascript if we made it here
MediaType::TsBuildInfo | MediaType::SourceMap | MediaType::Unknown => {
(ModuleType::JavaScript, into_bytes(self.data))
}
};
(self.specifier, media_type, source)
}
}
enum RemoteModulesStoreSpecifierValue {
Data(usize),
Redirect(Url),
}
pub struct RemoteModulesStore {
specifiers: HashMap<Url, RemoteModulesStoreSpecifierValue>,
files_data: &'static [u8],
}
impl RemoteModulesStore {
fn build(data: &'static [u8]) -> Result<Self, AnyError> {
fn read_specifier(input: &[u8]) -> Result<(&[u8], (Url, u64)), AnyError> {
let (input, specifier) = read_string_lossy(input)?;
let specifier = Url::parse(&specifier)?;
let (input, offset) = read_u64(input)?;
Ok((input, (specifier, offset)))
}
fn read_redirect(input: &[u8]) -> Result<(&[u8], (Url, Url)), AnyError> {
let (input, from) = read_string_lossy(input)?;
let from = Url::parse(&from)?;
let (input, to) = read_string_lossy(input)?;
let to = Url::parse(&to)?;
Ok((input, (from, to)))
}
fn read_headers(
input: &[u8],
) -> Result<(&[u8], HashMap<Url, RemoteModulesStoreSpecifierValue>), AnyError>
{
let (input, specifiers_len) = read_u32_as_usize(input)?;
let (mut input, redirects_len) = read_u32_as_usize(input)?;
let mut specifiers =
HashMap::with_capacity(specifiers_len + redirects_len);
for _ in 0..specifiers_len {
let (current_input, (specifier, offset)) =
read_specifier(input).context("reading specifier")?;
input = current_input;
specifiers.insert(
specifier,
RemoteModulesStoreSpecifierValue::Data(offset as usize),
);
}
for _ in 0..redirects_len {
let (current_input, (from, to)) = read_redirect(input)?;
input = current_input;
specifiers.insert(from, RemoteModulesStoreSpecifierValue::Redirect(to));
}
Ok((input, specifiers))
}
let (files_data, specifiers) = read_headers(data)?;
Ok(Self {
specifiers,
files_data,
})
}
pub fn resolve_specifier<'a>(
&'a self,
specifier: &'a Url,
) -> Result<Option<&'a Url>, AnyError> {
let mut count = 0;
let mut current = specifier;
loop {
if count > 10 {
bail!("Too many redirects resolving '{}'", specifier);
}
match self.specifiers.get(current) {
Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => {
current = to;
count += 1;
}
Some(RemoteModulesStoreSpecifierValue::Data(_)) => {
return Ok(Some(current));
}
None => {
return Ok(None);
}
}
}
}
pub fn read<'a>(
&'a self,
original_specifier: &'a Url,
) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> {
let mut count = 0;
let mut specifier = original_specifier;
loop {
if count > 10 {
bail!("Too many redirects resolving '{}'", original_specifier);
}
match self.specifiers.get(specifier) {
Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => {
specifier = to;
count += 1;
}
Some(RemoteModulesStoreSpecifierValue::Data(offset)) => {
let input = &self.files_data[*offset..];
let (input, media_type_byte) = read_bytes(input, 1)?;
let media_type = deserialize_media_type(media_type_byte[0])?;
let (input, len) = read_u64(input)?;
let (_input, data) = read_bytes(input, len as usize)?;
return Ok(Some(DenoCompileModuleData {
specifier,
media_type,
data: Cow::Borrowed(data),
}));
}
None => {
return Ok(None);
}
}
}
}
}
fn serialize_npm_snapshot(
mut snapshot: SerializedNpmResolutionSnapshot,
) -> Vec<u8> {
fn append_string(bytes: &mut Vec<u8>, string: &str) {
let len = string.len() as u32;
bytes.extend_from_slice(&len.to_le_bytes());
bytes.extend_from_slice(string.as_bytes());
}
snapshot.packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism
let ids_to_stored_ids = snapshot
.packages
.iter()
.enumerate()
.map(|(i, pkg)| (&pkg.id, i as u32))
.collect::<HashMap<_, _>>();
let mut root_packages: Vec<_> = snapshot.root_packages.iter().collect();
root_packages.sort();
let mut bytes = Vec::new();
bytes.extend_from_slice(&(snapshot.packages.len() as u32).to_le_bytes());
for pkg in &snapshot.packages {
append_string(&mut bytes, &pkg.id.as_serialized());
}
bytes.extend_from_slice(&(root_packages.len() as u32).to_le_bytes());
for (req, id) in root_packages {
append_string(&mut bytes, &req.to_string());
let id = ids_to_stored_ids.get(&id).unwrap();
bytes.extend_from_slice(&id.to_le_bytes());
}
for pkg in &snapshot.packages {
let deps_len = pkg.dependencies.len() as u32;
bytes.extend_from_slice(&deps_len.to_le_bytes());
let mut deps: Vec<_> = pkg.dependencies.iter().collect();
deps.sort();
for (req, id) in deps {
append_string(&mut bytes, req);
let id = ids_to_stored_ids.get(&id).unwrap();
bytes.extend_from_slice(&id.to_le_bytes());
}
}
bytes
}
fn deserialize_npm_snapshot(
input: &[u8],
) -> Result<ValidSerializedNpmResolutionSnapshot, AnyError> {
fn parse_id(input: &[u8]) -> Result<(&[u8], NpmPackageId), AnyError> {
let (input, id) = read_string_lossy(input)?;
let id = NpmPackageId::from_serialized(&id)?;
Ok((input, id))
}
#[allow(clippy::needless_lifetimes)] // clippy bug
fn parse_root_package<'a>(
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> impl Fn(&[u8]) -> Result<(&[u8], (PackageReq, NpmPackageId)), AnyError> + 'a
{
|input| {
let (input, req) = read_string_lossy(input)?;
let req = PackageReq::from_str(&req)?;
let (input, id) = read_u32_as_usize(input)?;
Ok((input, (req, id_to_npm_id(id)?)))
}
}
#[allow(clippy::needless_lifetimes)] // clippy bug
fn parse_package_dep<'a>(
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> impl Fn(&[u8]) -> Result<(&[u8], (String, NpmPackageId)), AnyError> + 'a
{
|input| {
let (input, req) = read_string_lossy(input)?;
let (input, id) = read_u32_as_usize(input)?;
Ok((input, (req.into_owned(), id_to_npm_id(id)?)))
}
}
fn parse_package<'a>(
input: &'a [u8],
id: NpmPackageId,
id_to_npm_id: &impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> Result<(&'a [u8], SerializedNpmResolutionSnapshotPackage), AnyError> {
let (input, deps_len) = read_u32_as_usize(input)?;
let (input, dependencies) =
parse_hashmap_n_times(input, deps_len, parse_package_dep(id_to_npm_id))?;
Ok((
input,
SerializedNpmResolutionSnapshotPackage {
id,
system: Default::default(),
dist: Default::default(),
dependencies,
optional_dependencies: Default::default(),
bin: None,
scripts: Default::default(),
deprecated: Default::default(),
},
))
}
let (input, packages_len) = read_u32_as_usize(input)?;
// get a hashmap of all the npm package ids to their serialized ids
let (input, data_ids_to_npm_ids) =
parse_vec_n_times(input, packages_len, parse_id)
.context("deserializing id")?;
let data_id_to_npm_id = |id: usize| {
data_ids_to_npm_ids
.get(id)
.cloned()
.ok_or_else(|| deno_core::anyhow::anyhow!("Invalid npm package id"))
};
let (input, root_packages_len) = read_u32_as_usize(input)?;
let (input, root_packages) = parse_hashmap_n_times(
input,
root_packages_len,
parse_root_package(&data_id_to_npm_id),
)
.context("deserializing root package")?;
let (input, packages) =
parse_vec_n_times_with_index(input, packages_len, |input, index| {
parse_package(input, data_id_to_npm_id(index)?, &data_id_to_npm_id)
})
.context("deserializing package")?;
if !input.is_empty() {
bail!("Unexpected data left over");
}
Ok(
SerializedNpmResolutionSnapshot {
packages,
root_packages,
}
// this is ok because we have already verified that all the
// identifiers found in the snapshot are valid via the
// npm package id -> npm package id mapping
.into_valid_unsafe(),
)
}
fn serialize_media_type(media_type: MediaType) -> u8 {
match media_type {
MediaType::JavaScript => 0,
MediaType::Jsx => 1,
MediaType::Mjs => 2,
MediaType::Cjs => 3,
MediaType::TypeScript => 4,
MediaType::Mts => 5,
MediaType::Cts => 6,
MediaType::Dts => 7,
MediaType::Dmts => 8,
MediaType::Dcts => 9,
MediaType::Tsx => 10,
MediaType::Json => 11,
MediaType::Wasm => 12,
MediaType::TsBuildInfo => 13,
MediaType::SourceMap => 14,
MediaType::Unknown => 15,
}
}
fn deserialize_media_type(value: u8) -> Result<MediaType, AnyError> {
match value {
0 => Ok(MediaType::JavaScript),
1 => Ok(MediaType::Jsx),
2 => Ok(MediaType::Mjs),
3 => Ok(MediaType::Cjs),
4 => Ok(MediaType::TypeScript),
5 => Ok(MediaType::Mts),
6 => Ok(MediaType::Cts),
7 => Ok(MediaType::Dts),
8 => Ok(MediaType::Dmts),
9 => Ok(MediaType::Dcts),
10 => Ok(MediaType::Tsx),
11 => Ok(MediaType::Json),
12 => Ok(MediaType::Wasm),
13 => Ok(MediaType::TsBuildInfo),
14 => Ok(MediaType::SourceMap),
15 => Ok(MediaType::Unknown),
_ => bail!("Unknown media type value: {}", value),
}
}
fn parse_hashmap_n_times<TKey: std::cmp::Eq + std::hash::Hash, TValue>(
mut input: &[u8],
times: usize,
parse: impl Fn(&[u8]) -> Result<(&[u8], (TKey, TValue)), AnyError>,
) -> Result<(&[u8], HashMap<TKey, TValue>), AnyError> {
let mut results = HashMap::with_capacity(times);
for _ in 0..times {
let result = parse(input);
let (new_input, (key, value)) = result?;
results.insert(key, value);
input = new_input;
}
Ok((input, results))
}
fn parse_vec_n_times<TResult>(
input: &[u8],
times: usize,
parse: impl Fn(&[u8]) -> Result<(&[u8], TResult), AnyError>,
) -> Result<(&[u8], Vec<TResult>), AnyError> {
parse_vec_n_times_with_index(input, times, |input, _index| parse(input))
}
fn parse_vec_n_times_with_index<TResult>(
mut input: &[u8],
times: usize,
parse: impl Fn(&[u8], usize) -> Result<(&[u8], TResult), AnyError>,
) -> Result<(&[u8], Vec<TResult>), AnyError> {
let mut results = Vec::with_capacity(times);
for i in 0..times {
let result = parse(input, i);
let (new_input, result) = result?;
results.push(result);
input = new_input;
}
Ok((input, results))
}
fn read_bytes(input: &[u8], len: usize) -> Result<(&[u8], &[u8]), AnyError> {
if input.len() < len {
bail!("Unexpected end of data.",);
}
let (len_bytes, input) = input.split_at(len);
Ok((input, len_bytes))
}
fn read_string_lossy(input: &[u8]) -> Result<(&[u8], Cow<str>), AnyError> {
let (input, str_len) = read_u32_as_usize(input)?;
let (input, data_bytes) = read_bytes(input, str_len)?;
Ok((input, String::from_utf8_lossy(data_bytes)))
}
fn read_u32_as_usize(input: &[u8]) -> Result<(&[u8], usize), AnyError> {
let (input, len_bytes) = read_bytes(input, 4)?;
let len = u32::from_le_bytes(len_bytes.try_into()?);
Ok((input, len as usize))
}
fn read_u64(input: &[u8]) -> Result<(&[u8], u64), AnyError> {
let (input, len_bytes) = read_bytes(input, 8)?;
let len = u64::from_le_bytes(len_bytes.try_into()?);
Ok((input, len))
}

View file

@ -7,6 +7,7 @@ use std::fs::File;
use std::io::Read; use std::io::Read;
use std::io::Seek; use std::io::Seek;
use std::io::SeekFrom; use std::io::SeekFrom;
use std::ops::Range;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::rc::Rc; use std::rc::Rc;
@ -67,6 +68,26 @@ impl VfsBuilder {
}) })
} }
pub fn set_new_root_path(
&mut self,
root_path: PathBuf,
) -> Result<(), AnyError> {
let root_path = canonicalize_path(&root_path)?;
self.root_path = root_path;
self.root_dir = VirtualDirectory {
name: self
.root_path
.file_stem()
.map(|s| s.to_string_lossy().into_owned())
.unwrap_or("root".to_string()),
entries: vec![VfsEntry::Dir(VirtualDirectory {
name: std::mem::take(&mut self.root_dir.name),
entries: std::mem::take(&mut self.root_dir.entries),
})],
};
Ok(())
}
pub fn with_root_dir<R>( pub fn with_root_dir<R>(
&mut self, &mut self,
with_root: impl FnOnce(&mut VirtualDirectory) -> R, with_root: impl FnOnce(&mut VirtualDirectory) -> R,
@ -119,7 +140,7 @@ impl VfsBuilder {
// inline the symlink and make the target file // inline the symlink and make the target file
let file_bytes = std::fs::read(&target) let file_bytes = std::fs::read(&target)
.with_context(|| format!("Reading {}", path.display()))?; .with_context(|| format!("Reading {}", path.display()))?;
self.add_file(&path, file_bytes)?; self.add_file_with_data_inner(&path, file_bytes)?;
} else { } else {
log::warn!( log::warn!(
"{} Symlink target is outside '{}'. Excluding symlink at '{}' with target '{}'.", "{} Symlink target is outside '{}'. Excluding symlink at '{}' with target '{}'.",
@ -191,16 +212,32 @@ impl VfsBuilder {
self.add_file_at_path_not_symlink(&target_path) self.add_file_at_path_not_symlink(&target_path)
} }
pub fn add_file_at_path_not_symlink( fn add_file_at_path_not_symlink(
&mut self, &mut self,
path: &Path, path: &Path,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let file_bytes = std::fs::read(path) let file_bytes = std::fs::read(path)
.with_context(|| format!("Reading {}", path.display()))?; .with_context(|| format!("Reading {}", path.display()))?;
self.add_file(path, file_bytes) self.add_file_with_data_inner(path, file_bytes)
} }
fn add_file(&mut self, path: &Path, data: Vec<u8>) -> Result<(), AnyError> { pub fn add_file_with_data(
&mut self,
path: &Path,
data: Vec<u8>,
) -> Result<(), AnyError> {
let target_path = canonicalize_path(path)?;
if target_path != path {
self.add_symlink(path, &target_path)?;
}
self.add_file_with_data_inner(&target_path, data)
}
fn add_file_with_data_inner(
&mut self,
path: &Path,
data: Vec<u8>,
) -> Result<(), AnyError> {
log::debug!("Adding file '{}'", path.display()); log::debug!("Adding file '{}'", path.display());
let checksum = util::checksum::gen(&[&data]); let checksum = util::checksum::gen(&[&data]);
let offset = if let Some(offset) = self.file_offsets.get(&checksum) { let offset = if let Some(offset) = self.file_offsets.get(&checksum) {
@ -249,8 +286,15 @@ impl VfsBuilder {
path.display(), path.display(),
target.display() target.display()
); );
let dest = self.path_relative_root(target)?; let relative_target = self.path_relative_root(target)?;
if dest == self.path_relative_root(path)? { let relative_path = match self.path_relative_root(path) {
Ok(path) => path,
Err(StripRootError { .. }) => {
// ignore if the original path is outside the root directory
return Ok(());
}
};
if relative_target == relative_path {
// it's the same, ignore // it's the same, ignore
return Ok(()); return Ok(());
} }
@ -263,7 +307,7 @@ impl VfsBuilder {
insert_index, insert_index,
VfsEntry::Symlink(VirtualSymlink { VfsEntry::Symlink(VirtualSymlink {
name: name.to_string(), name: name.to_string(),
dest_parts: dest dest_parts: relative_target
.components() .components()
.map(|c| c.as_os_str().to_string_lossy().to_string()) .map(|c| c.as_os_str().to_string_lossy().to_string())
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
@ -751,14 +795,14 @@ impl deno_io::fs::File for FileBackedVfsFile {
#[derive(Debug)] #[derive(Debug)]
pub struct FileBackedVfs { pub struct FileBackedVfs {
file: Mutex<Vec<u8>>, vfs_data: Cow<'static, [u8]>,
fs_root: VfsRoot, fs_root: VfsRoot,
} }
impl FileBackedVfs { impl FileBackedVfs {
pub fn new(file: Vec<u8>, fs_root: VfsRoot) -> Self { pub fn new(data: Cow<'static, [u8]>, fs_root: VfsRoot) -> Self {
Self { Self {
file: Mutex::new(file), vfs_data: data,
fs_root, fs_root,
} }
} }
@ -827,10 +871,15 @@ impl FileBackedVfs {
Ok(path) Ok(path)
} }
pub fn read_file_all(&self, file: &VirtualFile) -> std::io::Result<Vec<u8>> { pub fn read_file_all(
let mut buf = vec![0; file.len as usize]; &self,
self.read_file(file, 0, &mut buf)?; file: &VirtualFile,
Ok(buf) ) -> std::io::Result<Cow<'static, [u8]>> {
let read_range = self.get_read_range(file, 0, file.len)?;
match &self.vfs_data {
Cow::Borrowed(data) => Ok(Cow::Borrowed(&data[read_range])),
Cow::Owned(data) => Ok(Cow::Owned(data[read_range].to_vec())),
}
} }
pub fn read_file( pub fn read_file(
@ -839,18 +888,27 @@ impl FileBackedVfs {
pos: u64, pos: u64,
buf: &mut [u8], buf: &mut [u8],
) -> std::io::Result<usize> { ) -> std::io::Result<usize> {
let data = self.file.lock(); let read_range = self.get_read_range(file, pos, buf.len() as u64)?;
buf.copy_from_slice(&self.vfs_data[read_range]);
Ok(buf.len())
}
fn get_read_range(
&self,
file: &VirtualFile,
pos: u64,
len: u64,
) -> std::io::Result<Range<usize>> {
let data = &self.vfs_data;
let start = self.fs_root.start_file_offset + file.offset + pos; let start = self.fs_root.start_file_offset + file.offset + pos;
let end = start + buf.len() as u64; let end = start + len;
if end > data.len() as u64 { if end > data.len() as u64 {
return Err(std::io::Error::new( return Err(std::io::Error::new(
std::io::ErrorKind::UnexpectedEof, std::io::ErrorKind::UnexpectedEof,
"unexpected EOF", "unexpected EOF",
)); ));
} }
Ok(start as usize..end as usize)
buf.copy_from_slice(&data[start as usize..end as usize]);
Ok(buf.len())
} }
pub fn dir_entry(&self, path: &Path) -> std::io::Result<&VirtualDirectory> { pub fn dir_entry(&self, path: &Path) -> std::io::Result<&VirtualDirectory> {
@ -888,7 +946,7 @@ mod test {
#[track_caller] #[track_caller]
fn read_file(vfs: &FileBackedVfs, path: &Path) -> String { fn read_file(vfs: &FileBackedVfs, path: &Path) -> String {
let file = vfs.file_entry(path).unwrap(); let file = vfs.file_entry(path).unwrap();
String::from_utf8(vfs.read_file_all(file).unwrap()).unwrap() String::from_utf8(vfs.read_file_all(file).unwrap().into_owned()).unwrap()
} }
#[test] #[test]
@ -901,20 +959,23 @@ mod test {
let src_path = src_path.to_path_buf(); let src_path = src_path.to_path_buf();
let mut builder = VfsBuilder::new(src_path.clone()).unwrap(); let mut builder = VfsBuilder::new(src_path.clone()).unwrap();
builder builder
.add_file(&src_path.join("a.txt"), "data".into()) .add_file_with_data_inner(&src_path.join("a.txt"), "data".into())
.unwrap(); .unwrap();
builder builder
.add_file(&src_path.join("b.txt"), "data".into()) .add_file_with_data_inner(&src_path.join("b.txt"), "data".into())
.unwrap(); .unwrap();
assert_eq!(builder.files.len(), 1); // because duplicate data assert_eq!(builder.files.len(), 1); // because duplicate data
builder builder
.add_file(&src_path.join("c.txt"), "c".into()) .add_file_with_data_inner(&src_path.join("c.txt"), "c".into())
.unwrap(); .unwrap();
builder builder
.add_file(&src_path.join("sub_dir").join("d.txt"), "d".into()) .add_file_with_data_inner(
&src_path.join("sub_dir").join("d.txt"),
"d".into(),
)
.unwrap(); .unwrap();
builder builder
.add_file(&src_path.join("e.txt"), "e".into()) .add_file_with_data_inner(&src_path.join("e.txt"), "e".into())
.unwrap(); .unwrap();
builder builder
.add_symlink( .add_symlink(
@ -1031,7 +1092,7 @@ mod test {
( (
dest_path.to_path_buf(), dest_path.to_path_buf(),
FileBackedVfs::new( FileBackedVfs::new(
data, Cow::Owned(data),
VfsRoot { VfsRoot {
dir: root_dir, dir: root_dir,
root_path: dest_path.to_path_buf(), root_path: dest_path.to_path_buf(),
@ -1082,7 +1143,7 @@ mod test {
let temp_path = temp_dir.path().canonicalize(); let temp_path = temp_dir.path().canonicalize();
let mut builder = VfsBuilder::new(temp_path.to_path_buf()).unwrap(); let mut builder = VfsBuilder::new(temp_path.to_path_buf()).unwrap();
builder builder
.add_file( .add_file_with_data_inner(
temp_path.join("a.txt").as_path(), temp_path.join("a.txt").as_path(),
"0123456789".to_string().into_bytes(), "0123456789".to_string().into_bytes(),
) )

View file

@ -5,6 +5,7 @@ use crate::args::CompileFlags;
use crate::args::Flags; use crate::args::Flags;
use crate::factory::CliFactory; use crate::factory::CliFactory;
use crate::http_util::HttpClientProvider; use crate::http_util::HttpClientProvider;
use crate::standalone::binary::StandaloneRelativeFileBaseUrl;
use crate::standalone::is_standalone_binary; use crate::standalone::is_standalone_binary;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
@ -14,7 +15,6 @@ use deno_core::error::AnyError;
use deno_core::resolve_url_or_path; use deno_core::resolve_url_or_path;
use deno_graph::GraphKind; use deno_graph::GraphKind;
use deno_terminal::colors; use deno_terminal::colors;
use eszip::EszipRelativeFileBaseUrl;
use rand::Rng; use rand::Rng;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
@ -29,7 +29,6 @@ pub async fn compile(
let factory = CliFactory::from_flags(flags); let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?; let cli_options = factory.cli_options()?;
let module_graph_creator = factory.module_graph_creator().await?; let module_graph_creator = factory.module_graph_creator().await?;
let parsed_source_cache = factory.parsed_source_cache();
let binary_writer = factory.create_compile_binary_writer().await?; let binary_writer = factory.create_compile_binary_writer().await?;
let http_client = factory.http_client_provider(); let http_client = factory.http_client_provider();
let module_specifier = cli_options.resolve_main_module()?; let module_specifier = cli_options.resolve_main_module()?;
@ -80,7 +79,7 @@ pub async fn compile(
let graph = if cli_options.type_check_mode().is_true() { let graph = if cli_options.type_check_mode().is_true() {
// In this case, the previous graph creation did type checking, which will // In this case, the previous graph creation did type checking, which will
// create a module graph with types information in it. We don't want to // create a module graph with types information in it. We don't want to
// store that in the eszip so create a code only module graph from scratch. // store that in the binary so create a code only module graph from scratch.
module_graph_creator module_graph_creator
.create_graph(GraphKind::CodeOnly, module_roots) .create_graph(GraphKind::CodeOnly, module_roots)
.await? .await?
@ -91,11 +90,6 @@ pub async fn compile(
let ts_config_for_emit = cli_options let ts_config_for_emit = cli_options
.resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?; .resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?;
check_warn_tsconfig(&ts_config_for_emit); check_warn_tsconfig(&ts_config_for_emit);
let (transpile_options, emit_options) =
crate::args::ts_config_to_transpile_and_emit_options(
ts_config_for_emit.ts_config,
)?;
let parser = parsed_source_cache.as_capturing_parser();
let root_dir_url = resolve_root_dir_from_specifiers( let root_dir_url = resolve_root_dir_from_specifiers(
cli_options.workspace().root_dir(), cli_options.workspace().root_dir(),
graph.specifiers().map(|(s, _)| s).chain( graph.specifiers().map(|(s, _)| s).chain(
@ -106,17 +100,6 @@ pub async fn compile(
), ),
); );
log::debug!("Binary root dir: {}", root_dir_url); log::debug!("Binary root dir: {}", root_dir_url);
let root_dir_url = EszipRelativeFileBaseUrl::new(&root_dir_url);
let eszip = eszip::EszipV2::from_graph(eszip::FromGraphOptions {
graph,
parser,
transpile_options,
emit_options,
// make all the modules relative to the root folder
relative_file_base: Some(root_dir_url),
npm_packages: None,
})?;
log::info!( log::info!(
"{} {} to {}", "{} {} to {}",
colors::green("Compile"), colors::green("Compile"),
@ -143,15 +126,18 @@ pub async fn compile(
let write_result = binary_writer let write_result = binary_writer
.write_bin( .write_bin(
file, file,
eszip, &graph,
root_dir_url, StandaloneRelativeFileBaseUrl::from(&root_dir_url),
module_specifier, module_specifier,
&compile_flags, &compile_flags,
cli_options, cli_options,
) )
.await .await
.with_context(|| { .with_context(|| {
format!("Writing temporary file '{}'", temp_path.display()) format!(
"Writing deno compile executable to temporary file '{}'",
temp_path.display()
)
}); });
// set it as executable // set it as executable

View file

@ -571,7 +571,7 @@ pub async fn cover_files(
| MediaType::Cjs | MediaType::Cjs
| MediaType::Mjs | MediaType::Mjs
| MediaType::Json => None, | MediaType::Json => None,
MediaType::Dts | MediaType::Dmts | MediaType::Dcts => Some(Vec::new()), MediaType::Dts | MediaType::Dmts | MediaType::Dcts => Some(String::new()),
MediaType::TypeScript MediaType::TypeScript
| MediaType::Jsx | MediaType::Jsx
| MediaType::Mts | MediaType::Mts
@ -593,8 +593,7 @@ pub async fn cover_files(
} }
}; };
let runtime_code: String = match transpiled_code { let runtime_code: String = match transpiled_code {
Some(code) => String::from_utf8(code) Some(code) => code,
.with_context(|| format!("Failed decoding {}", file.specifier))?,
None => original_source.to_string(), None => original_source.to_string(),
}; };

View file

@ -978,6 +978,7 @@ fn get_resolved_malva_config(
single_line_top_level_declarations: false, single_line_top_level_declarations: false,
selector_override_comment_directive: "deno-fmt-selector-override".into(), selector_override_comment_directive: "deno-fmt-selector-override".into(),
ignore_comment_directive: "deno-fmt-ignore".into(), ignore_comment_directive: "deno-fmt-ignore".into(),
ignore_file_comment_directive: "deno-fmt-ignore-file".into(),
}; };
FormatOptions { FormatOptions {
@ -1036,6 +1037,7 @@ fn get_resolved_markup_fmt_config(
svelte_directive_shorthand: Some(true), svelte_directive_shorthand: Some(true),
astro_attr_shorthand: Some(true), astro_attr_shorthand: Some(true),
ignore_comment_directive: "deno-fmt-ignore".into(), ignore_comment_directive: "deno-fmt-ignore".into(),
ignore_file_comment_directive: "deno-fmt-ignore-file".into(),
}; };
FormatOptions { FormatOptions {

View file

@ -24,32 +24,29 @@ pub fn init_project(init_flags: InitFlags) -> Result<(), AnyError> {
create_file( create_file(
&dir, &dir,
"main.ts", "main.ts",
r#"import { type Route, route, serveDir } from "@std/http"; r#"import { serveDir } from "@std/http";
const routes: Route[] = [ const userPagePattern = new URLPattern({ pathname: "/users/:id" });
{ const staticPathPattern = new URLPattern({ pathname: "/static/*" });
pattern: new URLPattern({ pathname: "/" }),
handler: () => new Response("Home page"),
},
{
pattern: new URLPattern({ pathname: "/users/:id" }),
handler: (_req, _info, params) => new Response(params?.pathname.groups.id),
},
{
pattern: new URLPattern({ pathname: "/static/*" }),
handler: (req) => serveDir(req),
},
];
function defaultHandler(_req: Request) {
return new Response("Not found", { status: 404 });
}
const handler = route(routes, defaultHandler);
export default { export default {
fetch(req) { fetch(req) {
return handler(req); const url = new URL(req.url);
if (url.pathname === "/") {
return new Response("Home page");
}
const userPageMatch = userPagePattern.exec(url);
if (userPageMatch) {
return new Response(userPageMatch.pathname.groups.id);
}
if (staticPathPattern.test(url)) {
return serveDir(req);
}
return new Response("Not found", { status: 404 });
}, },
} satisfies Deno.ServeDefaultExport; } satisfies Deno.ServeDefaultExport;
"#, "#,

View file

@ -1,32 +1,22 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
mod cache_deps;
pub use cache_deps::cache_top_level_deps;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::VersionReq;
use std::borrow::Cow;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use deno_ast::TextChange;
use deno_config::deno_json::FmtOptionsConfig;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures::FutureExt; use deno_core::futures::FutureExt;
use deno_core::futures::StreamExt; use deno_core::futures::StreamExt;
use deno_core::serde_json; use deno_path_util::url_to_file_path;
use deno_core::ModuleSpecifier; use deno_semver::jsr::JsrPackageReqReference;
use deno_runtime::deno_node; use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use indexmap::IndexMap; use deno_semver::VersionReq;
use jsonc_parser::ast::ObjectProp; use jsonc_parser::cst::CstObject;
use jsonc_parser::ast::Value; use jsonc_parser::cst::CstObjectProp;
use yoke::Yoke; use jsonc_parser::cst::CstRootNode;
use jsonc_parser::json;
use crate::args::AddFlags; use crate::args::AddFlags;
use crate::args::CacheSetting; use crate::args::CacheSetting;
@ -38,236 +28,181 @@ use crate::file_fetcher::FileFetcher;
use crate::jsr::JsrFetchResolver; use crate::jsr::JsrFetchResolver;
use crate::npm::NpmFetchResolver; use crate::npm::NpmFetchResolver;
enum DenoConfigFormat { mod cache_deps;
Json,
Jsonc, pub use cache_deps::cache_top_level_deps;
#[derive(Debug, Copy, Clone)]
enum ConfigKind {
DenoJson,
PackageJson,
} }
impl DenoConfigFormat {
fn from_specifier(spec: &ModuleSpecifier) -> Result<Self, AnyError> {
let file_name = spec
.path_segments()
.ok_or_else(|| anyhow!("Empty path in deno config specifier: {spec}"))?
.last()
.unwrap();
match file_name {
"deno.json" => Ok(Self::Json),
"deno.jsonc" => Ok(Self::Jsonc),
_ => bail!("Unsupported deno config file: {file_name}"),
}
}
}
struct DenoConfig {
config: Arc<deno_config::deno_json::ConfigFile>,
format: DenoConfigFormat,
imports: IndexMap<String, String>,
}
fn deno_json_imports(
config: &deno_config::deno_json::ConfigFile,
) -> Result<IndexMap<String, String>, AnyError> {
Ok(
config
.json
.imports
.clone()
.map(|imports| {
serde_json::from_value(imports)
.map_err(|err| anyhow!("Malformed \"imports\" configuration: {err}"))
})
.transpose()?
.unwrap_or_default(),
)
}
impl DenoConfig {
fn from_options(options: &CliOptions) -> Result<Option<Self>, AnyError> {
let start_dir = &options.start_dir;
if let Some(config) = start_dir.maybe_deno_json() {
Ok(Some(Self {
imports: deno_json_imports(config)?,
config: config.clone(),
format: DenoConfigFormat::from_specifier(&config.specifier)?,
}))
} else {
Ok(None)
}
}
fn add(&mut self, selected: SelectedPackage) {
self.imports.insert(
selected.import_name,
format!("{}@{}", selected.package_name, selected.version_req),
);
}
fn remove(&mut self, package: &str) -> bool {
self.imports.shift_remove(package).is_some()
}
fn take_import_fields(
&mut self,
) -> Vec<(&'static str, IndexMap<String, String>)> {
vec![("imports", std::mem::take(&mut self.imports))]
}
}
impl NpmConfig {
fn from_options(options: &CliOptions) -> Result<Option<Self>, AnyError> {
let start_dir = &options.start_dir;
if let Some(pkg_json) = start_dir.maybe_pkg_json() {
Ok(Some(Self {
dependencies: pkg_json.dependencies.clone().unwrap_or_default(),
dev_dependencies: pkg_json.dev_dependencies.clone().unwrap_or_default(),
config: pkg_json.clone(),
fmt_options: None,
}))
} else {
Ok(None)
}
}
fn add(&mut self, selected: SelectedPackage, dev: bool) {
let (name, version) = package_json_dependency_entry(selected);
if dev {
self.dependencies.swap_remove(&name);
self.dev_dependencies.insert(name, version);
} else {
self.dev_dependencies.swap_remove(&name);
self.dependencies.insert(name, version);
}
}
fn remove(&mut self, package: &str) -> bool {
let in_deps = self.dependencies.shift_remove(package).is_some();
let in_dev_deps = self.dev_dependencies.shift_remove(package).is_some();
in_deps || in_dev_deps
}
fn take_import_fields(
&mut self,
) -> Vec<(&'static str, IndexMap<String, String>)> {
vec![
("dependencies", std::mem::take(&mut self.dependencies)),
(
"devDependencies",
std::mem::take(&mut self.dev_dependencies),
),
]
}
}
struct NpmConfig {
config: Arc<deno_node::PackageJson>,
fmt_options: Option<FmtOptionsConfig>,
dependencies: IndexMap<String, String>,
dev_dependencies: IndexMap<String, String>,
}
enum DenoOrPackageJson {
Deno(DenoConfig),
Npm(NpmConfig),
}
impl From<DenoConfig> for DenoOrPackageJson {
fn from(config: DenoConfig) -> Self {
Self::Deno(config)
}
}
impl From<NpmConfig> for DenoOrPackageJson {
fn from(config: NpmConfig) -> Self {
Self::Npm(config)
}
}
/// Wrapper around `jsonc_parser::ast::Object` that can be stored in a `Yoke`
#[derive(yoke::Yokeable)]
struct JsoncObjectView<'a>(jsonc_parser::ast::Object<'a>);
struct ConfigUpdater { struct ConfigUpdater {
config: DenoOrPackageJson, kind: ConfigKind,
// the `Yoke` is so we can carry the parsed object (which borrows from cst: CstRootNode,
// the source) along with the source itself root_object: CstObject,
ast: Yoke<JsoncObjectView<'static>, String>,
path: PathBuf, path: PathBuf,
modified: bool, modified: bool,
} }
impl ConfigUpdater { impl ConfigUpdater {
fn obj(&self) -> &jsonc_parser::ast::Object<'_> { fn new(
&self.ast.get().0 kind: ConfigKind,
} config_file_path: PathBuf,
fn contents(&self) -> &str { ) -> Result<Self, AnyError> {
self.ast.backing_cart() let config_file_contents = std::fs::read_to_string(&config_file_path)
}
async fn maybe_new(
config: Option<impl Into<DenoOrPackageJson>>,
) -> Result<Option<Self>, AnyError> {
if let Some(config) = config {
Ok(Some(Self::new(config.into()).await?))
} else {
Ok(None)
}
}
async fn new(config: DenoOrPackageJson) -> Result<Self, AnyError> {
let specifier = config.specifier();
if specifier.scheme() != "file" {
bail!("Can't update a remote configuration file");
}
let config_file_path = specifier.to_file_path().map_err(|_| {
anyhow!("Specifier {specifier:?} is an invalid file path")
})?;
let config_file_contents = {
let contents = tokio::fs::read_to_string(&config_file_path)
.await
.with_context(|| { .with_context(|| {
format!("Reading config file at: {}", config_file_path.display()) format!("Reading config file '{}'", config_file_path.display())
})?; })?;
if contents.trim().is_empty() { let cst = CstRootNode::parse(&config_file_contents, &Default::default())
"{}\n".into()
} else {
contents
}
};
let ast = Yoke::try_attach_to_cart(config_file_contents, |contents| {
let ast = jsonc_parser::parse_to_ast(
contents,
&Default::default(),
&Default::default(),
)
.with_context(|| { .with_context(|| {
format!("Failed to parse config file at {}", specifier) format!("Parsing config file '{}'", config_file_path.display())
})?;
let obj = match ast.value {
Some(Value::Object(obj)) => obj,
_ => bail!(
"Failed to update config file at {}, expected an object",
specifier
),
};
Ok(JsoncObjectView(obj))
})?; })?;
let root_object = cst.object_value_or_set();
Ok(Self { Ok(Self {
config, kind,
ast, cst,
root_object,
path: config_file_path, path: config_file_path,
modified: false, modified: false,
}) })
} }
fn add(&mut self, selected: SelectedPackage, dev: bool) { fn display_path(&self) -> String {
match &mut self.config { deno_path_util::url_from_file_path(&self.path)
DenoOrPackageJson::Deno(deno) => deno.add(selected), .map(|u| u.to_string())
DenoOrPackageJson::Npm(npm) => npm.add(selected, dev), .unwrap_or_else(|_| self.path.display().to_string())
} }
fn obj(&self) -> &CstObject {
&self.root_object
}
fn contents(&self) -> String {
self.cst.to_string()
}
fn add(&mut self, selected: SelectedPackage, dev: bool) {
fn insert_index(object: &CstObject, searching_name: &str) -> usize {
object
.properties()
.into_iter()
.take_while(|prop| {
let prop_name =
prop.name().and_then(|name| name.decoded_value().ok());
match prop_name {
Some(current_name) => {
searching_name.cmp(&current_name) == std::cmp::Ordering::Greater
}
None => true,
}
})
.count()
}
match self.kind {
ConfigKind::DenoJson => {
let imports = self.root_object.object_value_or_set("imports");
let value =
format!("{}@{}", selected.package_name, selected.version_req);
if let Some(prop) = imports.get(&selected.import_name) {
prop.set_value(json!(value));
} else {
let index = insert_index(&imports, &selected.import_name);
imports.insert(index, &selected.import_name, json!(value));
}
}
ConfigKind::PackageJson => {
let deps_prop = self.root_object.get("dependencies");
let dev_deps_prop = self.root_object.get("devDependencies");
let dependencies = if dev {
self
.root_object
.object_value("devDependencies")
.unwrap_or_else(|| {
let index = deps_prop
.as_ref()
.map(|p| p.property_index() + 1)
.unwrap_or_else(|| self.root_object.properties().len());
self
.root_object
.insert(index, "devDependencies", json!({}))
.object_value_or_set()
})
} else {
self
.root_object
.object_value("dependencies")
.unwrap_or_else(|| {
let index = dev_deps_prop
.as_ref()
.map(|p| p.property_index())
.unwrap_or_else(|| self.root_object.properties().len());
self
.root_object
.insert(index, "dependencies", json!({}))
.object_value_or_set()
})
};
let other_dependencies = if dev {
deps_prop.and_then(|p| p.value().and_then(|v| v.as_object()))
} else {
dev_deps_prop.and_then(|p| p.value().and_then(|v| v.as_object()))
};
let (alias, value) = package_json_dependency_entry(selected);
if let Some(other) = other_dependencies {
if let Some(prop) = other.get(&alias) {
remove_prop_and_maybe_parent_prop(prop);
}
}
if let Some(prop) = dependencies.get(&alias) {
prop.set_value(json!(value));
} else {
let index = insert_index(&dependencies, &alias);
dependencies.insert(index, &alias, json!(value));
}
}
}
self.modified = true; self.modified = true;
} }
fn remove(&mut self, package: &str) -> bool { fn remove(&mut self, package: &str) -> bool {
let removed = match &mut self.config { let removed = match self.kind {
DenoOrPackageJson::Deno(deno) => deno.remove(package), ConfigKind::DenoJson => {
DenoOrPackageJson::Npm(npm) => npm.remove(package), if let Some(prop) = self
.root_object
.object_value("imports")
.and_then(|i| i.get(package))
{
remove_prop_and_maybe_parent_prop(prop);
true
} else {
false
}
}
ConfigKind::PackageJson => {
let deps = [
self
.root_object
.object_value("dependencies")
.and_then(|deps| deps.get(package)),
self
.root_object
.object_value("devDependencies")
.and_then(|deps| deps.get(package)),
];
let removed = deps.iter().any(|d| d.is_some());
for dep in deps.into_iter().flatten() {
remove_prop_and_maybe_parent_prop(dep);
}
removed
}
}; };
if removed { if removed {
self.modified = true; self.modified = true;
@ -275,76 +210,28 @@ impl ConfigUpdater {
removed removed
} }
async fn commit(mut self) -> Result<(), AnyError> { fn commit(&self) -> Result<(), AnyError> {
if !self.modified { if !self.modified {
return Ok(()); return Ok(());
} }
let import_fields = self.config.take_import_fields(); let new_text = self.contents();
std::fs::write(&self.path, new_text).with_context(|| {
let fmt_config_options = self.config.fmt_options(); format!("failed writing to '{}'", self.path.display())
})?;
let new_text = update_config_file_content(
self.obj(),
self.contents(),
fmt_config_options,
import_fields.into_iter().map(|(k, v)| {
(
k,
if v.is_empty() {
None
} else {
Some(generate_imports(v.into_iter().collect()))
},
)
}),
self.config.file_name(),
);
tokio::fs::write(&self.path, new_text).await?;
Ok(()) Ok(())
} }
} }
impl DenoOrPackageJson { fn remove_prop_and_maybe_parent_prop(prop: CstObjectProp) {
fn specifier(&self) -> Cow<ModuleSpecifier> { let parent = prop.parent().unwrap().as_object().unwrap();
match self { prop.remove();
Self::Deno(d, ..) => Cow::Borrowed(&d.config.specifier), if parent.properties().is_empty() {
Self::Npm(n, ..) => Cow::Owned(n.config.specifier()), let parent_property = parent.parent().unwrap();
} let root_object = parent_property.parent().unwrap().as_object().unwrap();
} // remove the property
parent_property.remove();
fn fmt_options(&self) -> FmtOptionsConfig { root_object.ensure_multiline();
match self {
DenoOrPackageJson::Deno(deno, ..) => deno
.config
.to_fmt_config()
.ok()
.map(|f| f.options)
.unwrap_or_default(),
DenoOrPackageJson::Npm(config) => {
config.fmt_options.clone().unwrap_or_default()
}
}
}
fn take_import_fields(
&mut self,
) -> Vec<(&'static str, IndexMap<String, String>)> {
match self {
Self::Deno(d) => d.take_import_fields(),
Self::Npm(n) => n.take_import_fields(),
}
}
fn file_name(&self) -> &'static str {
match self {
DenoOrPackageJson::Deno(config) => match config.format {
DenoConfigFormat::Json => "deno.json",
DenoConfigFormat::Jsonc => "deno.jsonc",
},
DenoOrPackageJson::Npm(..) => "package.json",
}
} }
} }
@ -401,11 +288,27 @@ impl std::fmt::Display for AddCommandName {
fn load_configs( fn load_configs(
flags: &Arc<Flags>, flags: &Arc<Flags>,
has_jsr_specifiers: impl FnOnce() -> bool, has_jsr_specifiers: impl FnOnce() -> bool,
) -> Result<(CliFactory, Option<NpmConfig>, Option<DenoConfig>), AnyError> { ) -> Result<(CliFactory, Option<ConfigUpdater>, Option<ConfigUpdater>), AnyError>
{
let cli_factory = CliFactory::from_flags(flags.clone()); let cli_factory = CliFactory::from_flags(flags.clone());
let options = cli_factory.cli_options()?; let options = cli_factory.cli_options()?;
let npm_config = NpmConfig::from_options(options)?; let start_dir = &options.start_dir;
let (cli_factory, deno_config) = match DenoConfig::from_options(options)? { let npm_config = match start_dir.maybe_pkg_json() {
Some(pkg_json) => Some(ConfigUpdater::new(
ConfigKind::PackageJson,
pkg_json.path.clone(),
)?),
None => None,
};
let deno_config = match start_dir.maybe_deno_json() {
Some(deno_json) => Some(ConfigUpdater::new(
ConfigKind::DenoJson,
url_to_file_path(&deno_json.specifier)?,
)?),
None => None,
};
let (cli_factory, deno_config) = match deno_config {
Some(config) => (cli_factory, Some(config)), Some(config) => (cli_factory, Some(config)),
None if npm_config.is_some() && !has_jsr_specifiers() => { None if npm_config.is_some() && !has_jsr_specifiers() => {
(cli_factory, None) (cli_factory, None)
@ -413,11 +316,16 @@ fn load_configs(
_ => { _ => {
let factory = create_deno_json(flags, options)?; let factory = create_deno_json(flags, options)?;
let options = factory.cli_options()?.clone(); let options = factory.cli_options()?.clone();
let deno_json = options
.start_dir
.maybe_deno_json()
.expect("Just created deno.json");
( (
factory, factory,
Some( Some(ConfigUpdater::new(
DenoConfig::from_options(&options)?.expect("Just created deno.json"), ConfigKind::DenoJson,
), url_to_file_path(&deno_json.specifier)?,
)?),
) )
} }
}; };
@ -430,15 +338,13 @@ pub async fn add(
add_flags: AddFlags, add_flags: AddFlags,
cmd_name: AddCommandName, cmd_name: AddCommandName,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let (cli_factory, npm_config, deno_config) = load_configs(&flags, || { let (cli_factory, mut npm_config, mut deno_config) =
load_configs(&flags, || {
add_flags.packages.iter().any(|s| s.starts_with("jsr:")) add_flags.packages.iter().any(|s| s.starts_with("jsr:"))
})?; })?;
let mut npm_config = ConfigUpdater::maybe_new(npm_config).await?;
let mut deno_config = ConfigUpdater::maybe_new(deno_config).await?;
if let Some(deno) = &deno_config { if let Some(deno) = &deno_config {
let specifier = deno.config.specifier(); if deno.obj().get("importMap").is_some() {
if deno.obj().get_string("importMap").is_some() {
bail!( bail!(
concat!( concat!(
"`deno {}` is not supported when configuration file contains an \"importMap\" field. ", "`deno {}` is not supported when configuration file contains an \"importMap\" field. ",
@ -446,7 +352,7 @@ pub async fn add(
" at {}", " at {}",
), ),
cmd_name, cmd_name,
specifier deno.display_path(),
); );
} }
} }
@ -461,10 +367,14 @@ pub async fn add(
Default::default(), Default::default(),
None, None,
); );
let npmrc = cli_factory.cli_options().unwrap().npmrc();
deps_file_fetcher.set_download_log_level(log::Level::Trace); deps_file_fetcher.set_download_log_level(log::Level::Trace);
let deps_file_fetcher = Arc::new(deps_file_fetcher); let deps_file_fetcher = Arc::new(deps_file_fetcher);
let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone())); let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone()));
let npm_resolver = Arc::new(NpmFetchResolver::new(deps_file_fetcher)); let npm_resolver =
Arc::new(NpmFetchResolver::new(deps_file_fetcher, npmrc.clone()));
let mut selected_packages = Vec::with_capacity(add_flags.packages.len()); let mut selected_packages = Vec::with_capacity(add_flags.packages.len());
let mut package_reqs = Vec::with_capacity(add_flags.packages.len()); let mut package_reqs = Vec::with_capacity(add_flags.packages.len());
@ -558,18 +468,11 @@ pub async fn add(
} }
} }
let mut commit_futures = vec![];
if let Some(npm) = npm_config { if let Some(npm) = npm_config {
commit_futures.push(npm.commit()); npm.commit()?;
} }
if let Some(deno) = deno_config { if let Some(deno) = deno_config {
commit_futures.push(deno.commit()); deno.commit()?;
}
let commit_futures =
deno_core::futures::future::join_all(commit_futures).await;
for result in commit_futures {
result.context("Failed to update configuration file")?;
} }
npm_install_after_modification(flags, Some(jsr_resolver)).await?; npm_install_after_modification(flags, Some(jsr_resolver)).await?;
@ -754,33 +657,13 @@ impl AddRmPackageReq {
} }
} }
fn generate_imports(mut packages_to_version: Vec<(String, String)>) -> String {
packages_to_version.sort_by(|(k1, _), (k2, _)| k1.cmp(k2));
let mut contents = vec![];
let len = packages_to_version.len();
for (index, (package, version)) in packages_to_version.iter().enumerate() {
if index == 0 {
contents.push(String::new()); // force a newline at the start
}
// TODO(bartlomieju): fix it, once we start support specifying version on the cli
contents.push(format!("\"{}\": \"{}\"", package, version));
if index != len - 1 {
contents.push(",".to_string());
}
}
contents.join("\n")
}
pub async fn remove( pub async fn remove(
flags: Arc<Flags>, flags: Arc<Flags>,
remove_flags: RemoveFlags, remove_flags: RemoveFlags,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let (_, npm_config, deno_config) = load_configs(&flags, || false)?; let (_, npm_config, deno_config) = load_configs(&flags, || false)?;
let mut configs = [ let mut configs = [npm_config, deno_config];
ConfigUpdater::maybe_new(npm_config).await?,
ConfigUpdater::maybe_new(deno_config).await?,
];
let mut removed_packages = vec![]; let mut removed_packages = vec![];
@ -817,7 +700,7 @@ pub async fn remove(
log::info!("Removed {}", crate::colors::green(package)); log::info!("Removed {}", crate::colors::green(package));
} }
for config in configs.into_iter().flatten() { for config in configs.into_iter().flatten() {
config.commit().await?; config.commit()?;
} }
npm_install_after_modification(flags, None).await?; npm_install_after_modification(flags, None).await?;
@ -844,90 +727,13 @@ async fn npm_install_after_modification(
// npm install // npm install
cache_deps::cache_top_level_deps(&cli_factory, jsr_resolver).await?; cache_deps::cache_top_level_deps(&cli_factory, jsr_resolver).await?;
if let Some(lockfile) = cli_factory.cli_options()?.maybe_lockfile() {
lockfile.write_if_changed()?;
}
Ok(()) Ok(())
} }
fn update_config_file_content<
I: IntoIterator<Item = (&'static str, Option<String>)>,
>(
obj: &jsonc_parser::ast::Object,
config_file_contents: &str,
fmt_options: FmtOptionsConfig,
entries: I,
file_name: &str,
) -> String {
let mut text_changes = vec![];
for (key, value) in entries {
match obj.properties.iter().enumerate().find_map(|(idx, k)| {
if k.name.as_str() == key {
Some((idx, k))
} else {
None
}
}) {
Some((
idx,
ObjectProp {
value: Value::Object(lit),
range,
..
},
)) => {
if let Some(value) = value {
text_changes.push(TextChange {
range: (lit.range.start + 1)..(lit.range.end - 1),
new_text: value,
})
} else {
text_changes.push(TextChange {
// remove field entirely, making sure to
// remove the comma if it's not the last field
range: range.start..(if idx == obj.properties.len() - 1 {
range.end
} else {
obj.properties[idx + 1].range.start
}),
new_text: "".to_string(),
})
}
}
// need to add field
None => {
if let Some(value) = value {
let insert_position = obj.range.end - 1;
text_changes.push(TextChange {
range: insert_position..insert_position,
// NOTE(bartlomieju): adding `\n` here to force the formatter to always
// produce a config file that is multiline, like so:
// ```
// {
// "imports": {
// "<package_name>": "<registry>:<package_name>@<semver>"
// }
// }
new_text: format!("\"{key}\": {{\n {value} }}"),
})
}
}
// we verified the shape of `imports`/`dependencies` above
Some(_) => unreachable!(),
}
}
let new_text =
deno_ast::apply_text_changes(config_file_contents, text_changes);
crate::tools::fmt::format_json(
&PathBuf::from(file_name),
&new_text,
&fmt_options,
)
.ok()
.map(|formatted_text| formatted_text.unwrap_or_else(|| new_text.clone()))
.unwrap_or(new_text)
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;

View file

@ -89,10 +89,6 @@ pub async fn cache_top_level_deps(
while let Some(info_future) = info_futures.next().await { while let Some(info_future) = info_futures.next().await {
if let Some((specifier, info)) = info_future { if let Some((specifier, info)) = info_future {
if info.export(".").is_some() {
roots.push(specifier.clone());
continue;
}
let exports = info.exports(); let exports = info.exports();
for (k, _) in exports { for (k, _) in exports {
if let Ok(spec) = specifier.join(k) { if let Ok(spec) = specifier.join(k) {

View file

@ -579,6 +579,10 @@ pub async fn upgrade(
let output_exe_path = let output_exe_path =
full_path_output_flag.as_ref().unwrap_or(&current_exe_path); full_path_output_flag.as_ref().unwrap_or(&current_exe_path);
#[cfg(windows)]
kill_running_deno_lsp_processes();
let output_result = if *output_exe_path == current_exe_path { let output_result = if *output_exe_path == current_exe_path {
replace_exe(&new_exe_path, output_exe_path) replace_exe(&new_exe_path, output_exe_path)
} else { } else {
@ -966,6 +970,34 @@ fn check_windows_access_denied_error(
}) })
} }
#[cfg(windows)]
fn kill_running_deno_lsp_processes() {
// limit this to `deno lsp` invocations to avoid killing important programs someone might be running
let is_debug = log::log_enabled!(log::Level::Debug);
let get_pipe = || {
if is_debug {
std::process::Stdio::inherit()
} else {
std::process::Stdio::null()
}
};
let _ = Command::new("powershell.exe")
.args([
"-Command",
r#"Get-WmiObject Win32_Process | Where-Object {
$_.Name -eq 'deno.exe' -and
$_.CommandLine -match '^(?:\"[^\"]+\"|\S+)\s+lsp\b'
} | ForEach-Object {
if ($_.Terminate()) {
Write-Host 'Terminated:' $_.ProcessId
}
}"#,
])
.stdout(get_pipe())
.stderr(get_pipe())
.output();
}
fn set_exe_permissions( fn set_exe_permissions(
current_exe_path: &Path, current_exe_path: &Path,
output_exe_path: &Path, output_exe_path: &Path,

View file

@ -516,7 +516,6 @@ delete Object.prototype.__proto__;
/** @typedef {{ /** @typedef {{
* ls: ts.LanguageService & { [k:string]: any }, * ls: ts.LanguageService & { [k:string]: any },
* compilerOptions: ts.CompilerOptions, * compilerOptions: ts.CompilerOptions,
* forceEnabledVerbatimModuleSyntax: boolean,
* }} LanguageServiceEntry */ * }} LanguageServiceEntry */
/** @type {{ unscoped: LanguageServiceEntry, byScope: Map<string, LanguageServiceEntry> }} */ /** @type {{ unscoped: LanguageServiceEntry, byScope: Map<string, LanguageServiceEntry> }} */
const languageServiceEntries = { const languageServiceEntries = {
@ -1026,7 +1025,7 @@ delete Object.prototype.__proto__;
: ts.sortAndDeduplicateDiagnostics( : ts.sortAndDeduplicateDiagnostics(
checkFiles.map((s) => program.getSemanticDiagnostics(s)).flat(), checkFiles.map((s) => program.getSemanticDiagnostics(s)).flat(),
)), )),
].filter(filterMapDiagnostic.bind(null, false)); ].filter(filterMapDiagnostic);
// emit the tsbuildinfo file // emit the tsbuildinfo file
// @ts-ignore: emitBuildInfo is not exposed (https://github.com/microsoft/TypeScript/issues/49871) // @ts-ignore: emitBuildInfo is not exposed (https://github.com/microsoft/TypeScript/issues/49871)
@ -1041,28 +1040,11 @@ delete Object.prototype.__proto__;
debug("<<< exec stop"); debug("<<< exec stop");
} }
/** /** @param {ts.Diagnostic} diagnostic */
* @param {boolean} isLsp function filterMapDiagnostic(diagnostic) {
* @param {ts.Diagnostic} diagnostic
*/
function filterMapDiagnostic(isLsp, diagnostic) {
if (IGNORED_DIAGNOSTICS.includes(diagnostic.code)) { if (IGNORED_DIAGNOSTICS.includes(diagnostic.code)) {
return false; return false;
} }
if (isLsp) {
// TS1484: `...` is a type and must be imported using a type-only import when 'verbatimModuleSyntax' is enabled.
// We force-enable `verbatimModuleSyntax` in the LSP so the `type`
// modifier is used when auto-importing types. But we don't want this
// diagnostic unless it was explicitly enabled by the user.
if (diagnostic.code == 1484) {
const entry = (lastRequestScope
? languageServiceEntries.byScope.get(lastRequestScope)
: null) ?? languageServiceEntries.unscoped;
if (entry.forceEnabledVerbatimModuleSyntax) {
return false;
}
}
}
// make the diagnostic for using an `export =` in an es module a warning // make the diagnostic for using an `export =` in an es module a warning
if (diagnostic.code === 1203) { if (diagnostic.code === 1203) {
diagnostic.category = ts.DiagnosticCategory.Warning; diagnostic.category = ts.DiagnosticCategory.Warning;
@ -1159,12 +1141,10 @@ delete Object.prototype.__proto__;
"strict": true, "strict": true,
"target": "esnext", "target": "esnext",
"useDefineForClassFields": true, "useDefineForClassFields": true,
"verbatimModuleSyntax": true,
"jsx": "react", "jsx": "react",
"jsxFactory": "React.createElement", "jsxFactory": "React.createElement",
"jsxFragmentFactory": "React.Fragment", "jsxFragmentFactory": "React.Fragment",
}), }),
forceEnabledVerbatimModuleSyntax: true,
}; };
setLogDebug(enableDebugLogging, "TSLS"); setLogDebug(enableDebugLogging, "TSLS");
debug("serverInit()"); debug("serverInit()");
@ -1230,17 +1210,8 @@ delete Object.prototype.__proto__;
const ls = oldEntry const ls = oldEntry
? oldEntry.ls ? oldEntry.ls
: ts.createLanguageService(host, documentRegistry); : ts.createLanguageService(host, documentRegistry);
let forceEnabledVerbatimModuleSyntax = false;
if (!config["verbatimModuleSyntax"]) {
config["verbatimModuleSyntax"] = true;
forceEnabledVerbatimModuleSyntax = true;
}
const compilerOptions = lspTsConfigToCompilerOptions(config); const compilerOptions = lspTsConfigToCompilerOptions(config);
newByScope.set(scope, { newByScope.set(scope, { ls, compilerOptions });
ls,
compilerOptions,
forceEnabledVerbatimModuleSyntax,
});
languageServiceEntries.byScope.delete(scope); languageServiceEntries.byScope.delete(scope);
} }
for (const oldEntry of languageServiceEntries.byScope.values()) { for (const oldEntry of languageServiceEntries.byScope.values()) {
@ -1305,7 +1276,7 @@ delete Object.prototype.__proto__;
...ls.getSemanticDiagnostics(specifier), ...ls.getSemanticDiagnostics(specifier),
...ls.getSuggestionDiagnostics(specifier), ...ls.getSuggestionDiagnostics(specifier),
...ls.getSyntacticDiagnostics(specifier), ...ls.getSyntacticDiagnostics(specifier),
].filter(filterMapDiagnostic.bind(null, true))); ].filter(filterMapDiagnostic));
} }
return respond(id, diagnosticMap); return respond(id, diagnosticMap);
} catch (e) { } catch (e) {
@ -1366,18 +1337,12 @@ delete Object.prototype.__proto__;
"console", "console",
"Console", "Console",
"ErrorConstructor", "ErrorConstructor",
"exports",
"gc", "gc",
"Global", "Global",
"ImportMeta", "ImportMeta",
"localStorage", "localStorage",
"module",
"NodeModule",
"NodeRequire",
"process",
"queueMicrotask", "queueMicrotask",
"RequestInit", "RequestInit",
"require",
"ResponseInit", "ResponseInit",
"sessionStorage", "sessionStorage",
"setImmediate", "setImmediate",

View file

@ -5,6 +5,7 @@ use crate::args::TypeCheckMode;
use crate::cache::FastInsecureHasher; use crate::cache::FastInsecureHasher;
use crate::node; use crate::node;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::npm::ResolvePkgFolderFromDenoReqError;
use crate::util::checksum; use crate::util::checksum;
use crate::util::path::mapped_specifier_for_tsc; use crate::util::path::mapped_specifier_for_tsc;
@ -35,6 +36,7 @@ use deno_runtime::deno_node::NodeResolver;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use node_resolver::errors::NodeJsErrorCode; use node_resolver::errors::NodeJsErrorCode;
use node_resolver::errors::NodeJsErrorCoded; use node_resolver::errors::NodeJsErrorCoded;
use node_resolver::errors::ResolvePkgSubpathFromDenoModuleError;
use node_resolver::NodeModuleKind; use node_resolver::NodeModuleKind;
use node_resolver::NodeResolution; use node_resolver::NodeResolution;
use node_resolver::NodeResolutionMode; use node_resolver::NodeResolutionMode;
@ -45,6 +47,7 @@ use std::fmt;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use thiserror::Error;
mod diagnostics; mod diagnostics;
@ -688,12 +691,30 @@ fn op_resolve_inner(
Some(ResolutionResolved { specifier, .. }) => { Some(ResolutionResolved { specifier, .. }) => {
resolve_graph_specifier_types(specifier, &referrer, state)? resolve_graph_specifier_types(specifier, &referrer, state)?
} }
_ => resolve_non_graph_specifier_types( _ => {
match resolve_non_graph_specifier_types(
&specifier, &specifier,
&referrer, &referrer,
referrer_kind, referrer_kind,
state, state,
)?, ) {
Ok(maybe_result) => maybe_result,
Err(
err @ ResolveNonGraphSpecifierTypesError::ResolvePkgFolderFromDenoReq(
ResolvePkgFolderFromDenoReqError::Managed(_),
),
) => {
// it's most likely requesting the jsxImportSource, which isn't loaded
// into the graph when not using jsx, so just ignore this error
if specifier.ends_with("/jsx-runtime") {
None
} else {
return Err(err.into());
}
}
Err(err) => return Err(err.into()),
}
}
}; };
let result = match maybe_result { let result = match maybe_result {
Some((specifier, media_type)) => { Some((specifier, media_type)) => {
@ -818,12 +839,23 @@ fn resolve_graph_specifier_types(
} }
} }
#[derive(Debug, Error)]
enum ResolveNonGraphSpecifierTypesError {
#[error(transparent)]
ResolvePkgFolderFromDenoReq(#[from] ResolvePkgFolderFromDenoReqError),
#[error(transparent)]
ResolvePkgSubpathFromDenoModule(#[from] ResolvePkgSubpathFromDenoModuleError),
}
fn resolve_non_graph_specifier_types( fn resolve_non_graph_specifier_types(
raw_specifier: &str, raw_specifier: &str,
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind, referrer_kind: NodeModuleKind,
state: &State, state: &State,
) -> Result<Option<(ModuleSpecifier, MediaType)>, AnyError> { ) -> Result<
Option<(ModuleSpecifier, MediaType)>,
ResolveNonGraphSpecifierTypesError,
> {
let npm = match state.maybe_npm.as_ref() { let npm = match state.maybe_npm.as_ref() {
Some(npm) => npm, Some(npm) => npm,
None => return Ok(None), // we only support non-graph types for npm packages None => return Ok(None), // we only support non-graph types for npm packages

View file

@ -30,7 +30,7 @@ use tokio::sync::mpsc;
use tokio::sync::mpsc::UnboundedReceiver; use tokio::sync::mpsc::UnboundedReceiver;
use tokio::time::sleep; use tokio::time::sleep;
const CLEAR_SCREEN: &str = "\x1B[2J\x1B[1;1H"; const CLEAR_SCREEN: &str = "\x1B[H\x1B[2J\x1B[3J";
const DEBOUNCE_INTERVAL: Duration = Duration::from_millis(200); const DEBOUNCE_INTERVAL: Duration = Duration::from_millis(200);
struct DebouncedReceiver { struct DebouncedReceiver {

View file

@ -193,10 +193,16 @@ impl ProgressBarRenderer for TextOnlyProgressBarRenderer {
} }
}; };
// TODO(@marvinhagemeister): We're trying to reconstruct the original
// specifier from the resolved one, but we lack the information about
// private registries URLs and other things here.
let message = display_entry let message = display_entry
.message .message
.replace("https://registry.npmjs.org/", "npm:") .replace("https://registry.npmjs.org/", "npm:")
.replace("https://jsr.io/", "jsr:"); .replace("https://jsr.io/", "jsr:")
.replace("%2f", "/")
.replace("%2F", "/");
display_str.push_str( display_str.push_str(
&colors::gray(format!(" - {}{}\n", message, bytes_text)).to_string(), &colors::gray(format!(" - {}{}\n", message, bytes_text)).to_string(),
); );

View file

@ -103,6 +103,21 @@ pub fn arc_str_to_bytes(arc_str: Arc<str>) -> Arc<[u8]> {
unsafe { Arc::from_raw(raw as *const [u8]) } unsafe { Arc::from_raw(raw as *const [u8]) }
} }
/// Converts an `Arc<u8>` to an `Arc<str>` if able.
#[allow(dead_code)]
pub fn arc_u8_to_arc_str(
arc_u8: Arc<[u8]>,
) -> Result<Arc<str>, std::str::Utf8Error> {
// Check that the string is valid UTF-8.
std::str::from_utf8(&arc_u8)?;
// SAFETY: the string is valid UTF-8, and the layout Arc<[u8]> is the same as
// Arc<str>. This is proven by the From<Arc<str>> impl for Arc<[u8]> from the
// standard library.
Ok(unsafe {
std::mem::transmute::<std::sync::Arc<[u8]>, std::sync::Arc<str>>(arc_u8)
})
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::sync::Arc; use std::sync::Arc;

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_broadcast_channel" name = "deno_broadcast_channel"
version = "0.167.0" version = "0.168.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_cache" name = "deno_cache"
version = "0.105.0" version = "0.106.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_canvas" name = "deno_canvas"
version = "0.42.0" version = "0.43.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2653,6 +2653,7 @@ const HSL_PATTERN = new SafeRegExp(
); );
function parseCssColor(colorString) { function parseCssColor(colorString) {
colorString = StringPrototypeToLowerCase(colorString);
if (colorKeywords.has(colorString)) { if (colorKeywords.has(colorString)) {
colorString = colorKeywords.get(colorString); colorString = colorKeywords.get(colorString);
} }

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_console" name = "deno_console"
version = "0.173.0" version = "0.174.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_cron" name = "deno_cron"
version = "0.53.0" version = "0.54.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_crypto" name = "deno_crypto"
version = "0.187.0" version = "0.188.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_fetch" name = "deno_fetch"
version = "0.197.0" version = "0.198.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_ffi" name = "deno_ffi"
version = "0.160.0" version = "0.161.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true
@ -21,6 +21,7 @@ dynasmrt = "1.2.3"
libffi = "=3.2.0" libffi = "=3.2.0"
libffi-sys = "=2.3.0" libffi-sys = "=2.3.0"
log.workspace = true log.workspace = true
num-bigint.workspace = true
serde.workspace = true serde.workspace = true
serde-value = "0.7" serde-value = "0.7"
serde_json = "1.0" serde_json = "1.0"

View file

@ -9,12 +9,14 @@ use crate::FfiPermissions;
use crate::ForeignFunction; use crate::ForeignFunction;
use deno_core::op2; use deno_core::op2;
use deno_core::serde_json::Value; use deno_core::serde_json::Value;
use deno_core::serde_v8::BigInt as V8BigInt;
use deno_core::serde_v8::ExternalPointer; use deno_core::serde_v8::ExternalPointer;
use deno_core::unsync::spawn_blocking; use deno_core::unsync::spawn_blocking;
use deno_core::v8; use deno_core::v8;
use deno_core::OpState; use deno_core::OpState;
use deno_core::ResourceId; use deno_core::ResourceId;
use libffi::middle::Arg; use libffi::middle::Arg;
use num_bigint::BigInt;
use serde::Serialize; use serde::Serialize;
use std::cell::RefCell; use std::cell::RefCell;
use std::ffi::c_void; use std::ffi::c_void;
@ -202,6 +204,7 @@ where
#[serde(untagged)] #[serde(untagged)]
pub enum FfiValue { pub enum FfiValue {
Value(Value), Value(Value),
BigInt(V8BigInt),
External(ExternalPointer), External(ExternalPointer),
} }
@ -251,18 +254,18 @@ fn ffi_call(
NativeType::I32 => { NativeType::I32 => {
FfiValue::Value(Value::from(cif.call::<i32>(fun_ptr, &call_args))) FfiValue::Value(Value::from(cif.call::<i32>(fun_ptr, &call_args)))
} }
NativeType::U64 => { NativeType::U64 => FfiValue::BigInt(V8BigInt::from(BigInt::from(
FfiValue::Value(Value::from(cif.call::<u64>(fun_ptr, &call_args))) cif.call::<u64>(fun_ptr, &call_args),
} ))),
NativeType::I64 => { NativeType::I64 => FfiValue::BigInt(V8BigInt::from(BigInt::from(
FfiValue::Value(Value::from(cif.call::<i64>(fun_ptr, &call_args))) cif.call::<i64>(fun_ptr, &call_args),
} ))),
NativeType::USize => { NativeType::USize => FfiValue::BigInt(V8BigInt::from(BigInt::from(
FfiValue::Value(Value::from(cif.call::<usize>(fun_ptr, &call_args))) cif.call::<usize>(fun_ptr, &call_args),
} ))),
NativeType::ISize => { NativeType::ISize => FfiValue::BigInt(V8BigInt::from(BigInt::from(
FfiValue::Value(Value::from(cif.call::<isize>(fun_ptr, &call_args))) cif.call::<isize>(fun_ptr, &call_args),
} ))),
NativeType::F32 => { NativeType::F32 => {
FfiValue::Value(Value::from(cif.call::<f32>(fun_ptr, &call_args))) FfiValue::Value(Value::from(cif.call::<f32>(fun_ptr, &call_args)))
} }

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_fs" name = "deno_fs"
version = "0.83.0" version = "0.84.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true
@ -31,7 +31,7 @@ serde.workspace = true
thiserror.workspace = true thiserror.workspace = true
[target.'cfg(unix)'.dependencies] [target.'cfg(unix)'.dependencies]
nix = { workspace = true, features = ["user"] } nix = { workspace = true, features = ["fs", "user"] }
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
winapi = { workspace = true, features = ["winbase"] } winapi = { workspace = true, features = ["winbase"] }

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_http" name = "deno_http"
version = "0.171.0" version = "0.172.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_io" name = "deno_io"
version = "0.83.0" version = "0.84.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -183,9 +183,10 @@ fn from_raw(
) -> Result<(BiPipeRead, BiPipeWrite), std::io::Error> { ) -> Result<(BiPipeRead, BiPipeWrite), std::io::Error> {
use std::os::fd::FromRawFd; use std::os::fd::FromRawFd;
// Safety: The fd is part of a pair of connected sockets // Safety: The fd is part of a pair of connected sockets
let unix_stream = tokio::net::UnixStream::from_std(unsafe { let unix_stream =
std::os::unix::net::UnixStream::from_raw_fd(stream) unsafe { std::os::unix::net::UnixStream::from_raw_fd(stream) };
})?; unix_stream.set_nonblocking(true)?;
let unix_stream = tokio::net::UnixStream::from_std(unix_stream)?;
let (read, write) = unix_stream.into_split(); let (read, write) = unix_stream.into_split();
Ok((BiPipeRead { inner: read }, BiPipeWrite { inner: write })) Ok((BiPipeRead { inner: read }, BiPipeWrite { inner: write }))
} }
@ -280,7 +281,7 @@ pub fn bi_pipe_pair_raw(
// https://github.com/nix-rust/nix/issues/861 // https://github.com/nix-rust/nix/issues/861
let mut fds = [-1, -1]; let mut fds = [-1, -1];
#[cfg(not(target_os = "macos"))] #[cfg(not(target_os = "macos"))]
let flags = libc::SOCK_CLOEXEC | libc::SOCK_NONBLOCK; let flags = libc::SOCK_CLOEXEC;
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
let flags = 0; let flags = 0;
@ -301,13 +302,13 @@ pub fn bi_pipe_pair_raw(
if cfg!(target_os = "macos") { if cfg!(target_os = "macos") {
let fcntl = |fd: i32, flag: libc::c_int| -> Result<(), std::io::Error> { let fcntl = |fd: i32, flag: libc::c_int| -> Result<(), std::io::Error> {
// SAFETY: libc call, fd is valid // SAFETY: libc call, fd is valid
let flags = unsafe { libc::fcntl(fd, libc::F_GETFL) }; let flags = unsafe { libc::fcntl(fd, libc::F_GETFD) };
if flags == -1 { if flags == -1 {
return Err(fail(fds)); return Err(fail(fds));
} }
// SAFETY: libc call, fd is valid // SAFETY: libc call, fd is valid
let ret = unsafe { libc::fcntl(fd, libc::F_SETFL, flags | flag) }; let ret = unsafe { libc::fcntl(fd, libc::F_SETFD, flags | flag) };
if ret == -1 { if ret == -1 {
return Err(fail(fds)); return Err(fail(fds));
} }
@ -323,13 +324,9 @@ pub fn bi_pipe_pair_raw(
std::io::Error::last_os_error() std::io::Error::last_os_error()
} }
// SOCK_NONBLOCK is not supported on macOS.
(fcntl)(fds[0], libc::O_NONBLOCK)?;
(fcntl)(fds[1], libc::O_NONBLOCK)?;
// SOCK_CLOEXEC is not supported on macOS. // SOCK_CLOEXEC is not supported on macOS.
(fcntl)(fds[0], libc::FD_CLOEXEC)?; fcntl(fds[0], libc::FD_CLOEXEC)?;
(fcntl)(fds[1], libc::FD_CLOEXEC)?; fcntl(fds[1], libc::FD_CLOEXEC)?;
} }
let fd1 = fds[0]; let fd1 = fds[0];

View file

@ -1,6 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow; use std::borrow::Cow;
use std::fmt::Formatter;
use std::io; use std::io;
use std::rc::Rc; use std::rc::Rc;
use std::time::SystemTime; use std::time::SystemTime;
@ -21,6 +22,21 @@ pub enum FsError {
NotCapable(&'static str), NotCapable(&'static str),
} }
impl std::fmt::Display for FsError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
FsError::Io(err) => std::fmt::Display::fmt(err, f),
FsError::FileBusy => f.write_str("file busy"),
FsError::NotSupported => f.write_str("not supported"),
FsError::NotCapable(err) => {
f.write_str(&format!("requires {err} access"))
}
}
}
}
impl std::error::Error for FsError {}
impl FsError { impl FsError {
pub fn kind(&self) -> io::ErrorKind { pub fn kind(&self) -> io::ErrorKind {
match self { match self {
@ -55,20 +71,6 @@ impl From<io::ErrorKind> for FsError {
} }
} }
impl From<FsError> for deno_core::error::AnyError {
fn from(err: FsError) -> Self {
match err {
FsError::Io(err) => err.into(),
FsError::FileBusy => deno_core::error::resource_unavailable(),
FsError::NotSupported => deno_core::error::not_supported(),
FsError::NotCapable(err) => deno_core::error::custom_error(
"NotCapable",
format!("permission denied: {err}"),
),
}
}
}
impl From<JoinError> for FsError { impl From<JoinError> for FsError {
fn from(err: JoinError) -> Self { fn from(err: JoinError) -> Self {
if err.is_cancelled() { if err.is_cancelled() {

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_kv" name = "deno_kv"
version = "0.81.0" version = "0.82.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_napi" name = "deno_napi"
version = "0.104.0" version = "0.105.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true
@ -16,5 +16,14 @@ path = "lib.rs"
[dependencies] [dependencies]
deno_core.workspace = true deno_core.workspace = true
deno_permissions.workspace = true deno_permissions.workspace = true
libc.workspace = true
libloading = { version = "0.7" } libloading = { version = "0.7" }
log.workspace = true
napi_sym.workspace = true
thiserror.workspace = true thiserror.workspace = true
[target.'cfg(windows)'.dependencies]
windows-sys.workspace = true
[dev-dependencies]
libuv-sys-lite = "=1.48.2"

View file

@ -0,0 +1,114 @@
# napi
This directory contains source for Deno's Node-API implementation. It depends on
`napi_sym` and `deno_napi`.
Files are generally organized the same as in Node.js's implementation to ease in
ensuring compatibility.
## Adding a new function
Add the symbol name to
[`cli/napi_sym/symbol_exports.json`](../napi_sym/symbol_exports.json).
```diff
{
"symbols": [
...
"napi_get_undefined",
- "napi_get_null"
+ "napi_get_null",
+ "napi_get_boolean"
]
}
```
Determine where to place the implementation. `napi_get_boolean` is related to JS
values so we will place it in `js_native_api.rs`. If something is not clear,
just create a new file module.
See [`napi_sym`](../napi_sym/) for writing the implementation:
```rust
#[napi_sym::napi_sym]
fn napi_get_boolean(
env: *mut Env,
value: bool,
result: *mut napi_value,
) -> Result {
// ...
Ok(())
}
```
Update the generated symbol lists using the script:
```
deno run --allow-write tools/napi/generate_symbols_lists.js
```
Add a test in [`/tests/napi`](../../tests/napi/). You can also refer to Node.js
test suite for Node-API.
```js
// tests/napi/boolean_test.js
import { assertEquals, loadTestLibrary } from "./common.js";
const lib = loadTestLibrary();
Deno.test("napi get boolean", function () {
assertEquals(lib.test_get_boolean(true), true);
assertEquals(lib.test_get_boolean(false), false);
});
```
```rust
// tests/napi/src/boolean.rs
use napi_sys::Status::napi_ok;
use napi_sys::ValueType::napi_boolean;
use napi_sys::*;
extern "C" fn test_boolean(
env: napi_env,
info: napi_callback_info,
) -> napi_value {
let (args, argc, _) = crate::get_callback_info!(env, info, 1);
assert_eq!(argc, 1);
let mut ty = -1;
assert!(unsafe { napi_typeof(env, args[0], &mut ty) } == napi_ok);
assert_eq!(ty, napi_boolean);
// Use napi_get_boolean here...
value
}
pub fn init(env: napi_env, exports: napi_value) {
let properties = &[crate::new_property!(env, "test_boolean\0", test_boolean)];
unsafe {
napi_define_properties(env, exports, properties.len(), properties.as_ptr())
};
}
```
```diff
// tests/napi/src/lib.rs
+ mod boolean;
...
#[no_mangle]
unsafe extern "C" fn napi_register_module_v1(
env: napi_env,
exports: napi_value,
) -> napi_value {
...
+ boolean::init(env, exports);
exports
}
```
Run the test using `cargo test -p tests/napi`.

22
ext/napi/build.rs Normal file
View file

@ -0,0 +1,22 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
fn main() {
let symbols_file_name = match std::env::consts::OS {
"android" | "freebsd" | "openbsd" => {
"generated_symbol_exports_list_linux.def".to_string()
}
os => format!("generated_symbol_exports_list_{}.def", os),
};
let symbols_path = std::path::Path::new(".")
.join(symbols_file_name)
.canonicalize()
.expect(
"Missing symbols list! Generate using tools/napi/generate_symbols_lists.js",
);
println!("cargo:rustc-rerun-if-changed={}", symbols_path.display());
let path = std::path::PathBuf::from(std::env::var("OUT_DIR").unwrap())
.join("napi_symbol_path.txt");
std::fs::write(path, symbols_path.as_os_str().as_encoded_bytes()).unwrap();
}

View file

@ -5,7 +5,7 @@
const NAPI_VERSION: u32 = 9; const NAPI_VERSION: u32 = 9;
use deno_runtime::deno_napi::*; use crate::*;
use libc::INT_MAX; use libc::INT_MAX;
use super::util::check_new_from_utf8; use super::util::check_new_from_utf8;
@ -17,9 +17,9 @@ use super::util::napi_set_last_error;
use super::util::v8_name_from_property_descriptor; use super::util::v8_name_from_property_descriptor;
use crate::check_arg; use crate::check_arg;
use crate::check_env; use crate::check_env;
use deno_runtime::deno_napi::function::create_function; use crate::function::create_function;
use deno_runtime::deno_napi::function::create_function_template; use crate::function::create_function_template;
use deno_runtime::deno_napi::function::CallbackInfo; use crate::function::CallbackInfo;
use napi_sym::napi_sym; use napi_sym::napi_sym;
use std::ptr::NonNull; use std::ptr::NonNull;
@ -1083,7 +1083,7 @@ fn napi_create_string_latin1(
} }
#[napi_sym] #[napi_sym]
fn napi_create_string_utf8( pub(crate) fn napi_create_string_utf8(
env_ptr: *mut Env, env_ptr: *mut Env,
string: *const c_char, string: *const c_char,
length: usize, length: usize,
@ -1647,7 +1647,7 @@ fn napi_get_cb_info(
check_arg!(env, argc); check_arg!(env, argc);
let argc = unsafe { *argc as usize }; let argc = unsafe { *argc as usize };
for i in 0..argc { for i in 0..argc {
let mut arg = args.get(i as _); let arg = args.get(i as _);
unsafe { unsafe {
*argv.add(i) = arg.into(); *argv.add(i) = arg.into();
} }

View file

@ -5,6 +5,22 @@
#![allow(clippy::undocumented_unsafe_blocks)] #![allow(clippy::undocumented_unsafe_blocks)]
#![deny(clippy::missing_safety_doc)] #![deny(clippy::missing_safety_doc)]
//! Symbols to be exported are now defined in this JSON file.
//! The `#[napi_sym]` macro checks for missing entries and panics.
//!
//! `./tools/napi/generate_symbols_list.js` is used to generate the LINK `cli/exports.def` on Windows,
//! which is also checked into git.
//!
//! To add a new napi function:
//! 1. Place `#[napi_sym]` on top of your implementation.
//! 2. Add the function's identifier to this JSON list.
//! 3. Finally, run `tools/napi/generate_symbols_list.js` to update `ext/napi/generated_symbol_exports_list_*.def`.
pub mod js_native_api;
pub mod node_api;
pub mod util;
pub mod uv;
use core::ptr::NonNull; use core::ptr::NonNull;
use deno_core::op2; use deno_core::op2;
use deno_core::parking_lot::RwLock; use deno_core::parking_lot::RwLock;
@ -631,3 +647,34 @@ where
Ok(exports) Ok(exports)
} }
#[allow(clippy::print_stdout)]
pub fn print_linker_flags(name: &str) {
let symbols_path =
include_str!(concat!(env!("OUT_DIR"), "/napi_symbol_path.txt"));
#[cfg(target_os = "windows")]
println!("cargo:rustc-link-arg-bin={name}=/DEF:{}", symbols_path);
#[cfg(target_os = "macos")]
println!(
"cargo:rustc-link-arg-bin={name}=-Wl,-exported_symbols_list,{}",
symbols_path,
);
#[cfg(any(
target_os = "linux",
target_os = "freebsd",
target_os = "openbsd"
))]
println!(
"cargo:rustc-link-arg-bin={name}=-Wl,--export-dynamic-symbol-list={}",
symbols_path,
);
#[cfg(target_os = "android")]
println!(
"cargo:rustc-link-arg-bin={name}=-Wl,--export-dynamic-symbol-list={}",
symbols_path,
);
}

View file

@ -9,10 +9,10 @@ use super::util::napi_set_last_error;
use super::util::SendPtr; use super::util::SendPtr;
use crate::check_arg; use crate::check_arg;
use crate::check_env; use crate::check_env;
use crate::*;
use deno_core::parking_lot::Condvar; use deno_core::parking_lot::Condvar;
use deno_core::parking_lot::Mutex; use deno_core::parking_lot::Mutex;
use deno_core::V8CrossThreadTaskSpawner; use deno_core::V8CrossThreadTaskSpawner;
use deno_runtime::deno_napi::*;
use napi_sym::napi_sym; use napi_sym::napi_sym;
use std::sync::atomic::AtomicBool; use std::sync::atomic::AtomicBool;
use std::sync::atomic::AtomicU8; use std::sync::atomic::AtomicU8;
@ -488,7 +488,7 @@ impl AsyncWork {
} }
#[napi_sym] #[napi_sym]
fn napi_create_async_work( pub(crate) fn napi_create_async_work(
env: *mut Env, env: *mut Env,
async_resource: napi_value, async_resource: napi_value,
async_resource_name: napi_value, async_resource_name: napi_value,
@ -537,7 +537,10 @@ fn napi_create_async_work(
} }
#[napi_sym] #[napi_sym]
fn napi_delete_async_work(env: *mut Env, work: napi_async_work) -> napi_status { pub(crate) fn napi_delete_async_work(
env: *mut Env,
work: napi_async_work,
) -> napi_status {
let env = check_env!(env); let env = check_env!(env);
check_arg!(env, work); check_arg!(env, work);
@ -560,7 +563,10 @@ fn napi_get_uv_event_loop(
} }
#[napi_sym] #[napi_sym]
fn napi_queue_async_work(env: *mut Env, work: napi_async_work) -> napi_status { pub(crate) fn napi_queue_async_work(
env: *mut Env,
work: napi_async_work,
) -> napi_status {
let env = check_env!(env); let env = check_env!(env);
check_arg!(env, work); check_arg!(env, work);
@ -897,7 +903,7 @@ fn napi_create_threadsafe_function(
}; };
let resource_name = resource_name.to_rust_string_lossy(&mut env.scope()); let resource_name = resource_name.to_rust_string_lossy(&mut env.scope());
let mut tsfn = Box::new(TsFn { let tsfn = Box::new(TsFn {
env, env,
func, func,
max_queue_size, max_queue_size,

View file

@ -2,7 +2,7 @@
[package] [package]
name = "napi_sym" name = "napi_sym"
version = "0.103.0" version = "0.104.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,8 @@
A proc_macro for Deno's Node-API implementation. It does the following things: A proc_macro for Deno's Node-API implementation. It does the following things:
- Marks the symbol as `#[no_mangle]` and rewrites it as `pub extern "C" $name`. - Marks the symbol as `#[no_mangle]` and rewrites it as
`unsafe extern "C" $name`.
- Asserts that the function symbol is present in - Asserts that the function symbol is present in
[`symbol_exports.json`](./symbol_exports.json). [`symbol_exports.json`](./symbol_exports.json).
- Maps `deno_napi::Result` to raw `napi_result`. - Maps `deno_napi::Result` to raw `napi_result`.

View file

@ -1,9 +1,9 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_runtime::deno_napi::*; use crate::*;
use libc::INT_MAX; use libc::INT_MAX;
#[repr(transparent)] #[repr(transparent)]
pub struct SendPtr<T>(pub *const T); pub(crate) struct SendPtr<T>(pub *const T);
impl<T> SendPtr<T> { impl<T> SendPtr<T> {
// silly function to get around `clippy::redundant_locals` // silly function to get around `clippy::redundant_locals`
@ -37,7 +37,7 @@ impl Drop for BufferFinalizer {
} }
} }
pub extern "C" fn backing_store_deleter_callback( pub(crate) extern "C" fn backing_store_deleter_callback(
data: *mut c_void, data: *mut c_void,
_byte_length: usize, _byte_length: usize,
deleter_data: *mut c_void, deleter_data: *mut c_void,
@ -50,7 +50,7 @@ pub extern "C" fn backing_store_deleter_callback(
drop(finalizer); drop(finalizer);
} }
pub fn make_external_backing_store( pub(crate) fn make_external_backing_store(
env: *mut Env, env: *mut Env,
data: *mut c_void, data: *mut c_void,
byte_length: usize, byte_length: usize,
@ -90,9 +90,7 @@ macro_rules! check_env {
macro_rules! return_error_status_if_false { macro_rules! return_error_status_if_false {
($env: expr, $condition: expr, $status: ident) => { ($env: expr, $condition: expr, $status: ident) => {
if !$condition { if !$condition {
return Err( return Err($crate::util::napi_set_last_error($env, $status).into());
$crate::napi::util::napi_set_last_error($env, $status).into(),
);
} }
}; };
} }
@ -101,7 +99,7 @@ macro_rules! return_error_status_if_false {
macro_rules! return_status_if_false { macro_rules! return_status_if_false {
($env: expr, $condition: expr, $status: ident) => { ($env: expr, $condition: expr, $status: ident) => {
if !$condition { if !$condition {
return $crate::napi::util::napi_set_last_error($env, $status); return $crate::util::napi_set_last_error($env, $status);
} }
}; };
} }
@ -222,7 +220,7 @@ macro_rules! check_arg {
($env: expr, $ptr: expr) => { ($env: expr, $ptr: expr) => {
$crate::return_status_if_false!( $crate::return_status_if_false!(
$env, $env,
!$crate::napi::util::Nullable::is_null(&$ptr), !$crate::util::Nullable::is_null(&$ptr),
napi_invalid_arg napi_invalid_arg
); );
}; };
@ -230,17 +228,17 @@ macro_rules! check_arg {
#[macro_export] #[macro_export]
macro_rules! napi_wrap { macro_rules! napi_wrap {
( $( # $attr:tt )* fn $name:ident $( < $( $x:lifetime ),* > )? ( $env:ident : & $( $lt:lifetime )? mut Env $( , $ident:ident : $ty:ty )* $(,)? ) -> napi_status $body:block ) => { ( $( # [ $attr:meta ] )* $vis:vis fn $name:ident $( < $( $x:lifetime ),* > )? ( $env:ident : & $( $lt:lifetime )? mut Env $( , $ident:ident : $ty:ty )* $(,)? ) -> napi_status $body:block ) => {
$( # $attr )* $( # [ $attr ] )*
#[no_mangle] #[no_mangle]
pub unsafe extern "C" fn $name $( < $( $x ),* > )? ( env_ptr : *mut Env , $( $ident : $ty ),* ) -> napi_status { $vis unsafe extern "C" fn $name $( < $( $x ),* > )? ( env_ptr : *mut Env , $( $ident : $ty ),* ) -> napi_status {
let env: & $( $lt )? mut Env = $crate::check_env!(env_ptr); let env: & $( $lt )? mut Env = $crate::check_env!(env_ptr);
if env.last_exception.is_some() { if env.last_exception.is_some() {
return napi_pending_exception; return napi_pending_exception;
} }
$crate::napi::util::napi_clear_last_error(env); $crate::util::napi_clear_last_error(env);
let scope_env = unsafe { &mut *env_ptr }; let scope_env = unsafe { &mut *env_ptr };
let scope = &mut scope_env.scope(); let scope = &mut scope_env.scope();
@ -259,21 +257,21 @@ macro_rules! napi_wrap {
let env = unsafe { &mut *env_ptr }; let env = unsafe { &mut *env_ptr };
let global = v8::Global::new(env.isolate(), exception); let global = v8::Global::new(env.isolate(), exception);
env.last_exception = Some(global); env.last_exception = Some(global);
return $crate::napi::util::napi_set_last_error(env_ptr, napi_pending_exception); return $crate::util::napi_set_last_error(env_ptr, napi_pending_exception);
} }
if result != napi_ok { if result != napi_ok {
return $crate::napi::util::napi_set_last_error(env_ptr, result); return $crate::util::napi_set_last_error(env_ptr, result);
} }
return result; return result;
} }
}; };
( $( # $attr:tt )* fn $name:ident $( < $( $x:lifetime ),* > )? ( $( $ident:ident : $ty:ty ),* $(,)? ) -> napi_status $body:block ) => { ( $( # [ $attr:meta ] )* $vis:vis fn $name:ident $( < $( $x:lifetime ),* > )? ( $( $ident:ident : $ty:ty ),* $(,)? ) -> napi_status $body:block ) => {
$( # $attr )* $( # [ $attr ] )*
#[no_mangle] #[no_mangle]
pub unsafe extern "C" fn $name $( < $( $x ),* > )? ( $( $ident : $ty ),* ) -> napi_status { $vis unsafe extern "C" fn $name $( < $( $x ),* > )? ( $( $ident : $ty ),* ) -> napi_status {
#[inline(always)] #[inline(always)]
fn inner $( < $( $x ),* > )? ( $( $ident : $ty ),* ) -> napi_status $body fn inner $( < $( $x ),* > )? ( $( $ident : $ty ),* ) -> napi_status $body

View file

@ -1,7 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::*;
use deno_core::parking_lot::Mutex; use deno_core::parking_lot::Mutex;
use deno_runtime::deno_napi::*;
use std::mem::MaybeUninit; use std::mem::MaybeUninit;
use std::ptr::addr_of_mut; use std::ptr::addr_of_mut;
@ -16,10 +16,10 @@ fn assert_ok(res: c_int) -> c_int {
res res
} }
use crate::napi::js_native_api::napi_create_string_utf8; use js_native_api::napi_create_string_utf8;
use crate::napi::node_api::napi_create_async_work; use node_api::napi_create_async_work;
use crate::napi::node_api::napi_delete_async_work; use node_api::napi_delete_async_work;
use crate::napi::node_api::napi_queue_async_work; use node_api::napi_queue_async_work;
use std::ffi::c_int; use std::ffi::c_int;
const UV_MUTEX_SIZE: usize = { const UV_MUTEX_SIZE: usize = {

Some files were not shown because too many files have changed in this diff Show more