1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-12-11 01:58:05 -05:00

Merge remote-tracking branch 'upstream/main' into support_create_connection

This commit is contained in:
Satya Rohith 2024-10-17 01:32:10 +05:30
commit be6a1baa06
No known key found for this signature in database
GPG key ID: B2705CF40523EB05
212 changed files with 3398 additions and 1591 deletions

View file

@ -2,6 +2,11 @@ name: cargo_publish
on: workflow_dispatch
# Ensures only one publish is running at a time
concurrency:
group: ${{ github.workflow }}
cancel-in-progress: true
jobs:
build:
name: cargo publish

View file

@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
// Bump this number when you want to purge the cache.
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
// automatically via regex, so ensure that this line maintains this format.
const cacheVersion = 18;
const cacheVersion = 19;
const ubuntuX86Runner = "ubuntu-22.04";
const ubuntuX86XlRunner = "ubuntu-22.04-xl";
@ -751,11 +751,11 @@ const ci = {
].join("\n"),
run: [
"cd target/release",
"shasum -a 256 deno > deno-${{ matrix.arch }}-unknown-linux-gnu.sha256sum",
"zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno",
"shasum -a 256 deno-${{ matrix.arch }}-unknown-linux-gnu.zip > deno-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum",
"strip denort",
"shasum -a 256 denort > denort-${{ matrix.arch }}-unknown-linux-gnu.sha256sum",
"zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort",
"shasum -a 256 denort-${{ matrix.arch }}-unknown-linux-gnu.zip > denort-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum",
"./deno types > lib.deno.d.ts",
].join("\n"),
},
@ -779,11 +779,11 @@ const ci = {
"--p12-file=<(echo $APPLE_CODESIGN_KEY | base64 -d) " +
"--entitlements-xml-file=cli/entitlements.plist",
"cd target/release",
"shasum -a 256 deno > deno-${{ matrix.arch }}-apple-darwin.sha256sum",
"zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno",
"shasum -a 256 deno-${{ matrix.arch }}-apple-darwin.zip > deno-${{ matrix.arch }}-apple-darwin.zip.sha256sum",
"strip denort",
"shasum -a 256 denort > denort-${{ matrix.arch }}-apple-darwin.sha256sum",
"zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort",
"shasum -a 256 denort-${{ matrix.arch }}-apple-darwin.zip > denort-${{ matrix.arch }}-apple-darwin.zip.sha256sum",
]
.join("\n"),
},
@ -797,10 +797,10 @@ const ci = {
].join("\n"),
shell: "pwsh",
run: [
"Get-FileHash target/release/deno.exe -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.sha256sum",
"Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip",
"Get-FileHash target/release/denort.exe -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.sha256sum",
"Get-FileHash target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum",
"Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip",
"Get-FileHash target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum",
].join("\n"),
},
{
@ -1045,25 +1045,25 @@ const ci = {
with: {
files: [
"target/release/deno-x86_64-pc-windows-msvc.zip",
"target/release/deno-x86_64-pc-windows-msvc.sha256sum",
"target/release/deno-x86_64-pc-windows-msvc.zip.sha256sum",
"target/release/denort-x86_64-pc-windows-msvc.zip",
"target/release/denort-x86_64-pc-windows-msvc.sha256sum",
"target/release/denort-x86_64-pc-windows-msvc.zip.sha256sum",
"target/release/deno-x86_64-unknown-linux-gnu.zip",
"target/release/deno-x86_64-unknown-linux-gnu.sha256sum",
"target/release/deno-x86_64-unknown-linux-gnu.zip.sha256sum",
"target/release/denort-x86_64-unknown-linux-gnu.zip",
"target/release/denort-x86_64-unknown-linux-gnu.sha256sum",
"target/release/denort-x86_64-unknown-linux-gnu.zip.sha256sum",
"target/release/deno-x86_64-apple-darwin.zip",
"target/release/deno-x86_64-apple-darwin.sha256sum",
"target/release/deno-x86_64-apple-darwin.zip.sha256sum",
"target/release/denort-x86_64-apple-darwin.zip",
"target/release/denort-x86_64-apple-darwin.sha256sum",
"target/release/denort-x86_64-apple-darwin.zip.sha256sum",
"target/release/deno-aarch64-unknown-linux-gnu.zip",
"target/release/deno-aarch64-unknown-linux-gnu.sha256sum",
"target/release/deno-aarch64-unknown-linux-gnu.zip.sha256sum",
"target/release/denort-aarch64-unknown-linux-gnu.zip",
"target/release/denort-aarch64-unknown-linux-gnu.sha256sum",
"target/release/denort-aarch64-unknown-linux-gnu.zip.sha256sum",
"target/release/deno-aarch64-apple-darwin.zip",
"target/release/deno-aarch64-apple-darwin.sha256sum",
"target/release/deno-aarch64-apple-darwin.zip.sha256sum",
"target/release/denort-aarch64-apple-darwin.zip",
"target/release/denort-aarch64-apple-darwin.sha256sum",
"target/release/denort-aarch64-apple-darwin.zip.sha256sum",
"target/release/deno_src.tar.gz",
"target/release/lib.deno.d.ts",
].join("\n"),

View file

@ -361,8 +361,8 @@ jobs:
path: |-
~/.cargo/registry/index
~/.cargo/registry/cache
key: '18-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '18-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
key: '19-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '19-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
if: '!(matrix.skip)'
- name: Restore cache build output (PR)
uses: actions/cache/restore@v4
@ -375,7 +375,7 @@ jobs:
!./target/*/*.zip
!./target/*/*.tar.gz
key: never_saved
restore-keys: '18-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
restore-keys: '19-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
- name: Apply and update mtime cache
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
uses: ./.github/mtime_cache
@ -442,11 +442,11 @@ jobs:
github.repository == 'denoland/deno')
run: |-
cd target/release
shasum -a 256 deno > deno-${{ matrix.arch }}-unknown-linux-gnu.sha256sum
zip -r deno-${{ matrix.arch }}-unknown-linux-gnu.zip deno
shasum -a 256 deno-${{ matrix.arch }}-unknown-linux-gnu.zip > deno-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum
strip denort
shasum -a 256 denort > denort-${{ matrix.arch }}-unknown-linux-gnu.sha256sum
zip -r denort-${{ matrix.arch }}-unknown-linux-gnu.zip denort
shasum -a 256 denort-${{ matrix.arch }}-unknown-linux-gnu.zip > denort-${{ matrix.arch }}-unknown-linux-gnu.zip.sha256sum
./deno types > lib.deno.d.ts
- name: Pre-release (mac)
if: |-
@ -461,11 +461,11 @@ jobs:
echo "Key is $(echo $APPLE_CODESIGN_KEY | base64 -d | wc -c) bytes"
rcodesign sign target/release/deno --code-signature-flags=runtime --p12-password="$APPLE_CODESIGN_PASSWORD" --p12-file=<(echo $APPLE_CODESIGN_KEY | base64 -d) --entitlements-xml-file=cli/entitlements.plist
cd target/release
shasum -a 256 deno > deno-${{ matrix.arch }}-apple-darwin.sha256sum
zip -r deno-${{ matrix.arch }}-apple-darwin.zip deno
shasum -a 256 deno-${{ matrix.arch }}-apple-darwin.zip > deno-${{ matrix.arch }}-apple-darwin.zip.sha256sum
strip denort
shasum -a 256 denort > denort-${{ matrix.arch }}-apple-darwin.sha256sum
zip -r denort-${{ matrix.arch }}-apple-darwin.zip denort
shasum -a 256 denort-${{ matrix.arch }}-apple-darwin.zip > denort-${{ matrix.arch }}-apple-darwin.zip.sha256sum
- name: Pre-release (windows)
if: |-
!(matrix.skip) && (matrix.os == 'windows' &&
@ -474,10 +474,10 @@ jobs:
github.repository == 'denoland/deno')
shell: pwsh
run: |-
Get-FileHash target/release/deno.exe -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.sha256sum
Compress-Archive -CompressionLevel Optimal -Force -Path target/release/deno.exe -DestinationPath target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip
Get-FileHash target/release/denort.exe -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.sha256sum
Get-FileHash target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/deno-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum
Compress-Archive -CompressionLevel Optimal -Force -Path target/release/denort.exe -DestinationPath target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip
Get-FileHash target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip -Algorithm SHA256 | Format-List > target/release/denort-${{ matrix.arch }}-pc-windows-msvc.zip.sha256sum
- name: Upload canary to dl.deno.land
if: |-
!(matrix.skip) && (matrix.job == 'test' &&
@ -652,25 +652,25 @@ jobs:
with:
files: |-
target/release/deno-x86_64-pc-windows-msvc.zip
target/release/deno-x86_64-pc-windows-msvc.sha256sum
target/release/deno-x86_64-pc-windows-msvc.zip.sha256sum
target/release/denort-x86_64-pc-windows-msvc.zip
target/release/denort-x86_64-pc-windows-msvc.sha256sum
target/release/denort-x86_64-pc-windows-msvc.zip.sha256sum
target/release/deno-x86_64-unknown-linux-gnu.zip
target/release/deno-x86_64-unknown-linux-gnu.sha256sum
target/release/deno-x86_64-unknown-linux-gnu.zip.sha256sum
target/release/denort-x86_64-unknown-linux-gnu.zip
target/release/denort-x86_64-unknown-linux-gnu.sha256sum
target/release/denort-x86_64-unknown-linux-gnu.zip.sha256sum
target/release/deno-x86_64-apple-darwin.zip
target/release/deno-x86_64-apple-darwin.sha256sum
target/release/deno-x86_64-apple-darwin.zip.sha256sum
target/release/denort-x86_64-apple-darwin.zip
target/release/denort-x86_64-apple-darwin.sha256sum
target/release/denort-x86_64-apple-darwin.zip.sha256sum
target/release/deno-aarch64-unknown-linux-gnu.zip
target/release/deno-aarch64-unknown-linux-gnu.sha256sum
target/release/deno-aarch64-unknown-linux-gnu.zip.sha256sum
target/release/denort-aarch64-unknown-linux-gnu.zip
target/release/denort-aarch64-unknown-linux-gnu.sha256sum
target/release/denort-aarch64-unknown-linux-gnu.zip.sha256sum
target/release/deno-aarch64-apple-darwin.zip
target/release/deno-aarch64-apple-darwin.sha256sum
target/release/deno-aarch64-apple-darwin.zip.sha256sum
target/release/denort-aarch64-apple-darwin.zip
target/release/denort-aarch64-apple-darwin.sha256sum
target/release/denort-aarch64-apple-darwin.zip.sha256sum
target/release/deno_src.tar.gz
target/release/lib.deno.d.ts
body_path: target/release/release-notes.md
@ -685,7 +685,7 @@ jobs:
!./target/*/*.zip
!./target/*/*.sha256sum
!./target/*/*.tar.gz
key: '18-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
key: '19-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
publish-canary:
name: publish canary
runs-on: ubuntu-22.04

243
Cargo.lock generated
View file

@ -1188,6 +1188,7 @@ dependencies = [
"deno_task_shell",
"deno_terminal 0.2.0",
"deno_tower_lsp",
"dhat",
"dissimilar",
"dotenvy",
"dprint-plugin-json",
@ -1340,6 +1341,7 @@ version = "0.165.0"
dependencies = [
"async-trait",
"deno_core",
"thiserror",
"tokio",
"uuid",
]
@ -1353,6 +1355,7 @@ dependencies = [
"rusqlite",
"serde",
"sha2",
"thiserror",
"tokio",
]
@ -1384,6 +1387,7 @@ dependencies = [
"deno_webgpu",
"image",
"serde",
"thiserror",
]
[[package]]
@ -1419,9 +1423,9 @@ dependencies = [
[[package]]
name = "deno_core"
version = "0.311.0"
version = "0.313.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e09bd55da542fa1fde753aff617c355b5d782e763ab2a19e4371a56d7844cac"
checksum = "29f36be738d78e39b6603a6b07f1cf91e28baf3681f87205f07482999e0d0bc2"
dependencies = [
"anyhow",
"bincode",
@ -1464,6 +1468,7 @@ dependencies = [
"chrono",
"deno_core",
"saffron",
"thiserror",
"tokio",
]
@ -1504,9 +1509,9 @@ dependencies = [
[[package]]
name = "deno_doc"
version = "0.153.0"
version = "0.154.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6925db7ad16bee4bdcb7e654d2475e2fbd5e1d7dd4c6ee5f030ee858b4a2a8ee"
checksum = "17e204e45b0d79750880114e37b34abe19ad0710d8435a8da8f23a528fe98de4"
dependencies = [
"anyhow",
"cfg-if",
@ -1523,18 +1528,8 @@ dependencies = [
"regex",
"serde",
"serde_json",
"syntect",
"termcolor",
"tree-sitter-bash",
"tree-sitter-css",
"tree-sitter-highlight",
"tree-sitter-html",
"tree-sitter-javascript",
"tree-sitter-json",
"tree-sitter-md",
"tree-sitter-regex",
"tree-sitter-rust",
"tree-sitter-typescript",
"tree-sitter-xml",
]
[[package]]
@ -1583,6 +1578,8 @@ dependencies = [
"serde",
"serde-value",
"serde_json",
"thiserror",
"tokio",
"winapi",
]
@ -1897,9 +1894,9 @@ dependencies = [
[[package]]
name = "deno_npm"
version = "0.25.3"
version = "0.25.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8050bcc2513046cbc0134ae1bc0f3b251a58b95012f3b81e0ea09a7f069c301b"
checksum = "e6b4dc4a9f1cff63d5638e7d93042f24f46300d1cc77b86f3caaa699a7ddccf7"
dependencies = [
"anyhow",
"async-trait",
@ -1916,9 +1913,9 @@ dependencies = [
[[package]]
name = "deno_ops"
version = "0.187.0"
version = "0.189.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e040fd4def8a67538fe38c9955fd970efc9f44284bd69d44f8992a456afd665d"
checksum = "e8f998ad1d5b36064109367ffe67b1088385eb3d8025efc95e445bc013a147a2"
dependencies = [
"proc-macro-rules",
"proc-macro2",
@ -1991,6 +1988,7 @@ dependencies = [
name = "deno_runtime"
version = "0.180.0"
dependencies = [
"color-print",
"deno_ast",
"deno_broadcast_channel",
"deno_cache",
@ -2113,6 +2111,7 @@ dependencies = [
"rustls-tokio-stream",
"rustls-webpki",
"serde",
"thiserror",
"tokio",
"webpki-roots",
]
@ -2158,6 +2157,7 @@ dependencies = [
"deno_console",
"deno_core",
"deno_webidl",
"thiserror",
"urlpattern",
]
@ -2230,6 +2230,7 @@ dependencies = [
"deno_core",
"deno_web",
"rusqlite",
"thiserror",
]
[[package]]
@ -2415,6 +2416,22 @@ version = "1.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6e854126756c496b8c81dec88f9a706b15b875c5849d4097a3854476b9fdf94"
[[package]]
name = "dhat"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98cd11d84628e233de0ce467de10b8633f4ddaecafadefc86e13b84b8739b827"
dependencies = [
"backtrace",
"lazy_static",
"mintex",
"parking_lot",
"rustc-hash 1.1.0",
"serde",
"serde_json",
"thousands",
]
[[package]]
name = "diff"
version = "0.1.13"
@ -4425,6 +4442,12 @@ dependencies = [
"simd-adler32",
]
[[package]]
name = "mintex"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9bec4598fddb13cc7b528819e697852653252b760f1228b7642679bf2ff2cd07"
[[package]]
name = "mio"
version = "0.8.11"
@ -4733,6 +4756,28 @@ version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
[[package]]
name = "onig"
version = "6.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c4b31c8722ad9171c6d77d3557db078cab2bd50afcc9d09c8b315c59df8ca4f"
dependencies = [
"bitflags 1.3.2",
"libc",
"once_cell",
"onig_sys",
]
[[package]]
name = "onig_sys"
version = "69.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b829e3d7e9cc74c7e315ee8edb185bf4190da5acde74afd7fc59c35b1f086e7"
dependencies = [
"cc",
"pkg-config",
]
[[package]]
name = "opaque-debug"
version = "0.3.1"
@ -6166,9 +6211,9 @@ dependencies = [
[[package]]
name = "serde_v8"
version = "0.220.0"
version = "0.222.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e7a65d91d79acc82aa229aeb084f4a39bda269069bc1520df40f679495388e4"
checksum = "27130b5cd87f6f06228940a1f3a7ecc988ea13d1bede1398a48d74cb59dabc9a"
dependencies = [
"num-bigint",
"serde",
@ -7021,6 +7066,26 @@ dependencies = [
"syn 2.0.72",
]
[[package]]
name = "syntect"
version = "5.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "874dcfa363995604333cf947ae9f751ca3af4522c60886774c4963943b4746b1"
dependencies = [
"bincode",
"bitflags 1.3.2",
"flate2",
"fnv",
"once_cell",
"onig",
"regex-syntax",
"serde",
"serde_derive",
"serde_json",
"thiserror",
"walkdir",
]
[[package]]
name = "tap"
version = "1.0.1"
@ -7153,24 +7218,30 @@ dependencies = [
[[package]]
name = "thiserror"
version = "1.0.61"
version = "1.0.64"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709"
checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.61"
version = "1.0.64"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533"
checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.72",
]
[[package]]
name = "thousands"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3bf63baf9f5039dadc247375c29eb13706706cfde997d0330d05aa63a77d8820"
[[package]]
name = "thread_local"
version = "1.1.8"
@ -7452,128 +7523,6 @@ dependencies = [
"once_cell",
]
[[package]]
name = "tree-sitter"
version = "0.22.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df7cc499ceadd4dcdf7ec6d4cbc34ece92c3fa07821e287aedecd4416c516dca"
dependencies = [
"cc",
"regex",
]
[[package]]
name = "tree-sitter-bash"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5244703ad2e08a616d859a0557d7aa290adcd5e0990188a692e628ffe9dce40"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "tree-sitter-css"
version = "0.21.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e08e324b1cf60fd3291774b49724c66de2ce8fcf4d358d0b4b82e37b41b1c9b"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "tree-sitter-highlight"
version = "0.22.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eaca0fe34fa96eec6aaa8e63308dbe1bafe65a6317487c287f93938959b21907"
dependencies = [
"lazy_static",
"regex",
"thiserror",
"tree-sitter",
]
[[package]]
name = "tree-sitter-html"
version = "0.20.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8766b5ad3721517f8259e6394aefda9c686aebf7a8c74ab8624f2c3b46902fd5"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "tree-sitter-javascript"
version = "0.21.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8710a71bc6779e33811a8067bdda3ed08bed1733296ff915e44faf60f8c533d7"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "tree-sitter-json"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b737dcb73c35d74b7d64a5f3dde158113c86a012bf3cee2bfdf2150d23b05db"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "tree-sitter-md"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c3cfd068f2527250bbd8ff407431164e12b17863e7eafb76e311dd3f96965a"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "tree-sitter-regex"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ff1286fe9651b2797484839ffa37aa76c8618d4ccb6836d7e31765dfd60c0d5"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "tree-sitter-rust"
version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "277690f420bf90741dea984f3da038ace46c4fe6047cba57a66822226cde1c93"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "tree-sitter-typescript"
version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ecb35d98a688378e56c18c9c159824fd16f730ccbea19aacf4f206e5d5438ed9"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "tree-sitter-xml"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65c3a1b08e9842143f84fde1a18ac40ee77ca80a80b14077e4ca67a3b4808b8b"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "triomphe"
version = "0.1.13"

View file

@ -46,12 +46,12 @@ repository = "https://github.com/denoland/deno"
[workspace.dependencies]
deno_ast = { version = "=0.42.2", features = ["transpiling"] }
deno_core = { version = "0.311.0" }
deno_core = { version = "0.313.0" }
deno_bench_util = { version = "0.165.0", path = "./bench_util" }
deno_lockfile = "=0.23.1"
deno_media_type = { version = "0.1.4", features = ["module_specifier"] }
deno_npm = "=0.25.3"
deno_npm = "=0.25.4"
deno_path_util = "=0.2.1"
deno_permissions = { version = "0.31.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.180.0", path = "./runtime" }
@ -106,6 +106,7 @@ cbc = { version = "=0.1.2", features = ["alloc"] }
# Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS.
# Instead use util::time::utc_now()
chrono = { version = "0.4", default-features = false, features = ["std", "serde"] }
color-print = "0.3.5"
console_static_text = "=0.8.1"
dashmap = "5.5.3"
data-encoding = "2.3.3"

View file

@ -38,6 +38,11 @@ path = "./bench/lsp_bench_standalone.rs"
[features]
default = ["upgrade", "__vendored_zlib_ng"]
# A feature that enables heap profiling with dhat on Linux.
# 1. Compile with `cargo build --profile=release-with-debug --features=dhat-heap`
# 2. Run the executable. It will output a dhat-heap.json file.
# 3. Open the json file in https://nnethercote.github.io/dh_view/dh_view.html
dhat-heap = ["dhat"]
# A feature that enables the upgrade subcommand and the background check for
# available updates (of deno binary). This is typically disabled for (Linux)
# distribution packages.
@ -67,7 +72,7 @@ deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposa
deno_cache_dir = { workspace = true }
deno_config = { version = "=0.37.1", features = ["workspace", "sync"] }
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "0.153.0", features = ["html"] }
deno_doc = { version = "0.154.0", default-features = false, features = ["rust", "html", "syntect"] }
deno_graph = { version = "=0.83.3" }
deno_lint = { version = "=0.67.0", features = ["docs"] }
deno_lockfile.workspace = true
@ -94,10 +99,11 @@ chrono = { workspace = true, features = ["now"] }
clap = { version = "=4.5.16", features = ["env", "string", "wrap_help", "error-context"] }
clap_complete = "=4.5.24"
clap_complete_fig = "=4.5.2"
color-print = "0.3.5"
color-print.workspace = true
console_static_text.workspace = true
dashmap.workspace = true
data-encoding.workspace = true
dhat = { version = "0.3.3", optional = true }
dissimilar = "=1.0.4"
dotenvy = "0.15.7"
dprint-plugin-json = "=0.19.3"

View file

@ -575,7 +575,8 @@ fn parse_packages_allowed_scripts(s: &str) -> Result<String, AnyError> {
pub struct UnstableConfig {
// TODO(bartlomieju): remove in Deno 2.5
pub legacy_flag_enabled: bool, // --unstable
pub bare_node_builtins: bool, // --unstable-bare-node-builts
pub bare_node_builtins: bool,
pub detect_cjs: bool,
pub sloppy_imports: bool,
pub features: Vec<String>, // --unstabe-kv --unstable-cron
}
@ -1342,7 +1343,7 @@ pub fn flags_from_vec(args: Vec<OsString>) -> clap::error::Result<Flags> {
}
match subcommand.as_str() {
"add" => add_parse(&mut flags, &mut m),
"add" => add_parse(&mut flags, &mut m)?,
"remove" => remove_parse(&mut flags, &mut m),
"bench" => bench_parse(&mut flags, &mut m)?,
"bundle" => bundle_parse(&mut flags, &mut m),
@ -1528,7 +1529,7 @@ pub fn clap_root() -> Command {
);
run_args(Command::new("deno"), true)
.args(unstable_args(UnstableArgsConfig::ResolutionAndRuntime))
.with_unstable_args(UnstableArgsConfig::ResolutionAndRuntime)
.next_line_help(false)
.bin_name("deno")
.styles(
@ -1630,7 +1631,7 @@ fn command(
) -> Command {
Command::new(name)
.about(about)
.args(unstable_args(unstable_args_config))
.with_unstable_args(unstable_args_config)
}
fn help_subcommand(app: &Command) -> Command {
@ -1658,10 +1659,10 @@ fn add_subcommand() -> Command {
"add",
cstr!(
"Add dependencies to your configuration file.
<p(245)>deno add @std/path</>
<p(245)>deno add jsr:@std/path</>
You can add multiple dependencies at once:
<p(245)>deno add @std/path @std/assert</>"
<p(245)>deno add jsr:@std/path jsr:@std/assert</>"
),
UnstableArgsConfig::None,
)
@ -1675,6 +1676,7 @@ You can add multiple dependencies at once:
.action(ArgAction::Append),
)
.arg(add_dev_arg())
.arg(allow_scripts_arg())
})
}
@ -1717,7 +1719,7 @@ If you specify a directory instead of a file, the path is expanded to all contai
UnstableArgsConfig::ResolutionAndRuntime,
)
.defer(|cmd| {
runtime_args(cmd, true, false)
runtime_args(cmd, true, false, true)
.arg(check_arg(true))
.arg(
Arg::new("json")
@ -1881,7 +1883,7 @@ On the first invocation with deno will download the proper binary and cache it i
UnstableArgsConfig::ResolutionAndRuntime,
)
.defer(|cmd| {
runtime_args(cmd, true, false)
runtime_args(cmd, true, false, true)
.arg(check_arg(true))
.arg(
Arg::new("include")
@ -2202,7 +2204,7 @@ This command has implicit access to all permissions.
UnstableArgsConfig::ResolutionAndRuntime,
)
.defer(|cmd| {
runtime_args(cmd, false, true)
runtime_args(cmd, false, true, true)
.arg(check_arg(false))
.arg(executable_ext_arg())
.arg(
@ -2468,7 +2470,7 @@ in the package cache. If no dependency is specified, installs all dependencies l
If the <p(245)>--entrypoint</> flag is passed, installs the dependencies of the specified entrypoint(s).
<p(245)>deno install</>
<p(245)>deno install @std/bytes</>
<p(245)>deno install jsr:@std/bytes</>
<p(245)>deno install npm:chalk</>
<p(245)>deno install --entrypoint entry1.ts entry2.ts</>
@ -2501,7 +2503,7 @@ The installation root is determined, in order of precedence:
These must be added to the path manually if required."), UnstableArgsConfig::ResolutionAndRuntime)
.visible_alias("i")
.defer(|cmd| {
permission_args(runtime_args(cmd, false, true), Some("global"))
permission_args(runtime_args(cmd, false, true, false), Some("global"))
.arg(check_arg(true))
.arg(allow_scripts_arg())
.arg(
@ -2767,8 +2769,13 @@ It is especially useful for quick prototyping and checking snippets of code.
TypeScript is supported, however it is not type-checked, only transpiled."
), UnstableArgsConfig::ResolutionAndRuntime)
.defer(|cmd| runtime_args(cmd, true, true)
.arg(check_arg(false))
.defer(|cmd| {
let cmd = compile_args_without_check_args(cmd);
let cmd = inspect_args(cmd);
let cmd = permission_args(cmd, None);
let cmd = runtime_misc_args(cmd);
cmd
.arg(
Arg::new("eval-file")
.long("eval-file")
@ -2787,7 +2794,7 @@ TypeScript is supported, however it is not type-checked, only transpiled."
.after_help(cstr!("<y>Environment variables:</>
<g>DENO_REPL_HISTORY</> Set REPL history file path. History file is disabled when the value is empty.
<p(245)>[default: $DENO_DIR/deno_history.txt]</>"))
)
})
.arg(env_file_arg())
.arg(
Arg::new("args")
@ -2799,7 +2806,7 @@ TypeScript is supported, however it is not type-checked, only transpiled."
}
fn run_args(command: Command, top_level: bool) -> Command {
runtime_args(command, true, true)
runtime_args(command, true, true, true)
.arg(check_arg(false))
.arg(watch_arg(true))
.arg(hmr_arg(true))
@ -2855,7 +2862,7 @@ Start a server defined in server.ts:
Start a server defined in server.ts, watching for changes and running on port 5050:
<p(245)>deno serve --watch --port 5050 server.ts</>
<y>Read more:</> <c>https://docs.deno.com/go/serve</>"), UnstableArgsConfig::ResolutionAndRuntime), true, true)
<y>Read more:</> <c>https://docs.deno.com/go/serve</>"), UnstableArgsConfig::ResolutionAndRuntime), true, true, true)
.arg(
Arg::new("port")
.long("port")
@ -2929,7 +2936,7 @@ or <c>**/__tests__/**</>:
UnstableArgsConfig::ResolutionAndRuntime
)
.defer(|cmd|
runtime_args(cmd, true, true)
runtime_args(cmd, true, true, true)
.arg(check_arg(true))
.arg(
Arg::new("ignore")
@ -3642,6 +3649,7 @@ fn runtime_args(
app: Command,
include_perms: bool,
include_inspector: bool,
include_allow_scripts: bool,
) -> Command {
let app = compile_args(app);
let app = if include_perms {
@ -3654,6 +3662,15 @@ fn runtime_args(
} else {
app
};
let app = if include_allow_scripts {
app.arg(allow_scripts_arg())
} else {
app
};
runtime_misc_args(app)
}
fn runtime_misc_args(app: Command) -> Command {
app
.arg(frozen_lockfile_arg())
.arg(cached_only_arg())
@ -4135,23 +4152,29 @@ enum UnstableArgsConfig {
ResolutionAndRuntime,
}
struct UnstableArgsIter {
idx: usize,
cfg: UnstableArgsConfig,
trait CommandExt {
fn with_unstable_args(self, cfg: UnstableArgsConfig) -> Self;
}
impl Iterator for UnstableArgsIter {
type Item = Arg;
impl CommandExt for Command {
fn with_unstable_args(self, cfg: UnstableArgsConfig) -> Self {
let mut next_display_order = {
let mut value = 1000;
move || {
value += 1;
value
}
};
fn next(&mut self) -> Option<Self::Item> {
let arg = if self.idx == 0 {
let mut cmd = self.arg(
Arg::new("unstable")
.long("unstable")
.help(cstr!("Enable all unstable features and APIs. Instead of using this flag, consider enabling individual unstable features
<p(245)>To view the list of individual unstable feature flags, run this command again with --help=unstable</>"))
.action(ArgAction::SetTrue)
.hide(matches!(self.cfg, UnstableArgsConfig::None))
} else if self.idx == 1 {
.hide(matches!(cfg, UnstableArgsConfig::None))
.display_order(next_display_order())
).arg(
Arg::new("unstable-bare-node-builtins")
.long("unstable-bare-node-builtins")
.help("Enable unstable bare node builtins feature")
@ -4159,20 +4182,36 @@ impl Iterator for UnstableArgsIter {
.value_parser(FalseyValueParser::new())
.action(ArgAction::SetTrue)
.hide(true)
.long_help(match self.cfg {
.long_help(match cfg {
UnstableArgsConfig::None => None,
UnstableArgsConfig::ResolutionOnly
| UnstableArgsConfig::ResolutionAndRuntime => Some("true"),
})
.help_heading(UNSTABLE_HEADING)
} else if self.idx == 2 {
.display_order(next_display_order()),
).arg(
Arg::new("unstable-detect-cjs")
.long("unstable-detect-cjs")
.help("Reads the package.json type field in a project to treat .js files as .cjs")
.value_parser(FalseyValueParser::new())
.action(ArgAction::SetTrue)
.hide(true)
.long_help(match cfg {
UnstableArgsConfig::None => None,
UnstableArgsConfig::ResolutionOnly
| UnstableArgsConfig::ResolutionAndRuntime => Some("true"),
})
.help_heading(UNSTABLE_HEADING)
.display_order(next_display_order())
).arg(
Arg::new("unstable-byonm")
.long("unstable-byonm")
.value_parser(FalseyValueParser::new())
.action(ArgAction::SetTrue)
.hide(true)
.help_heading(UNSTABLE_HEADING)
} else if self.idx == 3 {
.display_order(next_display_order()),
).arg(
Arg::new("unstable-sloppy-imports")
.long("unstable-sloppy-imports")
.help("Enable unstable resolving of specifiers by extension probing, .js to .ts, and directory probing")
@ -4180,13 +4219,16 @@ impl Iterator for UnstableArgsIter {
.value_parser(FalseyValueParser::new())
.action(ArgAction::SetTrue)
.hide(true)
.long_help(match self.cfg {
.long_help(match cfg {
UnstableArgsConfig::None => None,
UnstableArgsConfig::ResolutionOnly | UnstableArgsConfig::ResolutionAndRuntime => Some("true")
})
.help_heading(UNSTABLE_HEADING)
} else if self.idx > 3 {
let granular_flag = crate::UNSTABLE_GRANULAR_FLAGS.get(self.idx - 4)?;
.display_order(next_display_order())
);
for granular_flag in crate::UNSTABLE_GRANULAR_FLAGS.iter() {
cmd = cmd.arg(
Arg::new(format!("unstable-{}", granular_flag.name))
.long(format!("unstable-{}", granular_flag.name))
.help(granular_flag.help_text)
@ -4195,7 +4237,7 @@ impl Iterator for UnstableArgsIter {
.help_heading(UNSTABLE_HEADING)
// we don't render long help, so using it here as a sort of metadata
.long_help(if granular_flag.show_in_help {
match self.cfg {
match cfg {
UnstableArgsConfig::None | UnstableArgsConfig::ResolutionOnly => {
None
}
@ -4204,16 +4246,12 @@ impl Iterator for UnstableArgsIter {
} else {
None
})
} else {
return None;
};
self.idx += 1;
Some(arg.display_order(self.idx + 1000))
}
.display_order(next_display_order()),
);
}
fn unstable_args(cfg: UnstableArgsConfig) -> impl IntoIterator<Item = Arg> {
UnstableArgsIter { idx: 0, cfg }
cmd
}
}
fn allow_scripts_arg_parse(
@ -4235,8 +4273,13 @@ fn allow_scripts_arg_parse(
Ok(())
}
fn add_parse(flags: &mut Flags, matches: &mut ArgMatches) {
fn add_parse(
flags: &mut Flags,
matches: &mut ArgMatches,
) -> clap::error::Result<()> {
allow_scripts_arg_parse(flags, matches)?;
flags.subcommand = DenoSubcommand::Add(add_parse_inner(matches, None));
Ok(())
}
fn add_parse_inner(
@ -4262,7 +4305,7 @@ fn bench_parse(
) -> clap::error::Result<()> {
flags.type_check_mode = TypeCheckMode::Local;
runtime_args_parse(flags, matches, true, false)?;
runtime_args_parse(flags, matches, true, false, true)?;
ext_arg_parse(flags, matches);
// NOTE: `deno bench` always uses `--no-prompt`, tests shouldn't ever do
@ -4352,7 +4395,7 @@ fn compile_parse(
matches: &mut ArgMatches,
) -> clap::error::Result<()> {
flags.type_check_mode = TypeCheckMode::Local;
runtime_args_parse(flags, matches, true, false)?;
runtime_args_parse(flags, matches, true, false, true)?;
let mut script = matches.remove_many::<String>("script_arg").unwrap();
let source_file = script.next().unwrap();
@ -4527,7 +4570,7 @@ fn eval_parse(
flags: &mut Flags,
matches: &mut ArgMatches,
) -> clap::error::Result<()> {
runtime_args_parse(flags, matches, false, true)?;
runtime_args_parse(flags, matches, false, true, false)?;
unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime);
flags.allow_all();
@ -4620,7 +4663,7 @@ fn install_parse(
flags: &mut Flags,
matches: &mut ArgMatches,
) -> clap::error::Result<()> {
runtime_args_parse(flags, matches, true, true)?;
runtime_args_parse(flags, matches, true, true, false)?;
let global = matches.get_flag("global");
if global {
@ -4846,8 +4889,18 @@ fn repl_parse(
flags: &mut Flags,
matches: &mut ArgMatches,
) -> clap::error::Result<()> {
runtime_args_parse(flags, matches, true, true)?;
unsafely_ignore_certificate_errors_parse(flags, matches);
unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime);
compile_args_without_check_parse(flags, matches)?;
cached_only_arg_parse(flags, matches);
frozen_lockfile_arg_parse(flags, matches);
permission_args_parse(flags, matches)?;
inspect_arg_parse(flags, matches);
location_arg_parse(flags, matches);
v8_flags_arg_parse(flags, matches);
seed_arg_parse(flags, matches);
enable_testing_features_arg_parse(flags, matches);
env_file_arg_parse(flags, matches);
strace_ops_parse(flags, matches);
let eval_files = matches
.remove_many::<String>("eval-file")
@ -4879,7 +4932,7 @@ fn run_parse(
mut app: Command,
bare: bool,
) -> clap::error::Result<()> {
runtime_args_parse(flags, matches, true, true)?;
runtime_args_parse(flags, matches, true, true, true)?;
ext_arg_parse(flags, matches);
flags.code_cache_enabled = !matches.get_flag("no-code-cache");
@ -4920,7 +4973,7 @@ fn serve_parse(
let worker_count = parallel_arg_parse(matches).map(|v| v.get());
runtime_args_parse(flags, matches, true, true)?;
runtime_args_parse(flags, matches, true, true, true)?;
// If the user didn't pass --allow-net, add this port to the network
// allowlist. If the host is 0.0.0.0, we add :{port} and allow the same network perms
// as if it was passed to --allow-net directly.
@ -5015,7 +5068,7 @@ fn test_parse(
matches: &mut ArgMatches,
) -> clap::error::Result<()> {
flags.type_check_mode = TypeCheckMode::Local;
runtime_args_parse(flags, matches, true, true)?;
runtime_args_parse(flags, matches, true, true, true)?;
ext_arg_parse(flags, matches);
// NOTE: `deno test` always uses `--no-prompt`, tests shouldn't ever do
@ -5380,6 +5433,7 @@ fn runtime_args_parse(
matches: &mut ArgMatches,
include_perms: bool,
include_inspector: bool,
include_allow_scripts: bool,
) -> clap::error::Result<()> {
unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime);
compile_args_parse(flags, matches)?;
@ -5391,6 +5445,9 @@ fn runtime_args_parse(
if include_inspector {
inspect_arg_parse(flags, matches);
}
if include_allow_scripts {
allow_scripts_arg_parse(flags, matches)?;
}
location_arg_parse(flags, matches);
v8_flags_arg_parse(flags, matches);
seed_arg_parse(flags, matches);
@ -5662,6 +5719,7 @@ fn unstable_args_parse(
flags.unstable_config.bare_node_builtins =
matches.get_flag("unstable-bare-node-builtins");
flags.unstable_config.detect_cjs = matches.get_flag("unstable-detect-cjs");
flags.unstable_config.sloppy_imports =
matches.get_flag("unstable-sloppy-imports");
@ -7390,7 +7448,7 @@ mod tests {
#[test]
fn repl_with_flags() {
#[rustfmt::skip]
let r = flags_from_vec(svec!["deno", "repl", "-A", "--import-map", "import_map.json", "--no-remote", "--config", "tsconfig.json", "--no-check", "--reload", "--lock", "lock.json", "--cert", "example.crt", "--cached-only", "--location", "https:foo", "--v8-flags=--help", "--seed", "1", "--inspect=127.0.0.1:9229", "--unsafely-ignore-certificate-errors", "--env=.example.env"]);
let r = flags_from_vec(svec!["deno", "repl", "-A", "--import-map", "import_map.json", "--no-remote", "--config", "tsconfig.json", "--reload", "--lock", "lock.json", "--cert", "example.crt", "--cached-only", "--location", "https:foo", "--v8-flags=--help", "--seed", "1", "--inspect=127.0.0.1:9229", "--unsafely-ignore-certificate-errors", "--env=.example.env"]);
assert_eq!(
r.unwrap(),
Flags {
@ -7438,7 +7496,6 @@ mod tests {
allow_write: Some(vec![]),
..Default::default()
},
type_check_mode: TypeCheckMode::None,
..Flags::default()
}
);
@ -7460,7 +7517,6 @@ mod tests {
eval: None,
is_default_command: false,
}),
type_check_mode: TypeCheckMode::None,
..Flags::default()
}
);
@ -8862,8 +8918,12 @@ mod tests {
#[test]
fn test_no_colon_in_value_name() {
let app =
runtime_args(Command::new("test_inspect_completion_value"), true, true);
let app = runtime_args(
Command::new("test_inspect_completion_value"),
true,
true,
false,
);
let inspect_args = app
.get_arguments()
.filter(|arg| arg.get_id() == "inspect")

View file

@ -1576,6 +1576,11 @@ impl CliOptions {
|| self.workspace().has_unstable("bare-node-builtins")
}
pub fn unstable_detect_cjs(&self) -> bool {
self.flags.unstable_config.detect_cjs
|| self.workspace().has_unstable("detect-cjs")
}
fn byonm_enabled(&self) -> bool {
// check if enabled via unstable
self.node_modules_dir().ok().flatten() == Some(NodeModulesDirMode::Manual)
@ -1620,21 +1625,17 @@ impl CliOptions {
});
if !from_config_file.is_empty() {
// collect unstable granular flags
let mut all_valid_unstable_flags: Vec<&str> =
crate::UNSTABLE_GRANULAR_FLAGS
let all_valid_unstable_flags: Vec<&str> = crate::UNSTABLE_GRANULAR_FLAGS
.iter()
.map(|granular_flag| granular_flag.name)
.collect();
let mut another_unstable_flags = Vec::from([
.chain([
"sloppy-imports",
"byonm",
"bare-node-builtins",
"fmt-component",
]);
// add more unstable flags to the same vector holding granular flags
all_valid_unstable_flags.append(&mut another_unstable_flags);
"detect-cjs",
])
.collect();
// check and warn if the unstable flag of config file isn't supported, by
// iterating through the vector holding the unstable flags

57
cli/cache/mod.rs vendored
View file

@ -9,10 +9,13 @@ use crate::file_fetcher::FetchPermissionsOptionRef;
use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::FileOrRedirect;
use crate::npm::CliNpmResolver;
use crate::resolver::CliNodeResolver;
use crate::util::fs::atomic_write_file_with_retries;
use crate::util::fs::atomic_write_file_with_retries_and_fs;
use crate::util::fs::AtomicWriteFileFsAdapter;
use crate::util::path::specifier_has_extension;
use crate::util::text_encoding::arc_str_to_bytes;
use crate::util::text_encoding::from_utf8_lossy_owned;
use deno_ast::MediaType;
use deno_core::futures;
@ -57,6 +60,7 @@ pub use fast_check::FastCheckCache;
pub use incremental::IncrementalCache;
pub use module_info::ModuleInfoCache;
pub use node::NodeAnalysisCache;
pub use parsed_source::EsmOrCjsChecker;
pub use parsed_source::LazyGraphSourceParser;
pub use parsed_source::ParsedSourceCache;
@ -177,37 +181,46 @@ pub struct FetchCacherOptions {
pub permissions: PermissionsContainer,
/// If we're publishing for `deno publish`.
pub is_deno_publish: bool,
pub unstable_detect_cjs: bool,
}
/// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides
/// a concise interface to the DENO_DIR when building module graphs.
pub struct FetchCacher {
file_fetcher: Arc<FileFetcher>,
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
esm_or_cjs_checker: Arc<EsmOrCjsChecker>,
file_fetcher: Arc<FileFetcher>,
global_http_cache: Arc<GlobalHttpCache>,
node_resolver: Arc<CliNodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>,
permissions: PermissionsContainer,
cache_info_enabled: bool,
is_deno_publish: bool,
unstable_detect_cjs: bool,
cache_info_enabled: bool,
}
impl FetchCacher {
pub fn new(
esm_or_cjs_checker: Arc<EsmOrCjsChecker>,
file_fetcher: Arc<FileFetcher>,
global_http_cache: Arc<GlobalHttpCache>,
node_resolver: Arc<CliNodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>,
options: FetchCacherOptions,
) -> Self {
Self {
file_fetcher,
esm_or_cjs_checker,
global_http_cache,
node_resolver,
npm_resolver,
module_info_cache,
file_header_overrides: options.file_header_overrides,
permissions: options.permissions,
is_deno_publish: options.is_deno_publish,
unstable_detect_cjs: options.unstable_detect_cjs,
cache_info_enabled: false,
}
}
@ -282,6 +295,46 @@ impl Loader for FetchCacher {
},
))));
}
if self.unstable_detect_cjs && specifier_has_extension(specifier, "js") {
if let Ok(Some(pkg_json)) =
self.node_resolver.get_closest_package_json(specifier)
{
if pkg_json.typ == "commonjs" {
if let Ok(path) = specifier.to_file_path() {
if let Ok(bytes) = std::fs::read(&path) {
let text: Arc<str> = from_utf8_lossy_owned(bytes).into();
let is_es_module = match self.esm_or_cjs_checker.is_esm(
specifier,
text.clone(),
MediaType::JavaScript,
) {
Ok(value) => value,
Err(err) => {
return Box::pin(futures::future::ready(Err(err.into())));
}
};
if !is_es_module {
self.node_resolver.mark_cjs_resolution(specifier.clone());
return Box::pin(futures::future::ready(Ok(Some(
LoadResponse::External {
specifier: specifier.clone(),
},
))));
} else {
return Box::pin(futures::future::ready(Ok(Some(
LoadResponse::Module {
specifier: specifier.clone(),
content: arc_str_to_bytes(text),
maybe_headers: None,
},
))));
}
}
}
}
}
}
}
if self.is_deno_publish

View file

@ -5,6 +5,7 @@ use std::sync::Arc;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_ast::ParseDiagnostic;
use deno_ast::ParsedSource;
use deno_core::parking_lot::Mutex;
use deno_graph::CapturingModuleParser;
@ -149,3 +150,42 @@ impl deno_graph::ParsedSourceStore for ParsedSourceCache {
}
}
}
pub struct EsmOrCjsChecker {
parsed_source_cache: Arc<ParsedSourceCache>,
}
impl EsmOrCjsChecker {
pub fn new(parsed_source_cache: Arc<ParsedSourceCache>) -> Self {
Self {
parsed_source_cache,
}
}
pub fn is_esm(
&self,
specifier: &ModuleSpecifier,
source: Arc<str>,
media_type: MediaType,
) -> Result<bool, ParseDiagnostic> {
// todo(dsherret): add a file cache here to avoid parsing with swc on each run
let source = match self.parsed_source_cache.get_parsed_source(specifier) {
Some(source) => source.clone(),
None => {
let source = deno_ast::parse_program(deno_ast::ParseParams {
specifier: specifier.clone(),
text: source,
media_type,
capture_tokens: true, // capture because it's used for cjs export analysis
scope_analysis: false,
maybe_syntax: None,
})?;
self
.parsed_source_cache
.set_parsed_source(specifier.clone(), source.clone());
source
}
};
Ok(source.is_module())
}
}

View file

@ -14,6 +14,7 @@ use crate::cache::CodeCache;
use crate::cache::DenoDir;
use crate::cache::DenoDirProvider;
use crate::cache::EmitCache;
use crate::cache::EsmOrCjsChecker;
use crate::cache::GlobalHttpCache;
use crate::cache::HttpCache;
use crate::cache::LocalHttpCache;
@ -171,6 +172,7 @@ struct CliFactoryServices {
http_client_provider: Deferred<Arc<HttpClientProvider>>,
emit_cache: Deferred<Arc<EmitCache>>,
emitter: Deferred<Arc<Emitter>>,
esm_or_cjs_checker: Deferred<Arc<EsmOrCjsChecker>>,
fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
main_graph_container: Deferred<Arc<MainModuleGraphContainer>>,
maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>,
@ -298,6 +300,12 @@ impl CliFactory {
.get_or_init(|| ProgressBar::new(ProgressBarStyle::TextOnly))
}
pub fn esm_or_cjs_checker(&self) -> &Arc<EsmOrCjsChecker> {
self.services.esm_or_cjs_checker.get_or_init(|| {
Arc::new(EsmOrCjsChecker::new(self.parsed_source_cache().clone()))
})
}
pub fn global_http_cache(&self) -> Result<&Arc<GlobalHttpCache>, AnyError> {
self.services.global_http_cache.get_or_try_init(|| {
Ok(Arc::new(GlobalHttpCache::new(
@ -579,6 +587,7 @@ impl CliFactory {
node_analysis_cache,
self.fs().clone(),
node_resolver,
Some(self.parsed_source_cache().clone()),
);
Ok(Arc::new(NodeCodeTranslator::new(
@ -619,8 +628,10 @@ impl CliFactory {
Ok(Arc::new(ModuleGraphBuilder::new(
cli_options.clone(),
self.caches()?.clone(),
self.esm_or_cjs_checker().clone(),
self.fs().clone(),
self.resolver().await?.clone(),
self.cli_node_resolver().await?.clone(),
self.npm_resolver().await?.clone(),
self.module_info_cache()?.clone(),
self.parsed_source_cache().clone(),
@ -792,6 +803,7 @@ impl CliFactory {
Ok(CliMainWorkerFactory::new(
self.blob_store().clone(),
self.cjs_resolutions().clone(),
if cli_options.code_cache_enabled() {
Some(self.code_cache()?.clone())
} else {
@ -896,6 +908,7 @@ impl CliFactory {
node_ipc: cli_options.node_ipc_fd(),
serve_port: cli_options.serve_port(),
serve_host: cli_options.serve_host(),
unstable_detect_cjs: cli_options.unstable_detect_cjs(),
})
}
}

View file

@ -6,6 +6,7 @@ use crate::args::CliLockfile;
use crate::args::CliOptions;
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
use crate::cache;
use crate::cache::EsmOrCjsChecker;
use crate::cache::GlobalHttpCache;
use crate::cache::ModuleInfoCache;
use crate::cache::ParsedSourceCache;
@ -14,6 +15,7 @@ use crate::errors::get_error_class_name;
use crate::file_fetcher::FileFetcher;
use crate::npm::CliNpmResolver;
use crate::resolver::CliGraphResolver;
use crate::resolver::CliNodeResolver;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs;
use crate::tools::check;
@ -379,8 +381,10 @@ pub struct BuildFastCheckGraphOptions<'a> {
pub struct ModuleGraphBuilder {
options: Arc<CliOptions>,
caches: Arc<cache::Caches>,
esm_or_cjs_checker: Arc<EsmOrCjsChecker>,
fs: Arc<dyn FileSystem>,
resolver: Arc<CliGraphResolver>,
node_resolver: Arc<CliNodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>,
parsed_source_cache: Arc<ParsedSourceCache>,
@ -396,8 +400,10 @@ impl ModuleGraphBuilder {
pub fn new(
options: Arc<CliOptions>,
caches: Arc<cache::Caches>,
esm_or_cjs_checker: Arc<EsmOrCjsChecker>,
fs: Arc<dyn FileSystem>,
resolver: Arc<CliGraphResolver>,
node_resolver: Arc<CliNodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>,
parsed_source_cache: Arc<ParsedSourceCache>,
@ -410,8 +416,10 @@ impl ModuleGraphBuilder {
Self {
options,
caches,
esm_or_cjs_checker,
fs,
resolver,
node_resolver,
npm_resolver,
module_info_cache,
parsed_source_cache,
@ -691,8 +699,10 @@ impl ModuleGraphBuilder {
permissions: PermissionsContainer,
) -> cache::FetchCacher {
cache::FetchCacher::new(
self.esm_or_cjs_checker.clone(),
self.file_fetcher.clone(),
self.global_http_cache.clone(),
self.node_resolver.clone(),
self.npm_resolver.clone(),
self.module_info_cache.clone(),
cache::FetchCacherOptions {
@ -702,6 +712,7 @@ impl ModuleGraphBuilder {
self.options.sub_command(),
crate::args::DenoSubcommand::Publish { .. }
),
unstable_detect_cjs: self.options.unstable_detect_cjs(),
},
)
}

View file

@ -470,14 +470,22 @@ impl HttpClient {
}
}
pub async fn download_with_progress(
pub async fn download_with_progress_and_retries(
&self,
url: Url,
maybe_header: Option<(HeaderName, HeaderValue)>,
progress_guard: &UpdateGuard,
) -> Result<Option<Vec<u8>>, DownloadError> {
self
.download_inner(url, maybe_header, Some(progress_guard))
crate::util::retry::retry(
|| {
self.download_inner(
url.clone(),
maybe_header.clone(),
Some(progress_guard),
)
},
|e| matches!(e, DownloadError::BadResponse(_) | DownloadError::Fetch(_)),
)
.await
}

View file

@ -3939,7 +3939,7 @@ pub struct OutliningSpan {
kind: OutliningSpanKind,
}
const FOLD_END_PAIR_CHARACTERS: &[u8] = &[b'}', b']', b')', b'`'];
const FOLD_END_PAIR_CHARACTERS: &[u8] = b"}])`";
impl OutliningSpan {
pub fn to_folding_range(

View file

@ -47,8 +47,7 @@ use deno_core::error::JsError;
use deno_core::futures::FutureExt;
use deno_core::unsync::JoinHandle;
use deno_npm::resolution::SnapshotFromLockfileError;
use deno_runtime::fmt_errors::format_js_error_with_suggestions;
use deno_runtime::fmt_errors::FixSuggestion;
use deno_runtime::fmt_errors::format_js_error;
use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics;
use deno_terminal::colors;
use factory::CliFactory;
@ -62,6 +61,10 @@ use std::ops::Deref;
use std::path::PathBuf;
use std::sync::Arc;
#[cfg(feature = "dhat-heap")]
#[global_allocator]
static ALLOC: dhat::Alloc = dhat::Alloc;
/// Ensures that all subcommands return an i32 exit code and an [`AnyError`] error type.
trait SubcommandOutput {
fn output(self) -> Result<i32, AnyError>;
@ -362,104 +365,12 @@ fn exit_with_message(message: &str, code: i32) -> ! {
std::process::exit(code);
}
fn get_suggestions_for_terminal_errors(e: &JsError) -> Vec<FixSuggestion> {
if let Some(msg) = &e.message {
if msg.contains("module is not defined")
|| msg.contains("exports is not defined")
{
return vec![
FixSuggestion::info(
"Deno does not support CommonJS modules without `.cjs` extension.",
),
FixSuggestion::hint(
"Rewrite this module to ESM or change the file extension to `.cjs`.",
),
];
} else if msg.contains("openKv is not a function") {
return vec![
FixSuggestion::info("Deno.openKv() is an unstable API."),
FixSuggestion::hint(
"Run again with `--unstable-kv` flag to enable this API.",
),
];
} else if msg.contains("cron is not a function") {
return vec![
FixSuggestion::info("Deno.cron() is an unstable API."),
FixSuggestion::hint(
"Run again with `--unstable-cron` flag to enable this API.",
),
];
} else if msg.contains("WebSocketStream is not defined") {
return vec![
FixSuggestion::info("new WebSocketStream() is an unstable API."),
FixSuggestion::hint(
"Run again with `--unstable-net` flag to enable this API.",
),
];
} else if msg.contains("Temporal is not defined") {
return vec![
FixSuggestion::info("Temporal is an unstable API."),
FixSuggestion::hint(
"Run again with `--unstable-temporal` flag to enable this API.",
),
];
} else if msg.contains("BroadcastChannel is not defined") {
return vec![
FixSuggestion::info("BroadcastChannel is an unstable API."),
FixSuggestion::hint(
"Run again with `--unstable-broadcast-channel` flag to enable this API.",
),
];
} else if msg.contains("window is not defined") {
return vec![
FixSuggestion::info("window global is not available in Deno 2."),
FixSuggestion::hint("Replace `window` with `globalThis`."),
];
} else if msg.contains("UnsafeWindowSurface is not a constructor") {
return vec![
FixSuggestion::info("Deno.UnsafeWindowSurface is an unstable API."),
FixSuggestion::hint(
"Run again with `--unstable-webgpu` flag to enable this API.",
),
];
// Try to capture errors like:
// ```
// Uncaught Error: Cannot find module '../build/Release/canvas.node'
// Require stack:
// - /.../deno/npm/registry.npmjs.org/canvas/2.11.2/lib/bindings.js
// - /.../.cache/deno/npm/registry.npmjs.org/canvas/2.11.2/lib/canvas.js
// ```
} else if msg.contains("Cannot find module")
&& msg.contains("Require stack")
&& msg.contains(".node'")
{
return vec![
FixSuggestion::info_multiline(
&[
"Trying to execute an npm package using Node-API addons,",
"these packages require local `node_modules` directory to be present."
]
),
FixSuggestion::hint_multiline(
&[
"Add `\"nodeModulesDir\": \"auto\" option to `deno.json`, and then run",
"`deno install --allow-scripts=npm:<package> --entrypoint <script>` to setup `node_modules` directory."
]
)
];
}
}
vec![]
}
fn exit_for_error(error: AnyError) -> ! {
let mut error_string = format!("{error:?}");
let mut error_code = 1;
if let Some(e) = error.downcast_ref::<JsError>() {
let suggestions = get_suggestions_for_terminal_errors(e);
error_string = format_js_error_with_suggestions(e, suggestions);
error_string = format_js_error(e);
} else if let Some(SnapshotFromLockfileError::IntegrityCheckFailed(e)) =
error.downcast_ref::<SnapshotFromLockfileError>()
{
@ -480,6 +391,9 @@ pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) {
}
pub fn main() {
#[cfg(feature = "dhat-heap")]
let profiler = dhat::Profiler::new_heap();
setup_panic_hook();
util::unix::raise_fd_limit();
@ -500,7 +414,12 @@ pub fn main() {
run_subcommand(Arc::new(flags)).await
};
match create_and_run_current_thread_with_maybe_metrics(future) {
let result = create_and_run_current_thread_with_maybe_metrics(future);
#[cfg(feature = "dhat-heap")]
drop(profiler);
match result {
Ok(exit_code) => std::process::exit(exit_code),
Err(err) => exit_for_error(err),
}

View file

@ -331,15 +331,23 @@ impl<TGraphContainer: ModuleGraphContainer>
maybe_referrer: Option<&ModuleSpecifier>,
requested_module_type: RequestedModuleType,
) -> Result<ModuleSource, AnyError> {
let code_source = if let Some(result) = self
let code_source = match self.load_prepared_module(specifier).await? {
Some(code_source) => code_source,
None => {
if self.shared.npm_module_loader.if_in_npm_package(specifier) {
self
.shared
.npm_module_loader
.load_if_in_npm_package(specifier, maybe_referrer)
.await
{
result?
.load(specifier, maybe_referrer)
.await?
} else {
self.load_prepared_module(specifier, maybe_referrer).await?
let mut msg = format!("Loading unprepared module: {specifier}");
if let Some(referrer) = maybe_referrer {
msg = format!("{}, imported from: {}", msg, referrer.as_str());
}
return Err(anyhow!(msg));
}
}
};
let code = if self.shared.is_inspecting {
// we need the code with the source map in order for
@ -514,17 +522,12 @@ impl<TGraphContainer: ModuleGraphContainer>
async fn load_prepared_module(
&self,
specifier: &ModuleSpecifier,
maybe_referrer: Option<&ModuleSpecifier>,
) -> Result<ModuleCodeStringSource, AnyError> {
) -> Result<Option<ModuleCodeStringSource>, AnyError> {
// Note: keep this in sync with the sync version below
let graph = self.graph_container.graph();
match self.load_prepared_module_or_defer_emit(
&graph,
specifier,
maybe_referrer,
) {
Ok(CodeOrDeferredEmit::Code(code_source)) => Ok(code_source),
Ok(CodeOrDeferredEmit::DeferredEmit {
match self.load_prepared_module_or_defer_emit(&graph, specifier)? {
Some(CodeOrDeferredEmit::Code(code_source)) => Ok(Some(code_source)),
Some(CodeOrDeferredEmit::DeferredEmit {
specifier,
media_type,
source,
@ -537,30 +540,25 @@ impl<TGraphContainer: ModuleGraphContainer>
// at this point, we no longer need the parsed source in memory, so free it
self.parsed_source_cache.free(specifier);
Ok(ModuleCodeStringSource {
Ok(Some(ModuleCodeStringSource {
code: ModuleSourceCode::Bytes(transpile_result),
found_url: specifier.clone(),
media_type,
})
}))
}
Err(err) => Err(err),
None => Ok(None),
}
}
fn load_prepared_module_sync(
&self,
specifier: &ModuleSpecifier,
maybe_referrer: Option<&ModuleSpecifier>,
) -> Result<ModuleCodeStringSource, AnyError> {
) -> Result<Option<ModuleCodeStringSource>, AnyError> {
// Note: keep this in sync with the async version above
let graph = self.graph_container.graph();
match self.load_prepared_module_or_defer_emit(
&graph,
specifier,
maybe_referrer,
) {
Ok(CodeOrDeferredEmit::Code(code_source)) => Ok(code_source),
Ok(CodeOrDeferredEmit::DeferredEmit {
match self.load_prepared_module_or_defer_emit(&graph, specifier)? {
Some(CodeOrDeferredEmit::Code(code_source)) => Ok(Some(code_source)),
Some(CodeOrDeferredEmit::DeferredEmit {
specifier,
media_type,
source,
@ -572,13 +570,13 @@ impl<TGraphContainer: ModuleGraphContainer>
// at this point, we no longer need the parsed source in memory, so free it
self.parsed_source_cache.free(specifier);
Ok(ModuleCodeStringSource {
Ok(Some(ModuleCodeStringSource {
code: ModuleSourceCode::Bytes(transpile_result),
found_url: specifier.clone(),
media_type,
})
}))
}
Err(err) => Err(err),
None => Ok(None),
}
}
@ -586,8 +584,7 @@ impl<TGraphContainer: ModuleGraphContainer>
&self,
graph: &'graph ModuleGraph,
specifier: &ModuleSpecifier,
maybe_referrer: Option<&ModuleSpecifier>,
) -> Result<CodeOrDeferredEmit<'graph>, AnyError> {
) -> Result<Option<CodeOrDeferredEmit<'graph>>, AnyError> {
if specifier.scheme() == "node" {
// Node built-in modules should be handled internally.
unreachable!("Deno bug. {} was misconfigured internally.", specifier);
@ -599,11 +596,11 @@ impl<TGraphContainer: ModuleGraphContainer>
media_type,
specifier,
..
})) => Ok(CodeOrDeferredEmit::Code(ModuleCodeStringSource {
})) => Ok(Some(CodeOrDeferredEmit::Code(ModuleCodeStringSource {
code: ModuleSourceCode::String(source.clone().into()),
found_url: specifier.clone(),
media_type: *media_type,
})),
}))),
Some(deno_graph::Module::Js(JsModule {
source,
media_type,
@ -624,11 +621,11 @@ impl<TGraphContainer: ModuleGraphContainer>
| MediaType::Cts
| MediaType::Jsx
| MediaType::Tsx => {
return Ok(CodeOrDeferredEmit::DeferredEmit {
return Ok(Some(CodeOrDeferredEmit::DeferredEmit {
specifier,
media_type: *media_type,
source,
});
}));
}
MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => {
panic!("Unexpected media type {media_type} for {specifier}")
@ -638,24 +635,18 @@ impl<TGraphContainer: ModuleGraphContainer>
// at this point, we no longer need the parsed source in memory, so free it
self.parsed_source_cache.free(specifier);
Ok(CodeOrDeferredEmit::Code(ModuleCodeStringSource {
Ok(Some(CodeOrDeferredEmit::Code(ModuleCodeStringSource {
code: ModuleSourceCode::String(code),
found_url: specifier.clone(),
media_type: *media_type,
}))
})))
}
Some(
deno_graph::Module::External(_)
| deno_graph::Module::Node(_)
| deno_graph::Module::Npm(_),
)
| None => {
let mut msg = format!("Loading unprepared module: {specifier}");
if let Some(referrer) = maybe_referrer {
msg = format!("{}, imported from: {}", msg, referrer.as_str());
}
Err(anyhow!(msg))
}
| None => Ok(None),
}
}
}
@ -828,7 +819,7 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
"wasm" | "file" | "http" | "https" | "data" | "blob" => (),
_ => return None,
}
let source = self.0.load_prepared_module_sync(&specifier, None).ok()?;
let source = self.0.load_prepared_module_sync(&specifier).ok()??;
source_map_from_code(source.code.as_bytes())
}

View file

@ -1694,15 +1694,14 @@ fn napi_get_new_target(
}
#[napi_sym]
fn napi_call_function(
env_ptr: *mut Env,
recv: napi_value,
func: napi_value,
fn napi_call_function<'s>(
env: &'s mut Env,
recv: napi_value<'s>,
func: napi_value<'s>,
argc: usize,
argv: *const napi_value,
result: *mut napi_value,
argv: *const napi_value<'s>,
result: *mut napi_value<'s>,
) -> napi_status {
let env = check_env!(env_ptr);
check_arg!(env, recv);
let args = if argc > 0 {
check_arg!(env, argv);
@ -1716,11 +1715,11 @@ fn napi_call_function(
let Some(func) =
func.and_then(|f| v8::Local::<v8::Function>::try_from(f).ok())
else {
return napi_set_last_error(env, napi_function_expected);
return napi_function_expected;
};
let Some(v) = func.call(&mut env.scope(), recv.unwrap(), args) else {
return napi_set_last_error(env_ptr, napi_generic_failure);
return napi_generic_failure;
};
if !result.is_null() {
@ -1729,7 +1728,7 @@ fn napi_call_function(
}
}
return napi_clear_last_error(env_ptr);
napi_ok
}
#[napi_sym]

View file

@ -692,7 +692,7 @@ impl Drop for TsFn {
if let Some(finalizer) = self.thread_finalize_cb {
unsafe {
(finalizer)(self.env as _, self.thread_finalize_data, ptr::null_mut());
(finalizer)(self.env as _, self.thread_finalize_data, self.context);
}
}
}

View file

@ -5,6 +5,7 @@ use std::sync::Arc;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_graph::ParsedSourceStore;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::DenoFsNodeResolverEnv;
use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis;
@ -16,6 +17,7 @@ use serde::Serialize;
use crate::cache::CacheDBHash;
use crate::cache::NodeAnalysisCache;
use crate::cache::ParsedSourceCache;
use crate::resolver::CliNodeResolver;
use crate::util::fs::canonicalize_path_maybe_not_exists;
@ -56,6 +58,7 @@ pub struct CliCjsCodeAnalyzer {
cache: NodeAnalysisCache,
fs: deno_fs::FileSystemRc,
node_resolver: Arc<CliNodeResolver>,
parsed_source_cache: Option<Arc<ParsedSourceCache>>,
}
impl CliCjsCodeAnalyzer {
@ -63,11 +66,13 @@ impl CliCjsCodeAnalyzer {
cache: NodeAnalysisCache,
fs: deno_fs::FileSystemRc,
node_resolver: Arc<CliNodeResolver>,
parsed_source_cache: Option<Arc<ParsedSourceCache>>,
) -> Self {
Self {
cache,
fs,
node_resolver,
parsed_source_cache,
}
}
@ -107,17 +112,25 @@ impl CliCjsCodeAnalyzer {
}
}
let maybe_parsed_source = self
.parsed_source_cache
.as_ref()
.and_then(|c| c.remove_parsed_source(specifier));
let analysis = deno_core::unsync::spawn_blocking({
let specifier = specifier.clone();
let source: Arc<str> = source.into();
move || -> Result<_, deno_ast::ParseDiagnostic> {
let parsed_source = deno_ast::parse_program(deno_ast::ParseParams {
let parsed_source =
maybe_parsed_source.map(Ok).unwrap_or_else(|| {
deno_ast::parse_program(deno_ast::ParseParams {
specifier,
text: source,
media_type,
capture_tokens: true,
scope_analysis: false,
maybe_syntax: None,
})
})?;
if parsed_source.is_script() {
let analysis = parsed_source.analyze_cjs();

View file

@ -40,7 +40,7 @@ pub fn maybe_auth_header_for_npm_registry(
header::AUTHORIZATION,
header::HeaderValue::from_str(&format!(
"Basic {}",
BASE64_STANDARD.encode(&format!(
BASE64_STANDARD.encode(format!(
"{}:{}",
username.unwrap(),
password.unwrap()

View file

@ -202,10 +202,13 @@ impl RegistryInfoDownloader {
let guard = self.progress_bar.update(package_url.as_str());
let name = name.to_string();
async move {
let maybe_bytes = downloader
.http_client_provider
.get_or_create()?
.download_with_progress(package_url, maybe_auth_header, &guard)
let client = downloader.http_client_provider.get_or_create()?;
let maybe_bytes = client
.download_with_progress_and_retries(
package_url,
maybe_auth_header,
&guard,
)
.await?;
match maybe_bytes {
Some(bytes) => {

View file

@ -172,7 +172,7 @@ impl TarballCache {
let guard = tarball_cache.progress_bar.update(&dist.tarball);
let result = tarball_cache.http_client_provider
.get_or_create()?
.download_with_progress(tarball_uri, maybe_auth_header, &guard)
.download_with_progress_and_retries(tarball_uri, maybe_auth_header, &guard)
.await;
let maybe_bytes = match result {
Ok(maybe_bytes) => maybe_bytes,

View file

@ -2,6 +2,8 @@
use super::bin_entries::BinEntries;
use crate::args::LifecycleScriptsConfig;
use crate::task_runner::TaskStdio;
use crate::util::progress_bar::ProgressBar;
use deno_core::anyhow::Context;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_runtime::deno_io::FromRawIoHandle;
@ -148,6 +150,7 @@ impl<'a> LifecycleScripts<'a> {
snapshot: &NpmResolutionSnapshot,
packages: &[NpmResolutionPackage],
root_node_modules_dir_path: Option<&Path>,
progress_bar: &ProgressBar,
) -> Result<(), AnyError> {
self.warn_not_run_scripts()?;
let get_package_path =
@ -201,7 +204,15 @@ impl<'a> LifecycleScripts<'a> {
{
continue;
}
let exit_code = crate::task_runner::run_task(
let _guard = progress_bar.update_with_prompt(
crate::util::progress_bar::ProgressMessagePrompt::Initialize,
&format!("{}: running '{script_name}' script", package.id.nv),
);
let crate::task_runner::TaskResult {
exit_code,
stderr,
stdout,
} = crate::task_runner::run_task(
crate::task_runner::RunTaskOptions {
task_name: script_name,
script,
@ -211,15 +222,37 @@ impl<'a> LifecycleScripts<'a> {
init_cwd,
argv: &[],
root_node_modules_dir: root_node_modules_dir_path,
stdio: Some(crate::task_runner::TaskIo {
stderr: TaskStdio::piped(),
stdout: TaskStdio::piped(),
}),
},
)
.await?;
let stdout = stdout.unwrap();
let stderr = stderr.unwrap();
if exit_code != 0 {
log::warn!(
"error: script '{}' in '{}' failed with exit code {}",
"error: script '{}' in '{}' failed with exit code {}{}{}",
script_name,
package.id.nv,
exit_code,
if !stdout.trim_ascii().is_empty() {
format!(
"\nstdout:\n{}\n",
String::from_utf8_lossy(&stdout).trim()
)
} else {
String::new()
},
if !stderr.trim_ascii().is_empty() {
format!(
"\nstderr:\n{}\n",
String::from_utf8_lossy(&stderr).trim()
)
} else {
String::new()
},
);
failed_packages.push(&package.id.nv);
// assume if earlier script fails, later ones will fail too

View file

@ -713,6 +713,7 @@ async fn sync_resolution_with_fs(
snapshot,
&package_partitions.packages,
Some(root_node_modules_dir_path),
progress_bar,
)
.await?;

View file

@ -43,7 +43,6 @@ use node_resolver::NodeModuleKind;
use node_resolver::NodeResolution;
use node_resolver::NodeResolutionMode;
use node_resolver::PackageJson;
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
@ -53,7 +52,9 @@ use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
use crate::node::CliNodeCodeTranslator;
use crate::npm::CliNpmResolver;
use crate::npm::InnerCliNpmResolverRef;
use crate::util::path::specifier_has_extension;
use crate::util::sync::AtomicFlag;
use crate::util::text_encoding::from_utf8_lossy_owned;
pub struct ModuleCodeStringSource {
pub code: ModuleSourceCode,
@ -215,7 +216,7 @@ impl CliNodeResolver {
referrer: &ModuleSpecifier,
mode: NodeResolutionMode,
) -> Result<NodeResolution, NodeResolveError> {
let referrer_kind = if self.cjs_resolutions.contains(referrer) {
let referrer_kind = if self.cjs_resolutions.is_known_cjs(referrer) {
NodeModuleKind::Cjs
} else {
NodeModuleKind::Esm
@ -310,9 +311,7 @@ impl CliNodeResolver {
if self.in_npm_package(&specifier) {
let resolution =
self.node_resolver.url_to_node_resolution(specifier)?;
if let NodeResolution::CommonJs(specifier) = &resolution {
self.cjs_resolutions.insert(specifier.clone());
}
let resolution = self.handle_node_resolution(resolution);
return Ok(Some(resolution.into_url()));
}
}
@ -333,12 +332,17 @@ impl CliNodeResolver {
) -> NodeResolution {
if let NodeResolution::CommonJs(specifier) = &resolution {
// remember that this was a common js resolution
self.cjs_resolutions.insert(specifier.clone());
self.mark_cjs_resolution(specifier.clone());
}
resolution
}
pub fn mark_cjs_resolution(&self, specifier: ModuleSpecifier) {
self.cjs_resolutions.insert(specifier);
}
}
// todo(dsherret): move to module_loader.rs
#[derive(Clone)]
pub struct NpmModuleLoader {
cjs_resolutions: Arc<CjsResolutionStore>,
@ -362,18 +366,9 @@ impl NpmModuleLoader {
}
}
pub async fn load_if_in_npm_package(
&self,
specifier: &ModuleSpecifier,
maybe_referrer: Option<&ModuleSpecifier>,
) -> Option<Result<ModuleCodeStringSource, AnyError>> {
if self.node_resolver.in_npm_package(specifier)
|| (specifier.scheme() == "file" && specifier.path().ends_with(".cjs"))
{
Some(self.load(specifier, maybe_referrer).await)
} else {
None
}
pub fn if_in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
self.node_resolver.in_npm_package(specifier)
|| self.cjs_resolutions.is_known_cjs(specifier)
}
pub async fn load(
@ -418,16 +413,9 @@ impl NpmModuleLoader {
}
})?;
let code = if self.cjs_resolutions.contains(specifier)
|| (specifier.scheme() == "file" && specifier.path().ends_with(".cjs"))
{
let code = if self.cjs_resolutions.is_known_cjs(specifier) {
// translate cjs to esm if it's cjs and inject node globals
let code = match String::from_utf8_lossy(&code) {
Cow::Owned(code) => code,
// SAFETY: `String::from_utf8_lossy` guarantees that the result is valid
// UTF-8 if `Cow::Borrowed` is returned.
Cow::Borrowed(_) => unsafe { String::from_utf8_unchecked(code) },
};
let code = from_utf8_lossy_owned(code);
ModuleSourceCode::String(
self
.node_code_translator
@ -452,8 +440,12 @@ impl NpmModuleLoader {
pub struct CjsResolutionStore(DashSet<ModuleSpecifier>);
impl CjsResolutionStore {
pub fn contains(&self, specifier: &ModuleSpecifier) -> bool {
self.0.contains(specifier)
pub fn is_known_cjs(&self, specifier: &ModuleSpecifier) -> bool {
if specifier.scheme() != "file" {
return false;
}
specifier_has_extension(specifier, "cjs") || self.0.contains(specifier)
}
pub fn insert(&self, specifier: ModuleSpecifier) {

View file

@ -528,6 +528,7 @@
"bare-node-builtins",
"byonm",
"cron",
"detect-cjs",
"ffi",
"fs",
"http",

View file

@ -468,7 +468,11 @@ impl<'a> DenoCompileBinaryWriter<'a> {
self
.http_client_provider
.get_or_create()?
.download_with_progress(download_url.parse()?, None, &progress)
.download_with_progress_and_retries(
download_url.parse()?,
None,
&progress,
)
.await?
};
let bytes = match maybe_bytes {
@ -622,6 +626,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
unstable_config: UnstableConfig {
legacy_flag_enabled: false,
bare_node_builtins: cli_options.unstable_bare_node_builtins(),
detect_cjs: cli_options.unstable_detect_cjs(),
sloppy_imports: cli_options.unstable_sloppy_imports(),
features: cli_options.unstable_features(),
},

View file

@ -586,6 +586,7 @@ pub async fn run(
node_analysis_cache,
fs.clone(),
cli_node_resolver.clone(),
None,
);
let node_code_translator = Arc::new(NodeCodeTranslator::new(
cjs_esm_code_analyzer,
@ -651,7 +652,7 @@ pub async fn run(
workspace_resolver,
node_resolver: cli_node_resolver.clone(),
npm_module_loader: Arc::new(NpmModuleLoader::new(
cjs_resolutions,
cjs_resolutions.clone(),
node_code_translator,
fs.clone(),
cli_node_resolver,
@ -696,6 +697,7 @@ pub async fn run(
});
let worker_factory = CliMainWorkerFactory::new(
Arc::new(BlobStore::default()),
cjs_resolutions,
// Code cache is not supported for standalone binary yet.
None,
feature_checker,
@ -738,6 +740,7 @@ pub async fn run(
node_ipc: None,
serve_port: None,
serve_host: None,
unstable_detect_cjs: metadata.unstable_config.detect_cjs,
},
);

View file

@ -16,8 +16,11 @@ use deno_task_shell::ExecutableCommand;
use deno_task_shell::ExecuteResult;
use deno_task_shell::ShellCommand;
use deno_task_shell::ShellCommandContext;
use deno_task_shell::ShellPipeReader;
use deno_task_shell::ShellPipeWriter;
use lazy_regex::Lazy;
use regex::Regex;
use tokio::task::JoinHandle;
use tokio::task::LocalSet;
use crate::npm::CliNpmResolver;
@ -36,6 +39,35 @@ pub fn get_script_with_args(script: &str, argv: &[String]) -> String {
script.trim().to_owned()
}
pub struct TaskStdio(Option<ShellPipeReader>, ShellPipeWriter);
impl TaskStdio {
pub fn stdout() -> Self {
Self(None, ShellPipeWriter::stdout())
}
pub fn stderr() -> Self {
Self(None, ShellPipeWriter::stderr())
}
pub fn piped() -> Self {
let (r, w) = deno_task_shell::pipe();
Self(Some(r), w)
}
}
pub struct TaskIo {
pub stdout: TaskStdio,
pub stderr: TaskStdio,
}
impl Default for TaskIo {
fn default() -> Self {
Self {
stderr: TaskStdio::stderr(),
stdout: TaskStdio::stdout(),
}
}
}
pub struct RunTaskOptions<'a> {
pub task_name: &'a str,
pub script: &'a str,
@ -45,24 +77,69 @@ pub struct RunTaskOptions<'a> {
pub argv: &'a [String],
pub custom_commands: HashMap<String, Rc<dyn ShellCommand>>,
pub root_node_modules_dir: Option<&'a Path>,
pub stdio: Option<TaskIo>,
}
pub type TaskCustomCommands = HashMap<String, Rc<dyn ShellCommand>>;
pub async fn run_task(opts: RunTaskOptions<'_>) -> Result<i32, AnyError> {
pub struct TaskResult {
pub exit_code: i32,
pub stdout: Option<Vec<u8>>,
pub stderr: Option<Vec<u8>>,
}
pub async fn run_task(
opts: RunTaskOptions<'_>,
) -> Result<TaskResult, AnyError> {
let script = get_script_with_args(opts.script, opts.argv);
let seq_list = deno_task_shell::parser::parse(&script)
.with_context(|| format!("Error parsing script '{}'.", opts.task_name))?;
let env_vars =
prepare_env_vars(opts.env_vars, opts.init_cwd, opts.root_node_modules_dir);
let state =
deno_task_shell::ShellState::new(env_vars, opts.cwd, opts.custom_commands);
let stdio = opts.stdio.unwrap_or_default();
let (
TaskStdio(stdout_read, stdout_write),
TaskStdio(stderr_read, stderr_write),
) = (stdio.stdout, stdio.stderr);
fn read(reader: ShellPipeReader) -> JoinHandle<Result<Vec<u8>, AnyError>> {
tokio::task::spawn_blocking(move || {
let mut buf = Vec::new();
reader.pipe_to(&mut buf)?;
Ok(buf)
})
}
let stdout = stdout_read.map(read);
let stderr = stderr_read.map(read);
let local = LocalSet::new();
let future = deno_task_shell::execute(
let future = async move {
let exit_code = deno_task_shell::execute_with_pipes(
seq_list,
env_vars,
opts.cwd,
opts.custom_commands,
);
Ok(local.run_until(future).await)
state,
ShellPipeReader::stdin(),
stdout_write,
stderr_write,
)
.await;
Ok::<_, AnyError>(TaskResult {
exit_code,
stdout: if let Some(stdout) = stdout {
Some(stdout.await??)
} else {
None
},
stderr: if let Some(stderr) = stderr {
Some(stderr.await??)
} else {
None
},
})
};
local.run_until(future).await
}
fn prepare_env_vars(

View file

@ -54,6 +54,16 @@ pub async fn compile(
);
}
if cli_options.unstable_detect_cjs() {
log::warn!(
concat!(
"{} --unstable-detect-cjs is not properly supported in deno compile. ",
"The compiled executable may encounter runtime errors.",
),
crate::colors::yellow("Warning"),
);
}
let output_path = resolve_compile_executable_output_path(
http_client,
&compile_flags,

View file

@ -17,6 +17,7 @@ use deno_graph::Module;
use deno_graph::ModuleError;
use deno_graph::ModuleGraph;
use deno_graph::Resolution;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
@ -47,6 +48,7 @@ pub async fn info(
let module_graph_creator = factory.module_graph_creator().await?;
let npm_resolver = factory.npm_resolver().await?;
let maybe_lockfile = cli_options.maybe_lockfile();
let npmrc = cli_options.npmrc();
let resolver = factory.workspace_resolver().await?;
let maybe_import_specifier =
@ -88,7 +90,8 @@ pub async fn info(
JSON_SCHEMA_VERSION.into(),
);
}
add_npm_packages_to_json(&mut json_graph, npm_resolver.as_ref());
add_npm_packages_to_json(&mut json_graph, npm_resolver.as_ref(), npmrc);
display::write_json_to_stdout(&json_graph)?;
} else {
let mut output = String::new();
@ -185,6 +188,7 @@ fn print_cache_info(
fn add_npm_packages_to_json(
json: &mut serde_json::Value,
npm_resolver: &dyn CliNpmResolver,
npmrc: &ResolvedNpmRc,
) {
let Some(npm_resolver) = npm_resolver.as_managed() else {
return; // does not include byonm to deno info's output
@ -195,13 +199,11 @@ fn add_npm_packages_to_json(
let json = json.as_object_mut().unwrap();
let modules = json.get_mut("modules").and_then(|m| m.as_array_mut());
if let Some(modules) = modules {
if modules.len() == 1
&& modules[0].get("kind").and_then(|k| k.as_str()) == Some("npm")
{
for module in modules.iter_mut() {
if matches!(module.get("kind").and_then(|k| k.as_str()), Some("npm")) {
// If there is only one module and it's "external", then that means
// someone provided an npm specifier as a cli argument. In this case,
// we want to show which npm package the cli argument resolved to.
let module = &mut modules[0];
let maybe_package = module
.get("specifier")
.and_then(|k| k.as_str())
@ -217,23 +219,8 @@ fn add_npm_packages_to_json(
.insert("npmPackage".to_string(), pkg.id.as_serialized().into());
}
}
} else {
// Filter out npm package references from the modules and instead
// have them only listed as dependencies. This is done because various
// npm specifiers modules in the graph are really just unresolved
// references. So there could be listed multiple npm specifiers
// that would resolve to a single npm package.
for i in (0..modules.len()).rev() {
if matches!(
modules[i].get("kind").and_then(|k| k.as_str()),
Some("npm") | Some("external")
) {
modules.remove(i);
}
}
}
for module in modules.iter_mut() {
let dependencies = module
.get_mut("dependencies")
.and_then(|d| d.as_array_mut());
@ -265,7 +252,7 @@ fn add_npm_packages_to_json(
let mut json_packages = serde_json::Map::with_capacity(sorted_packages.len());
for pkg in sorted_packages {
let mut kv = serde_json::Map::new();
kv.insert("name".to_string(), pkg.id.nv.name.to_string().into());
kv.insert("name".to_string(), pkg.id.nv.name.clone().into());
kv.insert("version".to_string(), pkg.id.nv.version.to_string().into());
let mut deps = pkg.dependencies.values().collect::<Vec<_>>();
deps.sort();
@ -274,6 +261,8 @@ fn add_npm_packages_to_json(
.map(|id| serde_json::Value::String(id.as_serialized()))
.collect::<Vec<_>>();
kv.insert("dependencies".to_string(), deps.into());
let registry_url = npmrc.get_registry_url(&pkg.id.nv.name);
kv.insert("registryUrl".to_string(), registry_url.to_string().into());
json_packages.insert(pkg.id.as_serialized(), kv.into());
}

View file

@ -58,9 +58,9 @@ pub fn install() -> Result<(), AnyError> {
let f = std::fs::File::create(kernel_json_path)?;
serde_json::to_writer_pretty(f, &json_data)?;
install_icon(&user_data_dir, "logo-32x32.png", DENO_ICON_32)?;
install_icon(&user_data_dir, "logo-64x64.png", DENO_ICON_64)?;
install_icon(&user_data_dir, "logo-svg.svg", DENO_ICON_SVG)?;
install_icon(&kernel_dir, "logo-32x32.png", DENO_ICON_32)?;
install_icon(&kernel_dir, "logo-64x64.png", DENO_ICON_64)?;
install_icon(&kernel_dir, "logo-svg.svg", DENO_ICON_SVG)?;
log::info!("✅ Deno kernelspec installed successfully.");
Ok(())

View file

@ -363,7 +363,14 @@ fn package_json_dependency_entry(
selected: SelectedPackage,
) -> (String, String) {
if let Some(npm_package) = selected.package_name.strip_prefix("npm:") {
if selected.import_name == npm_package {
(npm_package.into(), selected.version_req)
} else {
(
selected.import_name,
format!("npm:{}@{}", npm_package, selected.version_req),
)
}
} else if let Some(jsr_package) = selected.package_name.strip_prefix("jsr:") {
let jsr_package = jsr_package.strip_prefix('@').unwrap_or(jsr_package);
let scope_replaced = jsr_package.replace('/', "__");
@ -393,14 +400,17 @@ impl std::fmt::Display for AddCommandName {
fn load_configs(
flags: &Arc<Flags>,
has_jsr_specifiers: impl FnOnce() -> bool,
) -> Result<(CliFactory, Option<NpmConfig>, Option<DenoConfig>), AnyError> {
let cli_factory = CliFactory::from_flags(flags.clone());
let options = cli_factory.cli_options()?;
let npm_config = NpmConfig::from_options(options)?;
let (cli_factory, deno_config) = match DenoConfig::from_options(options)? {
Some(config) => (cli_factory, Some(config)),
None if npm_config.is_some() => (cli_factory, None),
None => {
None if npm_config.is_some() && !has_jsr_specifiers() => {
(cli_factory, None)
}
_ => {
let factory = create_deno_json(flags, options)?;
let options = factory.cli_options()?.clone();
(
@ -420,7 +430,9 @@ pub async fn add(
add_flags: AddFlags,
cmd_name: AddCommandName,
) -> Result<(), AnyError> {
let (cli_factory, npm_config, deno_config) = load_configs(&flags)?;
let (cli_factory, npm_config, deno_config) = load_configs(&flags, || {
add_flags.packages.iter().any(|s| s.starts_with("jsr:"))
})?;
let mut npm_config = ConfigUpdater::maybe_new(npm_config).await?;
let mut deno_config = ConfigUpdater::maybe_new(deno_config).await?;
@ -458,7 +470,7 @@ pub async fn add(
let mut package_reqs = Vec::with_capacity(add_flags.packages.len());
for entry_text in add_flags.packages.iter() {
let req = AddPackageReq::parse(entry_text).with_context(|| {
let req = AddRmPackageReq::parse(entry_text).with_context(|| {
format!("Failed to parse package required: {}", entry_text)
})?;
@ -584,10 +596,10 @@ enum PackageAndVersion {
async fn find_package_and_select_version_for_req(
jsr_resolver: Arc<JsrFetchResolver>,
npm_resolver: Arc<NpmFetchResolver>,
add_package_req: AddPackageReq,
add_package_req: AddRmPackageReq,
) -> Result<PackageAndVersion, AnyError> {
match add_package_req.value {
AddPackageReqValue::Jsr(req) => {
AddRmPackageReqValue::Jsr(req) => {
let jsr_prefixed_name = format!("jsr:{}", &req.name);
let Some(nv) = jsr_resolver.req_to_nv(&req).await else {
if npm_resolver.req_to_nv(&req).await.is_some() {
@ -605,9 +617,11 @@ async fn find_package_and_select_version_for_req(
});
};
let range_symbol = if req.version_req.version_text().starts_with('~') {
'~'
"~"
} else if req.version_req.version_text() == nv.version.to_string() {
""
} else {
'^'
"^"
};
Ok(PackageAndVersion::Selected(SelectedPackage {
import_name: add_package_req.alias,
@ -616,7 +630,7 @@ async fn find_package_and_select_version_for_req(
selected_version: nv.version.to_string(),
}))
}
AddPackageReqValue::Npm(req) => {
AddRmPackageReqValue::Npm(req) => {
let npm_prefixed_name = format!("npm:{}", &req.name);
let Some(nv) = npm_resolver.req_to_nv(&req).await else {
return Ok(PackageAndVersion::NotFound {
@ -625,11 +639,15 @@ async fn find_package_and_select_version_for_req(
package_req: req,
});
};
let range_symbol = if req.version_req.version_text().starts_with('~') {
'~'
"~"
} else if req.version_req.version_text() == nv.version.to_string() {
""
} else {
'^'
"^"
};
Ok(PackageAndVersion::Selected(SelectedPackage {
import_name: add_package_req.alias,
package_name: npm_prefixed_name,
@ -641,18 +659,18 @@ async fn find_package_and_select_version_for_req(
}
#[derive(Debug, PartialEq, Eq)]
enum AddPackageReqValue {
enum AddRmPackageReqValue {
Jsr(PackageReq),
Npm(PackageReq),
}
#[derive(Debug, PartialEq, Eq)]
struct AddPackageReq {
struct AddRmPackageReq {
alias: String,
value: AddPackageReqValue,
value: AddRmPackageReqValue,
}
impl AddPackageReq {
impl AddRmPackageReq {
pub fn parse(entry_text: &str) -> Result<Result<Self, PackageReq>, AnyError> {
enum Prefix {
Jsr,
@ -707,9 +725,9 @@ impl AddPackageReq {
let req_ref =
JsrPackageReqReference::from_str(&format!("jsr:{}", entry_text))?;
let package_req = req_ref.into_inner().req;
Ok(Ok(AddPackageReq {
Ok(Ok(AddRmPackageReq {
alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()),
value: AddPackageReqValue::Jsr(package_req),
value: AddRmPackageReqValue::Jsr(package_req),
}))
}
Prefix::Npm => {
@ -727,9 +745,9 @@ impl AddPackageReq {
deno_semver::RangeSetOrTag::Tag("latest".into()),
);
}
Ok(Ok(AddPackageReq {
Ok(Ok(AddRmPackageReq {
alias: maybe_alias.unwrap_or_else(|| package_req.name.to_string()),
value: AddPackageReqValue::Npm(package_req),
value: AddRmPackageReqValue::Npm(package_req),
}))
}
}
@ -741,6 +759,9 @@ fn generate_imports(mut packages_to_version: Vec<(String, String)>) -> String {
let mut contents = vec![];
let len = packages_to_version.len();
for (index, (package, version)) in packages_to_version.iter().enumerate() {
if index == 0 {
contents.push(String::new()); // force a newline at the start
}
// TODO(bartlomieju): fix it, once we start support specifying version on the cli
contents.push(format!("\"{}\": \"{}\"", package, version));
if index != len - 1 {
@ -754,7 +775,7 @@ pub async fn remove(
flags: Arc<Flags>,
remove_flags: RemoveFlags,
) -> Result<(), AnyError> {
let (_, npm_config, deno_config) = load_configs(&flags)?;
let (_, npm_config, deno_config) = load_configs(&flags, || false)?;
let mut configs = [
ConfigUpdater::maybe_new(npm_config).await?,
@ -764,12 +785,28 @@ pub async fn remove(
let mut removed_packages = vec![];
for package in &remove_flags.packages {
let mut removed = false;
let req = AddRmPackageReq::parse(package).with_context(|| {
format!("Failed to parse package required: {}", package)
})?;
let mut parsed_pkg_name = None;
for config in configs.iter_mut().flatten() {
removed |= config.remove(package);
match &req {
Ok(rm_pkg) => {
if config.remove(&rm_pkg.alias) && parsed_pkg_name.is_none() {
parsed_pkg_name = Some(rm_pkg.alias.clone());
}
if removed {
removed_packages.push(package.clone());
}
Err(pkg) => {
// An alias or a package name without registry/version
// constraints. Try to remove the package anyway.
if config.remove(&pkg.name) && parsed_pkg_name.is_none() {
parsed_pkg_name = Some(pkg.name.clone());
}
}
}
}
if let Some(pkg) = parsed_pkg_name {
removed_packages.push(pkg);
}
}
@ -898,48 +935,52 @@ mod test {
#[test]
fn test_parse_add_package_req() {
assert_eq!(
AddPackageReq::parse("jsr:foo").unwrap().unwrap(),
AddPackageReq {
AddRmPackageReq::parse("jsr:foo").unwrap().unwrap(),
AddRmPackageReq {
alias: "foo".to_string(),
value: AddPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
}
);
assert_eq!(
AddPackageReq::parse("alias@jsr:foo").unwrap().unwrap(),
AddPackageReq {
AddRmPackageReq::parse("alias@jsr:foo").unwrap().unwrap(),
AddRmPackageReq {
alias: "alias".to_string(),
value: AddPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
}
);
assert_eq!(
AddPackageReq::parse("@alias/pkg@npm:foo").unwrap().unwrap(),
AddPackageReq {
AddRmPackageReq::parse("@alias/pkg@npm:foo")
.unwrap()
.unwrap(),
AddRmPackageReq {
alias: "@alias/pkg".to_string(),
value: AddPackageReqValue::Npm(
value: AddRmPackageReqValue::Npm(
PackageReq::from_str("foo@latest").unwrap()
)
}
);
assert_eq!(
AddPackageReq::parse("@alias/pkg@jsr:foo").unwrap().unwrap(),
AddPackageReq {
AddRmPackageReq::parse("@alias/pkg@jsr:foo")
.unwrap()
.unwrap(),
AddRmPackageReq {
alias: "@alias/pkg".to_string(),
value: AddPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
value: AddRmPackageReqValue::Jsr(PackageReq::from_str("foo").unwrap())
}
);
assert_eq!(
AddPackageReq::parse("alias@jsr:foo@^1.5.0")
AddRmPackageReq::parse("alias@jsr:foo@^1.5.0")
.unwrap()
.unwrap(),
AddPackageReq {
AddRmPackageReq {
alias: "alias".to_string(),
value: AddPackageReqValue::Jsr(
value: AddRmPackageReqValue::Jsr(
PackageReq::from_str("foo@^1.5.0").unwrap()
)
}
);
assert_eq!(
AddPackageReq::parse("@scope/pkg@tag")
AddRmPackageReq::parse("@scope/pkg@tag")
.unwrap()
.unwrap_err()
.to_string(),

View file

@ -182,6 +182,7 @@ async fn run_task(opts: RunTaskOptions<'_>) -> Result<i32, AnyError> {
&task_runner::get_script_with_args(script, cli_options.argv()),
);
Ok(
task_runner::run_task(task_runner::RunTaskOptions {
task_name,
script,
@ -191,8 +192,11 @@ async fn run_task(opts: RunTaskOptions<'_>) -> Result<i32, AnyError> {
init_cwd: opts.cli_options.initial_cwd(),
argv: cli_options.argv(),
root_node_modules_dir: npm_resolver.root_node_modules_path(),
stdio: None,
})
.await
.await?
.exit_code,
)
}
fn output_task(task_name: &str, script: &str) {

View file

@ -913,7 +913,7 @@ async fn download_package(
// text above which will stay alive after the progress bars are complete
let progress = progress_bar.update("");
let maybe_bytes = client
.download_with_progress(download_url.clone(), None, &progress)
.download_with_progress_and_retries(download_url.clone(), None, &progress)
.await
.with_context(|| format!("Failed downloading {download_url}. The version you requested may not have been built for the current architecture."))?;
Ok(maybe_bytes)

View file

@ -254,7 +254,11 @@ impl ExportCollector {
let mut import_specifiers = vec![];
if let Some(default_export) = &self.default_export {
if !symbols_to_exclude.contains(default_export) {
// If the default export conflicts with a named export, a named one
// takes precedence.
if !symbols_to_exclude.contains(default_export)
&& !self.named_exports.contains(default_export)
{
import_specifiers.push(ast::ImportSpecifier::Default(
ast::ImportDefaultSpecifier {
span: DUMMY_SP,
@ -1137,6 +1141,30 @@ Deno.test("file:///README.md$6-12.js", async ()=>{
media_type: MediaType::JavaScript,
}],
},
// https://github.com/denoland/deno/issues/26009
Test {
input: Input {
source: r#"
/**
* ```ts
* console.log(Foo)
* ```
*/
export class Foo {}
export default Foo
"#,
specifier: "file:///main.ts",
},
expected: vec![Expected {
source: r#"import { Foo } from "file:///main.ts";
Deno.test("file:///main.ts$3-6.ts", async ()=>{
console.log(Foo);
});
"#,
specifier: "file:///main.ts$3-6.ts",
media_type: MediaType::TypeScript,
}],
},
];
for test in tests {
@ -1326,6 +1354,28 @@ assertEquals(add(1, 2), 3);
media_type: MediaType::JavaScript,
}],
},
// https://github.com/denoland/deno/issues/26009
Test {
input: Input {
source: r#"
/**
* ```ts
* console.log(Foo)
* ```
*/
export class Foo {}
export default Foo
"#,
specifier: "file:///main.ts",
},
expected: vec![Expected {
source: r#"import { Foo } from "file:///main.ts";
console.log(Foo);
"#,
specifier: "file:///main.ts$3-6.ts",
media_type: MediaType::TypeScript,
}],
},
];
for test in tests {
@ -1581,6 +1631,16 @@ declare global {
named_expected: atom_set!(),
default_expected: None,
},
// The identifier `Foo` conflicts, but `ExportCollector` doesn't do
// anything about it. It is handled by `to_import_specifiers` method.
Test {
input: r#"
export class Foo {}
export default Foo
"#,
named_expected: atom_set!("Foo"),
default_expected: Some("Foo".into()),
},
];
for test in tests {

View file

@ -160,10 +160,10 @@ fn atomic_write_file(
data: &[u8],
) -> std::io::Result<()> {
fs.write_file(temp_file_path, data)?;
fs.rename_file(temp_file_path, file_path).map_err(|err| {
fs.rename_file(temp_file_path, file_path)
.inspect_err(|_err| {
// clean up the created temp file on error
let _ = fs.remove_file(temp_file_path);
err
})
}
@ -277,7 +277,7 @@ pub fn write_file_2<T: AsRef<[u8]>>(
/// Similar to `std::fs::canonicalize()` but strips UNC prefixes on Windows.
pub fn canonicalize_path(path: &Path) -> Result<PathBuf, Error> {
Ok(deno_core::strip_unc_prefix(path.canonicalize()?))
Ok(deno_path_util::strip_unc_prefix(path.canonicalize()?))
}
/// Canonicalizes a path which might be non-existent by going up the

View file

@ -14,6 +14,7 @@ pub mod logger;
pub mod path;
pub mod progress_bar;
pub mod result;
pub mod retry;
pub mod sync;
pub mod text_encoding;
pub mod unix;

41
cli/util/retry.rs Normal file
View file

@ -0,0 +1,41 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::future::Future;
use std::time::Duration;
pub fn retry<
F: FnMut() -> Fut,
T,
E,
Fut: Future<Output = Result<T, E>>,
ShouldRetry: FnMut(&E) -> bool,
>(
mut f: F,
mut should_retry: ShouldRetry,
) -> impl Future<Output = Result<T, E>> {
const WAITS: [Duration; 3] = [
Duration::from_millis(100),
Duration::from_millis(250),
Duration::from_millis(500),
];
let mut waits = WAITS.into_iter();
async move {
let mut first_result = None;
loop {
let result = f().await;
match result {
Ok(r) => return Ok(r),
Err(e) if !should_retry(&e) => return Err(e),
_ => {}
}
if first_result.is_none() {
first_result = Some(result);
}
let Some(wait) = waits.next() else {
return first_result.unwrap();
};
tokio::time::sleep(wait).await;
}
}
}

View file

@ -1,6 +1,8 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::ops::Range;
use std::sync::Arc;
use base64::prelude::BASE64_STANDARD;
use base64::Engine;
@ -9,6 +11,15 @@ use deno_core::ModuleSourceCode;
static SOURCE_MAP_PREFIX: &[u8] =
b"//# sourceMappingURL=data:application/json;base64,";
pub fn from_utf8_lossy_owned(bytes: Vec<u8>) -> String {
match String::from_utf8_lossy(&bytes) {
Cow::Owned(code) => code,
// SAFETY: `String::from_utf8_lossy` guarantees that the result is valid
// UTF-8 if `Cow::Borrowed` is returned.
Cow::Borrowed(_) => unsafe { String::from_utf8_unchecked(bytes) },
}
}
pub fn source_map_from_code(code: &[u8]) -> Option<Vec<u8>> {
let range = find_source_map_range(code)?;
let source_map_range = &code[range];
@ -85,6 +96,13 @@ fn find_source_map_range(code: &[u8]) -> Option<Range<usize>> {
}
}
/// Converts an `Arc<str>` to an `Arc<[u8]>`.
pub fn arc_str_to_bytes(arc_str: Arc<str>) -> Arc<[u8]> {
let raw = Arc::into_raw(arc_str);
// SAFETY: This is safe because they have the same memory layout.
unsafe { Arc::from_raw(raw as *const [u8]) }
}
#[cfg(test)]
mod tests {
use std::sync::Arc;

View file

@ -51,9 +51,11 @@ use crate::args::DenoSubcommand;
use crate::args::StorageKeyResolver;
use crate::errors;
use crate::npm::CliNpmResolver;
use crate::resolver::CjsResolutionStore;
use crate::util::checksum;
use crate::util::file_watcher::WatcherCommunicator;
use crate::util::file_watcher::WatcherRestartMode;
use crate::util::path::specifier_has_extension;
use crate::version;
pub struct ModuleLoaderAndSourceMapGetter {
@ -120,11 +122,13 @@ pub struct CliMainWorkerOptions {
pub node_ipc: Option<i64>,
pub serve_port: Option<u16>,
pub serve_host: Option<String>,
pub unstable_detect_cjs: bool,
}
struct SharedWorkerState {
blob_store: Arc<BlobStore>,
broadcast_channel: InMemoryBroadcastChannel,
cjs_resolution_store: Arc<CjsResolutionStore>,
code_cache: Option<Arc<dyn code_cache::CodeCache>>,
compiled_wasm_module_store: CompiledWasmModuleStore,
feature_checker: Arc<FeatureChecker>,
@ -422,6 +426,7 @@ impl CliMainWorkerFactory {
#[allow(clippy::too_many_arguments)]
pub fn new(
blob_store: Arc<BlobStore>,
cjs_resolution_store: Arc<CjsResolutionStore>,
code_cache: Option<Arc<dyn code_cache::CodeCache>>,
feature_checker: Arc<FeatureChecker>,
fs: Arc<dyn deno_fs::FileSystem>,
@ -441,6 +446,7 @@ impl CliMainWorkerFactory {
shared: Arc::new(SharedWorkerState {
blob_store,
broadcast_channel: Default::default(),
cjs_resolution_store,
code_cache,
compiled_wasm_module_store: Default::default(),
feature_checker,
@ -486,6 +492,9 @@ impl CliMainWorkerFactory {
stdio: deno_runtime::deno_io::Stdio,
) -> Result<CliMainWorker, AnyError> {
let shared = &self.shared;
let ModuleLoaderAndSourceMapGetter { module_loader } = shared
.module_loader_factory
.create_for_main(permissions.clone());
let (main_module, is_main_cjs) = if let Ok(package_ref) =
NpmPackageReqReference::from_specifier(&main_module)
{
@ -526,13 +535,29 @@ impl CliMainWorkerFactory {
let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_));
(node_resolution.into_url(), is_main_cjs)
} else {
let is_cjs = main_module.path().ends_with(".cjs");
let is_maybe_cjs_js_ext = self.shared.options.unstable_detect_cjs
&& specifier_has_extension(&main_module, "js")
&& self
.shared
.node_resolver
.get_closest_package_json(&main_module)
.ok()
.flatten()
.map(|pkg_json| pkg_json.typ == "commonjs")
.unwrap_or(false);
let is_cjs = if is_maybe_cjs_js_ext {
// fill the cjs resolution store by preparing the module load
module_loader
.prepare_load(&main_module, None, false)
.await?;
self.shared.cjs_resolution_store.is_known_cjs(&main_module)
} else {
main_module.scheme() == "file"
&& specifier_has_extension(&main_module, "cjs")
};
(main_module, is_cjs)
};
let ModuleLoaderAndSourceMapGetter { module_loader } = shared
.module_loader_factory
.create_for_main(permissions.clone());
let maybe_inspector_server = shared.maybe_inspector_server.clone();
let create_web_worker_cb =

View file

@ -16,5 +16,6 @@ path = "lib.rs"
[dependencies]
async-trait.workspace = true
deno_core.workspace = true
thiserror.workspace = true
tokio.workspace = true
uuid.workspace = true

View file

@ -3,13 +3,13 @@
use std::sync::Arc;
use async_trait::async_trait;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use tokio::sync::broadcast;
use tokio::sync::mpsc;
use uuid::Uuid;
use crate::BroadcastChannel;
use crate::BroadcastChannelError;
#[derive(Clone)]
pub struct InMemoryBroadcastChannel(Arc<Mutex<broadcast::Sender<Message>>>);
@ -41,7 +41,7 @@ impl Default for InMemoryBroadcastChannel {
impl BroadcastChannel for InMemoryBroadcastChannel {
type Resource = InMemoryBroadcastChannelResource;
fn subscribe(&self) -> Result<Self::Resource, AnyError> {
fn subscribe(&self) -> Result<Self::Resource, BroadcastChannelError> {
let (cancel_tx, cancel_rx) = mpsc::unbounded_channel();
let broadcast_rx = self.0.lock().subscribe();
let rx = tokio::sync::Mutex::new((broadcast_rx, cancel_rx));
@ -53,7 +53,10 @@ impl BroadcastChannel for InMemoryBroadcastChannel {
})
}
fn unsubscribe(&self, resource: &Self::Resource) -> Result<(), AnyError> {
fn unsubscribe(
&self,
resource: &Self::Resource,
) -> Result<(), BroadcastChannelError> {
Ok(resource.cancel_tx.send(())?)
}
@ -62,7 +65,7 @@ impl BroadcastChannel for InMemoryBroadcastChannel {
resource: &Self::Resource,
name: String,
data: Vec<u8>,
) -> Result<(), AnyError> {
) -> Result<(), BroadcastChannelError> {
let name = Arc::new(name);
let data = Arc::new(data);
let uuid = resource.uuid;
@ -73,7 +76,7 @@ impl BroadcastChannel for InMemoryBroadcastChannel {
async fn recv(
&self,
resource: &Self::Resource,
) -> Result<Option<crate::Message>, AnyError> {
) -> Result<Option<crate::Message>, BroadcastChannelError> {
let mut g = resource.rx.lock().await;
let (broadcast_rx, cancel_rx) = &mut *g;
loop {

View file

@ -10,34 +10,69 @@ use std::path::PathBuf;
use std::rc::Rc;
use async_trait::async_trait;
use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::JsBuffer;
use deno_core::OpState;
use deno_core::Resource;
use deno_core::ResourceId;
use tokio::sync::broadcast::error::SendError as BroadcastSendError;
use tokio::sync::mpsc::error::SendError as MpscSendError;
pub const UNSTABLE_FEATURE_NAME: &str = "broadcast-channel";
#[derive(Debug, thiserror::Error)]
pub enum BroadcastChannelError {
#[error(transparent)]
Resource(deno_core::error::AnyError),
#[error(transparent)]
MPSCSendError(MpscSendError<Box<dyn std::fmt::Debug + Send + Sync>>),
#[error(transparent)]
BroadcastSendError(
BroadcastSendError<Box<dyn std::fmt::Debug + Send + Sync>>,
),
#[error(transparent)]
Other(deno_core::error::AnyError),
}
impl<T: std::fmt::Debug + Send + Sync + 'static> From<MpscSendError<T>>
for BroadcastChannelError
{
fn from(value: MpscSendError<T>) -> Self {
BroadcastChannelError::MPSCSendError(MpscSendError(Box::new(value.0)))
}
}
impl<T: std::fmt::Debug + Send + Sync + 'static> From<BroadcastSendError<T>>
for BroadcastChannelError
{
fn from(value: BroadcastSendError<T>) -> Self {
BroadcastChannelError::BroadcastSendError(BroadcastSendError(Box::new(
value.0,
)))
}
}
#[async_trait]
pub trait BroadcastChannel: Clone {
type Resource: Resource;
fn subscribe(&self) -> Result<Self::Resource, AnyError>;
fn subscribe(&self) -> Result<Self::Resource, BroadcastChannelError>;
fn unsubscribe(&self, resource: &Self::Resource) -> Result<(), AnyError>;
fn unsubscribe(
&self,
resource: &Self::Resource,
) -> Result<(), BroadcastChannelError>;
async fn send(
&self,
resource: &Self::Resource,
name: String,
data: Vec<u8>,
) -> Result<(), AnyError>;
) -> Result<(), BroadcastChannelError>;
async fn recv(
&self,
resource: &Self::Resource,
) -> Result<Option<Message>, AnyError>;
) -> Result<Option<Message>, BroadcastChannelError>;
}
pub type Message = (String, Vec<u8>);
@ -46,7 +81,7 @@ pub type Message = (String, Vec<u8>);
#[smi]
pub fn op_broadcast_subscribe<BC>(
state: &mut OpState,
) -> Result<ResourceId, AnyError>
) -> Result<ResourceId, BroadcastChannelError>
where
BC: BroadcastChannel + 'static,
{
@ -62,11 +97,14 @@ where
pub fn op_broadcast_unsubscribe<BC>(
state: &mut OpState,
#[smi] rid: ResourceId,
) -> Result<(), AnyError>
) -> Result<(), BroadcastChannelError>
where
BC: BroadcastChannel + 'static,
{
let resource = state.resource_table.get::<BC::Resource>(rid)?;
let resource = state
.resource_table
.get::<BC::Resource>(rid)
.map_err(BroadcastChannelError::Resource)?;
let bc = state.borrow::<BC>();
bc.unsubscribe(&resource)
}
@ -77,11 +115,15 @@ pub async fn op_broadcast_send<BC>(
#[smi] rid: ResourceId,
#[string] name: String,
#[buffer] buf: JsBuffer,
) -> Result<(), AnyError>
) -> Result<(), BroadcastChannelError>
where
BC: BroadcastChannel + 'static,
{
let resource = state.borrow().resource_table.get::<BC::Resource>(rid)?;
let resource = state
.borrow()
.resource_table
.get::<BC::Resource>(rid)
.map_err(BroadcastChannelError::Resource)?;
let bc = state.borrow().borrow::<BC>().clone();
bc.send(&resource, name, buf.to_vec()).await
}
@ -91,11 +133,15 @@ where
pub async fn op_broadcast_recv<BC>(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
) -> Result<Option<Message>, AnyError>
) -> Result<Option<Message>, BroadcastChannelError>
where
BC: BroadcastChannel + 'static,
{
let resource = state.borrow().resource_table.get::<BC::Resource>(rid)?;
let resource = state
.borrow()
.resource_table
.get::<BC::Resource>(rid)
.map_err(BroadcastChannelError::Resource)?;
let bc = state.borrow().borrow::<BC>().clone();
bc.recv(&resource).await
}

View file

@ -19,4 +19,5 @@ deno_core.workspace = true
rusqlite.workspace = true
serde.workspace = true
sha2.workspace = true
thiserror.workspace = true
tokio.workspace = true

60
ext/cache/lib.rs vendored
View file

@ -7,7 +7,6 @@ use std::sync::Arc;
use async_trait::async_trait;
use deno_core::error::type_error;
use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::serde::Deserialize;
use deno_core::serde::Serialize;
@ -19,6 +18,20 @@ use deno_core::ResourceId;
mod sqlite;
pub use sqlite::SqliteBackedCache;
#[derive(Debug, thiserror::Error)]
pub enum CacheError {
#[error(transparent)]
Sqlite(#[from] rusqlite::Error),
#[error(transparent)]
JoinError(#[from] tokio::task::JoinError),
#[error(transparent)]
Resource(deno_core::error::AnyError),
#[error(transparent)]
Other(deno_core::error::AnyError),
#[error(transparent)]
Io(#[from] std::io::Error),
}
#[derive(Clone)]
pub struct CreateCache<C: Cache + 'static>(pub Arc<dyn Fn() -> C>);
@ -92,26 +105,31 @@ pub struct CacheDeleteRequest {
pub trait Cache: Clone + 'static {
type CacheMatchResourceType: Resource;
async fn storage_open(&self, cache_name: String) -> Result<i64, AnyError>;
async fn storage_has(&self, cache_name: String) -> Result<bool, AnyError>;
async fn storage_delete(&self, cache_name: String) -> Result<bool, AnyError>;
async fn storage_open(&self, cache_name: String) -> Result<i64, CacheError>;
async fn storage_has(&self, cache_name: String) -> Result<bool, CacheError>;
async fn storage_delete(
&self,
cache_name: String,
) -> Result<bool, CacheError>;
/// Put a resource into the cache.
async fn put(
&self,
request_response: CachePutRequest,
resource: Option<Rc<dyn Resource>>,
) -> Result<(), AnyError>;
) -> Result<(), CacheError>;
async fn r#match(
&self,
request: CacheMatchRequest,
) -> Result<
Option<(CacheMatchResponseMeta, Option<Self::CacheMatchResourceType>)>,
AnyError,
CacheError,
>;
async fn delete(&self, request: CacheDeleteRequest)
-> Result<bool, AnyError>;
async fn delete(
&self,
request: CacheDeleteRequest,
) -> Result<bool, CacheError>;
}
#[op2(async)]
@ -119,7 +137,7 @@ pub trait Cache: Clone + 'static {
pub async fn op_cache_storage_open<CA>(
state: Rc<RefCell<OpState>>,
#[string] cache_name: String,
) -> Result<i64, AnyError>
) -> Result<i64, CacheError>
where
CA: Cache,
{
@ -131,7 +149,7 @@ where
pub async fn op_cache_storage_has<CA>(
state: Rc<RefCell<OpState>>,
#[string] cache_name: String,
) -> Result<bool, AnyError>
) -> Result<bool, CacheError>
where
CA: Cache,
{
@ -143,7 +161,7 @@ where
pub async fn op_cache_storage_delete<CA>(
state: Rc<RefCell<OpState>>,
#[string] cache_name: String,
) -> Result<bool, AnyError>
) -> Result<bool, CacheError>
where
CA: Cache,
{
@ -155,13 +173,19 @@ where
pub async fn op_cache_put<CA>(
state: Rc<RefCell<OpState>>,
#[serde] request_response: CachePutRequest,
) -> Result<(), AnyError>
) -> Result<(), CacheError>
where
CA: Cache,
{
let cache = get_cache::<CA>(&state)?;
let resource = match request_response.response_rid {
Some(rid) => Some(state.borrow_mut().resource_table.take_any(rid)?),
Some(rid) => Some(
state
.borrow_mut()
.resource_table
.take_any(rid)
.map_err(CacheError::Resource)?,
),
None => None,
};
cache.put(request_response, resource).await
@ -172,7 +196,7 @@ where
pub async fn op_cache_match<CA>(
state: Rc<RefCell<OpState>>,
#[serde] request: CacheMatchRequest,
) -> Result<Option<CacheMatchResponse>, AnyError>
) -> Result<Option<CacheMatchResponse>, CacheError>
where
CA: Cache,
{
@ -191,7 +215,7 @@ where
pub async fn op_cache_delete<CA>(
state: Rc<RefCell<OpState>>,
#[serde] request: CacheDeleteRequest,
) -> Result<bool, AnyError>
) -> Result<bool, CacheError>
where
CA: Cache,
{
@ -199,7 +223,7 @@ where
cache.delete(request).await
}
pub fn get_cache<CA>(state: &Rc<RefCell<OpState>>) -> Result<CA, AnyError>
pub fn get_cache<CA>(state: &Rc<RefCell<OpState>>) -> Result<CA, CacheError>
where
CA: Cache,
{
@ -211,7 +235,9 @@ where
state.put(cache);
Ok(state.borrow::<CA>().clone())
} else {
Err(type_error("CacheStorage is not available in this context"))
Err(CacheError::Other(type_error(
"CacheStorage is not available in this context",
)))
}
}

50
ext/cache/sqlite.rs vendored
View file

@ -30,6 +30,7 @@ use crate::serialize_headers;
use crate::vary_header_matches;
use crate::Cache;
use crate::CacheDeleteRequest;
use crate::CacheError;
use crate::CacheMatchRequest;
use crate::CacheMatchResponseMeta;
use crate::CachePutRequest;
@ -102,7 +103,7 @@ impl Cache for SqliteBackedCache {
/// Open a cache storage. Internally, this creates a row in the
/// sqlite db if the cache doesn't exist and returns the internal id
/// of the cache.
async fn storage_open(&self, cache_name: String) -> Result<i64, AnyError> {
async fn storage_open(&self, cache_name: String) -> Result<i64, CacheError> {
let db = self.connection.clone();
let cache_storage_dir = self.cache_storage_dir.clone();
spawn_blocking(move || {
@ -121,14 +122,14 @@ impl Cache for SqliteBackedCache {
)?;
let responses_dir = get_responses_dir(cache_storage_dir, cache_id);
std::fs::create_dir_all(responses_dir)?;
Ok::<i64, AnyError>(cache_id)
Ok::<i64, CacheError>(cache_id)
})
.await?
}
/// Check if a cache with the provided name exists.
/// Note: this doesn't check the disk, it only checks the sqlite db.
async fn storage_has(&self, cache_name: String) -> Result<bool, AnyError> {
async fn storage_has(&self, cache_name: String) -> Result<bool, CacheError> {
let db = self.connection.clone();
spawn_blocking(move || {
let db = db.lock();
@ -140,13 +141,16 @@ impl Cache for SqliteBackedCache {
Ok(count > 0)
},
)?;
Ok::<bool, AnyError>(cache_exists)
Ok::<bool, CacheError>(cache_exists)
})
.await?
}
/// Delete a cache storage. Internally, this deletes the row in the sqlite db.
async fn storage_delete(&self, cache_name: String) -> Result<bool, AnyError> {
async fn storage_delete(
&self,
cache_name: String,
) -> Result<bool, CacheError> {
let db = self.connection.clone();
let cache_storage_dir = self.cache_storage_dir.clone();
spawn_blocking(move || {
@ -167,7 +171,7 @@ impl Cache for SqliteBackedCache {
std::fs::remove_dir_all(cache_dir)?;
}
}
Ok::<bool, AnyError>(maybe_cache_id.is_some())
Ok::<bool, CacheError>(maybe_cache_id.is_some())
})
.await?
}
@ -176,10 +180,12 @@ impl Cache for SqliteBackedCache {
&self,
request_response: CachePutRequest,
resource: Option<Rc<dyn Resource>>,
) -> Result<(), AnyError> {
) -> Result<(), CacheError> {
let db = self.connection.clone();
let cache_storage_dir = self.cache_storage_dir.clone();
let now = SystemTime::now().duration_since(UNIX_EPOCH)?;
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("SystemTime is before unix epoch");
if let Some(resource) = resource {
let body_key = hash(&format!(
@ -193,7 +199,11 @@ impl Cache for SqliteBackedCache {
let mut file = tokio::fs::File::create(response_path).await?;
let mut buf = BufMutView::new(64 * 1024);
loop {
let (size, buf2) = resource.clone().read_byob(buf).await?;
let (size, buf2) = resource
.clone()
.read_byob(buf)
.await
.map_err(CacheError::Other)?;
if size == 0 {
break;
}
@ -224,7 +234,7 @@ impl Cache for SqliteBackedCache {
request: CacheMatchRequest,
) -> Result<
Option<(CacheMatchResponseMeta, Option<CacheResponseResource>)>,
AnyError,
CacheError,
> {
let db = self.connection.clone();
let cache_storage_dir = self.cache_storage_dir.clone();
@ -290,19 +300,17 @@ impl Cache for SqliteBackedCache {
}
Err(err) => return Err(err.into()),
};
return Ok(Some((cache_meta, Some(CacheResponseResource::new(file)))));
Ok(Some((cache_meta, Some(CacheResponseResource::new(file)))))
}
Some((cache_meta, None)) => {
return Ok(Some((cache_meta, None)));
}
None => return Ok(None),
Some((cache_meta, None)) => Ok(Some((cache_meta, None))),
None => Ok(None),
}
}
async fn delete(
&self,
request: CacheDeleteRequest,
) -> Result<bool, AnyError> {
) -> Result<bool, CacheError> {
let db = self.connection.clone();
spawn_blocking(move || {
// TODO(@satyarohith): remove the response body from disk if one exists
@ -311,17 +319,17 @@ impl Cache for SqliteBackedCache {
"DELETE FROM request_response_list WHERE cache_id = ?1 AND request_url = ?2",
(request.cache_id, &request.request_url),
)?;
Ok::<bool, AnyError>(rows_effected > 0)
Ok::<bool, CacheError>(rows_effected > 0)
})
.await?
}
}
async fn insert_cache_asset(
db: Arc<Mutex<rusqlite::Connection>>,
db: Arc<Mutex<Connection>>,
put: CachePutRequest,
response_body_key: Option<String>,
) -> Result<Option<String>, deno_core::anyhow::Error> {
) -> Result<Option<String>, CacheError> {
spawn_blocking(move || {
let maybe_response_body = {
let db = db.lock();
@ -339,7 +347,7 @@ async fn insert_cache_asset(
response_body_key,
put.response_status,
put.response_status_text,
SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs(),
SystemTime::now().duration_since(UNIX_EPOCH).expect("SystemTime is before unix epoch").as_secs(),
),
|row| {
let response_body_key: Option<String> = row.get(0)?;
@ -347,7 +355,7 @@ async fn insert_cache_asset(
},
)?
};
Ok::<Option<String>, AnyError>(maybe_response_body)
Ok::<Option<String>, CacheError>(maybe_response_body)
}).await?
}

View file

@ -18,3 +18,4 @@ deno_core.workspace = true
deno_webgpu.workspace = true
image = { version = "0.24.7", default-features = false, features = ["png"] }
serde = { workspace = true, features = ["derive"] }
thiserror.workspace = true

View file

@ -1,7 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_core::error::type_error;
use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::ToJsBuffer;
use image::imageops::FilterType;
@ -13,6 +11,14 @@ use serde::Deserialize;
use serde::Serialize;
use std::path::PathBuf;
#[derive(Debug, thiserror::Error)]
pub enum CanvasError {
#[error("Color type '{0:?}' not supported")]
UnsupportedColorType(ColorType),
#[error(transparent)]
Image(#[from] image::ImageError),
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "snake_case")]
enum ImageResizeQuality {
@ -43,7 +49,7 @@ struct ImageProcessArgs {
fn op_image_process(
#[buffer] buf: &[u8],
#[serde] args: ImageProcessArgs,
) -> Result<ToJsBuffer, AnyError> {
) -> ToJsBuffer {
let view =
RgbaImage::from_vec(args.width, args.height, buf.to_vec()).unwrap();
@ -105,7 +111,7 @@ fn op_image_process(
}
}
Ok(image_out.to_vec().into())
image_out.to_vec().into()
}
#[derive(Debug, Serialize)]
@ -117,17 +123,16 @@ struct DecodedPng {
#[op2]
#[serde]
fn op_image_decode_png(#[buffer] buf: &[u8]) -> Result<DecodedPng, AnyError> {
fn op_image_decode_png(
#[buffer] buf: &[u8],
) -> Result<DecodedPng, CanvasError> {
let png = image::codecs::png::PngDecoder::new(buf)?;
let (width, height) = png.dimensions();
// TODO(@crowlKats): maybe use DynamicImage https://docs.rs/image/0.24.7/image/enum.DynamicImage.html ?
if png.color_type() != ColorType::Rgba8 {
return Err(type_error(format!(
"Color type '{:?}' not supported",
png.color_type()
)));
return Err(CanvasError::UnsupportedColorType(png.color_type()));
}
// read_image will assert that the buffer is the correct size, so we need to fill it with zeros

View file

@ -84,6 +84,7 @@ const {
NumberIsInteger,
NumberIsNaN,
NumberParseInt,
NumberParseFloat,
NumberPrototypeToFixed,
NumberPrototypeToString,
NumberPrototypeValueOf,
@ -3010,20 +3011,18 @@ function inspectArgs(args, inspectOptions = { __proto__: null }) {
} else if (ArrayPrototypeIncludes(["d", "i"], char)) {
// Format as an integer.
const value = args[a++];
if (typeof value == "bigint") {
formattedArg = `${value}n`;
} else if (typeof value == "number") {
formattedArg = `${NumberParseInt(String(value))}`;
} else {
if (typeof value === "symbol") {
formattedArg = "NaN";
} else {
formattedArg = `${NumberParseInt(value)}`;
}
} else if (char == "f") {
// Format as a floating point value.
const value = args[a++];
if (typeof value == "number") {
formattedArg = `${value}`;
} else {
if (typeof value === "symbol") {
formattedArg = "NaN";
} else {
formattedArg = `${NumberParseFloat(value)}`;
}
} else if (ArrayPrototypeIncludes(["O", "o"], char)) {
// Format as an object.
@ -3257,7 +3256,7 @@ class Console {
const stringifyValue = (value) =>
inspectValueWithQuotes(value, {
...getDefaultInspectOptions(),
...getConsoleInspectOptions(noColorStdout()),
depth: 1,
compact: true,
});

View file

@ -19,4 +19,5 @@ async-trait.workspace = true
chrono = { workspace = true, features = ["now"] }
deno_core.workspace = true
saffron.workspace = true
thiserror.workspace = true
tokio.workspace = true

View file

@ -1,17 +1,17 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::CronError;
use async_trait::async_trait;
use deno_core::error::AnyError;
pub trait CronHandler {
type EH: CronHandle + 'static;
fn create(&self, spec: CronSpec) -> Result<Self::EH, AnyError>;
fn create(&self, spec: CronSpec) -> Result<Self::EH, CronError>;
}
#[async_trait(?Send)]
pub trait CronHandle {
async fn next(&self, prev_success: bool) -> Result<bool, AnyError>;
async fn next(&self, prev_success: bool) -> Result<bool, CronError>;
fn close(&self);
}

View file

@ -7,16 +7,13 @@ use std::borrow::Cow;
use std::cell::RefCell;
use std::rc::Rc;
pub use crate::interface::*;
use deno_core::error::get_custom_error_class;
use deno_core::error::type_error;
use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::OpState;
use deno_core::Resource;
use deno_core::ResourceId;
pub use crate::interface::*;
pub const UNSTABLE_FEATURE_NAME: &str = "cron";
deno_core::extension!(deno_cron,
@ -49,6 +46,28 @@ impl<EH: CronHandle + 'static> Resource for CronResource<EH> {
}
}
#[derive(Debug, thiserror::Error)]
pub enum CronError {
#[error(transparent)]
Resource(deno_core::error::AnyError),
#[error("Cron name cannot exceed 64 characters: current length {0}")]
NameExceeded(usize),
#[error("Invalid cron name: only alphanumeric characters, whitespace, hyphens, and underscores are allowed")]
NameInvalid,
#[error("Cron with this name already exists")]
AlreadyExists,
#[error("Too many crons")]
TooManyCrons,
#[error("Invalid cron schedule")]
InvalidCron,
#[error("Invalid backoff schedule")]
InvalidBackoff,
#[error(transparent)]
AcquireError(#[from] tokio::sync::AcquireError),
#[error(transparent)]
Other(deno_core::error::AnyError),
}
#[op2]
#[smi]
fn op_cron_create<C>(
@ -56,7 +75,7 @@ fn op_cron_create<C>(
#[string] name: String,
#[string] cron_schedule: String,
#[serde] backoff_schedule: Option<Vec<u32>>,
) -> Result<ResourceId, AnyError>
) -> Result<ResourceId, CronError>
where
C: CronHandler + 'static,
{
@ -90,7 +109,7 @@ async fn op_cron_next<C>(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
prev_success: bool,
) -> Result<bool, AnyError>
) -> Result<bool, CronError>
where
C: CronHandler + 'static,
{
@ -102,7 +121,7 @@ where
if get_custom_error_class(&err) == Some("BadResource") {
return Ok(false);
} else {
return Err(err);
return Err(CronError::Resource(err));
}
}
};
@ -112,17 +131,14 @@ where
cron_handler.next(prev_success).await
}
fn validate_cron_name(name: &str) -> Result<(), AnyError> {
fn validate_cron_name(name: &str) -> Result<(), CronError> {
if name.len() > 64 {
return Err(type_error(format!(
"Cron name cannot exceed 64 characters: current length {}",
name.len()
)));
return Err(CronError::NameExceeded(name.len()));
}
if !name.chars().all(|c| {
c.is_ascii_whitespace() || c.is_ascii_alphanumeric() || c == '_' || c == '-'
}) {
return Err(type_error("Invalid cron name: only alphanumeric characters, whitespace, hyphens, and underscores are allowed"));
return Err(CronError::NameInvalid);
}
Ok(())
}

View file

@ -10,8 +10,6 @@ use std::rc::Weak;
use std::sync::Arc;
use async_trait::async_trait;
use deno_core::error::type_error;
use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::futures::FutureExt;
use deno_core::unsync::spawn;
@ -21,6 +19,7 @@ use tokio::sync::mpsc::WeakSender;
use tokio::sync::OwnedSemaphorePermit;
use tokio::sync::Semaphore;
use crate::CronError;
use crate::CronHandle;
use crate::CronHandler;
use crate::CronSpec;
@ -81,7 +80,7 @@ impl LocalCronHandler {
async fn cron_loop(
runtime_state: Rc<RefCell<RuntimeState>>,
mut cron_schedule_rx: mpsc::Receiver<(String, bool)>,
) -> Result<(), AnyError> {
) -> Result<(), CronError> {
loop {
let earliest_deadline = runtime_state
.borrow()
@ -154,7 +153,7 @@ impl LocalCronHandler {
impl RuntimeState {
fn get_ready_crons(
&mut self,
) -> Result<Vec<(String, WeakSender<()>)>, AnyError> {
) -> Result<Vec<(String, WeakSender<()>)>, CronError> {
let now = chrono::Utc::now().timestamp_millis() as u64;
let ready = {
@ -191,7 +190,7 @@ impl RuntimeState {
impl CronHandler for LocalCronHandler {
type EH = CronExecutionHandle;
fn create(&self, spec: CronSpec) -> Result<Self::EH, AnyError> {
fn create(&self, spec: CronSpec) -> Result<Self::EH, CronError> {
// Ensure that the cron loop is started.
self.cron_loop_join_handle.get_or_init(|| {
let (cron_schedule_tx, cron_schedule_rx) =
@ -208,17 +207,17 @@ impl CronHandler for LocalCronHandler {
let mut runtime_state = self.runtime_state.borrow_mut();
if runtime_state.crons.len() > MAX_CRONS {
return Err(type_error("Too many crons"));
return Err(CronError::TooManyCrons);
}
if runtime_state.crons.contains_key(&spec.name) {
return Err(type_error("Cron with this name already exists"));
return Err(CronError::AlreadyExists);
}
// Validate schedule expression.
spec
.cron_schedule
.parse::<saffron::Cron>()
.map_err(|_| type_error("Invalid cron schedule"))?;
.map_err(|_| CronError::InvalidCron)?;
// Validate backoff_schedule.
if let Some(backoff_schedule) = &spec.backoff_schedule {
@ -263,7 +262,7 @@ struct Inner {
#[async_trait(?Send)]
impl CronHandle for CronExecutionHandle {
async fn next(&self, prev_success: bool) -> Result<bool, AnyError> {
async fn next(&self, prev_success: bool) -> Result<bool, CronError> {
self.inner.borrow_mut().permit.take();
if self
@ -300,7 +299,7 @@ impl CronHandle for CronExecutionHandle {
}
}
fn compute_next_deadline(cron_expression: &str) -> Result<u64, AnyError> {
fn compute_next_deadline(cron_expression: &str) -> Result<u64, CronError> {
let now = chrono::Utc::now();
if let Ok(test_schedule) = env::var("DENO_CRON_TEST_SCHEDULE_OFFSET") {
@ -311,19 +310,21 @@ fn compute_next_deadline(cron_expression: &str) -> Result<u64, AnyError> {
let cron = cron_expression
.parse::<saffron::Cron>()
.map_err(|_| anyhow::anyhow!("invalid cron expression"))?;
.map_err(|_| CronError::InvalidCron)?;
let Some(next_deadline) = cron.next_after(now) else {
return Err(anyhow::anyhow!("invalid cron expression"));
return Err(CronError::InvalidCron);
};
Ok(next_deadline.timestamp_millis() as u64)
}
fn validate_backoff_schedule(backoff_schedule: &[u32]) -> Result<(), AnyError> {
fn validate_backoff_schedule(
backoff_schedule: &[u32],
) -> Result<(), CronError> {
if backoff_schedule.len() > MAX_BACKOFF_COUNT {
return Err(type_error("Invalid backoff schedule"));
return Err(CronError::InvalidBackoff);
}
if backoff_schedule.iter().any(|s| *s > MAX_BACKOFF_MS) {
return Err(type_error("Invalid backoff schedule"));
return Err(CronError::InvalidBackoff);
}
Ok(())
}

View file

@ -61,6 +61,15 @@ const _mimeType = Symbol("mime type");
const _body = Symbol("body");
const _brand = webidl.brand;
// it's slightly faster to cache these
const webidlConvertersBodyInitDomString =
webidl.converters["BodyInit_DOMString?"];
const webidlConvertersUSVString = webidl.converters["USVString"];
const webidlConvertersUnsignedShort = webidl.converters["unsigned short"];
const webidlConvertersAny = webidl.converters["any"];
const webidlConvertersByteString = webidl.converters["ByteString"];
const webidlConvertersHeadersInit = webidl.converters["HeadersInit"];
/**
* @typedef InnerResponse
* @property {"basic" | "cors" | "default" | "error" | "opaque" | "opaqueredirect"} type
@ -259,8 +268,8 @@ class Response {
*/
static redirect(url, status = 302) {
const prefix = "Failed to execute 'Response.redirect'";
url = webidl.converters["USVString"](url, prefix, "Argument 1");
status = webidl.converters["unsigned short"](status, prefix, "Argument 2");
url = webidlConvertersUSVString(url, prefix, "Argument 1");
status = webidlConvertersUnsignedShort(status, prefix, "Argument 2");
const baseURL = getLocationHref();
const parsedURL = new URL(url, baseURL);
@ -286,8 +295,8 @@ class Response {
*/
static json(data = undefined, init = { __proto__: null }) {
const prefix = "Failed to execute 'Response.json'";
data = webidl.converters.any(data);
init = webidl.converters["ResponseInit_fast"](init, prefix, "Argument 2");
data = webidlConvertersAny(data);
init = webidlConvertersResponseInitFast(init, prefix, "Argument 2");
const str = serializeJSValueToJSONString(data);
const res = extractBody(str);
@ -313,8 +322,8 @@ class Response {
}
const prefix = "Failed to construct 'Response'";
body = webidl.converters["BodyInit_DOMString?"](body, prefix, "Argument 1");
init = webidl.converters["ResponseInit_fast"](init, prefix, "Argument 2");
body = webidlConvertersBodyInitDomString(body, prefix, "Argument 1");
init = webidlConvertersResponseInitFast(init, prefix, "Argument 2");
this[_response] = newInnerResponse();
this[_headers] = headersFromHeaderList(
@ -443,22 +452,24 @@ webidl.converters["Response"] = webidl.createInterfaceConverter(
"Response",
ResponsePrototype,
);
webidl.converters["ResponseInit"] = webidl.createDictionaryConverter(
const webidlConvertersResponseInit = webidl.converters["ResponseInit"] = webidl
.createDictionaryConverter(
"ResponseInit",
[{
key: "status",
defaultValue: 200,
converter: webidl.converters["unsigned short"],
converter: webidlConvertersUnsignedShort,
}, {
key: "statusText",
defaultValue: "",
converter: webidl.converters["ByteString"],
converter: webidlConvertersByteString,
}, {
key: "headers",
converter: webidl.converters["HeadersInit"],
converter: webidlConvertersHeadersInit,
}],
);
webidl.converters["ResponseInit_fast"] = function (
const webidlConvertersResponseInitFast = webidl
.converters["ResponseInit_fast"] = function (
init,
prefix,
context,
@ -471,18 +482,18 @@ webidl.converters["ResponseInit_fast"] = function (
if (typeof init === "object" && !core.isProxy(init)) {
// Not a proxy fast path
const status = init.status !== undefined
? webidl.converters["unsigned short"](init.status)
? webidlConvertersUnsignedShort(init.status)
: 200;
const statusText = init.statusText !== undefined
? webidl.converters["ByteString"](init.statusText)
? webidlConvertersByteString(init.statusText)
: "";
const headers = init.headers !== undefined
? webidl.converters["HeadersInit"](init.headers)
? webidlConvertersHeadersInit(init.headers)
: undefined;
return { status, statusText, headers };
}
// Slow default path
return webidl.converters["ResponseInit"](init, prefix, context, opts);
return webidlConvertersResponseInit(init, prefix, context, opts);
};
/**

View file

@ -24,6 +24,8 @@ log.workspace = true
serde.workspace = true
serde-value = "0.7"
serde_json = "1.0"
thiserror.workspace = true
tokio.workspace = true
[target.'cfg(windows)'.dependencies]
winapi = { workspace = true, features = ["errhandlingapi", "minwindef", "ntdef", "winbase", "winnt"] }

View file

@ -7,9 +7,6 @@ use crate::symbol::NativeType;
use crate::symbol::Symbol;
use crate::FfiPermissions;
use crate::ForeignFunction;
use deno_core::anyhow::anyhow;
use deno_core::error::type_error;
use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::serde_json::Value;
use deno_core::serde_v8::ExternalPointer;
@ -24,6 +21,20 @@ use std::ffi::c_void;
use std::future::Future;
use std::rc::Rc;
#[derive(Debug, thiserror::Error)]
pub enum CallError {
#[error(transparent)]
IR(#[from] IRError),
#[error("Nonblocking FFI call failed: {0}")]
NonblockingCallFailure(#[source] tokio::task::JoinError),
#[error("Invalid FFI symbol name: '{0}'")]
InvalidSymbol(String),
#[error(transparent)]
Permission(deno_core::error::AnyError),
#[error(transparent)]
Callback(#[from] super::CallbackError),
}
// SAFETY: Makes an FFI call
unsafe fn ffi_call_rtype_struct(
cif: &libffi::middle::Cif,
@ -45,7 +56,7 @@ pub(crate) fn ffi_call_sync<'scope>(
args: v8::FunctionCallbackArguments,
symbol: &Symbol,
out_buffer: Option<OutBuffer>,
) -> Result<NativeValue, AnyError>
) -> Result<NativeValue, CallError>
where
'scope: 'scope,
{
@ -201,7 +212,7 @@ fn ffi_call(
parameter_types: &[NativeType],
result_type: NativeType,
out_buffer: Option<OutBuffer>,
) -> Result<FfiValue, AnyError> {
) -> FfiValue {
let call_args: Vec<Arg> = call_args
.iter()
.enumerate()
@ -214,7 +225,7 @@ fn ffi_call(
// SAFETY: types in the `Cif` match the actual calling convention and
// types of symbol.
unsafe {
Ok(match result_type {
match result_type {
NativeType::Void => {
cif.call::<()>(fun_ptr, &call_args);
FfiValue::Value(Value::from(()))
@ -267,7 +278,7 @@ fn ffi_call(
ffi_call_rtype_struct(cif, &fun_ptr, call_args, out_buffer.unwrap().0);
FfiValue::Value(Value::Null)
}
})
}
}
}
@ -280,14 +291,16 @@ pub fn op_ffi_call_ptr_nonblocking<FP>(
#[serde] def: ForeignFunction,
parameters: v8::Local<v8::Array>,
out_buffer: Option<v8::Local<v8::TypedArray>>,
) -> Result<impl Future<Output = Result<FfiValue, AnyError>>, AnyError>
) -> Result<impl Future<Output = Result<FfiValue, CallError>>, CallError>
where
FP: FfiPermissions + 'static,
{
{
let mut state = state.borrow_mut();
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(CallError::Permission)?;
};
let symbol = PtrSymbol::new(pointer, &def)?;
@ -309,7 +322,7 @@ where
Ok(async move {
let result = join_handle
.await
.map_err(|err| anyhow!("Nonblocking FFI call failed: {}", err))??;
.map_err(CallError::NonblockingCallFailure)?;
// SAFETY: Same return type declared to libffi; trust user to have it right beyond that.
Ok(result)
})
@ -325,16 +338,17 @@ pub fn op_ffi_call_nonblocking(
#[string] symbol: String,
parameters: v8::Local<v8::Array>,
out_buffer: Option<v8::Local<v8::TypedArray>>,
) -> Result<impl Future<Output = Result<FfiValue, AnyError>>, AnyError> {
) -> Result<impl Future<Output = Result<FfiValue, CallError>>, CallError> {
let symbol = {
let state = state.borrow();
let resource = state.resource_table.get::<DynamicLibraryResource>(rid)?;
let resource = state
.resource_table
.get::<DynamicLibraryResource>(rid)
.map_err(CallError::Permission)?;
let symbols = &resource.symbols;
*symbols
.get(&symbol)
.ok_or_else(|| {
type_error(format!("Invalid FFI symbol name: '{symbol}'"))
})?
.ok_or_else(|| CallError::InvalidSymbol(symbol))?
.clone()
};
@ -362,7 +376,7 @@ pub fn op_ffi_call_nonblocking(
Ok(async move {
let result = join_handle
.await
.map_err(|err| anyhow!("Nonblocking FFI call failed: {}", err))??;
.map_err(CallError::NonblockingCallFailure)?;
// SAFETY: Same return type declared to libffi; trust user to have it right beyond that.
Ok(result)
})
@ -377,14 +391,16 @@ pub fn op_ffi_call_ptr<FP>(
#[serde] def: ForeignFunction,
parameters: v8::Local<v8::Array>,
out_buffer: Option<v8::Local<v8::TypedArray>>,
) -> Result<FfiValue, AnyError>
) -> Result<FfiValue, CallError>
where
FP: FfiPermissions + 'static,
{
{
let mut state = state.borrow_mut();
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(CallError::Permission)?;
};
let symbol = PtrSymbol::new(pointer, &def)?;
@ -399,7 +415,7 @@ where
&def.parameters,
def.result.clone(),
out_buffer_ptr,
)?;
);
// SAFETY: Same return type declared to libffi; trust user to have it right beyond that.
Ok(result)
}

View file

@ -3,7 +3,6 @@
use crate::symbol::NativeType;
use crate::FfiPermissions;
use crate::ForeignFunction;
use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::v8;
use deno_core::v8::TryCatch;
@ -34,6 +33,16 @@ thread_local! {
static LOCAL_THREAD_ID: RefCell<u32> = const { RefCell::new(0) };
}
#[derive(Debug, thiserror::Error)]
pub enum CallbackError {
#[error(transparent)]
Resource(deno_core::error::AnyError),
#[error(transparent)]
Permission(deno_core::error::AnyError),
#[error(transparent)]
Other(deno_core::error::AnyError),
}
#[derive(Clone)]
pub struct PtrSymbol {
pub cif: libffi::middle::Cif,
@ -44,7 +53,7 @@ impl PtrSymbol {
pub fn new(
fn_ptr: *mut c_void,
def: &ForeignFunction,
) -> Result<Self, AnyError> {
) -> Result<Self, CallbackError> {
let ptr = libffi::middle::CodePtr::from_ptr(fn_ptr as _);
let cif = libffi::middle::Cif::new(
def
@ -52,8 +61,13 @@ impl PtrSymbol {
.clone()
.into_iter()
.map(libffi::middle::Type::try_from)
.collect::<Result<Vec<_>, _>>()?,
def.result.clone().try_into()?,
.collect::<Result<Vec<_>, _>>()
.map_err(CallbackError::Other)?,
def
.result
.clone()
.try_into()
.map_err(CallbackError::Other)?,
);
Ok(Self { cif, ptr })
@ -522,10 +536,12 @@ unsafe fn do_ffi_callback(
pub fn op_ffi_unsafe_callback_ref(
state: Rc<RefCell<OpState>>,
#[smi] rid: ResourceId,
) -> Result<impl Future<Output = Result<(), AnyError>>, AnyError> {
) -> Result<impl Future<Output = ()>, CallbackError> {
let state = state.borrow();
let callback_resource =
state.resource_table.get::<UnsafeCallbackResource>(rid)?;
let callback_resource = state
.resource_table
.get::<UnsafeCallbackResource>(rid)
.map_err(CallbackError::Resource)?;
Ok(async move {
let info: &mut CallbackInfo =
@ -536,7 +552,6 @@ pub fn op_ffi_unsafe_callback_ref(
.into_future()
.or_cancel(callback_resource.cancel.clone())
.await;
Ok(())
})
}
@ -552,12 +567,14 @@ pub fn op_ffi_unsafe_callback_create<FP, 'scope>(
scope: &mut v8::HandleScope<'scope>,
#[serde] args: RegisterCallbackArgs,
cb: v8::Local<v8::Function>,
) -> Result<v8::Local<'scope, v8::Value>, AnyError>
) -> Result<v8::Local<'scope, v8::Value>, CallbackError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(CallbackError::Permission)?;
let thread_id: u32 = LOCAL_THREAD_ID.with(|s| {
let value = *s.borrow();
@ -593,8 +610,10 @@ where
.parameters
.into_iter()
.map(libffi::middle::Type::try_from)
.collect::<Result<Vec<_>, _>>()?,
libffi::middle::Type::try_from(args.result)?,
.collect::<Result<Vec<_>, _>>()
.map_err(CallbackError::Other)?,
libffi::middle::Type::try_from(args.result)
.map_err(CallbackError::Other)?,
);
// SAFETY: CallbackInfo is leaked, is not null and stays valid as long as the callback exists.
@ -624,14 +643,16 @@ pub fn op_ffi_unsafe_callback_close(
state: &mut OpState,
scope: &mut v8::HandleScope,
#[smi] rid: ResourceId,
) -> Result<(), AnyError> {
) -> Result<(), CallbackError> {
// SAFETY: This drops the closure and the callback info associated with it.
// Any retained function pointers to the closure become dangling pointers.
// It is up to the user to know that it is safe to call the `close()` on the
// UnsafeCallback instance.
unsafe {
let callback_resource =
state.resource_table.take::<UnsafeCallbackResource>(rid)?;
let callback_resource = state
.resource_table
.take::<UnsafeCallbackResource>(rid)
.map_err(CallbackError::Resource)?;
let info = Box::from_raw(callback_resource.info);
let _ = v8::Global::from_raw(scope, info.callback);
let _ = v8::Global::from_raw(scope, info.context);

View file

@ -6,8 +6,6 @@ use crate::symbol::Symbol;
use crate::turbocall;
use crate::turbocall::Turbocall;
use crate::FfiPermissions;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::v8;
use deno_core::GarbageCollected;
@ -21,6 +19,22 @@ use std::collections::HashMap;
use std::ffi::c_void;
use std::rc::Rc;
#[derive(Debug, thiserror::Error)]
pub enum DlfcnError {
#[error("Failed to register symbol {symbol}: {error}")]
RegisterSymbol {
symbol: String,
#[source]
error: dlopen2::Error,
},
#[error(transparent)]
Dlopen(#[from] dlopen2::Error),
#[error(transparent)]
Permission(deno_core::error::AnyError),
#[error(transparent)]
Other(deno_core::error::AnyError),
}
pub struct DynamicLibraryResource {
lib: Library,
pub symbols: HashMap<String, Box<Symbol>>,
@ -37,7 +51,7 @@ impl Resource for DynamicLibraryResource {
}
impl DynamicLibraryResource {
pub fn get_static(&self, symbol: String) -> Result<*mut c_void, AnyError> {
pub fn get_static(&self, symbol: String) -> Result<*mut c_void, DlfcnError> {
// By default, Err returned by this function does not tell
// which symbol wasn't exported. So we'll modify the error
// message to include the name of symbol.
@ -45,9 +59,7 @@ impl DynamicLibraryResource {
// SAFETY: The obtained T symbol is the size of a pointer.
match unsafe { self.lib.symbol::<*mut c_void>(&symbol) } {
Ok(value) => Ok(Ok(value)),
Err(err) => Err(generic_error(format!(
"Failed to register symbol {symbol}: {err}"
))),
Err(error) => Err(DlfcnError::RegisterSymbol { symbol, error }),
}?
}
}
@ -116,12 +128,14 @@ pub fn op_ffi_load<'scope, FP>(
scope: &mut v8::HandleScope<'scope>,
state: &mut OpState,
#[serde] args: FfiLoadArgs,
) -> Result<v8::Local<'scope, v8::Value>, AnyError>
) -> Result<v8::Local<'scope, v8::Value>, DlfcnError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
let path = permissions.check_partial_with_path(&args.path)?;
let path = permissions
.check_partial_with_path(&args.path)
.map_err(DlfcnError::Permission)?;
let lib = Library::open(&path).map_err(|e| {
dlopen2::Error::OpeningLibraryError(std::io::Error::new(
@ -152,15 +166,16 @@ where
// SAFETY: The obtained T symbol is the size of a pointer.
match unsafe { resource.lib.symbol::<*const c_void>(symbol) } {
Ok(value) => Ok(value),
Err(err) => if foreign_fn.optional {
Err(error) => if foreign_fn.optional {
let null: v8::Local<v8::Value> = v8::null(scope).into();
let func_key = v8::String::new(scope, &symbol_key).unwrap();
obj.set(scope, func_key.into(), null);
break 'register_symbol;
} else {
Err(generic_error(format!(
"Failed to register symbol {symbol}: {err}"
)))
Err(DlfcnError::RegisterSymbol {
symbol: symbol.to_owned(),
error,
})
},
}?;
@ -171,8 +186,13 @@ where
.clone()
.into_iter()
.map(libffi::middle::Type::try_from)
.collect::<Result<Vec<_>, _>>()?,
foreign_fn.result.clone().try_into()?,
.collect::<Result<Vec<_>, _>>()
.map_err(DlfcnError::Other)?,
foreign_fn
.result
.clone()
.try_into()
.map_err(DlfcnError::Other)?,
);
let func_key = v8::String::new(scope, &symbol_key).unwrap();

View file

@ -1,13 +1,55 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::symbol::NativeType;
use deno_core::error::type_error;
use deno_core::error::AnyError;
use deno_core::v8;
use libffi::middle::Arg;
use std::ffi::c_void;
use std::ptr;
#[derive(Debug, thiserror::Error)]
pub enum IRError {
#[error("Invalid FFI u8 type, expected boolean")]
InvalidU8ExpectedBoolean,
#[error("Invalid FFI u8 type, expected unsigned integer")]
InvalidU8ExpectedUnsignedInteger,
#[error("Invalid FFI i8 type, expected integer")]
InvalidI8,
#[error("Invalid FFI u16 type, expected unsigned integer")]
InvalidU16,
#[error("Invalid FFI i16 type, expected integer")]
InvalidI16,
#[error("Invalid FFI u32 type, expected unsigned integer")]
InvalidU32,
#[error("Invalid FFI i32 type, expected integer")]
InvalidI32,
#[error("Invalid FFI u64 type, expected unsigned integer")]
InvalidU64,
#[error("Invalid FFI i64 type, expected integer")]
InvalidI64,
#[error("Invalid FFI usize type, expected unsigned integer")]
InvalidUsize,
#[error("Invalid FFI isize type, expected integer")]
InvalidIsize,
#[error("Invalid FFI f32 type, expected number")]
InvalidF32,
#[error("Invalid FFI f64 type, expected number")]
InvalidF64,
#[error("Invalid FFI pointer type, expected null, or External")]
InvalidPointerType,
#[error(
"Invalid FFI buffer type, expected null, ArrayBuffer, or ArrayBufferView"
)]
InvalidBufferType,
#[error("Invalid FFI ArrayBufferView, expected data in the buffer")]
InvalidArrayBufferView,
#[error("Invalid FFI ArrayBuffer, expected data in buffer")]
InvalidArrayBuffer,
#[error("Invalid FFI struct type, expected ArrayBuffer, or ArrayBufferView")]
InvalidStructType,
#[error("Invalid FFI function type, expected null, or External")]
InvalidFunctionType,
}
pub struct OutBuffer(pub *mut u8);
// SAFETY: OutBuffer is allocated by us in 00_ffi.js and is guaranteed to be
@ -126,9 +168,9 @@ unsafe impl Send for NativeValue {}
#[inline]
pub fn ffi_parse_bool_arg(
arg: v8::Local<v8::Value>,
) -> Result<NativeValue, AnyError> {
) -> Result<NativeValue, IRError> {
let bool_value = v8::Local::<v8::Boolean>::try_from(arg)
.map_err(|_| type_error("Invalid FFI u8 type, expected boolean"))?
.map_err(|_| IRError::InvalidU8ExpectedBoolean)?
.is_true();
Ok(NativeValue { bool_value })
}
@ -136,9 +178,9 @@ pub fn ffi_parse_bool_arg(
#[inline]
pub fn ffi_parse_u8_arg(
arg: v8::Local<v8::Value>,
) -> Result<NativeValue, AnyError> {
) -> Result<NativeValue, IRError> {
let u8_value = v8::Local::<v8::Uint32>::try_from(arg)
.map_err(|_| type_error("Invalid FFI u8 type, expected unsigned integer"))?
.map_err(|_| IRError::InvalidU8ExpectedUnsignedInteger)?
.value() as u8;
Ok(NativeValue { u8_value })
}
@ -146,9 +188,9 @@ pub fn ffi_parse_u8_arg(
#[inline]
pub fn ffi_parse_i8_arg(
arg: v8::Local<v8::Value>,
) -> Result<NativeValue, AnyError> {
) -> Result<NativeValue, IRError> {
let i8_value = v8::Local::<v8::Int32>::try_from(arg)
.map_err(|_| type_error("Invalid FFI i8 type, expected integer"))?
.map_err(|_| IRError::InvalidI8)?
.value() as i8;
Ok(NativeValue { i8_value })
}
@ -156,9 +198,9 @@ pub fn ffi_parse_i8_arg(
#[inline]
pub fn ffi_parse_u16_arg(
arg: v8::Local<v8::Value>,
) -> Result<NativeValue, AnyError> {
) -> Result<NativeValue, IRError> {
let u16_value = v8::Local::<v8::Uint32>::try_from(arg)
.map_err(|_| type_error("Invalid FFI u16 type, expected unsigned integer"))?
.map_err(|_| IRError::InvalidU16)?
.value() as u16;
Ok(NativeValue { u16_value })
}
@ -166,9 +208,9 @@ pub fn ffi_parse_u16_arg(
#[inline]
pub fn ffi_parse_i16_arg(
arg: v8::Local<v8::Value>,
) -> Result<NativeValue, AnyError> {
) -> Result<NativeValue, IRError> {
let i16_value = v8::Local::<v8::Int32>::try_from(arg)
.map_err(|_| type_error("Invalid FFI i16 type, expected integer"))?
.map_err(|_| IRError::InvalidI16)?
.value() as i16;
Ok(NativeValue { i16_value })
}
@ -176,9 +218,9 @@ pub fn ffi_parse_i16_arg(
#[inline]
pub fn ffi_parse_u32_arg(
arg: v8::Local<v8::Value>,
) -> Result<NativeValue, AnyError> {
) -> Result<NativeValue, IRError> {
let u32_value = v8::Local::<v8::Uint32>::try_from(arg)
.map_err(|_| type_error("Invalid FFI u32 type, expected unsigned integer"))?
.map_err(|_| IRError::InvalidU32)?
.value();
Ok(NativeValue { u32_value })
}
@ -186,9 +228,9 @@ pub fn ffi_parse_u32_arg(
#[inline]
pub fn ffi_parse_i32_arg(
arg: v8::Local<v8::Value>,
) -> Result<NativeValue, AnyError> {
) -> Result<NativeValue, IRError> {
let i32_value = v8::Local::<v8::Int32>::try_from(arg)
.map_err(|_| type_error("Invalid FFI i32 type, expected integer"))?
.map_err(|_| IRError::InvalidI32)?
.value();
Ok(NativeValue { i32_value })
}
@ -197,7 +239,7 @@ pub fn ffi_parse_i32_arg(
pub fn ffi_parse_u64_arg(
scope: &mut v8::HandleScope,
arg: v8::Local<v8::Value>,
) -> Result<NativeValue, AnyError> {
) -> Result<NativeValue, IRError> {
// Order of checking:
// 1. BigInt: Uncommon and not supported by Fast API, so optimise slow call for this case.
// 2. Number: Common, supported by Fast API, so let that be the optimal case.
@ -207,9 +249,7 @@ pub fn ffi_parse_u64_arg(
} else if let Ok(value) = v8::Local::<v8::Number>::try_from(arg) {
value.integer_value(scope).unwrap() as u64
} else {
return Err(type_error(
"Invalid FFI u64 type, expected unsigned integer",
));
return Err(IRError::InvalidU64);
};
Ok(NativeValue { u64_value })
}
@ -218,7 +258,7 @@ pub fn ffi_parse_u64_arg(
pub fn ffi_parse_i64_arg(
scope: &mut v8::HandleScope,
arg: v8::Local<v8::Value>,
) -> Result<NativeValue, AnyError> {
) -> Result<NativeValue, IRError> {
// Order of checking:
// 1. BigInt: Uncommon and not supported by Fast API, so optimise slow call for this case.
// 2. Number: Common, supported by Fast API, so let that be the optimal case.
@ -228,7 +268,7 @@ pub fn ffi_parse_i64_arg(
} else if let Ok(value) = v8::Local::<v8::Number>::try_from(arg) {
value.integer_value(scope).unwrap()
} else {
return Err(type_error("Invalid FFI i64 type, expected integer"));
return Err(IRError::InvalidI64);
};
Ok(NativeValue { i64_value })
}
@ -237,7 +277,7 @@ pub fn ffi_parse_i64_arg(
pub fn ffi_parse_usize_arg(
scope: &mut v8::HandleScope,
arg: v8::Local<v8::Value>,
) -> Result<NativeValue, AnyError> {
) -> Result<NativeValue, IRError> {
// Order of checking:
// 1. BigInt: Uncommon and not supported by Fast API, so optimise slow call for this case.
// 2. Number: Common, supported by Fast API, so let that be the optimal case.
@ -247,7 +287,7 @@ pub fn ffi_parse_usize_arg(
} else if let Ok(value) = v8::Local::<v8::Number>::try_from(arg) {
value.integer_value(scope).unwrap() as usize
} else {
return Err(type_error("Invalid FFI usize type, expected integer"));
return Err(IRError::InvalidUsize);
};
Ok(NativeValue { usize_value })
}
@ -256,7 +296,7 @@ pub fn ffi_parse_usize_arg(
pub fn ffi_parse_isize_arg(
scope: &mut v8::HandleScope,
arg: v8::Local<v8::Value>,
) -> Result<NativeValue, AnyError> {
) -> Result<NativeValue, IRError> {
// Order of checking:
// 1. BigInt: Uncommon and not supported by Fast API, so optimise slow call for this case.
// 2. Number: Common, supported by Fast API, so let that be the optimal case.
@ -266,7 +306,7 @@ pub fn ffi_parse_isize_arg(
} else if let Ok(value) = v8::Local::<v8::Number>::try_from(arg) {
value.integer_value(scope).unwrap() as isize
} else {
return Err(type_error("Invalid FFI isize type, expected integer"));
return Err(IRError::InvalidIsize);
};
Ok(NativeValue { isize_value })
}
@ -274,9 +314,9 @@ pub fn ffi_parse_isize_arg(
#[inline]
pub fn ffi_parse_f32_arg(
arg: v8::Local<v8::Value>,
) -> Result<NativeValue, AnyError> {
) -> Result<NativeValue, IRError> {
let f32_value = v8::Local::<v8::Number>::try_from(arg)
.map_err(|_| type_error("Invalid FFI f32 type, expected number"))?
.map_err(|_| IRError::InvalidF32)?
.value() as f32;
Ok(NativeValue { f32_value })
}
@ -284,9 +324,9 @@ pub fn ffi_parse_f32_arg(
#[inline]
pub fn ffi_parse_f64_arg(
arg: v8::Local<v8::Value>,
) -> Result<NativeValue, AnyError> {
) -> Result<NativeValue, IRError> {
let f64_value = v8::Local::<v8::Number>::try_from(arg)
.map_err(|_| type_error("Invalid FFI f64 type, expected number"))?
.map_err(|_| IRError::InvalidF64)?
.value();
Ok(NativeValue { f64_value })
}
@ -295,15 +335,13 @@ pub fn ffi_parse_f64_arg(
pub fn ffi_parse_pointer_arg(
_scope: &mut v8::HandleScope,
arg: v8::Local<v8::Value>,
) -> Result<NativeValue, AnyError> {
) -> Result<NativeValue, IRError> {
let pointer = if let Ok(value) = v8::Local::<v8::External>::try_from(arg) {
value.value()
} else if arg.is_null() {
ptr::null_mut()
} else {
return Err(type_error(
"Invalid FFI pointer type, expected null, or External",
));
return Err(IRError::InvalidPointerType);
};
Ok(NativeValue { pointer })
}
@ -312,7 +350,7 @@ pub fn ffi_parse_pointer_arg(
pub fn ffi_parse_buffer_arg(
scope: &mut v8::HandleScope,
arg: v8::Local<v8::Value>,
) -> Result<NativeValue, AnyError> {
) -> Result<NativeValue, IRError> {
// Order of checking:
// 1. ArrayBuffer: Fairly common and not supported by Fast API, optimise this case.
// 2. ArrayBufferView: Common and supported by Fast API
@ -328,9 +366,7 @@ pub fn ffi_parse_buffer_arg(
let byte_offset = value.byte_offset();
let pointer = value
.buffer(scope)
.ok_or_else(|| {
type_error("Invalid FFI ArrayBufferView, expected data in the buffer")
})?
.ok_or(IRError::InvalidArrayBufferView)?
.data();
if let Some(non_null) = pointer {
// SAFETY: Pointer is non-null, and V8 guarantees that the byte_offset
@ -342,9 +378,7 @@ pub fn ffi_parse_buffer_arg(
} else if arg.is_null() {
ptr::null_mut()
} else {
return Err(type_error(
"Invalid FFI buffer type, expected null, ArrayBuffer, or ArrayBufferView",
));
return Err(IRError::InvalidBufferType);
};
Ok(NativeValue { pointer })
}
@ -353,7 +387,7 @@ pub fn ffi_parse_buffer_arg(
pub fn ffi_parse_struct_arg(
scope: &mut v8::HandleScope,
arg: v8::Local<v8::Value>,
) -> Result<NativeValue, AnyError> {
) -> Result<NativeValue, IRError> {
// Order of checking:
// 1. ArrayBuffer: Fairly common and not supported by Fast API, optimise this case.
// 2. ArrayBufferView: Common and supported by Fast API
@ -362,31 +396,23 @@ pub fn ffi_parse_struct_arg(
if let Some(non_null) = value.data() {
non_null.as_ptr()
} else {
return Err(type_error(
"Invalid FFI ArrayBuffer, expected data in buffer",
));
return Err(IRError::InvalidArrayBuffer);
}
} else if let Ok(value) = v8::Local::<v8::ArrayBufferView>::try_from(arg) {
let byte_offset = value.byte_offset();
let pointer = value
.buffer(scope)
.ok_or_else(|| {
type_error("Invalid FFI ArrayBufferView, expected data in the buffer")
})?
.ok_or(IRError::InvalidArrayBufferView)?
.data();
if let Some(non_null) = pointer {
// SAFETY: Pointer is non-null, and V8 guarantees that the byte_offset
// is within the buffer backing store.
unsafe { non_null.as_ptr().add(byte_offset) }
} else {
return Err(type_error(
"Invalid FFI ArrayBufferView, expected data in buffer",
));
return Err(IRError::InvalidArrayBufferView);
}
} else {
return Err(type_error(
"Invalid FFI struct type, expected ArrayBuffer, or ArrayBufferView",
));
return Err(IRError::InvalidStructType);
};
Ok(NativeValue { pointer })
}
@ -395,15 +421,13 @@ pub fn ffi_parse_struct_arg(
pub fn ffi_parse_function_arg(
_scope: &mut v8::HandleScope,
arg: v8::Local<v8::Value>,
) -> Result<NativeValue, AnyError> {
) -> Result<NativeValue, IRError> {
let pointer = if let Ok(value) = v8::Local::<v8::External>::try_from(arg) {
value.value()
} else if arg.is_null() {
ptr::null_mut()
} else {
return Err(type_error(
"Invalid FFI function type, expected null, or External",
));
return Err(IRError::InvalidFunctionType);
};
Ok(NativeValue { pointer })
}
@ -412,7 +436,7 @@ pub fn ffi_parse_args<'scope>(
scope: &mut v8::HandleScope<'scope>,
args: v8::Local<v8::Array>,
parameter_types: &[NativeType],
) -> Result<Vec<NativeValue>, AnyError>
) -> Result<Vec<NativeValue>, IRError>
where
'scope: 'scope,
{

View file

@ -29,6 +29,13 @@ use repr::*;
use symbol::NativeType;
use symbol::Symbol;
pub use call::CallError;
pub use callback::CallbackError;
pub use dlfcn::DlfcnError;
pub use ir::IRError;
pub use r#static::StaticError;
pub use repr::ReprError;
#[cfg(not(target_pointer_width = "64"))]
compile_error!("platform not supported");

View file

@ -1,9 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::FfiPermissions;
use deno_core::error::range_error;
use deno_core::error::type_error;
use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::v8;
use deno_core::OpState;
@ -12,16 +9,58 @@ use std::ffi::c_void;
use std::ffi::CStr;
use std::ptr;
#[derive(Debug, thiserror::Error)]
pub enum ReprError {
#[error("Invalid pointer to offset, pointer is null")]
InvalidOffset,
#[error("Invalid ArrayBuffer pointer, pointer is null")]
InvalidArrayBuffer,
#[error("Destination length is smaller than source length")]
DestinationLengthTooShort,
#[error("Invalid CString pointer, pointer is null")]
InvalidCString,
#[error("Invalid CString pointer, string exceeds max length")]
CStringTooLong,
#[error("Invalid bool pointer, pointer is null")]
InvalidBool,
#[error("Invalid u8 pointer, pointer is null")]
InvalidU8,
#[error("Invalid i8 pointer, pointer is null")]
InvalidI8,
#[error("Invalid u16 pointer, pointer is null")]
InvalidU16,
#[error("Invalid i16 pointer, pointer is null")]
InvalidI16,
#[error("Invalid u32 pointer, pointer is null")]
InvalidU32,
#[error("Invalid i32 pointer, pointer is null")]
InvalidI32,
#[error("Invalid u64 pointer, pointer is null")]
InvalidU64,
#[error("Invalid i64 pointer, pointer is null")]
InvalidI64,
#[error("Invalid f32 pointer, pointer is null")]
InvalidF32,
#[error("Invalid f64 pointer, pointer is null")]
InvalidF64,
#[error("Invalid pointer pointer, pointer is null")]
InvalidPointer,
#[error(transparent)]
Permission(deno_core::error::AnyError),
}
#[op2(fast)]
pub fn op_ffi_ptr_create<FP>(
state: &mut OpState,
#[bigint] ptr_number: usize,
) -> Result<*mut c_void, AnyError>
) -> Result<*mut c_void, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
Ok(ptr_number as *mut c_void)
}
@ -31,12 +70,14 @@ pub fn op_ffi_ptr_equals<FP>(
state: &mut OpState,
a: *const c_void,
b: *const c_void,
) -> Result<bool, AnyError>
) -> Result<bool, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
Ok(a == b)
}
@ -45,12 +86,14 @@ where
pub fn op_ffi_ptr_of<FP>(
state: &mut OpState,
#[anybuffer] buf: *const u8,
) -> Result<*mut c_void, AnyError>
) -> Result<*mut c_void, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
Ok(buf as *mut c_void)
}
@ -59,12 +102,14 @@ where
pub fn op_ffi_ptr_of_exact<FP>(
state: &mut OpState,
buf: v8::Local<v8::ArrayBufferView>,
) -> Result<*mut c_void, AnyError>
) -> Result<*mut c_void, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
let Some(buf) = buf.get_backing_store() else {
return Ok(0 as _);
@ -80,15 +125,17 @@ pub fn op_ffi_ptr_offset<FP>(
state: &mut OpState,
ptr: *mut c_void,
#[number] offset: isize,
) -> Result<*mut c_void, AnyError>
) -> Result<*mut c_void, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
if ptr.is_null() {
return Err(type_error("Invalid pointer to offset, pointer is null"));
return Err(ReprError::InvalidOffset);
}
// TODO(mmastrac): Create a RawPointer that can safely do pointer math.
@ -110,12 +157,14 @@ unsafe extern "C" fn noop_deleter_callback(
pub fn op_ffi_ptr_value<FP>(
state: &mut OpState,
ptr: *mut c_void,
) -> Result<usize, AnyError>
) -> Result<usize, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
Ok(ptr as usize)
}
@ -127,15 +176,17 @@ pub fn op_ffi_get_buf<FP, 'scope>(
ptr: *mut c_void,
#[number] offset: isize,
#[number] len: usize,
) -> Result<v8::Local<'scope, v8::ArrayBuffer>, AnyError>
) -> Result<v8::Local<'scope, v8::ArrayBuffer>, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
if ptr.is_null() {
return Err(type_error("Invalid ArrayBuffer pointer, pointer is null"));
return Err(ReprError::InvalidArrayBuffer);
}
// SAFETY: Trust the user to have provided a real pointer, offset, and a valid matching size to it. Since this is a foreign pointer, we should not do any deletion.
@ -144,7 +195,7 @@ where
ptr.offset(offset),
len,
noop_deleter_callback,
std::ptr::null_mut(),
ptr::null_mut(),
)
}
.make_shared();
@ -159,19 +210,19 @@ pub fn op_ffi_buf_copy_into<FP>(
#[number] offset: isize,
#[anybuffer] dst: &mut [u8],
#[number] len: usize,
) -> Result<(), AnyError>
) -> Result<(), ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
if src.is_null() {
Err(type_error("Invalid ArrayBuffer pointer, pointer is null"))
Err(ReprError::InvalidArrayBuffer)
} else if dst.len() < len {
Err(range_error(
"Destination length is smaller than source length",
))
Err(ReprError::DestinationLengthTooShort)
} else {
let src = src as *const c_void;
@ -190,24 +241,24 @@ pub fn op_ffi_cstr_read<FP, 'scope>(
state: &mut OpState,
ptr: *mut c_void,
#[number] offset: isize,
) -> Result<v8::Local<'scope, v8::String>, AnyError>
) -> Result<v8::Local<'scope, v8::String>, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
if ptr.is_null() {
return Err(type_error("Invalid CString pointer, pointer is null"));
return Err(ReprError::InvalidCString);
}
let cstr =
// SAFETY: Pointer and offset are user provided.
unsafe { CStr::from_ptr(ptr.offset(offset) as *const c_char) }.to_bytes();
let value = v8::String::new_from_utf8(scope, cstr, v8::NewStringType::Normal)
.ok_or_else(|| {
type_error("Invalid CString pointer, string exceeds max length")
})?;
.ok_or_else(|| ReprError::CStringTooLong)?;
Ok(value)
}
@ -216,15 +267,17 @@ pub fn op_ffi_read_bool<FP>(
state: &mut OpState,
ptr: *mut c_void,
#[number] offset: isize,
) -> Result<bool, AnyError>
) -> Result<bool, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
if ptr.is_null() {
return Err(type_error("Invalid bool pointer, pointer is null"));
return Err(ReprError::InvalidBool);
}
// SAFETY: ptr and offset are user provided.
@ -236,15 +289,17 @@ pub fn op_ffi_read_u8<FP>(
state: &mut OpState,
ptr: *mut c_void,
#[number] offset: isize,
) -> Result<u32, AnyError>
) -> Result<u32, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
if ptr.is_null() {
return Err(type_error("Invalid u8 pointer, pointer is null"));
return Err(ReprError::InvalidU8);
}
// SAFETY: ptr and offset are user provided.
@ -258,15 +313,17 @@ pub fn op_ffi_read_i8<FP>(
state: &mut OpState,
ptr: *mut c_void,
#[number] offset: isize,
) -> Result<i32, AnyError>
) -> Result<i32, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
if ptr.is_null() {
return Err(type_error("Invalid i8 pointer, pointer is null"));
return Err(ReprError::InvalidI8);
}
// SAFETY: ptr and offset are user provided.
@ -280,15 +337,17 @@ pub fn op_ffi_read_u16<FP>(
state: &mut OpState,
ptr: *mut c_void,
#[number] offset: isize,
) -> Result<u32, AnyError>
) -> Result<u32, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
if ptr.is_null() {
return Err(type_error("Invalid u16 pointer, pointer is null"));
return Err(ReprError::InvalidU16);
}
// SAFETY: ptr and offset are user provided.
@ -302,15 +361,17 @@ pub fn op_ffi_read_i16<FP>(
state: &mut OpState,
ptr: *mut c_void,
#[number] offset: isize,
) -> Result<i32, AnyError>
) -> Result<i32, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
if ptr.is_null() {
return Err(type_error("Invalid i16 pointer, pointer is null"));
return Err(ReprError::InvalidI16);
}
// SAFETY: ptr and offset are user provided.
@ -324,15 +385,17 @@ pub fn op_ffi_read_u32<FP>(
state: &mut OpState,
ptr: *mut c_void,
#[number] offset: isize,
) -> Result<u32, AnyError>
) -> Result<u32, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
if ptr.is_null() {
return Err(type_error("Invalid u32 pointer, pointer is null"));
return Err(ReprError::InvalidU32);
}
// SAFETY: ptr and offset are user provided.
@ -344,15 +407,17 @@ pub fn op_ffi_read_i32<FP>(
state: &mut OpState,
ptr: *mut c_void,
#[number] offset: isize,
) -> Result<i32, AnyError>
) -> Result<i32, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
if ptr.is_null() {
return Err(type_error("Invalid i32 pointer, pointer is null"));
return Err(ReprError::InvalidI32);
}
// SAFETY: ptr and offset are user provided.
@ -367,15 +432,17 @@ pub fn op_ffi_read_u64<FP>(
// Note: The representation of 64-bit integers is function-wide. We cannot
// choose to take this parameter as a number while returning a bigint.
#[bigint] offset: isize,
) -> Result<u64, AnyError>
) -> Result<u64, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
if ptr.is_null() {
return Err(type_error("Invalid u64 pointer, pointer is null"));
return Err(ReprError::InvalidU64);
}
let value =
@ -393,15 +460,17 @@ pub fn op_ffi_read_i64<FP>(
// Note: The representation of 64-bit integers is function-wide. We cannot
// choose to take this parameter as a number while returning a bigint.
#[bigint] offset: isize,
) -> Result<i64, AnyError>
) -> Result<i64, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
if ptr.is_null() {
return Err(type_error("Invalid i64 pointer, pointer is null"));
return Err(ReprError::InvalidI64);
}
let value =
@ -416,15 +485,17 @@ pub fn op_ffi_read_f32<FP>(
state: &mut OpState,
ptr: *mut c_void,
#[number] offset: isize,
) -> Result<f32, AnyError>
) -> Result<f32, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
if ptr.is_null() {
return Err(type_error("Invalid f32 pointer, pointer is null"));
return Err(ReprError::InvalidF32);
}
// SAFETY: ptr and offset are user provided.
@ -436,15 +507,17 @@ pub fn op_ffi_read_f64<FP>(
state: &mut OpState,
ptr: *mut c_void,
#[number] offset: isize,
) -> Result<f64, AnyError>
) -> Result<f64, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
if ptr.is_null() {
return Err(type_error("Invalid f64 pointer, pointer is null"));
return Err(ReprError::InvalidF64);
}
// SAFETY: ptr and offset are user provided.
@ -456,15 +529,17 @@ pub fn op_ffi_read_ptr<FP>(
state: &mut OpState,
ptr: *mut c_void,
#[number] offset: isize,
) -> Result<*mut c_void, AnyError>
) -> Result<*mut c_void, ReprError>
where
FP: FfiPermissions + 'static,
{
let permissions = state.borrow_mut::<FP>();
permissions.check_partial_no_path()?;
permissions
.check_partial_no_path()
.map_err(ReprError::Permission)?;
if ptr.is_null() {
return Err(type_error("Invalid pointer pointer, pointer is null"));
return Err(ReprError::InvalidPointer);
}
// SAFETY: ptr and offset are user provided.

View file

@ -2,14 +2,24 @@
use crate::dlfcn::DynamicLibraryResource;
use crate::symbol::NativeType;
use deno_core::error::type_error;
use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::v8;
use deno_core::OpState;
use deno_core::ResourceId;
use std::ptr;
#[derive(Debug, thiserror::Error)]
pub enum StaticError {
#[error(transparent)]
Dlfcn(super::DlfcnError),
#[error("Invalid FFI static type 'void'")]
InvalidTypeVoid,
#[error("Invalid FFI static type 'struct'")]
InvalidTypeStruct,
#[error(transparent)]
Resource(deno_core::error::AnyError),
}
#[op2]
pub fn op_ffi_get_static<'scope>(
scope: &mut v8::HandleScope<'scope>,
@ -18,24 +28,27 @@ pub fn op_ffi_get_static<'scope>(
#[string] name: String,
#[serde] static_type: NativeType,
optional: bool,
) -> Result<v8::Local<'scope, v8::Value>, AnyError> {
let resource = state.resource_table.get::<DynamicLibraryResource>(rid)?;
) -> Result<v8::Local<'scope, v8::Value>, StaticError> {
let resource = state
.resource_table
.get::<DynamicLibraryResource>(rid)
.map_err(StaticError::Resource)?;
let data_ptr = match resource.get_static(name) {
Ok(data_ptr) => Ok(data_ptr),
Ok(data_ptr) => data_ptr,
Err(err) => {
if optional {
let null: v8::Local<v8::Value> = v8::null(scope).into();
return Ok(null);
} else {
Err(err)
return Err(StaticError::Dlfcn(err));
}
}
}?;
};
Ok(match static_type {
NativeType::Void => {
return Err(type_error("Invalid FFI static type 'void'"));
return Err(StaticError::InvalidTypeVoid);
}
NativeType::Bool => {
// SAFETY: ptr is user provided
@ -132,7 +145,7 @@ pub fn op_ffi_get_static<'scope>(
external
}
NativeType::Struct(_) => {
return Err(type_error("Invalid FFI static type 'struct'"));
return Err(StaticError::InvalidTypeStruct);
}
})
}

View file

@ -929,7 +929,7 @@ fn exists(path: &Path) -> bool {
}
fn realpath(path: &Path) -> FsResult<PathBuf> {
Ok(deno_core::strip_unc_prefix(path.canonicalize()?))
Ok(deno_path_util::strip_unc_prefix(path.canonicalize()?))
}
fn read_dir(path: &Path) -> FsResult<Vec<FsDirEntry>> {

View file

@ -119,7 +119,7 @@ fn encodings_iter_inner<'s>(
};
Some(Ok((encoding, qval)))
})
.map(|r| r?) // flatten Result<Result<...
.flatten()
}
#[cfg(test)]

View file

@ -296,7 +296,7 @@ where
let authority: v8::Local<v8::Value> = match request_properties.authority {
Some(authority) => v8::String::new_from_utf8(
scope,
authority.as_ref(),
authority.as_bytes(),
v8::NewStringType::Normal,
)
.unwrap()
@ -305,13 +305,23 @@ where
};
// Only extract the path part - we handle authority elsewhere
let path = match &request_parts.uri.path_and_query() {
Some(path_and_query) => path_and_query.to_string(),
None => "".to_owned(),
let path = match request_parts.uri.path_and_query() {
Some(path_and_query) => {
let path = path_and_query.as_str();
if matches!(path.as_bytes().first(), Some(b'/' | b'*')) {
Cow::Borrowed(path)
} else {
Cow::Owned(format!("/{}", path))
}
}
None => Cow::Borrowed(""),
};
let path: v8::Local<v8::Value> =
v8::String::new_from_utf8(scope, path.as_ref(), v8::NewStringType::Normal)
let path: v8::Local<v8::Value> = v8::String::new_from_utf8(
scope,
path.as_bytes(),
v8::NewStringType::Normal,
)
.unwrap()
.into();

View file

@ -34,8 +34,8 @@ pub struct HttpConnectionProperties {
pub stream_type: NetworkStreamType,
}
pub struct HttpRequestProperties {
pub authority: Option<String>,
pub struct HttpRequestProperties<'a> {
pub authority: Option<Cow<'a, str>>,
}
/// Pluggable trait to determine listen, connection and request properties
@ -84,11 +84,11 @@ pub trait HttpPropertyExtractor {
) -> NetworkStream;
/// Determines the request properties.
fn request_properties(
connection_properties: &HttpConnectionProperties,
uri: &Uri,
headers: &HeaderMap,
) -> HttpRequestProperties;
fn request_properties<'a>(
connection_properties: &'a HttpConnectionProperties,
uri: &'a Uri,
headers: &'a HeaderMap,
) -> HttpRequestProperties<'a>;
}
pub struct DefaultHttpPropertyExtractor {}
@ -180,18 +180,17 @@ impl HttpPropertyExtractor for DefaultHttpPropertyExtractor {
}
}
fn request_properties(
connection_properties: &HttpConnectionProperties,
uri: &Uri,
headers: &HeaderMap,
) -> HttpRequestProperties {
fn request_properties<'a>(
connection_properties: &'a HttpConnectionProperties,
uri: &'a Uri,
headers: &'a HeaderMap,
) -> HttpRequestProperties<'a> {
let authority = req_host(
uri,
headers,
connection_properties.stream_type,
connection_properties.local_port.unwrap_or_default(),
)
.map(|s| s.into_owned());
);
HttpRequestProperties { authority }
}

View file

@ -2,7 +2,6 @@
use std::rc::Rc;
use deno_core::error::AnyError;
use deno_core::AsyncRefCell;
use deno_core::AsyncResult;
use deno_core::CancelHandle;
@ -71,13 +70,16 @@ impl BiPipeResource {
pub async fn read(
self: Rc<Self>,
data: &mut [u8],
) -> Result<usize, AnyError> {
) -> Result<usize, std::io::Error> {
let mut rd = RcRef::map(&self, |r| &r.read_half).borrow_mut().await;
let cancel_handle = RcRef::map(&self, |r| &r.cancel);
Ok(rd.read(data).try_or_cancel(cancel_handle).await?)
rd.read(data).try_or_cancel(cancel_handle).await
}
pub async fn write(self: Rc<Self>, data: &[u8]) -> Result<usize, AnyError> {
pub async fn write(
self: Rc<Self>,
data: &[u8],
) -> Result<usize, std::io::Error> {
let mut wr = RcRef::map(self, |r| &r.write_half).borrow_mut().await;
let nwritten = wr.write(data).await?;
wr.flush().await?;
@ -270,8 +272,8 @@ impl_async_write!(for BiPipe -> self.write_end);
/// Creates both sides of a bidirectional pipe, returning the raw
/// handles to the underlying OS resources.
pub fn bi_pipe_pair_raw() -> Result<(RawBiPipeHandle, RawBiPipeHandle), AnyError>
{
pub fn bi_pipe_pair_raw(
) -> Result<(RawBiPipeHandle, RawBiPipeHandle), std::io::Error> {
#[cfg(unix)]
{
// SockFlag is broken on macOS
@ -293,7 +295,7 @@ pub fn bi_pipe_pair_raw() -> Result<(RawBiPipeHandle, RawBiPipeHandle), AnyError
)
};
if ret != 0 {
return Err(std::io::Error::last_os_error().into());
return Err(std::io::Error::last_os_error());
}
if cfg!(target_os = "macos") {
@ -389,7 +391,7 @@ pub fn bi_pipe_pair_raw() -> Result<(RawBiPipeHandle, RawBiPipeHandle), AnyError
continue;
}
return Err(err.into());
return Err(err);
}
break (path, hd1);
@ -411,7 +413,7 @@ pub fn bi_pipe_pair_raw() -> Result<(RawBiPipeHandle, RawBiPipeHandle), AnyError
0,
);
if hd2 == INVALID_HANDLE_VALUE {
return Err(io::Error::last_os_error().into());
return Err(io::Error::last_os_error());
}
// Will not block because we have create the pair.
@ -419,7 +421,7 @@ pub fn bi_pipe_pair_raw() -> Result<(RawBiPipeHandle, RawBiPipeHandle), AnyError
let err = std::io::Error::last_os_error();
if err.raw_os_error() != Some(ERROR_PIPE_CONNECTED as i32) {
CloseHandle(hd2);
return Err(err.into());
return Err(err);
}
}

View file

@ -6,10 +6,6 @@ use std::rc::Rc;
use std::time::SystemTime;
use std::time::UNIX_EPOCH;
use deno_core::error::custom_error;
use deno_core::error::not_supported;
use deno_core::error::resource_unavailable;
use deno_core::error::AnyError;
use deno_core::BufMutView;
use deno_core::BufView;
use deno_core::OpState;
@ -59,15 +55,16 @@ impl From<io::ErrorKind> for FsError {
}
}
impl From<FsError> for AnyError {
impl From<FsError> for deno_core::error::AnyError {
fn from(err: FsError) -> Self {
match err {
FsError::Io(err) => AnyError::from(err),
FsError::FileBusy => resource_unavailable(),
FsError::NotSupported => not_supported(),
FsError::NotCapable(err) => {
custom_error("NotCapable", format!("permission denied: {err}"))
}
FsError::Io(err) => err.into(),
FsError::FileBusy => deno_core::error::resource_unavailable(),
FsError::NotSupported => deno_core::error::not_supported(),
FsError::NotCapable(err) => deno_core::error::custom_error(
"NotCapable",
format!("permission denied: {err}"),
),
}
}
}
@ -266,9 +263,9 @@ impl FileResource {
state: &OpState,
rid: ResourceId,
f: F,
) -> Result<R, AnyError>
) -> Result<R, deno_core::error::AnyError>
where
F: FnOnce(Rc<FileResource>) -> Result<R, AnyError>,
F: FnOnce(Rc<FileResource>) -> Result<R, deno_core::error::AnyError>,
{
let resource = state.resource_table.get::<FileResource>(rid)?;
f(resource)
@ -277,7 +274,7 @@ impl FileResource {
pub fn get_file(
state: &OpState,
rid: ResourceId,
) -> Result<Rc<dyn File>, AnyError> {
) -> Result<Rc<dyn File>, deno_core::error::AnyError> {
let resource = state.resource_table.get::<FileResource>(rid)?;
Ok(resource.file())
}
@ -286,9 +283,9 @@ impl FileResource {
state: &OpState,
rid: ResourceId,
f: F,
) -> Result<R, AnyError>
) -> Result<R, deno_core::error::AnyError>
where
F: FnOnce(Rc<dyn File>) -> Result<R, AnyError>,
F: FnOnce(Rc<dyn File>) -> Result<R, deno_core::error::AnyError>,
{
Self::with_resource(state, rid, |r| f(r.file.clone()))
}
@ -303,10 +300,7 @@ impl deno_core::Resource for FileResource {
Cow::Borrowed(&self.name)
}
fn read(
self: Rc<Self>,
limit: usize,
) -> deno_core::AsyncResult<deno_core::BufView> {
fn read(self: Rc<Self>, limit: usize) -> deno_core::AsyncResult<BufView> {
Box::pin(async move {
self
.file
@ -319,8 +313,8 @@ impl deno_core::Resource for FileResource {
fn read_byob(
self: Rc<Self>,
buf: deno_core::BufMutView,
) -> deno_core::AsyncResult<(usize, deno_core::BufMutView)> {
buf: BufMutView,
) -> deno_core::AsyncResult<(usize, BufMutView)> {
Box::pin(async move {
self
.file
@ -333,17 +327,14 @@ impl deno_core::Resource for FileResource {
fn write(
self: Rc<Self>,
buf: deno_core::BufView,
buf: BufView,
) -> deno_core::AsyncResult<deno_core::WriteOutcome> {
Box::pin(async move {
self.file.clone().write(buf).await.map_err(|err| err.into())
})
}
fn write_all(
self: Rc<Self>,
buf: deno_core::BufView,
) -> deno_core::AsyncResult<()> {
fn write_all(self: Rc<Self>, buf: BufView) -> deno_core::AsyncResult<()> {
Box::pin(async move {
self
.file

View file

@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::unsync::spawn_blocking;
use deno_core::unsync::TaskQueue;
@ -48,6 +47,7 @@ use winapi::um::processenv::GetStdHandle;
#[cfg(windows)]
use winapi::um::winbase;
use deno_core::futures::TryFutureExt;
#[cfg(windows)]
use parking_lot::Condvar;
#[cfg(windows)]
@ -348,13 +348,13 @@ where
RcRef::map(self, |r| &r.stream).borrow_mut()
}
async fn write(self: Rc<Self>, data: &[u8]) -> Result<usize, AnyError> {
async fn write(self: Rc<Self>, data: &[u8]) -> Result<usize, io::Error> {
let mut stream = self.borrow_mut().await;
let nwritten = stream.write(data).await?;
Ok(nwritten)
}
async fn shutdown(self: Rc<Self>) -> Result<(), AnyError> {
async fn shutdown(self: Rc<Self>) -> Result<(), io::Error> {
let mut stream = self.borrow_mut().await;
stream.shutdown().await?;
Ok(())
@ -396,7 +396,7 @@ where
self.cancel_handle.cancel()
}
async fn read(self: Rc<Self>, data: &mut [u8]) -> Result<usize, AnyError> {
async fn read(self: Rc<Self>, data: &mut [u8]) -> Result<usize, io::Error> {
let mut rd = self.borrow_mut().await;
let nread = rd.read(data).try_or_cancel(self.cancel_handle()).await?;
Ok(nread)
@ -417,7 +417,7 @@ impl Resource for ChildStdinResource {
deno_core::impl_writable!();
fn shutdown(self: Rc<Self>) -> AsyncResult<()> {
Box::pin(self.shutdown())
Box::pin(self.shutdown().map_err(|e| e.into()))
}
}
@ -1010,7 +1010,7 @@ pub fn op_print(
state: &mut OpState,
#[string] msg: &str,
is_err: bool,
) -> Result<(), AnyError> {
) -> Result<(), deno_core::error::AnyError> {
let rid = if is_err { 2 } else { 1 };
FileResource::with_file(state, rid, move |file| {
Ok(file.write_all_sync(msg.as_bytes())?)

View file

@ -250,7 +250,7 @@ pub fn op_tls_cert_resolver_resolve_error(
#[string] sni: String,
#[string] error: String,
) {
lookup.resolve(sni, Err(anyhow!(error)))
lookup.resolve(sni, Err(error))
}
#[op2]

View file

@ -2024,7 +2024,9 @@ pub fn op_node_export_public_key_pem(
_ => unreachable!("export_der would have errored"),
};
let mut out = vec![0; 2048];
let pem_len = der::pem::encapsulated_len(label, LineEnding::LF, data.len())
.map_err(|_| type_error("very large data"))?;
let mut out = vec![0; pem_len];
let mut writer = PemWriter::new(label, LineEnding::LF, &mut out)?;
writer.write(&data)?;
let len = writer.finish()?;
@ -2063,7 +2065,9 @@ pub fn op_node_export_private_key_pem(
_ => unreachable!("export_der would have errored"),
};
let mut out = vec![0; 2048];
let pem_len = der::pem::encapsulated_len(label, LineEnding::LF, data.len())
.map_err(|_| type_error("very large data"))?;
let mut out = vec![0; pem_len];
let mut writer = PemWriter::new(label, LineEnding::LF, &mut out)?;
writer.write(&data)?;
let len = writer.finish()?;

View file

@ -488,14 +488,12 @@ pub async fn op_http2_client_get_response_body_chunk(
loop {
let result = poll_fn(|cx| poll_data_or_trailers(cx, &mut body)).await;
if let Err(err) = result {
let reason = err.reason();
if let Some(reason) = reason {
if reason == Reason::CANCEL {
return Ok((None, false, true));
match err.reason() {
Some(Reason::NO_ERROR) => return Ok((None, true, false)),
Some(Reason::CANCEL) => return Ok((None, false, true)),
_ => return Err(err.into()),
}
}
return Err(err.into());
}
match result.unwrap() {
DataOrTrailers::Data(data) => {
return Ok((Some(data.to_vec()), false, false));

View file

@ -295,7 +295,7 @@ where
let path = ensure_read_permission::<P>(state, &path)?;
let fs = state.borrow::<FileSystemRc>();
let canonicalized_path =
deno_core::strip_unc_prefix(fs.realpath_sync(&path)?);
deno_path_util::strip_unc_prefix(fs.realpath_sync(&path)?);
Ok(canonicalized_path.to_string_lossy().into_owned())
}

View file

@ -52,7 +52,7 @@ where
let path = ensure_read_permission::<P>(state, &path)?;
let fs = state.borrow::<FileSystemRc>();
let canonicalized_path =
deno_core::strip_unc_prefix(fs.realpath_sync(&path)?);
deno_path_util::strip_unc_prefix(fs.realpath_sync(&path)?);
Url::from_file_path(canonicalized_path)
.map_err(|e| generic_error(format!("URL from Path-String: {:#?}", e)))?
};

View file

@ -66,14 +66,19 @@ export function createWritableStdioStream(writer, name, warmup = false) {
// We cannot call `writer?.isTerminal()` eagerly here
let getIsTTY = () => writer?.isTerminal();
const getColumns = () =>
stream._columns ||
(writer?.isTerminal() ? Deno.consoleSize?.().columns : undefined);
ObjectDefineProperties(stream, {
columns: {
__proto__: null,
enumerable: true,
configurable: true,
get: () =>
writer?.isTerminal() ? Deno.consoleSize?.().columns : undefined,
get: () => getColumns(),
set: (value) => {
stream._columns = value;
},
},
rows: {
__proto__: null,

View file

@ -132,6 +132,8 @@ export function fork(
rm = 2;
}
execArgv.splice(index, rm);
} else if (flag.startsWith("--no-warnings")) {
execArgv[index] = "--quiet";
} else {
index++;
}

View file

@ -52,7 +52,7 @@ import { urlToHttpOptions } from "ext:deno_node/internal/url.ts";
import { kEmptyObject, once } from "ext:deno_node/internal/util.mjs";
import { constants, TCP } from "ext:deno_node/internal_binding/tcp_wrap.ts";
import { notImplemented } from "ext:deno_node/_utils.ts";
import { isWindows } from "ext:deno_node/_util/os.ts";
import {
connResetException,
ERR_HTTP_HEADERS_SENT,
@ -1677,9 +1677,8 @@ export class ServerImpl extends EventEmitter {
port = options.port | 0;
}
// TODO(bnoordhuis) Node prefers [::] when host is omitted,
// we on the other hand default to 0.0.0.0.
let hostname = options.host ?? "0.0.0.0";
// Use 0.0.0.0 for Windows, and [::] for other platforms.
let hostname = options.host ?? (isWindows ? "0.0.0.0" : "[::]");
if (hostname == "localhost") {
hostname = "127.0.0.1";
}

View file

@ -882,6 +882,7 @@ export class ClientHttp2Stream extends Duplex {
trailersReady: false,
endAfterHeaders: false,
shutdownWritableCalled: false,
serverEndedCall: false,
};
this[kDenoResponse] = undefined;
this[kDenoRid] = undefined;
@ -1109,7 +1110,9 @@ export class ClientHttp2Stream extends Duplex {
}
debugHttp2(">>> chunk", chunk, finished, this[kDenoResponse].bodyRid);
if (chunk === null) {
if (finished || chunk === null) {
this[kState].serverEndedCall = true;
const trailerList = await op_http2_client_get_response_trailers(
this[kDenoResponse].bodyRid,
);
@ -1237,8 +1240,10 @@ export class ClientHttp2Stream extends Duplex {
this[kSession] = undefined;
session[kMaybeDestroy]();
if (callback) {
callback(err);
}
}
[kMaybeDestroy](code = constants.NGHTTP2_NO_ERROR) {
debugHttp2(
@ -1280,6 +1285,9 @@ function shutdownWritable(stream, callback, streamRid) {
if (state.flags & STREAM_FLAGS_HAS_TRAILERS) {
onStreamTrailers(stream);
callback();
} else if (state.serverEndedCall) {
debugHttp2(">>> stream finished");
callback();
} else {
op_http2_client_send_data(streamRid, new Uint8Array(), true)
.then(() => {

View file

@ -2,10 +2,59 @@
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
// Copyright Feross Aboukhadijeh, and other contributors. All rights reserved. MIT license.
// TODO(petamoriken): enable prefer-primordials for node polyfills
// deno-lint-ignore-file prefer-primordials
import { core } from "ext:core/mod.js";
import { core, primordials } from "ext:core/mod.js";
const {
isAnyArrayBuffer,
isArrayBuffer,
isDataView,
isSharedArrayBuffer,
isTypedArray,
} = core;
const {
ArrayBufferPrototypeGetByteLength,
ArrayBufferPrototypeGetDetached,
ArrayIsArray,
ArrayPrototypeSlice,
BigInt,
DataViewPrototypeGetByteLength,
Float32Array,
Float64Array,
MathFloor,
MathMin,
Number,
NumberIsInteger,
NumberIsNaN,
NumberMAX_SAFE_INTEGER,
NumberMIN_SAFE_INTEGER,
NumberPrototypeToString,
ObjectCreate,
ObjectDefineProperty,
ObjectPrototypeIsPrototypeOf,
ObjectSetPrototypeOf,
RangeError,
SafeRegExp,
String,
StringFromCharCode,
StringPrototypeCharCodeAt,
StringPrototypeIncludes,
StringPrototypeReplace,
StringPrototypeToLowerCase,
StringPrototypeTrim,
SymbolFor,
SymbolToPrimitive,
TypeError,
TypeErrorPrototype,
TypedArrayPrototypeCopyWithin,
TypedArrayPrototypeFill,
TypedArrayPrototypeGetBuffer,
TypedArrayPrototypeGetByteLength,
TypedArrayPrototypeGetByteOffset,
TypedArrayPrototypeSet,
TypedArrayPrototypeSlice,
TypedArrayPrototypeSubarray,
Uint8Array,
Uint8ArrayPrototype,
} = primordials;
import { op_is_ascii, op_is_utf8, op_transcode } from "ext:core/ops";
import { TextDecoder, TextEncoder } from "ext:deno_web/08_text_encoding.js";
@ -24,11 +73,6 @@ import {
hexToBytes,
utf16leToBytes,
} from "ext:deno_node/internal_binding/_utils.ts";
import {
isAnyArrayBuffer,
isArrayBufferView,
isTypedArray,
} from "ext:deno_node/internal/util/types.ts";
import { normalizeEncoding } from "ext:deno_node/internal/util.mjs";
import { validateBuffer } from "ext:deno_node/internal/validators.mjs";
import { isUint8Array } from "ext:deno_node/internal/util/types.ts";
@ -50,9 +94,13 @@ const utf8Encoder = new TextEncoder();
// Temporary buffers to convert numbers.
const float32Array = new Float32Array(1);
const uInt8Float32Array = new Uint8Array(float32Array.buffer);
const uInt8Float32Array = new Uint8Array(
TypedArrayPrototypeGetBuffer(float32Array),
);
const float64Array = new Float64Array(1);
const uInt8Float64Array = new Uint8Array(float64Array.buffer);
const uInt8Float64Array = new Uint8Array(
TypedArrayPrototypeGetBuffer(float64Array),
);
// Check endianness.
float32Array[0] = -1; // 0xBF800000
@ -64,10 +112,7 @@ export const kMaxLength = 2147483647;
export const kStringMaxLength = 536870888;
const MAX_UINT32 = 2 ** 32;
const customInspectSymbol =
typeof Symbol === "function" && typeof Symbol["for"] === "function"
? Symbol["for"]("nodejs.util.inspect.custom")
: null;
const customInspectSymbol = SymbolFor("nodejs.util.inspect.custom");
export const INSPECT_MAX_BYTES = 50;
@ -76,23 +121,25 @@ export const constants = {
MAX_STRING_LENGTH: kStringMaxLength,
};
Object.defineProperty(Buffer.prototype, "parent", {
ObjectDefineProperty(Buffer.prototype, "parent", {
__proto__: null,
enumerable: true,
get: function () {
if (!Buffer.isBuffer(this)) {
if (!BufferIsBuffer(this)) {
return void 0;
}
return this.buffer;
return TypedArrayPrototypeGetBuffer(this);
},
});
Object.defineProperty(Buffer.prototype, "offset", {
ObjectDefineProperty(Buffer.prototype, "offset", {
__proto__: null,
enumerable: true,
get: function () {
if (!Buffer.isBuffer(this)) {
if (!BufferIsBuffer(this)) {
return void 0;
}
return this.byteOffset;
return TypedArrayPrototypeGetByteOffset(this);
},
});
@ -103,10 +150,21 @@ function createBuffer(length) {
);
}
const buf = new Uint8Array(length);
Object.setPrototypeOf(buf, Buffer.prototype);
ObjectSetPrototypeOf(buf, BufferPrototype);
return buf;
}
/**
* @param {ArrayBufferLike} O
* @returns {boolean}
*/
function isDetachedBuffer(O) {
if (isSharedArrayBuffer(O)) {
return false;
}
return ArrayBufferPrototypeGetDetached(O);
}
export function Buffer(arg, encodingOrOffset, length) {
if (typeof arg === "number") {
if (typeof encodingOrOffset === "string") {
@ -133,6 +191,7 @@ function _from(value, encodingOrOffset, length) {
return fromArrayBuffer(value, encodingOrOffset, length);
}
// deno-lint-ignore prefer-primordials
const valueOf = value.valueOf && value.valueOf();
if (
valueOf != null &&
@ -147,8 +206,8 @@ function _from(value, encodingOrOffset, length) {
return b;
}
if (typeof value[Symbol.toPrimitive] === "function") {
const primitive = value[Symbol.toPrimitive]("string");
if (typeof value[SymbolToPrimitive] === "function") {
const primitive = value[SymbolToPrimitive]("string");
if (typeof primitive === "string") {
return fromString(primitive, encodingOrOffset);
}
@ -162,13 +221,19 @@ function _from(value, encodingOrOffset, length) {
);
}
Buffer.from = function from(value, encodingOrOffset, length) {
const BufferFrom = Buffer.from = function from(
value,
encodingOrOffset,
length,
) {
return _from(value, encodingOrOffset, length);
};
Object.setPrototypeOf(Buffer.prototype, Uint8Array.prototype);
const BufferPrototype = Buffer.prototype;
Object.setPrototypeOf(Buffer, Uint8Array);
ObjectSetPrototypeOf(Buffer.prototype, Uint8ArrayPrototype);
ObjectSetPrototypeOf(Buffer, Uint8Array);
function assertSize(size) {
validateNumber(size, "size", 0, kMaxLength);
@ -186,6 +251,7 @@ function _alloc(size, fill, encoding) {
encoding,
);
}
// deno-lint-ignore prefer-primordials
return buffer.fill(fill, encoding);
}
return buffer;
@ -212,13 +278,14 @@ function fromString(string, encoding) {
if (typeof encoding !== "string" || encoding === "") {
encoding = "utf8";
}
if (!Buffer.isEncoding(encoding)) {
if (!BufferIsEncoding(encoding)) {
throw new codes.ERR_UNKNOWN_ENCODING(encoding);
}
const length = byteLength(string, encoding) | 0;
let buf = createBuffer(length);
const actual = buf.write(string, encoding);
if (actual !== length) {
// deno-lint-ignore prefer-primordials
buf = buf.slice(0, actual);
}
return buf;
@ -226,11 +293,12 @@ function fromString(string, encoding) {
function fromArrayLike(obj) {
const buf = new Uint8Array(obj);
Object.setPrototypeOf(buf, Buffer.prototype);
ObjectSetPrototypeOf(buf, BufferPrototype);
return buf;
}
function fromObject(obj) {
// deno-lint-ignore prefer-primordials
if (obj.length !== undefined || isAnyArrayBuffer(obj.buffer)) {
if (typeof obj.length !== "number") {
return createBuffer(0);
@ -239,7 +307,7 @@ function fromObject(obj) {
return fromArrayLike(obj);
}
if (obj.type === "Buffer" && Array.isArray(obj.data)) {
if (obj.type === "Buffer" && ArrayIsArray(obj.data)) {
return fromArrayLike(obj.data);
}
}
@ -248,7 +316,7 @@ function checked(length) {
if (length >= kMaxLength) {
throw new RangeError(
"Attempt to allocate Buffer larger than maximum size: 0x" +
kMaxLength.toString(16) + " bytes",
NumberPrototypeToString(kMaxLength, 16) + " bytes",
);
}
return length | 0;
@ -256,25 +324,33 @@ function checked(length) {
export function SlowBuffer(length) {
assertSize(length);
return Buffer.alloc(+length);
return _alloc(+length);
}
Object.setPrototypeOf(SlowBuffer.prototype, Uint8Array.prototype);
ObjectSetPrototypeOf(SlowBuffer.prototype, Uint8ArrayPrototype);
Object.setPrototypeOf(SlowBuffer, Uint8Array);
ObjectSetPrototypeOf(SlowBuffer, Uint8Array);
Buffer.isBuffer = function isBuffer(b) {
return b != null && b._isBuffer === true && b !== Buffer.prototype;
const BufferIsBuffer = Buffer.isBuffer = function isBuffer(b) {
return b != null && b._isBuffer === true && b !== BufferPrototype;
};
Buffer.compare = function compare(a, b) {
if (isInstance(a, Uint8Array)) {
a = Buffer.from(a, a.offset, a.byteLength);
const BufferCompare = Buffer.compare = function compare(a, b) {
if (isUint8Array(a)) {
a = BufferFrom(
a,
TypedArrayPrototypeGetByteOffset(a),
TypedArrayPrototypeGetByteLength(a),
);
}
if (isInstance(b, Uint8Array)) {
b = Buffer.from(b, b.offset, b.byteLength);
if (isUint8Array(b)) {
b = BufferFrom(
b,
TypedArrayPrototypeGetByteOffset(b),
TypedArrayPrototypeGetByteLength(b),
);
}
if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) {
if (!BufferIsBuffer(a) || !BufferIsBuffer(b)) {
throw new TypeError(
'The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array',
);
@ -284,7 +360,7 @@ Buffer.compare = function compare(a, b) {
}
let x = a.length;
let y = b.length;
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
for (let i = 0, len = MathMin(x, y); i < len; ++i) {
if (a[i] !== b[i]) {
x = a[i];
y = b[i];
@ -300,18 +376,18 @@ Buffer.compare = function compare(a, b) {
return 0;
};
Buffer.isEncoding = function isEncoding(encoding) {
const BufferIsEncoding = Buffer.isEncoding = function isEncoding(encoding) {
return typeof encoding === "string" && encoding.length !== 0 &&
normalizeEncoding(encoding) !== undefined;
};
Buffer.concat = function concat(list, length) {
if (!Array.isArray(list)) {
if (!ArrayIsArray(list)) {
throw new codes.ERR_INVALID_ARG_TYPE("list", "Array", list);
}
if (list.length === 0) {
return Buffer.alloc(0);
return _alloc(0);
}
if (length === undefined) {
@ -325,7 +401,7 @@ Buffer.concat = function concat(list, length) {
validateOffset(length, "length");
}
const buffer = Buffer.allocUnsafe(length);
const buffer = _allocUnsafe(length);
let pos = 0;
for (let i = 0; i < list.length; i++) {
const buf = list[i];
@ -346,7 +422,7 @@ Buffer.concat = function concat(list, length) {
// Zero-fill the remaining bytes if the specified `length` was more than
// the actual total length, i.e. if we have some remaining allocated bytes
// there were not initialized.
buffer.fill(0, pos, length);
TypedArrayPrototypeFill(buffer, 0, pos, length);
}
return buffer;
@ -354,7 +430,18 @@ Buffer.concat = function concat(list, length) {
function byteLength(string, encoding) {
if (typeof string !== "string") {
if (isArrayBufferView(string) || isAnyArrayBuffer(string)) {
if (isTypedArray(string)) {
return TypedArrayPrototypeGetByteLength(string);
}
if (isDataView(string)) {
return DataViewPrototypeGetByteLength(string);
}
if (isArrayBuffer(string)) {
return ArrayBufferPrototypeGetByteLength(string);
}
if (isSharedArrayBuffer(string)) {
// TODO(petamoriken): add SharedArayBuffer to primordials
// deno-lint-ignore prefer-primordials
return string.byteLength;
}
@ -463,6 +550,7 @@ Buffer.prototype.toString = function toString(encoding, start, end) {
throw new codes.ERR_UNKNOWN_ENCODING(encoding);
}
// deno-lint-ignore prefer-primordials
return ops.slice(this, start, end);
};
@ -479,23 +567,30 @@ Buffer.prototype.equals = function equals(b) {
if (this === b) {
return true;
}
return Buffer.compare(this, b) === 0;
return BufferCompare(this, b) === 0;
};
Buffer.prototype.inspect = function inspect() {
const SPACER_PATTERN = new SafeRegExp(/(.{2})/g);
Buffer.prototype[customInspectSymbol] =
Buffer.prototype.inspect =
function inspect() {
let str = "";
const max = INSPECT_MAX_BYTES;
str = this.toString("hex", 0, max).replace(/(.{2})/g, "$1 ").trim();
str = StringPrototypeTrim(
StringPrototypeReplace(
// deno-lint-ignore prefer-primordials
this.toString("hex", 0, max),
SPACER_PATTERN,
"$1 ",
),
);
if (this.length > max) {
str += " ... ";
}
return "<Buffer " + str + ">";
};
if (customInspectSymbol) {
Buffer.prototype[customInspectSymbol] = Buffer.prototype.inspect;
}
Buffer.prototype.compare = function compare(
target,
start,
@ -503,10 +598,14 @@ Buffer.prototype.compare = function compare(
thisStart,
thisEnd,
) {
if (isInstance(target, Uint8Array)) {
target = Buffer.from(target, target.offset, target.byteLength);
if (isUint8Array(target)) {
target = BufferFrom(
target,
TypedArrayPrototypeGetByteOffset(target),
TypedArrayPrototypeGetByteLength(target),
);
}
if (!Buffer.isBuffer(target)) {
if (!BufferIsBuffer(target)) {
throw new codes.ERR_INVALID_ARG_TYPE(
"target",
["Buffer", "Uint8Array"],
@ -563,8 +662,9 @@ Buffer.prototype.compare = function compare(
}
let x = thisEnd - thisStart;
let y = end - start;
const len = Math.min(x, y);
const thisCopy = this.slice(thisStart, thisEnd);
const len = MathMin(x, y);
const thisCopy = TypedArrayPrototypeSlice(this, thisStart, thisEnd);
// deno-lint-ignore prefer-primordials
const targetCopy = target.slice(start, end);
for (let i = 0; i < len; ++i) {
if (thisCopy[i] !== targetCopy[i]) {
@ -594,7 +694,8 @@ function bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
byteOffset = -0x80000000;
}
byteOffset = +byteOffset;
if (Number.isNaN(byteOffset)) {
if (NumberIsNaN(byteOffset)) {
// deno-lint-ignore prefer-primordials
byteOffset = dir ? 0 : (buffer.length || buffer.byteLength);
}
dir = !!dir;
@ -614,6 +715,7 @@ function bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
if (ops === undefined) {
throw new codes.ERR_UNKNOWN_ENCODING(encoding);
}
// deno-lint-ignore prefer-primordials
return ops.indexOf(buffer, val, byteOffset, dir);
}
@ -630,6 +732,7 @@ function bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
}
Buffer.prototype.includes = function includes(val, byteOffset, encoding) {
// deno-lint-ignore prefer-primordials
return this.indexOf(val, byteOffset, encoding) !== -1;
};
@ -649,7 +752,7 @@ Buffer.prototype.asciiSlice = function asciiSlice(offset, length) {
if (offset === 0 && length === this.length) {
return bytesToAscii(this);
} else {
return bytesToAscii(this.slice(offset, length));
return bytesToAscii(TypedArrayPrototypeSlice(this, offset, length));
}
};
@ -664,7 +767,9 @@ Buffer.prototype.base64Slice = function base64Slice(
if (offset === 0 && length === this.length) {
return forgivingBase64Encode(this);
} else {
return forgivingBase64Encode(this.slice(offset, length));
return forgivingBase64Encode(
TypedArrayPrototypeSlice(this, offset, length),
);
}
};
@ -683,7 +788,9 @@ Buffer.prototype.base64urlSlice = function base64urlSlice(
if (offset === 0 && length === this.length) {
return forgivingBase64UrlEncode(this);
} else {
return forgivingBase64UrlEncode(this.slice(offset, length));
return forgivingBase64UrlEncode(
TypedArrayPrototypeSlice(this, offset, length),
);
}
};
@ -728,7 +835,7 @@ Buffer.prototype.ucs2Slice = function ucs2Slice(offset, length) {
if (offset === 0 && length === this.length) {
return bytesToUtf16le(this);
} else {
return bytesToUtf16le(this.slice(offset, length));
return bytesToUtf16le(TypedArrayPrototypeSlice(this, offset, length));
}
};
@ -747,9 +854,9 @@ Buffer.prototype.utf8Slice = function utf8Slice(string, offset, length) {
Buffer.prototype.utf8Write = function utf8Write(string, offset, length) {
offset = offset || 0;
const maxLength = Math.min(length || Infinity, this.length - offset);
const maxLength = MathMin(length || Infinity, this.length - offset);
const buf = offset || maxLength < this.length
? this.subarray(offset, maxLength + offset)
? TypedArrayPrototypeSubarray(this, offset, maxLength + offset)
: this;
return utf8Encoder.encodeInto(string, buf).written;
};
@ -801,7 +908,7 @@ Buffer.prototype.write = function write(string, offset, length, encoding) {
Buffer.prototype.toJSON = function toJSON() {
return {
type: "Buffer",
data: Array.prototype.slice.call(this._arr || this, 0),
data: ArrayPrototypeSlice(this._arr || this, 0),
};
};
function fromArrayBuffer(obj, byteOffset, length) {
@ -810,11 +917,12 @@ function fromArrayBuffer(obj, byteOffset, length) {
byteOffset = 0;
} else {
byteOffset = +byteOffset;
if (Number.isNaN(byteOffset)) {
if (NumberIsNaN(byteOffset)) {
byteOffset = 0;
}
}
// deno-lint-ignore prefer-primordials
const maxLength = obj.byteLength - byteOffset;
if (maxLength < 0) {
@ -836,7 +944,7 @@ function fromArrayBuffer(obj, byteOffset, length) {
}
const buffer = new Uint8Array(obj, byteOffset, length);
Object.setPrototypeOf(buffer, Buffer.prototype);
ObjectSetPrototypeOf(buffer, BufferPrototype);
return buffer;
}
@ -844,6 +952,7 @@ function _base64Slice(buf, start, end) {
if (start === 0 && end === buf.length) {
return forgivingBase64Encode(buf);
} else {
// deno-lint-ignore prefer-primordials
return forgivingBase64Encode(buf.slice(start, end));
}
}
@ -852,9 +961,10 @@ const decoder = new TextDecoder();
function _utf8Slice(buf, start, end) {
try {
// deno-lint-ignore prefer-primordials
return decoder.decode(buf.slice(start, end));
} catch (err) {
if (err instanceof TypeError) {
if (ObjectPrototypeIsPrototypeOf(TypeErrorPrototype, err)) {
throw new NodeError("ERR_STRING_TOO_LONG", "String too long");
}
throw err;
@ -863,9 +973,9 @@ function _utf8Slice(buf, start, end) {
function _latin1Slice(buf, start, end) {
let ret = "";
end = Math.min(buf.length, end);
end = MathMin(buf.length, end);
for (let i = start; i < end; ++i) {
ret += String.fromCharCode(buf[i]);
ret += StringFromCharCode(buf[i]);
}
return ret;
}
@ -994,7 +1104,6 @@ Buffer.prototype.readUint32BE = Buffer.prototype.readUInt32BE = readUInt32BE;
Buffer.prototype.readBigUint64LE =
Buffer.prototype.readBigUInt64LE =
defineBigIntMethod(
function readBigUInt64LE(offset) {
offset = offset >>> 0;
validateNumber(offset, "offset");
@ -1008,13 +1117,11 @@ Buffer.prototype.readBigUint64LE =
this[++offset] * 2 ** 24;
const hi = this[++offset] + this[++offset] * 2 ** 8 +
this[++offset] * 2 ** 16 + last * 2 ** 24;
return BigInt(lo) + (BigInt(hi) << BigInt(32));
},
);
return BigInt(lo) + (BigInt(hi) << 32n);
};
Buffer.prototype.readBigUint64BE =
Buffer.prototype.readBigUInt64BE =
defineBigIntMethod(
function readBigUInt64BE(offset) {
offset = offset >>> 0;
validateNumber(offset, "offset");
@ -1027,9 +1134,8 @@ Buffer.prototype.readBigUint64BE =
this[++offset] * 2 ** 8 + this[++offset];
const lo = this[++offset] * 2 ** 24 + this[++offset] * 2 ** 16 +
this[++offset] * 2 ** 8 + last;
return (BigInt(hi) << BigInt(32)) + BigInt(lo);
},
);
return (BigInt(hi) << 32n) + BigInt(lo);
};
Buffer.prototype.readIntLE = function readIntLE(
offset,
@ -1148,8 +1254,7 @@ Buffer.prototype.readInt32BE = function readInt32BE(offset = 0) {
last;
};
Buffer.prototype.readBigInt64LE = defineBigIntMethod(
function readBigInt64LE(offset) {
Buffer.prototype.readBigInt64LE = function readBigInt64LE(offset) {
offset = offset >>> 0;
validateNumber(offset, "offset");
const first = this[offset];
@ -1159,16 +1264,14 @@ Buffer.prototype.readBigInt64LE = defineBigIntMethod(
}
const val = this[offset + 4] + this[offset + 5] * 2 ** 8 +
this[offset + 6] * 2 ** 16 + (last << 24);
return (BigInt(val) << BigInt(32)) +
return (BigInt(val) << 32n) +
BigInt(
first + this[++offset] * 2 ** 8 + this[++offset] * 2 ** 16 +
this[++offset] * 2 ** 24,
);
},
);
};
Buffer.prototype.readBigInt64BE = defineBigIntMethod(
function readBigInt64BE(offset) {
Buffer.prototype.readBigInt64BE = function readBigInt64BE(offset) {
offset = offset >>> 0;
validateNumber(offset, "offset");
const first = this[offset];
@ -1178,13 +1281,12 @@ Buffer.prototype.readBigInt64BE = defineBigIntMethod(
}
const val = (first << 24) + this[++offset] * 2 ** 16 +
this[++offset] * 2 ** 8 + this[++offset];
return (BigInt(val) << BigInt(32)) +
return (BigInt(val) << 32n) +
BigInt(
this[++offset] * 2 ** 24 + this[++offset] * 2 ** 16 +
this[++offset] * 2 ** 8 + last,
);
},
);
};
Buffer.prototype.readFloatLE = function readFloatLE(offset) {
return bigEndian
@ -1293,7 +1395,7 @@ Buffer.prototype.writeUint32BE =
function wrtBigUInt64LE(buf, value, offset, min, max) {
checkIntBI(value, min, max, buf, offset, 7);
let lo = Number(value & BigInt(4294967295));
let lo = Number(value & 4294967295n);
buf[offset++] = lo;
lo = lo >> 8;
buf[offset++] = lo;
@ -1301,7 +1403,7 @@ function wrtBigUInt64LE(buf, value, offset, min, max) {
buf[offset++] = lo;
lo = lo >> 8;
buf[offset++] = lo;
let hi = Number(value >> BigInt(32) & BigInt(4294967295));
let hi = Number(value >> 32n & 4294967295n);
buf[offset++] = hi;
hi = hi >> 8;
buf[offset++] = hi;
@ -1314,7 +1416,7 @@ function wrtBigUInt64LE(buf, value, offset, min, max) {
function wrtBigUInt64BE(buf, value, offset, min, max) {
checkIntBI(value, min, max, buf, offset, 7);
let lo = Number(value & BigInt(4294967295));
let lo = Number(value & 4294967295n);
buf[offset + 7] = lo;
lo = lo >> 8;
buf[offset + 6] = lo;
@ -1322,7 +1424,7 @@ function wrtBigUInt64BE(buf, value, offset, min, max) {
buf[offset + 5] = lo;
lo = lo >> 8;
buf[offset + 4] = lo;
let hi = Number(value >> BigInt(32) & BigInt(4294967295));
let hi = Number(value >> 32n & 4294967295n);
buf[offset + 3] = hi;
hi = hi >> 8;
buf[offset + 2] = hi;
@ -1335,31 +1437,27 @@ function wrtBigUInt64BE(buf, value, offset, min, max) {
Buffer.prototype.writeBigUint64LE =
Buffer.prototype.writeBigUInt64LE =
defineBigIntMethod(
function writeBigUInt64LE(value, offset = 0) {
return wrtBigUInt64LE(
this,
value,
offset,
BigInt(0),
BigInt("0xffffffffffffffff"),
);
},
0n,
0xffffffffffffffffn,
);
};
Buffer.prototype.writeBigUint64BE =
Buffer.prototype.writeBigUInt64BE =
defineBigIntMethod(
function writeBigUInt64BE(value, offset = 0) {
return wrtBigUInt64BE(
this,
value,
offset,
BigInt(0),
BigInt("0xffffffffffffffff"),
);
},
0n,
0xffffffffffffffffn,
);
};
Buffer.prototype.writeIntLE = function writeIntLE(
value,
@ -1450,29 +1548,25 @@ Buffer.prototype.writeInt32BE = function writeInt32BE(value, offset = 0) {
return writeU_Int32BE(this, value, offset, -0x80000000, 0x7fffffff);
};
Buffer.prototype.writeBigInt64LE = defineBigIntMethod(
function writeBigInt64LE(value, offset = 0) {
Buffer.prototype.writeBigInt64LE = function writeBigInt64LE(value, offset = 0) {
return wrtBigUInt64LE(
this,
value,
offset,
-BigInt("0x8000000000000000"),
BigInt("0x7fffffffffffffff"),
);
},
-0x8000000000000000n,
0x7fffffffffffffffn,
);
};
Buffer.prototype.writeBigInt64BE = defineBigIntMethod(
function writeBigInt64BE(value, offset = 0) {
Buffer.prototype.writeBigInt64BE = function writeBigInt64BE(value, offset = 0) {
return wrtBigUInt64BE(
this,
value,
offset,
-BigInt("0x8000000000000000"),
BigInt("0x7fffffffffffffff"),
);
},
-0x8000000000000000n,
0x7fffffffffffffffn,
);
};
Buffer.prototype.writeFloatLE = function writeFloatLE(
value,
@ -1600,14 +1694,12 @@ Buffer.prototype.copy = function copy(
}
const len = sourceEnd - sourceStart;
if (
this === target && typeof Uint8Array.prototype.copyWithin === "function"
) {
this.copyWithin(targetStart, sourceStart, sourceEnd);
if (this === target) {
TypedArrayPrototypeCopyWithin(this, targetStart, sourceStart, sourceEnd);
} else {
Uint8Array.prototype.set.call(
TypedArrayPrototypeSet(
target,
this.subarray(sourceStart, sourceEnd),
TypedArrayPrototypeSubarray(this, sourceStart, sourceEnd),
targetStart,
);
}
@ -1627,11 +1719,11 @@ Buffer.prototype.fill = function fill(val, start, end, encoding) {
if (encoding !== void 0 && typeof encoding !== "string") {
throw new TypeError("encoding must be a string");
}
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
if (typeof encoding === "string" && !BufferIsEncoding(encoding)) {
throw new TypeError("Unknown encoding: " + encoding);
}
if (val.length === 1) {
const code = val.charCodeAt(0);
const code = StringPrototypeCharCodeAt(val, 0);
if (encoding === "utf8" && code < 128 || encoding === "latin1") {
val = code;
}
@ -1658,7 +1750,7 @@ Buffer.prototype.fill = function fill(val, start, end, encoding) {
this[i] = val;
}
} else {
const bytes = Buffer.isBuffer(val) ? val : Buffer.from(val, encoding);
const bytes = BufferIsBuffer(val) ? val : BufferFrom(val, encoding);
const len = bytes.length;
if (len === 0) {
throw new codes.ERR_INVALID_ARG_VALUE(
@ -1685,7 +1777,7 @@ function checkIntBI(value, min, max, buf, offset, byteLength2) {
const n = typeof min === "bigint" ? "n" : "";
let range;
if (byteLength2 > 3) {
if (min === 0 || min === BigInt(0)) {
if (min === 0 || min === 0n) {
range = `>= 0${n} and < 2${n} ** ${(byteLength2 + 1) * 8}${n}`;
} else {
range = `>= -(2${n} ** ${(byteLength2 + 1) * 8 - 1}${n}) and < 2 ** ${
@ -1710,7 +1802,7 @@ function checkIntBI(value, min, max, buf, offset, byteLength2) {
function blitBuffer(src, dst, offset, byteLength = Infinity) {
const srcLength = src.length;
// Establish the number of bytes to be written
const bytesToWrite = Math.min(
const bytesToWrite = MathMin(
// If byte length is defined in the call, then it sets an upper bound,
// otherwise it is Infinity and is never chosen.
byteLength,
@ -1730,15 +1822,9 @@ function blitBuffer(src, dst, offset, byteLength = Infinity) {
return bytesToWrite;
}
function isInstance(obj, type) {
return obj instanceof type ||
obj != null && obj.constructor != null &&
obj.constructor.name != null && obj.constructor.name === type.name;
}
const hexSliceLookupTable = function () {
const alphabet = "0123456789abcdef";
const table = new Array(256);
const table = [];
for (let i = 0; i < 16; ++i) {
const i16 = i * 16;
for (let j = 0; j < 16; ++j) {
@ -1748,14 +1834,6 @@ const hexSliceLookupTable = function () {
return table;
}();
function defineBigIntMethod(fn) {
return typeof BigInt === "undefined" ? BufferBigIntNotDefined : fn;
}
function BufferBigIntNotDefined() {
throw new Error("BigInt not supported");
}
export function readUInt48LE(buf, offset = 0) {
validateNumber(offset, "offset");
const first = buf[offset];
@ -2079,10 +2157,10 @@ export function byteLengthUtf8(str) {
function base64ByteLength(str, bytes) {
// Handle padding
if (str.charCodeAt(bytes - 1) === 0x3D) {
if (StringPrototypeCharCodeAt(str, bytes - 1) === 0x3D) {
bytes--;
}
if (bytes > 1 && str.charCodeAt(bytes - 1) === 0x3D) {
if (bytes > 1 && StringPrototypeCharCodeAt(str, bytes - 1) === 0x3D) {
bytes--;
}
@ -2090,7 +2168,7 @@ function base64ByteLength(str, bytes) {
return (bytes * 3) >>> 2;
}
export const encodingsMap = Object.create(null);
export const encodingsMap = ObjectCreate(null);
for (let i = 0; i < encodings.length; ++i) {
encodingsMap[encodings[i]] = i;
}
@ -2220,7 +2298,7 @@ export const encodingOps = {
};
export function getEncodingOps(encoding) {
encoding = String(encoding).toLowerCase();
encoding = StringPrototypeToLowerCase(String(encoding));
switch (encoding.length) {
case 4:
if (encoding === "utf8") return encodingOps.utf8;
@ -2260,6 +2338,14 @@ export function getEncodingOps(encoding) {
}
}
/**
* @param {Buffer} source
* @param {Buffer} target
* @param {number} targetStart
* @param {number} sourceStart
* @param {number} sourceEnd
* @returns {number}
*/
export function _copyActual(
source,
target,
@ -2278,6 +2364,7 @@ export function _copyActual(
}
if (sourceStart !== 0 || sourceEnd < source.length) {
// deno-lint-ignore prefer-primordials
source = new Uint8Array(source.buffer, source.byteOffset + sourceStart, nb);
}
@ -2287,7 +2374,7 @@ export function _copyActual(
}
export function boundsError(value, length, type) {
if (Math.floor(value) !== value) {
if (MathFloor(value) !== value) {
validateNumber(value, type);
throw new codes.ERR_OUT_OF_RANGE(type || "offset", "an integer", value);
}
@ -2310,7 +2397,7 @@ export function validateNumber(value, name, min = undefined, max) {
if (
(min != null && value < min) || (max != null && value > max) ||
((min != null || max != null) && Number.isNaN(value))
((min != null || max != null) && NumberIsNaN(value))
) {
throw new codes.ERR_OUT_OF_RANGE(
name,
@ -2344,11 +2431,11 @@ function checkInt(value, min, max, buf, offset, byteLength) {
export function toInteger(n, defaultVal) {
n = +n;
if (
!Number.isNaN(n) &&
n >= Number.MIN_SAFE_INTEGER &&
n <= Number.MAX_SAFE_INTEGER
!NumberIsNaN(n) &&
n >= NumberMIN_SAFE_INTEGER &&
n <= NumberMAX_SAFE_INTEGER
) {
return ((n % 1) === 0 ? n : Math.floor(n));
return ((n % 1) === 0 ? n : MathFloor(n));
}
return defaultVal;
}
@ -2421,7 +2508,7 @@ export function writeU_Int48BE(buf, value, offset, min, max) {
value = +value;
checkInt(value, min, max, buf, offset, 5);
const newVal = Math.floor(value * 2 ** -32);
const newVal = MathFloor(value * 2 ** -32);
buf[offset++] = newVal >>> 8;
buf[offset++] = newVal;
buf[offset + 3] = value;
@ -2439,7 +2526,7 @@ export function writeU_Int40BE(buf, value, offset, min, max) {
value = +value;
checkInt(value, min, max, buf, offset, 4);
buf[offset++] = Math.floor(value * 2 ** -32);
buf[offset++] = MathFloor(value * 2 ** -32);
buf[offset + 3] = value;
value = value >>> 8;
buf[offset + 2] = value;
@ -2482,12 +2569,12 @@ export function validateOffset(
value,
name,
min = 0,
max = Number.MAX_SAFE_INTEGER,
max = NumberMAX_SAFE_INTEGER,
) {
if (typeof value !== "number") {
throw new codes.ERR_INVALID_ARG_TYPE(name, "number", value);
}
if (!Number.isInteger(value)) {
if (!NumberIsInteger(value)) {
throw new codes.ERR_OUT_OF_RANGE(name, "an integer", value);
}
if (value < min || value > max) {
@ -2500,7 +2587,7 @@ export function writeU_Int48LE(buf, value, offset, min, max) {
value = +value;
checkInt(value, min, max, buf, offset, 5);
const newVal = Math.floor(value * 2 ** -32);
const newVal = MathFloor(value * 2 ** -32);
buf[offset++] = value;
value = value >>> 8;
buf[offset++] = value;
@ -2526,7 +2613,7 @@ export function writeU_Int40LE(buf, value, offset, min, max) {
buf[offset++] = value;
value = value >>> 8;
buf[offset++] = value;
buf[offset++] = Math.floor(newVal * 2 ** -32);
buf[offset++] = MathFloor(newVal * 2 ** -32);
return offset;
}
@ -2560,14 +2647,14 @@ export function writeU_Int24LE(buf, value, offset, min, max) {
export function isUtf8(input) {
if (isTypedArray(input)) {
if (input.buffer.detached) {
if (isDetachedBuffer(TypedArrayPrototypeGetBuffer(input))) {
throw new ERR_INVALID_STATE("Cannot validate on a detached buffer");
}
return op_is_utf8(input);
}
if (isAnyArrayBuffer(input)) {
if (input.detached) {
if (isDetachedBuffer(input)) {
throw new ERR_INVALID_STATE("Cannot validate on a detached buffer");
}
return op_is_utf8(new Uint8Array(input));
@ -2582,14 +2669,14 @@ export function isUtf8(input) {
export function isAscii(input) {
if (isTypedArray(input)) {
if (input.buffer.detached) {
if (isDetachedBuffer(TypedArrayPrototypeGetBuffer(input))) {
throw new ERR_INVALID_STATE("Cannot validate on a detached buffer");
}
return op_is_ascii(input);
}
if (isAnyArrayBuffer(input)) {
if (input.detached) {
if (isDetachedBuffer(input)) {
throw new ERR_INVALID_STATE("Cannot validate on a detached buffer");
}
return op_is_ascii(new Uint8Array(input));
@ -2636,7 +2723,7 @@ export function transcode(source, fromEnco, toEnco) {
const result = op_transcode(new Uint8Array(source), fromEnco, toEnco);
return Buffer.from(result, toEnco);
} catch (err) {
if (err.message.includes("Unable to transcode Buffer")) {
if (StringPrototypeIncludes(err.message, "Unable to transcode Buffer")) {
throw illegalArgumentError;
} else {
throw err;

View file

@ -1191,8 +1191,12 @@ function toDenoArgs(args: string[]): string[] {
}
if (flagInfo === undefined) {
if (arg === "--no-warnings") {
denoArgs.push("--quiet");
} else {
// Not a known flag that expects a value. Just copy it to the output.
denoArgs.push(arg);
}
continue;
}

View file

@ -416,20 +416,10 @@ export function emitInvalidHostnameWarning(hostname: string) {
);
}
let dnsOrder = getOptionValue("--dns-result-order") || "ipv4first";
let dnsOrder = getOptionValue("--dns-result-order") || "verbatim";
export function getDefaultVerbatim() {
switch (dnsOrder) {
case "verbatim": {
return true;
}
case "ipv4first": {
return false;
}
default: {
return false;
}
}
return dnsOrder !== "ipv4first";
}
/**

View file

@ -5,10 +5,11 @@
import { Buffer } from "node:buffer";
function assert(cond) {
if (!cond) {
throw new Error("assertion failed");
function toDataView(ab: ArrayBufferLike | ArrayBufferView): DataView {
if (ArrayBuffer.isView(ab)) {
return new DataView(ab.buffer, ab.byteOffset, ab.byteLength);
}
return new DataView(ab);
}
/** Compare to array buffers or data views in a way that timing based attacks
@ -21,13 +22,11 @@ function stdTimingSafeEqual(
return false;
}
if (!(a instanceof DataView)) {
a = new DataView(ArrayBuffer.isView(a) ? a.buffer : a);
a = toDataView(a);
}
if (!(b instanceof DataView)) {
b = new DataView(ArrayBuffer.isView(b) ? b.buffer : b);
b = toDataView(b);
}
assert(a instanceof DataView);
assert(b instanceof DataView);
const length = a.byteLength;
let out = 0;
let i = -1;
@ -41,7 +40,11 @@ export const timingSafeEqual = (
a: Buffer | DataView | ArrayBuffer,
b: Buffer | DataView | ArrayBuffer,
): boolean => {
if (a instanceof Buffer) a = new DataView(a.buffer);
if (a instanceof Buffer) b = new DataView(a.buffer);
if (a instanceof Buffer) {
a = new DataView(a.buffer, a.byteOffset, a.byteLength);
}
if (b instanceof Buffer) {
b = new DataView(b.buffer, b.byteOffset, b.byteLength);
}
return stdTimingSafeEqual(a, b);
};

View file

@ -75,11 +75,18 @@ export function getaddrinfo(
const recordTypes: ("A" | "AAAA")[] = [];
if (family === 0 || family === 4) {
recordTypes.push("A");
}
if (family === 0 || family === 6) {
if (family === 6) {
recordTypes.push("AAAA");
} else if (family === 4) {
recordTypes.push("A");
} else if (family === 0 && hostname === "localhost") {
// Ipv6 is preferred over Ipv4 for localhost
recordTypes.push("AAAA");
recordTypes.push("A");
} else if (family === 0) {
// Only get Ipv4 addresses for the other hostnames
// This simulates what `getaddrinfo` does when the family is not specified
recordTypes.push("A");
}
(async () => {

View file

@ -303,8 +303,8 @@ export class TCP extends ConnectionWrap {
* @param noDelay
* @return An error status code.
*/
setNoDelay(_noDelay: boolean): number {
// TODO(bnoordhuis) https://github.com/denoland/deno/pull/13103
setNoDelay(noDelay: boolean): number {
this[kStreamBaseField].setNoDelay(noDelay);
return 0;
}

View file

@ -1879,23 +1879,13 @@ function _setupListenHandle(
// Try to bind to the unspecified IPv6 address, see if IPv6 is available
if (!address && typeof fd !== "number") {
// TODO(@bartlomieju): differs from Node which tries to bind to IPv6 first
// when no address is provided.
//
// Forcing IPv4 as a workaround for Deno not aligning with Node on
// implicit binding on Windows.
//
// REF: https://github.com/denoland/deno/issues/10762
// rval = _createServerHandle(DEFAULT_IPV6_ADDR, port, 6, fd, flags);
// if (typeof rval === "number") {
// rval = null;
if (isWindows) {
address = DEFAULT_IPV4_ADDR;
addressType = 4;
// } else {
// address = DEFAULT_IPV6_ADDR;
// addressType = 6;
// }
} else {
address = DEFAULT_IPV6_ADDR;
addressType = 6;
}
}
if (rval === null) {

View file

@ -39,6 +39,7 @@ import {
formatWithOptions,
inspect,
stripVTControlCharacters,
styleText,
} from "ext:deno_node/internal/util/inspect.mjs";
import { codes } from "ext:deno_node/internal/error_codes.ts";
import types from "node:util/types";
@ -63,6 +64,7 @@ export {
parseArgs,
promisify,
stripVTControlCharacters,
styleText,
types,
};
@ -354,4 +356,5 @@ export default {
debuglog,
debug: debuglog,
isDeepStrictEqual,
styleText,
};

View file

@ -21,5 +21,6 @@ rustls-pemfile.workspace = true
rustls-tokio-stream.workspace = true
rustls-webpki.workspace = true
serde.workspace = true
thiserror.workspace = true
tokio.workspace = true
webpki-roots.workspace = true

View file

@ -9,17 +9,12 @@ pub use rustls_tokio_stream::*;
pub use webpki;
pub use webpki_roots;
use deno_core::anyhow::anyhow;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use rustls::client::danger::HandshakeSignatureValid;
use rustls::client::danger::ServerCertVerified;
use rustls::client::danger::ServerCertVerifier;
use rustls::client::WebPkiServerVerifier;
use rustls::ClientConfig;
use rustls::DigitallySignedStruct;
use rustls::Error;
use rustls::RootCertStore;
use rustls_pemfile::certs;
use rustls_pemfile::ec_private_keys;
@ -35,12 +30,30 @@ use std::sync::Arc;
mod tls_key;
pub use tls_key::*;
#[derive(Debug, thiserror::Error)]
pub enum TlsError {
#[error(transparent)]
Rustls(#[from] rustls::Error),
#[error("Unable to add pem file to certificate store: {0}")]
UnableAddPemFileToCert(std::io::Error),
#[error("Unable to decode certificate")]
CertInvalid,
#[error("No certificates found in certificate data")]
CertsNotFound,
#[error("No keys found in key data")]
KeysNotFound,
#[error("Unable to decode key")]
KeyDecode,
}
/// Lazily resolves the root cert store.
///
/// This was done because the root cert store is not needed in all cases
/// and takes a bit of time to initialize.
pub trait RootCertStoreProvider: Send + Sync {
fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError>;
fn get_or_try_init(
&self,
) -> Result<&RootCertStore, deno_core::error::AnyError>;
}
// This extension has no runtime apis, it only exports some shared native functions.
@ -77,7 +90,7 @@ impl ServerCertVerifier for NoCertificateVerification {
server_name: &rustls::pki_types::ServerName<'_>,
ocsp_response: &[u8],
now: rustls::pki_types::UnixTime,
) -> Result<ServerCertVerified, Error> {
) -> Result<ServerCertVerified, rustls::Error> {
if self.ic_allowlist.is_empty() {
return Ok(ServerCertVerified::assertion());
}
@ -89,7 +102,9 @@ impl ServerCertVerifier for NoCertificateVerification {
_ => {
// NOTE(bartlomieju): `ServerName` is a non-exhaustive enum
// so we have this catch all errors here.
return Err(Error::General("Unknown `ServerName` variant".to_string()));
return Err(rustls::Error::General(
"Unknown `ServerName` variant".to_string(),
));
}
};
if self.ic_allowlist.contains(&dns_name_or_ip_address) {
@ -110,7 +125,7 @@ impl ServerCertVerifier for NoCertificateVerification {
message: &[u8],
cert: &rustls::pki_types::CertificateDer,
dss: &DigitallySignedStruct,
) -> Result<HandshakeSignatureValid, Error> {
) -> Result<HandshakeSignatureValid, rustls::Error> {
if self.ic_allowlist.is_empty() {
return Ok(HandshakeSignatureValid::assertion());
}
@ -126,7 +141,7 @@ impl ServerCertVerifier for NoCertificateVerification {
message: &[u8],
cert: &rustls::pki_types::CertificateDer,
dss: &DigitallySignedStruct,
) -> Result<HandshakeSignatureValid, Error> {
) -> Result<HandshakeSignatureValid, rustls::Error> {
if self.ic_allowlist.is_empty() {
return Ok(HandshakeSignatureValid::assertion());
}
@ -178,7 +193,7 @@ pub fn create_client_config(
unsafely_ignore_certificate_errors: Option<Vec<String>>,
maybe_cert_chain_and_key: TlsKeys,
socket_use: SocketUse,
) -> Result<ClientConfig, AnyError> {
) -> Result<ClientConfig, TlsError> {
if let Some(ic_allowlist) = unsafely_ignore_certificate_errors {
let client_config = ClientConfig::builder()
.dangerous()
@ -214,10 +229,7 @@ pub fn create_client_config(
root_cert_store.add(cert)?;
}
Err(e) => {
return Err(anyhow!(
"Unable to add pem file to certificate store: {}",
e
));
return Err(TlsError::UnableAddPemFileToCert(e));
}
}
}
@ -255,74 +267,61 @@ fn add_alpn(client: &mut ClientConfig, socket_use: SocketUse) {
pub fn load_certs(
reader: &mut dyn BufRead,
) -> Result<Vec<CertificateDer<'static>>, AnyError> {
) -> Result<Vec<CertificateDer<'static>>, TlsError> {
let certs: Result<Vec<_>, _> = certs(reader).collect();
let certs = certs
.map_err(|_| custom_error("InvalidData", "Unable to decode certificate"))?;
let certs = certs.map_err(|_| TlsError::CertInvalid)?;
if certs.is_empty() {
return Err(cert_not_found_err());
return Err(TlsError::CertsNotFound);
}
Ok(certs)
}
fn key_decode_err() -> AnyError {
custom_error("InvalidData", "Unable to decode key")
}
fn key_not_found_err() -> AnyError {
custom_error("InvalidData", "No keys found in key data")
}
fn cert_not_found_err() -> AnyError {
custom_error("InvalidData", "No certificates found in certificate data")
}
/// Starts with -----BEGIN RSA PRIVATE KEY-----
fn load_rsa_keys(
mut bytes: &[u8],
) -> Result<Vec<PrivateKeyDer<'static>>, AnyError> {
) -> Result<Vec<PrivateKeyDer<'static>>, TlsError> {
let keys: Result<Vec<_>, _> = rsa_private_keys(&mut bytes).collect();
let keys = keys.map_err(|_| key_decode_err())?;
let keys = keys.map_err(|_| TlsError::KeyDecode)?;
Ok(keys.into_iter().map(PrivateKeyDer::Pkcs1).collect())
}
/// Starts with -----BEGIN EC PRIVATE KEY-----
fn load_ec_keys(
mut bytes: &[u8],
) -> Result<Vec<PrivateKeyDer<'static>>, AnyError> {
) -> Result<Vec<PrivateKeyDer<'static>>, TlsError> {
let keys: Result<Vec<_>, std::io::Error> =
ec_private_keys(&mut bytes).collect();
let keys2 = keys.map_err(|_| key_decode_err())?;
let keys2 = keys.map_err(|_| TlsError::KeyDecode)?;
Ok(keys2.into_iter().map(PrivateKeyDer::Sec1).collect())
}
/// Starts with -----BEGIN PRIVATE KEY-----
fn load_pkcs8_keys(
mut bytes: &[u8],
) -> Result<Vec<PrivateKeyDer<'static>>, AnyError> {
) -> Result<Vec<PrivateKeyDer<'static>>, TlsError> {
let keys: Result<Vec<_>, std::io::Error> =
pkcs8_private_keys(&mut bytes).collect();
let keys2 = keys.map_err(|_| key_decode_err())?;
let keys2 = keys.map_err(|_| TlsError::KeyDecode)?;
Ok(keys2.into_iter().map(PrivateKeyDer::Pkcs8).collect())
}
fn filter_invalid_encoding_err(
to_be_filtered: Result<HandshakeSignatureValid, Error>,
) -> Result<HandshakeSignatureValid, Error> {
to_be_filtered: Result<HandshakeSignatureValid, rustls::Error>,
) -> Result<HandshakeSignatureValid, rustls::Error> {
match to_be_filtered {
Err(Error::InvalidCertificate(rustls::CertificateError::BadEncoding)) => {
Ok(HandshakeSignatureValid::assertion())
}
Err(rustls::Error::InvalidCertificate(
rustls::CertificateError::BadEncoding,
)) => Ok(HandshakeSignatureValid::assertion()),
res => res,
}
}
pub fn load_private_keys(
bytes: &[u8],
) -> Result<Vec<PrivateKeyDer<'static>>, AnyError> {
) -> Result<Vec<PrivateKeyDer<'static>>, TlsError> {
let mut keys = load_rsa_keys(bytes)?;
if keys.is_empty() {
@ -334,7 +333,7 @@ pub fn load_private_keys(
}
if keys.is_empty() {
return Err(key_not_found_err());
return Err(TlsError::KeysNotFound);
}
Ok(keys)

View file

@ -11,8 +11,6 @@
//! key lookup can handle closing one end of the pair, in which case they will just
//! attempt to clean up the associated resources.
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::futures::future::poll_fn;
use deno_core::futures::future::Either;
use deno_core::futures::FutureExt;
@ -33,7 +31,19 @@ use tokio::sync::oneshot;
use webpki::types::CertificateDer;
use webpki::types::PrivateKeyDer;
type ErrorType = Rc<AnyError>;
#[derive(Debug, thiserror::Error)]
pub enum TlsKeyError {
#[error(transparent)]
Rustls(#[from] rustls::Error),
#[error("Failed: {0}")]
Failed(ErrorType),
#[error(transparent)]
JoinError(#[from] tokio::task::JoinError),
#[error(transparent)]
RecvError(#[from] tokio::sync::broadcast::error::RecvError),
}
type ErrorType = Arc<Box<str>>;
/// A TLS certificate/private key pair.
/// see https://docs.rs/rustls-pki-types/latest/rustls_pki_types/#cloning-private-keys
@ -114,7 +124,7 @@ impl TlsKeyResolver {
&self,
sni: String,
alpn: Vec<Vec<u8>>,
) -> Result<Arc<ServerConfig>, AnyError> {
) -> Result<Arc<ServerConfig>, TlsKeyError> {
let key = self.resolve(sni).await?;
let mut tls_config = ServerConfig::builder()
@ -183,7 +193,7 @@ impl TlsKeyResolver {
pub fn resolve(
&self,
sni: String,
) -> impl Future<Output = Result<TlsKey, AnyError>> {
) -> impl Future<Output = Result<TlsKey, TlsKeyError>> {
let mut cache = self.inner.cache.borrow_mut();
let mut recv = match cache.get(&sni) {
None => {
@ -194,7 +204,7 @@ impl TlsKeyResolver {
}
Some(TlsKeyState::Resolving(recv)) => recv.resubscribe(),
Some(TlsKeyState::Resolved(res)) => {
return Either::Left(ready(res.clone().map_err(|_| anyhow!("Failed"))));
return Either::Left(ready(res.clone().map_err(TlsKeyError::Failed)));
}
};
drop(cache);
@ -212,7 +222,7 @@ impl TlsKeyResolver {
// Someone beat us to it
}
}
res.map_err(|_| anyhow!("Failed"))
res.map_err(TlsKeyError::Failed)
});
Either::Right(async move { handle.await? })
}
@ -247,13 +257,13 @@ impl TlsKeyLookup {
}
/// Resolve a previously polled item.
pub fn resolve(&self, sni: String, res: Result<TlsKey, AnyError>) {
pub fn resolve(&self, sni: String, res: Result<TlsKey, String>) {
_ = self
.pending
.borrow_mut()
.remove(&sni)
.unwrap()
.send(res.map_err(Rc::new));
.send(res.map_err(|e| Arc::new(e.into_boxed_str())));
}
}

View file

@ -15,6 +15,7 @@ path = "lib.rs"
[dependencies]
deno_core.workspace = true
thiserror.workspace = true
urlpattern = "0.3.0"
[dev-dependencies]

View file

@ -15,6 +15,8 @@ use std::path::PathBuf;
use crate::urlpattern::op_urlpattern_parse;
use crate::urlpattern::op_urlpattern_process_match_input;
pub use urlpattern::UrlPatternError;
deno_core::extension!(
deno_url,
deps = [deno_webidl],

View file

@ -1,7 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_core::error::type_error;
use deno_core::error::AnyError;
use deno_core::op2;
use urlpattern::quirks;
@ -9,21 +7,23 @@ use urlpattern::quirks::MatchInput;
use urlpattern::quirks::StringOrInit;
use urlpattern::quirks::UrlPattern;
#[derive(Debug, thiserror::Error)]
#[error(transparent)]
pub struct UrlPatternError(urlpattern::Error);
#[op2]
#[serde]
pub fn op_urlpattern_parse(
#[serde] input: StringOrInit,
#[string] base_url: Option<String>,
#[serde] options: urlpattern::UrlPatternOptions,
) -> Result<UrlPattern, AnyError> {
let init = urlpattern::quirks::process_construct_pattern_input(
input,
base_url.as_deref(),
)
.map_err(|e| type_error(e.to_string()))?;
) -> Result<UrlPattern, UrlPatternError> {
let init =
quirks::process_construct_pattern_input(input, base_url.as_deref())
.map_err(UrlPatternError)?;
let pattern = urlpattern::quirks::parse_pattern(init, options)
.map_err(|e| type_error(e.to_string()))?;
let pattern =
quirks::parse_pattern(init, options).map_err(UrlPatternError)?;
Ok(pattern)
}
@ -33,14 +33,14 @@ pub fn op_urlpattern_parse(
pub fn op_urlpattern_process_match_input(
#[serde] input: StringOrInit,
#[string] base_url: Option<String>,
) -> Result<Option<(MatchInput, quirks::Inputs)>, AnyError> {
let res = urlpattern::quirks::process_match_input(input, base_url.as_deref())
.map_err(|e| type_error(e.to_string()))?;
) -> Result<Option<(MatchInput, quirks::Inputs)>, UrlPatternError> {
let res = quirks::process_match_input(input, base_url.as_deref())
.map_err(UrlPatternError)?;
let (input, inputs) = match res {
Some((input, inputs)) => (input, inputs),
None => return Ok(None),
};
Ok(urlpattern::quirks::parse_match_input(input).map(|input| (input, inputs)))
Ok(quirks::parse_match_input(input).map(|input| (input, inputs)))
}

View file

@ -44,7 +44,7 @@ mod macros {
#[cfg(all(not(target_arch = "wasm32"), windows))]
wgpu_types::Backend::Dx12 => $($c)*.$method::<wgpu_core::api::Dx12> $params,
#[cfg(any(
all(unix, not(target_os = "macos"), not(target_os = "ios")),
all(not(target_os = "macos"), not(target_os = "ios")),
feature = "angle",
target_arch = "wasm32"
))]

View file

@ -349,6 +349,7 @@ pub fn create_ws_client_config(
TlsKeys::Null,
socket_use,
)
.map_err(|e| e.into())
}
/// Headers common to both http/1.1 and h2 requests.

View file

@ -17,3 +17,4 @@ path = "lib.rs"
deno_core.workspace = true
deno_web.workspace = true
rusqlite.workspace = true
thiserror.workspace = true

View file

@ -2,10 +2,8 @@
// NOTE to all: use **cached** prepared statements when interfacing with SQLite.
use std::fmt;
use std::path::PathBuf;
use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::OpState;
use rusqlite::params;
@ -14,6 +12,18 @@ use rusqlite::OptionalExtension;
pub use rusqlite;
#[derive(Debug, thiserror::Error)]
pub enum WebStorageError {
#[error("LocalStorage is not supported in this context.")]
ContextNotSupported,
#[error(transparent)]
Sqlite(#[from] rusqlite::Error),
#[error(transparent)]
Io(std::io::Error),
#[error("Exceeded maximum storage size")]
StorageExceeded,
}
#[derive(Clone)]
struct OriginStorageDir(PathBuf);
@ -51,15 +61,13 @@ struct SessionStorage(Connection);
fn get_webstorage(
state: &mut OpState,
persistent: bool,
) -> Result<&Connection, AnyError> {
) -> Result<&Connection, WebStorageError> {
let conn = if persistent {
if state.try_borrow::<LocalStorage>().is_none() {
let path = state.try_borrow::<OriginStorageDir>().ok_or_else(|| {
DomExceptionNotSupportedError::new(
"LocalStorage is not supported in this context.",
)
})?;
std::fs::create_dir_all(&path.0)?;
let path = state
.try_borrow::<OriginStorageDir>()
.ok_or(WebStorageError::ContextNotSupported)?;
std::fs::create_dir_all(&path.0).map_err(WebStorageError::Io)?;
let conn = Connection::open(path.0.join("local_storage"))?;
// Enable write-ahead-logging and tweak some other stuff.
let initial_pragmas = "
@ -106,7 +114,7 @@ fn get_webstorage(
pub fn op_webstorage_length(
state: &mut OpState,
persistent: bool,
) -> Result<u32, AnyError> {
) -> Result<u32, WebStorageError> {
let conn = get_webstorage(state, persistent)?;
let mut stmt = conn.prepare_cached("SELECT COUNT(*) FROM data")?;
@ -121,7 +129,7 @@ pub fn op_webstorage_key(
state: &mut OpState,
#[smi] index: u32,
persistent: bool,
) -> Result<Option<String>, AnyError> {
) -> Result<Option<String>, WebStorageError> {
let conn = get_webstorage(state, persistent)?;
let mut stmt =
@ -135,14 +143,9 @@ pub fn op_webstorage_key(
}
#[inline]
fn size_check(input: usize) -> Result<(), AnyError> {
fn size_check(input: usize) -> Result<(), WebStorageError> {
if input >= MAX_STORAGE_BYTES {
return Err(
deno_web::DomExceptionQuotaExceededError::new(
"Exceeded maximum storage size",
)
.into(),
);
return Err(WebStorageError::StorageExceeded);
}
Ok(())
@ -154,7 +157,7 @@ pub fn op_webstorage_set(
#[string] key: &str,
#[string] value: &str,
persistent: bool,
) -> Result<(), AnyError> {
) -> Result<(), WebStorageError> {
let conn = get_webstorage(state, persistent)?;
size_check(key.len() + value.len())?;
@ -178,7 +181,7 @@ pub fn op_webstorage_get(
state: &mut OpState,
#[string] key_name: String,
persistent: bool,
) -> Result<Option<String>, AnyError> {
) -> Result<Option<String>, WebStorageError> {
let conn = get_webstorage(state, persistent)?;
let mut stmt = conn.prepare_cached("SELECT value FROM data WHERE key = ?")?;
@ -194,7 +197,7 @@ pub fn op_webstorage_remove(
state: &mut OpState,
#[string] key_name: &str,
persistent: bool,
) -> Result<(), AnyError> {
) -> Result<(), WebStorageError> {
let conn = get_webstorage(state, persistent)?;
let mut stmt = conn.prepare_cached("DELETE FROM data WHERE key = ?")?;
@ -207,7 +210,7 @@ pub fn op_webstorage_remove(
pub fn op_webstorage_clear(
state: &mut OpState,
persistent: bool,
) -> Result<(), AnyError> {
) -> Result<(), WebStorageError> {
let conn = get_webstorage(state, persistent)?;
let mut stmt = conn.prepare_cached("DELETE FROM data")?;
@ -221,7 +224,7 @@ pub fn op_webstorage_clear(
pub fn op_webstorage_iterate_keys(
state: &mut OpState,
persistent: bool,
) -> Result<Vec<String>, AnyError> {
) -> Result<Vec<String>, WebStorageError> {
let conn = get_webstorage(state, persistent)?;
let mut stmt = conn.prepare_cached("SELECT key FROM data")?;
@ -232,31 +235,3 @@ pub fn op_webstorage_iterate_keys(
Ok(keys)
}
#[derive(Debug)]
pub struct DomExceptionNotSupportedError {
pub msg: String,
}
impl DomExceptionNotSupportedError {
pub fn new(msg: &str) -> Self {
DomExceptionNotSupportedError {
msg: msg.to_string(),
}
}
}
impl fmt::Display for DomExceptionNotSupportedError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad(&self.msg)
}
}
impl std::error::Error for DomExceptionNotSupportedError {}
pub fn get_not_supported_error_class_name(
e: &AnyError,
) -> Option<&'static str> {
e.downcast_ref::<DomExceptionNotSupportedError>()
.map(|_| "DOMExceptionNotSupportedError")
}

View file

@ -100,6 +100,7 @@ deno_websocket.workspace = true
deno_webstorage.workspace = true
node_resolver = { workspace = true, features = ["sync"] }
color-print.workspace = true
dlopen2.workspace = true
encoding_rs.workspace = true
fastwebsockets.workspace = true

View file

@ -9,10 +9,22 @@
//! Diagnostics are compile-time type errors, whereas JsErrors are runtime
//! exceptions.
use deno_broadcast_channel::BroadcastChannelError;
use deno_cache::CacheError;
use deno_canvas::CanvasError;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url;
use deno_core::ModuleResolutionError;
use deno_cron::CronError;
use deno_ffi::CallError;
use deno_ffi::CallbackError;
use deno_ffi::DlfcnError;
use deno_ffi::IRError;
use deno_ffi::ReprError;
use deno_ffi::StaticError;
use deno_tls::TlsError;
use deno_webstorage::WebStorageError;
use std::env;
use std::error::Error;
use std::io;
@ -153,12 +165,169 @@ pub fn get_nix_error_class(error: &nix::Error) -> &'static str {
}
}
fn get_ffi_repr_error_class(e: &ReprError) -> &'static str {
match e {
ReprError::InvalidOffset => "TypeError",
ReprError::InvalidArrayBuffer => "TypeError",
ReprError::DestinationLengthTooShort => "RangeError",
ReprError::InvalidCString => "TypeError",
ReprError::CStringTooLong => "TypeError",
ReprError::InvalidBool => "TypeError",
ReprError::InvalidU8 => "TypeError",
ReprError::InvalidI8 => "TypeError",
ReprError::InvalidU16 => "TypeError",
ReprError::InvalidI16 => "TypeError",
ReprError::InvalidU32 => "TypeError",
ReprError::InvalidI32 => "TypeError",
ReprError::InvalidU64 => "TypeError",
ReprError::InvalidI64 => "TypeError",
ReprError::InvalidF32 => "TypeError",
ReprError::InvalidF64 => "TypeError",
ReprError::InvalidPointer => "TypeError",
ReprError::Permission(e) => get_error_class_name(e).unwrap_or("Error"),
}
}
fn get_ffi_dlfcn_error_class(e: &DlfcnError) -> &'static str {
match e {
DlfcnError::RegisterSymbol { .. } => "Error",
DlfcnError::Dlopen(_) => "Error",
DlfcnError::Permission(e) => get_error_class_name(e).unwrap_or("Error"),
DlfcnError::Other(e) => get_error_class_name(e).unwrap_or("Error"),
}
}
fn get_ffi_static_error_class(e: &StaticError) -> &'static str {
match e {
StaticError::Dlfcn(e) => get_ffi_dlfcn_error_class(e),
StaticError::InvalidTypeVoid => "TypeError",
StaticError::InvalidTypeStruct => "TypeError",
StaticError::Resource(e) => get_error_class_name(e).unwrap_or("Error"),
}
}
fn get_ffi_callback_error_class(e: &CallbackError) -> &'static str {
match e {
CallbackError::Resource(e) => get_error_class_name(e).unwrap_or("Error"),
CallbackError::Other(e) => get_error_class_name(e).unwrap_or("Error"),
CallbackError::Permission(e) => get_error_class_name(e).unwrap_or("Error"),
}
}
fn get_ffi_call_error_class(e: &CallError) -> &'static str {
match e {
CallError::IR(_) => "TypeError",
CallError::NonblockingCallFailure(_) => "Error",
CallError::InvalidSymbol(_) => "TypeError",
CallError::Permission(e) => get_error_class_name(e).unwrap_or("Error"),
CallError::Callback(e) => get_ffi_callback_error_class(e),
}
}
fn get_webstorage_class_name(e: &WebStorageError) -> &'static str {
match e {
WebStorageError::ContextNotSupported => "DOMExceptionNotSupportedError",
WebStorageError::Sqlite(_) => todo!(),
WebStorageError::Io(e) => get_io_error_class(e),
WebStorageError::StorageExceeded => "DOMExceptionQuotaExceededError",
}
}
fn get_tls_error_class(e: &TlsError) -> &'static str {
match e {
TlsError::Rustls(_) => "Error",
TlsError::UnableAddPemFileToCert(e) => get_io_error_class(e),
TlsError::CertInvalid
| TlsError::CertsNotFound
| TlsError::KeysNotFound
| TlsError::KeyDecode => "InvalidData",
}
}
pub fn get_cron_error_class(e: &CronError) -> &'static str {
match e {
CronError::Resource(e) => {
deno_core::error::get_custom_error_class(e).unwrap_or("Error")
}
CronError::NameExceeded(_) => "TypeError",
CronError::NameInvalid => "TypeError",
CronError::AlreadyExists => "TypeError",
CronError::TooManyCrons => "TypeError",
CronError::InvalidCron => "TypeError",
CronError::InvalidBackoff => "TypeError",
CronError::AcquireError(_) => "Error",
CronError::Other(e) => get_error_class_name(e).unwrap_or("Error"),
}
}
fn get_canvas_error(e: &CanvasError) -> &'static str {
match e {
CanvasError::UnsupportedColorType(_) => "TypeError",
CanvasError::Image(_) => "Error",
}
}
pub fn get_cache_error(error: &CacheError) -> &'static str {
match error {
CacheError::Sqlite(_) => "Error",
CacheError::JoinError(_) => "Error",
CacheError::Resource(err) => {
deno_core::error::get_custom_error_class(err).unwrap_or("Error")
}
CacheError::Other(e) => get_error_class_name(e).unwrap_or("Error"),
CacheError::Io(err) => get_io_error_class(err),
}
}
fn get_broadcast_channel_error(error: &BroadcastChannelError) -> &'static str {
match error {
BroadcastChannelError::Resource(err) => {
deno_core::error::get_custom_error_class(err).unwrap()
}
BroadcastChannelError::MPSCSendError(_) => "Error",
BroadcastChannelError::BroadcastSendError(_) => "Error",
BroadcastChannelError::Other(err) => {
get_error_class_name(err).unwrap_or("Error")
}
}
}
pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> {
deno_core::error::get_custom_error_class(e)
.or_else(|| deno_webgpu::error::get_error_class_name(e))
.or_else(|| deno_web::get_error_class_name(e))
.or_else(|| deno_webstorage::get_not_supported_error_class_name(e))
.or_else(|| deno_websocket::get_network_error_class_name(e))
.or_else(|| e.downcast_ref::<IRError>().map(|_| "TypeError"))
.or_else(|| e.downcast_ref::<ReprError>().map(get_ffi_repr_error_class))
.or_else(|| {
e.downcast_ref::<DlfcnError>()
.map(get_ffi_dlfcn_error_class)
})
.or_else(|| {
e.downcast_ref::<StaticError>()
.map(get_ffi_static_error_class)
})
.or_else(|| {
e.downcast_ref::<CallbackError>()
.map(get_ffi_callback_error_class)
})
.or_else(|| e.downcast_ref::<CallError>().map(get_ffi_call_error_class))
.or_else(|| e.downcast_ref::<TlsError>().map(get_tls_error_class))
.or_else(|| e.downcast_ref::<CronError>().map(get_cron_error_class))
.or_else(|| e.downcast_ref::<CanvasError>().map(get_canvas_error))
.or_else(|| e.downcast_ref::<CacheError>().map(get_cache_error))
.or_else(|| {
e.downcast_ref::<BroadcastChannelError>()
.map(get_broadcast_channel_error)
})
.or_else(|| {
e.downcast_ref::<WebStorageError>()
.map(get_webstorage_class_name)
})
.or_else(|| {
e.downcast_ref::<deno_url::UrlPatternError>()
.map(|_| "TypeError")
})
.or_else(|| {
e.downcast_ref::<dlopen2::Error>()
.map(get_dlopen_error_class)

View file

@ -1,5 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
//! This mod provides DenoError to unify errors across Deno.
use color_print::cstr;
use deno_core::error::format_frame;
use deno_core::error::JsError;
use deno_terminal::colors::cyan;
@ -282,6 +283,113 @@ fn format_js_error_inner(
s
}
fn get_suggestions_for_terminal_errors(e: &JsError) -> Vec<FixSuggestion> {
if let Some(msg) = &e.message {
if msg.contains("module is not defined")
|| msg.contains("exports is not defined")
|| msg.contains("require is not defined")
{
return vec![
FixSuggestion::info_multiline(&[
cstr!("Deno supports CommonJS modules in <u>.cjs</> files, or when there's a <u>package.json</>"),
cstr!("with <i>\"type\": \"commonjs\"</> option and <i>--unstable-detect-cjs</> flag is used.")
]),
FixSuggestion::hint_multiline(&[
"Rewrite this module to ESM,",
cstr!("or change the file extension to <u>.cjs</u>,"),
cstr!("or add <u>package.json</> next to the file with <i>\"type\": \"commonjs\"</> option"),
cstr!("and pass <i>--unstable-detect-cjs</> flag."),
]),
FixSuggestion::hint("See https://docs.deno.com/go/commonjs for details"),
];
} else if msg.contains("openKv is not a function") {
return vec![
FixSuggestion::info("Deno.openKv() is an unstable API."),
FixSuggestion::hint(
"Run again with `--unstable-kv` flag to enable this API.",
),
];
} else if msg.contains("cron is not a function") {
return vec![
FixSuggestion::info("Deno.cron() is an unstable API."),
FixSuggestion::hint(
"Run again with `--unstable-cron` flag to enable this API.",
),
];
} else if msg.contains("WebSocketStream is not defined") {
return vec![
FixSuggestion::info("new WebSocketStream() is an unstable API."),
FixSuggestion::hint(
"Run again with `--unstable-net` flag to enable this API.",
),
];
} else if msg.contains("Temporal is not defined") {
return vec![
FixSuggestion::info("Temporal is an unstable API."),
FixSuggestion::hint(
"Run again with `--unstable-temporal` flag to enable this API.",
),
];
} else if msg.contains("BroadcastChannel is not defined") {
return vec![
FixSuggestion::info("BroadcastChannel is an unstable API."),
FixSuggestion::hint(
"Run again with `--unstable-broadcast-channel` flag to enable this API.",
),
];
} else if msg.contains("window is not defined") {
return vec![
FixSuggestion::info("window global is not available in Deno 2."),
FixSuggestion::hint("Replace `window` with `globalThis`."),
];
} else if msg.contains("UnsafeWindowSurface is not a constructor") {
return vec![
FixSuggestion::info("Deno.UnsafeWindowSurface is an unstable API."),
FixSuggestion::hint(
"Run again with `--unstable-webgpu` flag to enable this API.",
),
];
// Try to capture errors like:
// ```
// Uncaught Error: Cannot find module '../build/Release/canvas.node'
// Require stack:
// - /.../deno/npm/registry.npmjs.org/canvas/2.11.2/lib/bindings.js
// - /.../.cache/deno/npm/registry.npmjs.org/canvas/2.11.2/lib/canvas.js
// ```
} else if msg.contains("Cannot find module")
&& msg.contains("Require stack")
&& msg.contains(".node'")
{
return vec![
FixSuggestion::info_multiline(
&[
"Trying to execute an npm package using Node-API addons,",
"these packages require local `node_modules` directory to be present."
]
),
FixSuggestion::hint_multiline(
&[
"Add `\"nodeModulesDir\": \"auto\" option to `deno.json`, and then run",
"`deno install --allow-scripts=npm:<package> --entrypoint <script>` to setup `node_modules` directory."
]
)
];
} else if msg.contains("document is not defined") {
return vec![
FixSuggestion::info(cstr!(
"<u>document</> global is not available in Deno."
)),
FixSuggestion::hint_multiline(&[
cstr!("Use a library like <u>happy-dom</>, <u>deno_dom</>, <u>linkedom</> or <u>JSDom</>"),
cstr!("and setup the <u>document</> global according to the library documentation."),
]),
];
}
}
vec![]
}
/// Format a [`JsError`] for terminal output.
pub fn format_js_error(js_error: &JsError) -> String {
let circular =
@ -289,21 +397,7 @@ pub fn format_js_error(js_error: &JsError) -> String {
reference,
index: 1,
});
format_js_error_inner(js_error, circular, true, vec![])
}
/// Format a [`JsError`] for terminal output, printing additional suggestions.
pub fn format_js_error_with_suggestions(
js_error: &JsError,
suggestions: Vec<FixSuggestion>,
) -> String {
let circular =
find_recursive_cause(js_error).map(|reference| IndexedErrorReference {
reference,
index: 1,
});
let suggestions = get_suggestions_for_terminal_errors(js_error);
format_js_error_inner(js_error, circular, true, suggestions)
}

Some files were not shown because too many files have changed in this diff Show more