diff --git a/.dprint.json b/.dprint.json index 082eb30058..3b2376f786 100644 --- a/.dprint.json +++ b/.dprint.json @@ -65,11 +65,15 @@ "tests/wpt/runner/expectation.json", "tests/wpt/runner/manifest.json", "tests/wpt/suite", - "third_party" + "third_party", + "tests/specs/run/shebang_with_json_imports_tsc", + "tests/specs/run/shebang_with_json_imports_swc", + "tests/specs/run/ext_flag_takes_precedence_over_extension", + "tests/specs/run/error_syntax_empty_trailing_line/error_syntax_empty_trailing_line.mjs" ], "plugins": [ - "https://plugins.dprint.dev/typescript-0.93.0.wasm", - "https://plugins.dprint.dev/json-0.19.3.wasm", + "https://plugins.dprint.dev/typescript-0.93.2.wasm", + "https://plugins.dprint.dev/json-0.19.4.wasm", "https://plugins.dprint.dev/markdown-0.17.8.wasm", "https://plugins.dprint.dev/toml-0.6.3.wasm", "https://plugins.dprint.dev/exec-0.5.0.json@8d9972eee71fa1590e04873540421f3eda7674d0f1aae3d7c788615e7b7413d0", diff --git a/.github/workflows/cargo_publish.yml b/.github/workflows/cargo_publish.yml index 8d164ea1bf..3af97f4662 100644 --- a/.github/workflows/cargo_publish.yml +++ b/.github/workflows/cargo_publish.yml @@ -10,7 +10,7 @@ concurrency: jobs: build: name: cargo publish - runs-on: ubuntu-20.04-xl + runs-on: ubuntu-24.04-xl timeout-minutes: 90 env: @@ -33,7 +33,7 @@ jobs: - uses: dsherret/rust-toolchain-file@v1 - name: Install deno - uses: denoland/setup-deno@v1 + uses: denoland/setup-deno@v2 with: deno-version: v1.x diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index 98272abe4e..5ed02d3cde 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -5,15 +5,16 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify"; // Bump this number when you want to purge the cache. // Note: the tools/release/01_bump_crate_versions.ts script will update this version // automatically via regex, so ensure that this line maintains this format. -const cacheVersion = 21; +const cacheVersion = 25; -const ubuntuX86Runner = "ubuntu-22.04"; -const ubuntuX86XlRunner = "ubuntu-22.04-xl"; +const ubuntuX86Runner = "ubuntu-24.04"; +const ubuntuX86XlRunner = "ubuntu-24.04-xl"; const ubuntuARMRunner = "ubicloud-standard-16-arm"; const windowsX86Runner = "windows-2022"; const windowsX86XlRunner = "windows-2022-xl"; const macosX86Runner = "macos-13"; const macosArmRunner = "macos-14"; +const selfHostedMacosArmRunner = "self-hosted"; const Runners = { linuxX86: { @@ -40,7 +41,8 @@ const Runners = { macosArm: { os: "macos", arch: "aarch64", - runner: macosArmRunner, + runner: + `\${{ github.repository == 'denoland/deno' && startsWith(github.ref, 'refs/tags/') && '${selfHostedMacosArmRunner}' || '${macosArmRunner}' }}`, }, windowsX86: { os: "windows", @@ -59,7 +61,7 @@ const prCacheKeyPrefix = `${cacheVersion}-cargo-target-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ matrix.profile }}-\${{ matrix.job }}-`; // Note that you may need to add more version to the `apt-get remove` line below if you change this -const llvmVersion = 18; +const llvmVersion = 19; const installPkgsCommand = `sudo apt-get install --no-install-recommends clang-${llvmVersion} lld-${llvmVersion} clang-tools-${llvmVersion} clang-format-${llvmVersion} clang-tidy-${llvmVersion}`; const sysRootStep = { @@ -71,7 +73,7 @@ export DEBIAN_FRONTEND=noninteractive sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null # Remove older clang before we install sudo apt-get -qq remove \ - 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' > /dev/null 2> /dev/null + 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'clang-17*' 'clang-18*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' 'lld-17*' 'lld-18*' > /dev/null 2> /dev/null # Install clang-XXX, lld-XXX, and debootstrap. echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-${llvmVersion} main" | @@ -86,7 +88,7 @@ ${installPkgsCommand} || echo 'Failed. Trying again.' && sudo apt-get clean && s (yes '' | sudo update-alternatives --force --all) > /dev/null 2> /dev/null || true echo "Decompressing sysroot..." -wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20240528/sysroot-\`uname -m\`.tar.xz -O /tmp/sysroot.tar.xz +wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20241030/sysroot-\`uname -m\`.tar.xz -O /tmp/sysroot.tar.xz cd / xzcat /tmp/sysroot.tar.xz | sudo tar -x sudo mount --rbind /dev /sysroot/dev @@ -193,7 +195,7 @@ const installNodeStep = { }; const installDenoStep = { name: "Install Deno", - uses: "denoland/setup-deno@v1", + uses: "denoland/setup-deno@v2", with: { "deno-version": "v1.x" }, }; diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 161b8719c8..39a3afe769 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -62,18 +62,18 @@ jobs: profile: debug - os: macos arch: x86_64 - runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-22.04'' || ''macos-13'' }}' + runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || ''macos-13'' }}' job: test profile: release skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}' - os: macos arch: aarch64 - runner: macos-14 + runner: '${{ github.repository == ''denoland/deno'' && startsWith(github.ref, ''refs/tags/'') && ''self-hosted'' || ''macos-14'' }}' job: test profile: debug - os: macos arch: aarch64 - runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-22.04'' || ''macos-14'' }}' + runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && startsWith(github.ref, ''refs/tags/'') && ''self-hosted'' || ''macos-14'' }}' job: test profile: release skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}' @@ -84,33 +84,33 @@ jobs: profile: debug - os: windows arch: x86_64 - runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-22.04'' || github.repository == ''denoland/deno'' && ''windows-2022-xl'' || ''windows-2022'' }}' + runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && ''windows-2022-xl'' || ''windows-2022'' }}' job: test profile: release skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}' - os: linux arch: x86_64 - runner: '${{ github.repository == ''denoland/deno'' && ''ubuntu-22.04-xl'' || ''ubuntu-22.04'' }}' + runner: '${{ github.repository == ''denoland/deno'' && ''ubuntu-24.04-xl'' || ''ubuntu-24.04'' }}' job: test profile: release use_sysroot: true wpt: '${{ !startsWith(github.ref, ''refs/tags/'') }}' - os: linux arch: x86_64 - runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'' && !contains(github.event.pull_request.labels.*.name, ''ci-bench''))) && ''ubuntu-22.04'' || github.repository == ''denoland/deno'' && ''ubuntu-22.04-xl'' || ''ubuntu-22.04'' }}' + runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'' && !contains(github.event.pull_request.labels.*.name, ''ci-bench''))) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && ''ubuntu-24.04-xl'' || ''ubuntu-24.04'' }}' job: bench profile: release use_sysroot: true skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'' && !contains(github.event.pull_request.labels.*.name, ''ci-bench'')) }}' - os: linux arch: x86_64 - runner: ubuntu-22.04 + runner: ubuntu-24.04 job: test profile: debug use_sysroot: true - os: linux arch: x86_64 - runner: ubuntu-22.04 + runner: ubuntu-24.04 job: lint profile: debug - os: linux @@ -178,7 +178,7 @@ jobs: if: '!(matrix.skip)' - if: '!(matrix.skip) && (matrix.job == ''lint'' || matrix.job == ''test'' || matrix.job == ''bench'')' name: Install Deno - uses: denoland/setup-deno@v1 + uses: denoland/setup-deno@v2 with: deno-version: v1.x - name: Install Python @@ -252,22 +252,22 @@ jobs: # to complete. sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null # Remove older clang before we install - sudo apt-get -qq remove 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' > /dev/null 2> /dev/null + sudo apt-get -qq remove 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'clang-17*' 'clang-18*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' 'lld-17*' 'lld-18*' > /dev/null 2> /dev/null # Install clang-XXX, lld-XXX, and debootstrap. - echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-18 main" | - sudo dd of=/etc/apt/sources.list.d/llvm-toolchain-jammy-18.list + echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-19 main" | + sudo dd of=/etc/apt/sources.list.d/llvm-toolchain-jammy-19.list curl https://apt.llvm.org/llvm-snapshot.gpg.key | gpg --dearmor | sudo dd of=/etc/apt/trusted.gpg.d/llvm-snapshot.gpg sudo apt-get update # this was unreliable sometimes, so try again if it fails - sudo apt-get install --no-install-recommends clang-18 lld-18 clang-tools-18 clang-format-18 clang-tidy-18 || echo 'Failed. Trying again.' && sudo apt-get clean && sudo apt-get update && sudo apt-get install --no-install-recommends clang-18 lld-18 clang-tools-18 clang-format-18 clang-tidy-18 + sudo apt-get install --no-install-recommends clang-19 lld-19 clang-tools-19 clang-format-19 clang-tidy-19 || echo 'Failed. Trying again.' && sudo apt-get clean && sudo apt-get update && sudo apt-get install --no-install-recommends clang-19 lld-19 clang-tools-19 clang-format-19 clang-tidy-19 # Fix alternatives (yes '' | sudo update-alternatives --force --all) > /dev/null 2> /dev/null || true echo "Decompressing sysroot..." - wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20240528/sysroot-`uname -m`.tar.xz -O /tmp/sysroot.tar.xz + wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20241030/sysroot-`uname -m`.tar.xz -O /tmp/sysroot.tar.xz cd / xzcat /tmp/sysroot.tar.xz | sudo tar -x sudo mount --rbind /dev /sysroot/dev @@ -299,8 +299,8 @@ jobs: CARGO_PROFILE_RELEASE_LTO=false RUSTFLAGS<<__1 -C linker-plugin-lto=true - -C linker=clang-18 - -C link-arg=-fuse-ld=lld-18 + -C linker=clang-19 + -C link-arg=-fuse-ld=lld-19 -C link-arg=-ldl -C link-arg=-Wl,--allow-shlib-undefined -C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache @@ -310,8 +310,8 @@ jobs: __1 RUSTDOCFLAGS<<__1 -C linker-plugin-lto=true - -C linker=clang-18 - -C link-arg=-fuse-ld=lld-18 + -C linker=clang-19 + -C link-arg=-fuse-ld=lld-19 -C link-arg=-ldl -C link-arg=-Wl,--allow-shlib-undefined -C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache @@ -319,7 +319,7 @@ jobs: --cfg tokio_unstable $RUSTFLAGS __1 - CC=/usr/bin/clang-18 + CC=/usr/bin/clang-19 CFLAGS=-flto=thin $CFLAGS " > $GITHUB_ENV - name: Remove macOS cURL --ipv4 flag @@ -361,8 +361,8 @@ jobs: path: |- ~/.cargo/registry/index ~/.cargo/registry/cache - key: '21-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' - restore-keys: '21-cargo-home-${{ matrix.os }}-${{ matrix.arch }}' + key: '25-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' + restore-keys: '25-cargo-home-${{ matrix.os }}-${{ matrix.arch }}' if: '!(matrix.skip)' - name: Restore cache build output (PR) uses: actions/cache/restore@v4 @@ -375,7 +375,7 @@ jobs: !./target/*/*.zip !./target/*/*.tar.gz key: never_saved - restore-keys: '21-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' + restore-keys: '25-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' - name: Apply and update mtime cache if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))' uses: ./.github/mtime_cache @@ -685,10 +685,10 @@ jobs: !./target/*/*.zip !./target/*/*.sha256sum !./target/*/*.tar.gz - key: '21-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' + key: '25-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' publish-canary: name: publish canary - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 needs: - build if: github.repository == 'denoland/deno' && github.ref == 'refs/heads/main' diff --git a/.github/workflows/post_publish.yml b/.github/workflows/post_publish.yml index c0db0906df..dd80b3637a 100644 --- a/.github/workflows/post_publish.yml +++ b/.github/workflows/post_publish.yml @@ -7,7 +7,7 @@ on: jobs: update-dl-version: name: update dl.deno.land version - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 if: github.repository == 'denoland/deno' steps: - name: Authenticate with Google Cloud diff --git a/.github/workflows/promote_to_release.yml b/.github/workflows/promote_to_release.yml index 3dc15dc730..79fefa6d6c 100644 --- a/.github/workflows/promote_to_release.yml +++ b/.github/workflows/promote_to_release.yml @@ -40,7 +40,7 @@ jobs: project_id: denoland - name: Install deno - uses: denoland/setup-deno@v1 + uses: denoland/setup-deno@v2 with: deno-version: v1.x diff --git a/.github/workflows/start_release.yml b/.github/workflows/start_release.yml index 392551afbe..40a44bb61a 100644 --- a/.github/workflows/start_release.yml +++ b/.github/workflows/start_release.yml @@ -16,7 +16,7 @@ on: jobs: build: name: start release - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 timeout-minutes: 30 env: @@ -34,7 +34,7 @@ jobs: uses: actions/checkout@v4 - name: Install deno - uses: denoland/setup-deno@v1 + uses: denoland/setup-deno@v2 with: deno-version: v1.x diff --git a/.github/workflows/version_bump.yml b/.github/workflows/version_bump.yml index 733abbb024..9038fe0d22 100644 --- a/.github/workflows/version_bump.yml +++ b/.github/workflows/version_bump.yml @@ -16,7 +16,7 @@ on: jobs: build: name: version bump - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 timeout-minutes: 90 env: @@ -39,7 +39,7 @@ jobs: - uses: dsherret/rust-toolchain-file@v1 - name: Install deno - uses: denoland/setup-deno@v1 + uses: denoland/setup-deno@v2 with: deno-version: v1.x diff --git a/.github/workflows/wpt_epoch.yml b/.github/workflows/wpt_epoch.yml index c3c6277b93..1d86ed5557 100644 --- a/.github/workflows/wpt_epoch.yml +++ b/.github/workflows/wpt_epoch.yml @@ -20,7 +20,7 @@ jobs: fail-fast: false matrix: deno-version: [v1.x, canary] - os: [ubuntu-22.04-xl] + os: [ubuntu-24.04-xl] steps: - name: Clone repository @@ -30,7 +30,7 @@ jobs: persist-credentials: false - name: Setup Deno - uses: denoland/setup-deno@v1 + uses: denoland/setup-deno@v2 with: deno-version: ${{ matrix.deno-version }} diff --git a/Cargo.lock b/Cargo.lock index bdcb483e38..b86b8acd94 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -765,6 +765,8 @@ dependencies = [ "fastwebsockets", "file_test_runner", "flaky_test", + "hickory-client", + "hickory-server", "http 1.1.0", "http-body-util", "hyper 1.4.1", @@ -778,8 +780,6 @@ dependencies = [ "serde", "test_server", "tokio", - "trust-dns-client", - "trust-dns-server", "url", "uuid", "zeromq", @@ -1154,7 +1154,7 @@ dependencies = [ [[package]] name = "deno" -version = "2.0.2" +version = "2.0.6" dependencies = [ "anstream", "async-trait", @@ -1196,7 +1196,6 @@ dependencies = [ "dprint-plugin-markdown", "dprint-plugin-typescript", "env_logger", - "eszip", "fancy-regex", "faster-hex", "flate2", @@ -1214,7 +1213,6 @@ dependencies = [ "lazy-regex", "libc", "libsui", - "libuv-sys-lite", "libz-sys", "log", "lsp-types", @@ -1222,7 +1220,6 @@ dependencies = [ "markup_fmt", "memmem", "monch", - "napi_sym", "nix", "node_resolver", "notify", @@ -1263,9 +1260,7 @@ dependencies = [ "walkdir", "which 4.4.2", "winapi", - "windows-sys 0.52.0", "winres", - "yoke", "zeromq", "zip", "zstd", @@ -1284,9 +1279,9 @@ dependencies = [ [[package]] name = "deno_ast" -version = "0.42.2" +version = "0.43.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2b9d03b1bbeeecdac54367f075d572131736d06c5be3bc49037855bc5ab1bbb" +checksum = "48d00b724e06d2081a141ec1155756a0b465d413d8e2a7515221f61d482eb2ee" dependencies = [ "base64 0.21.7", "deno_media_type", @@ -1328,7 +1323,7 @@ dependencies = [ [[package]] name = "deno_bench_util" -version = "0.167.0" +version = "0.171.0" dependencies = [ "bencher", "deno_core", @@ -1337,7 +1332,7 @@ dependencies = [ [[package]] name = "deno_broadcast_channel" -version = "0.167.0" +version = "0.171.0" dependencies = [ "async-trait", "deno_core", @@ -1348,7 +1343,7 @@ dependencies = [ [[package]] name = "deno_cache" -version = "0.105.0" +version = "0.109.0" dependencies = [ "async-trait", "deno_core", @@ -1361,9 +1356,9 @@ dependencies = [ [[package]] name = "deno_cache_dir" -version = "0.13.0" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "186a102b13b4512841f5f40784cd25822042d22954afe3b5b070d406d15eb4f2" +checksum = "08c1f52170cd7715f8006da54cde1444863a0d6fbd9c11d037a737db2dec8e22" dependencies = [ "base32", "deno_media_type", @@ -1381,7 +1376,7 @@ dependencies = [ [[package]] name = "deno_canvas" -version = "0.42.0" +version = "0.46.0" dependencies = [ "deno_core", "deno_webgpu", @@ -1392,9 +1387,9 @@ dependencies = [ [[package]] name = "deno_config" -version = "0.37.1" +version = "0.38.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cb7a1723676fba5964f8d7441d8b53748f9e74d6d4241be7de9730da021859a" +checksum = "966825073480a6ac7e01977a3879d13edc8d6ea2d65ea164b37156a5fb206e9a" dependencies = [ "anyhow", "deno_package_json", @@ -1416,16 +1411,16 @@ dependencies = [ [[package]] name = "deno_console" -version = "0.173.0" +version = "0.177.0" dependencies = [ "deno_core", ] [[package]] name = "deno_core" -version = "0.314.2" +version = "0.318.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83138917579676069b423c3eb9be3c1e579f60dc022d85f6ded4c792456255ff" +checksum = "10cae2393219ff9278123f7b24799cdfab37c7d6561b69ca06ced115cac92111" dependencies = [ "anyhow", "bincode", @@ -1461,7 +1456,7 @@ checksum = "a13951ea98c0a4c372f162d669193b4c9d991512de9f2381dd161027f34b26b1" [[package]] name = "deno_cron" -version = "0.53.0" +version = "0.57.0" dependencies = [ "anyhow", "async-trait", @@ -1474,7 +1469,7 @@ dependencies = [ [[package]] name = "deno_crypto" -version = "0.187.0" +version = "0.191.0" dependencies = [ "aes", "aes-gcm", @@ -1511,9 +1506,9 @@ dependencies = [ [[package]] name = "deno_doc" -version = "0.154.0" +version = "0.156.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17e204e45b0d79750880114e37b34abe19ad0710d8435a8da8f23a528fe98de4" +checksum = "2585b98d6ad76dae30bf2d7b6d71b8363cae041158b8780d14a2f4fe17590a61" dependencies = [ "anyhow", "cfg-if", @@ -1536,7 +1531,7 @@ dependencies = [ [[package]] name = "deno_fetch" -version = "0.197.0" +version = "0.201.0" dependencies = [ "base64 0.21.7", "bytes", @@ -1569,7 +1564,7 @@ dependencies = [ [[package]] name = "deno_ffi" -version = "0.160.0" +version = "0.164.0" dependencies = [ "deno_core", "deno_permissions", @@ -1578,6 +1573,7 @@ dependencies = [ "libffi", "libffi-sys", "log", + "num-bigint", "serde", "serde-value", "serde_json", @@ -1588,7 +1584,7 @@ dependencies = [ [[package]] name = "deno_fs" -version = "0.83.0" +version = "0.87.0" dependencies = [ "async-trait", "base32", @@ -1610,9 +1606,9 @@ dependencies = [ [[package]] name = "deno_graph" -version = "0.83.3" +version = "0.84.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77163c46755676d8f793fc19e365537ba660a8db173cd1e02d21eb010c0b3cef" +checksum = "cd4f4a14aa069087be41c2998077b0453f0191747898f96e6343f700abfc2c18" dependencies = [ "anyhow", "async-trait", @@ -1639,7 +1635,7 @@ dependencies = [ [[package]] name = "deno_http" -version = "0.171.0" +version = "0.175.0" dependencies = [ "async-compression", "async-trait", @@ -1678,7 +1674,7 @@ dependencies = [ [[package]] name = "deno_io" -version = "0.83.0" +version = "0.87.0" dependencies = [ "async-trait", "deno_core", @@ -1699,7 +1695,7 @@ dependencies = [ [[package]] name = "deno_kv" -version = "0.81.0" +version = "0.85.0" dependencies = [ "anyhow", "async-trait", @@ -1730,9 +1726,9 @@ dependencies = [ [[package]] name = "deno_lint" -version = "0.67.0" +version = "0.68.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "871b60e32bfb6c110cbb9b0688dbf048f81e5d347fe4ce5a42239263de9dd938" +checksum = "bb994e6d1b18223df0a756c7948143b35682941d615edffef60d5b38822f38ac" dependencies = [ "anyhow", "deno_ast", @@ -1760,9 +1756,9 @@ dependencies = [ [[package]] name = "deno_media_type" -version = "0.1.4" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8978229b82552bf8457a0125aa20863f023619cfc21ebb007b1e571d68fd85b" +checksum = "7fcf552fbdedbe81c89705349d7d2485c7051382b000dfddbdbf7fc25931cf83" dependencies = [ "data-url", "serde", @@ -1771,12 +1767,17 @@ dependencies = [ [[package]] name = "deno_napi" -version = "0.104.0" +version = "0.108.0" dependencies = [ "deno_core", "deno_permissions", + "libc", "libloading 0.7.4", + "libuv-sys-lite", + "log", + "napi_sym", "thiserror", + "windows-sys 0.52.0", ] [[package]] @@ -1794,24 +1795,24 @@ dependencies = [ [[package]] name = "deno_net" -version = "0.165.0" +version = "0.169.0" dependencies = [ "deno_core", "deno_permissions", "deno_tls", + "hickory-proto", + "hickory-resolver", "pin-project", "rustls-tokio-stream", "serde", "socket2", "thiserror", "tokio", - "trust-dns-proto", - "trust-dns-resolver", ] [[package]] name = "deno_node" -version = "0.110.0" +version = "0.114.0" dependencies = [ "aead-gcm-stream", "aes", @@ -1920,9 +1921,9 @@ dependencies = [ [[package]] name = "deno_ops" -version = "0.190.1" +version = "0.194.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26f46d4e4f52f26c882b74a9b58810ea75252b807cf0966166ec333077cdfd85" +checksum = "f760b492bd638c1dc3e992d11672c259fbe9a233162099a8347591c9e22d0391" dependencies = [ "proc-macro-rules", "proc-macro2", @@ -1960,7 +1961,7 @@ dependencies = [ [[package]] name = "deno_permissions" -version = "0.33.0" +version = "0.37.0" dependencies = [ "deno_core", "deno_path_util", @@ -1971,13 +1972,14 @@ dependencies = [ "once_cell", "percent-encoding", "serde", + "thiserror", "which 4.4.2", "winapi", ] [[package]] name = "deno_resolver" -version = "0.5.0" +version = "0.9.0" dependencies = [ "anyhow", "base32", @@ -1993,7 +1995,7 @@ dependencies = [ [[package]] name = "deno_runtime" -version = "0.182.0" +version = "0.186.0" dependencies = [ "color-print", "deno_ast", @@ -2043,11 +2045,13 @@ dependencies = [ "percent-encoding", "regex", "rustyline", + "same-file", "serde", "signal-hook", "signal-hook-registry", "tempfile", "test_server", + "thiserror", "tokio", "tokio-metrics", "twox-hash", @@ -2059,9 +2063,9 @@ dependencies = [ [[package]] name = "deno_semver" -version = "0.5.14" +version = "0.5.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "670fec7ef309384e23c2a90ac5d2d9d91a776d225306c75f5cdd28cf6cc8a59f" +checksum = "c957c6a57c38b7dde2315df0da0ec228911e56a74f185b108a488d0401841a67" dependencies = [ "monch", "once_cell", @@ -2109,7 +2113,7 @@ dependencies = [ [[package]] name = "deno_tls" -version = "0.160.0" +version = "0.164.0" dependencies = [ "deno_core", "deno_native_certs", @@ -2158,7 +2162,7 @@ dependencies = [ [[package]] name = "deno_url" -version = "0.173.0" +version = "0.177.0" dependencies = [ "deno_bench_util", "deno_console", @@ -2170,7 +2174,7 @@ dependencies = [ [[package]] name = "deno_web" -version = "0.204.0" +version = "0.208.0" dependencies = [ "async-trait", "base64-simd 0.8.0", @@ -2192,7 +2196,7 @@ dependencies = [ [[package]] name = "deno_webgpu" -version = "0.140.0" +version = "0.144.0" dependencies = [ "deno_core", "raw-window-handle", @@ -2205,7 +2209,7 @@ dependencies = [ [[package]] name = "deno_webidl" -version = "0.173.0" +version = "0.177.0" dependencies = [ "deno_bench_util", "deno_core", @@ -2213,7 +2217,7 @@ dependencies = [ [[package]] name = "deno_websocket" -version = "0.178.0" +version = "0.182.0" dependencies = [ "bytes", "deno_core", @@ -2235,7 +2239,7 @@ dependencies = [ [[package]] name = "deno_webstorage" -version = "0.168.0" +version = "0.172.0" dependencies = [ "deno_core", "deno_web", @@ -2563,9 +2567,9 @@ dependencies = [ [[package]] name = "dprint-plugin-json" -version = "0.19.3" +version = "0.19.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a19f4a9f2f548b2098b8ec597d7bb40af133b6e9a3187c1d3c4caa101b8c93c3" +checksum = "57f91e594559b450b7c5d6a0ba9f3f9fe951c1ea371168f7c95973da3fdbd85a" dependencies = [ "anyhow", "dprint-core", @@ -2577,9 +2581,9 @@ dependencies = [ [[package]] name = "dprint-plugin-jupyter" -version = "0.1.3" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c67b0e54b552a4775c221b44ed33be918c400bd8041d1f044f947fbb01025cc0" +checksum = "d0d20684e37b3824e2bc917cfcb14e2cdf88398eef507335d839cbd78172bfee" dependencies = [ "anyhow", "dprint-core", @@ -2605,9 +2609,9 @@ dependencies = [ [[package]] name = "dprint-plugin-typescript" -version = "0.93.0" +version = "0.93.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9308d98b923b7c0335c2ee1560199e3f2321b1be82803107b4ba4ed5dac46cc" +checksum = "3ff29fd136541e59d51946f0d2d353fefc886776f61a799ebfb5838b06cef13b" dependencies = [ "anyhow", "deno_ast", @@ -2635,15 +2639,6 @@ dependencies = [ "text_lines", ] -[[package]] -name = "drain" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d105028bd2b5dfcb33318fd79a445001ead36004dd8dffef1bdd7e493d8bc1e" -dependencies = [ - "tokio", -] - [[package]] name = "dsa" version = "0.6.3" @@ -2891,29 +2886,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "31ae425815400e5ed474178a7a22e275a9687086a12ca63ec793ff292d8fdae8" -[[package]] -name = "eszip" -version = "0.79.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8eb55c89bdde75a3826a79d49c9d847623ae7fbdb2695b542982982da990d33e" -dependencies = [ - "anyhow", - "async-trait", - "base64 0.21.7", - "deno_ast", - "deno_graph", - "deno_npm", - "deno_semver", - "futures", - "hashlink 0.8.4", - "indexmap", - "serde", - "serde_json", - "sha2", - "thiserror", - "url", -] - [[package]] name = "fallible-iterator" version = "0.3.0" @@ -3525,15 +3497,6 @@ dependencies = [ "allocator-api2", ] -[[package]] -name = "hashlink" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" -dependencies = [ - "hashbrown", -] - [[package]] name = "hashlink" version = "0.9.1" @@ -3573,6 +3536,92 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dfa686283ad6dd069f105e5ab091b04c62850d3e4cf5d67debad1933f55023df" +[[package]] +name = "hickory-client" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bab9683b08d8f8957a857b0236455d80e1886eaa8c6178af556aa7871fb61b55" +dependencies = [ + "cfg-if", + "data-encoding", + "futures-channel", + "futures-util", + "hickory-proto", + "once_cell", + "radix_trie", + "rand", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "hickory-proto" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07698b8420e2f0d6447a436ba999ec85d8fbf2a398bbd737b82cac4a2e96e512" +dependencies = [ + "async-trait", + "cfg-if", + "data-encoding", + "enum-as-inner", + "futures-channel", + "futures-io", + "futures-util", + "idna 0.4.0", + "ipnet", + "once_cell", + "rand", + "serde", + "thiserror", + "tinyvec", + "tokio", + "tracing", + "url", +] + +[[package]] +name = "hickory-resolver" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28757f23aa75c98f254cf0405e6d8c25b831b32921b050a66692427679b1f243" +dependencies = [ + "cfg-if", + "futures-util", + "hickory-proto", + "ipconfig", + "lru-cache", + "once_cell", + "parking_lot", + "rand", + "resolv-conf", + "serde", + "smallvec", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "hickory-server" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9be0e43c556b9b3fdb6c7c71a9a32153a2275d02419e3de809e520bfcfe40c37" +dependencies = [ + "async-trait", + "bytes", + "cfg-if", + "enum-as-inner", + "futures-util", + "hickory-proto", + "serde", + "thiserror", + "time", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "hkdf" version = "0.12.4" @@ -4034,9 +4083,9 @@ dependencies = [ [[package]] name = "jsonc-parser" -version = "0.23.0" +version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7725c320caac8c21d8228c1d055af27a995d371f78cc763073d3e068323641b5" +checksum = "b558af6b49fd918e970471374e7a798b2c9bbcda624a210ffa3901ee5614bc8e" dependencies = [ "serde_json", ] @@ -4223,9 +4272,9 @@ dependencies = [ [[package]] name = "libsui" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "205eca4e7beaad637dcd38fe41292065894ee7f498077cf3c135d5f7252b9f27" +checksum = "89795977654ad6250d6c0915411b622bac22f9efb4f852af94b2e00964cab832" dependencies = [ "editpe", "libc", @@ -4327,9 +4376,9 @@ dependencies = [ [[package]] name = "malva" -version = "0.10.1" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "484beda6e5d775ed06a8ec0fce79e51d39f49d834ed2a29da3f437079321804f" +checksum = "1c67b97ed99f56b86fa3c010843441f1fcdb71884bab96b8551bb3d1e7c6d529" dependencies = [ "aho-corasick", "itertools 0.13.0", @@ -4340,9 +4389,9 @@ dependencies = [ [[package]] name = "markup_fmt" -version = "0.13.1" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dab5ae899659fbe5c8835b2c8ca8d3e357974a3e454138925b404004973361f" +checksum = "ebae65c91eab3d42231232bf48107f351e5a8d511454927218c53aeb68bbdb6f" dependencies = [ "aho-corasick", "css_dataset", @@ -4512,7 +4561,7 @@ dependencies = [ [[package]] name = "napi_sym" -version = "0.103.0" +version = "0.107.0" dependencies = [ "quote", "serde", @@ -4567,7 +4616,7 @@ dependencies = [ [[package]] name = "node_resolver" -version = "0.12.0" +version = "0.16.0" dependencies = [ "anyhow", "async-trait", @@ -5811,7 +5860,7 @@ dependencies = [ "bitflags 2.6.0", "fallible-iterator", "fallible-streaming-iterator", - "hashlink 0.9.1", + "hashlink", "libsqlite3-sys", "smallvec", ] @@ -6175,15 +6224,6 @@ dependencies = [ "syn 2.0.72", ] -[[package]] -name = "serde_spanned" -version = "0.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0" -dependencies = [ - "serde", -] - [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -6198,9 +6238,9 @@ dependencies = [ [[package]] name = "serde_v8" -version = "0.223.1" +version = "0.227.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf3d859dda87ee96423c01244f10af864fa6d6a9fcdc2b77e0595078ea0ea11" +checksum = "0a8294c2223c53bed343be8b80564ece4dc0d03b643b06fa86c4ccc0e064eda0" dependencies = [ "num-bigint", "serde", @@ -7150,6 +7190,7 @@ dependencies = [ "console_static_text", "deno_unsync", "denokv_proto", + "faster-hex", "fastwebsockets", "flate2", "futures", @@ -7397,40 +7438,6 @@ dependencies = [ "serde", ] -[[package]] -name = "toml" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" -dependencies = [ - "serde", - "serde_spanned", - "toml_datetime", - "toml_edit", -] - -[[package]] -name = "toml_datetime" -version = "0.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" -dependencies = [ - "serde", -] - -[[package]] -name = "toml_edit" -version = "0.19.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" -dependencies = [ - "indexmap", - "serde", - "serde_spanned", - "toml_datetime", - "winnow 0.5.40", -] - [[package]] name = "tower" version = "0.4.13" @@ -7520,95 +7527,6 @@ dependencies = [ "stable_deref_trait", ] -[[package]] -name = "trust-dns-client" -version = "0.23.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14135e72c7e6d4c9b6902d4437881a8598f0145dbb2e3f86f92dbad845b61e63" -dependencies = [ - "cfg-if", - "data-encoding", - "futures-channel", - "futures-util", - "once_cell", - "radix_trie", - "rand", - "thiserror", - "tokio", - "tracing", - "trust-dns-proto", -] - -[[package]] -name = "trust-dns-proto" -version = "0.23.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3119112651c157f4488931a01e586aa459736e9d6046d3bd9105ffb69352d374" -dependencies = [ - "async-trait", - "cfg-if", - "data-encoding", - "enum-as-inner", - "futures-channel", - "futures-io", - "futures-util", - "idna 0.4.0", - "ipnet", - "once_cell", - "rand", - "serde", - "smallvec", - "thiserror", - "tinyvec", - "tokio", - "tracing", - "url", -] - -[[package]] -name = "trust-dns-resolver" -version = "0.23.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10a3e6c3aff1718b3c73e395d1f35202ba2ffa847c6a62eea0db8fb4cfe30be6" -dependencies = [ - "cfg-if", - "futures-util", - "ipconfig", - "lru-cache", - "once_cell", - "parking_lot", - "rand", - "resolv-conf", - "serde", - "smallvec", - "thiserror", - "tokio", - "tracing", - "trust-dns-proto", -] - -[[package]] -name = "trust-dns-server" -version = "0.23.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c540f73c2b2ec2f6c54eabd0900e7aafb747a820224b742f556e8faabb461bc7" -dependencies = [ - "async-trait", - "bytes", - "cfg-if", - "drain", - "enum-as-inner", - "futures-executor", - "futures-util", - "serde", - "thiserror", - "time", - "tokio", - "toml 0.7.8", - "tracing", - "trust-dns-proto", -] - [[package]] name = "try-lock" version = "0.2.5" @@ -8358,15 +8276,6 @@ version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" -[[package]] -name = "winnow" -version = "0.5.40" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" -dependencies = [ - "memchr", -] - [[package]] name = "winnow" version = "0.6.15" @@ -8402,7 +8311,7 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b68db261ef59e9e52806f688020631e987592bd83619edccda9c47d42cde4f6c" dependencies = [ - "toml 0.5.11", + "toml", ] [[package]] @@ -8479,7 +8388,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a6a39b6b5ba0d02c910d05d7fbc366a4befb8901ea107dcde9c1c97acb8a366" dependencies = [ "rowan", - "winnow 0.6.15", + "winnow", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 9f15d03a54..d83ca2e413 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,7 +5,6 @@ resolver = "2" members = [ "bench_util", "cli", - "cli/napi/sym", "ext/broadcast_channel", "ext/cache", "ext/canvas", @@ -19,6 +18,7 @@ members = [ "ext/io", "ext/kv", "ext/napi", + "ext/napi/sym", "ext/net", "ext/node", "ext/url", @@ -45,19 +45,19 @@ license = "MIT" repository = "https://github.com/denoland/deno" [workspace.dependencies] -deno_ast = { version = "=0.42.2", features = ["transpiling"] } -deno_core = { version = "0.314.2" } +deno_ast = { version = "=0.43.3", features = ["transpiling"] } +deno_core = { version = "0.318.0" } -deno_bench_util = { version = "0.167.0", path = "./bench_util" } +deno_bench_util = { version = "0.171.0", path = "./bench_util" } deno_lockfile = "=0.23.1" -deno_media_type = { version = "0.1.4", features = ["module_specifier"] } +deno_media_type = { version = "0.2.0", features = ["module_specifier"] } deno_npm = "=0.25.4" deno_path_util = "=0.2.1" -deno_permissions = { version = "0.33.0", path = "./runtime/permissions" } -deno_runtime = { version = "0.182.0", path = "./runtime" } -deno_semver = "=0.5.14" +deno_permissions = { version = "0.37.0", path = "./runtime/permissions" } +deno_runtime = { version = "0.186.0", path = "./runtime" } +deno_semver = "=0.5.16" deno_terminal = "0.2.0" -napi_sym = { version = "0.103.0", path = "./cli/napi/sym" } +napi_sym = { version = "0.107.0", path = "./ext/napi/sym" } test_util = { package = "test_server", path = "./tests/util/server" } denokv_proto = "0.8.1" @@ -66,32 +66,32 @@ denokv_remote = "0.8.1" denokv_sqlite = { default-features = false, version = "0.8.2" } # exts -deno_broadcast_channel = { version = "0.167.0", path = "./ext/broadcast_channel" } -deno_cache = { version = "0.105.0", path = "./ext/cache" } -deno_canvas = { version = "0.42.0", path = "./ext/canvas" } -deno_console = { version = "0.173.0", path = "./ext/console" } -deno_cron = { version = "0.53.0", path = "./ext/cron" } -deno_crypto = { version = "0.187.0", path = "./ext/crypto" } -deno_fetch = { version = "0.197.0", path = "./ext/fetch" } -deno_ffi = { version = "0.160.0", path = "./ext/ffi" } -deno_fs = { version = "0.83.0", path = "./ext/fs" } -deno_http = { version = "0.171.0", path = "./ext/http" } -deno_io = { version = "0.83.0", path = "./ext/io" } -deno_kv = { version = "0.81.0", path = "./ext/kv" } -deno_napi = { version = "0.104.0", path = "./ext/napi" } -deno_net = { version = "0.165.0", path = "./ext/net" } -deno_node = { version = "0.110.0", path = "./ext/node" } -deno_tls = { version = "0.160.0", path = "./ext/tls" } -deno_url = { version = "0.173.0", path = "./ext/url" } -deno_web = { version = "0.204.0", path = "./ext/web" } -deno_webgpu = { version = "0.140.0", path = "./ext/webgpu" } -deno_webidl = { version = "0.173.0", path = "./ext/webidl" } -deno_websocket = { version = "0.178.0", path = "./ext/websocket" } -deno_webstorage = { version = "0.168.0", path = "./ext/webstorage" } +deno_broadcast_channel = { version = "0.171.0", path = "./ext/broadcast_channel" } +deno_cache = { version = "0.109.0", path = "./ext/cache" } +deno_canvas = { version = "0.46.0", path = "./ext/canvas" } +deno_console = { version = "0.177.0", path = "./ext/console" } +deno_cron = { version = "0.57.0", path = "./ext/cron" } +deno_crypto = { version = "0.191.0", path = "./ext/crypto" } +deno_fetch = { version = "0.201.0", path = "./ext/fetch" } +deno_ffi = { version = "0.164.0", path = "./ext/ffi" } +deno_fs = { version = "0.87.0", path = "./ext/fs" } +deno_http = { version = "0.175.0", path = "./ext/http" } +deno_io = { version = "0.87.0", path = "./ext/io" } +deno_kv = { version = "0.85.0", path = "./ext/kv" } +deno_napi = { version = "0.108.0", path = "./ext/napi" } +deno_net = { version = "0.169.0", path = "./ext/net" } +deno_node = { version = "0.114.0", path = "./ext/node" } +deno_tls = { version = "0.164.0", path = "./ext/tls" } +deno_url = { version = "0.177.0", path = "./ext/url" } +deno_web = { version = "0.208.0", path = "./ext/web" } +deno_webgpu = { version = "0.144.0", path = "./ext/webgpu" } +deno_webidl = { version = "0.177.0", path = "./ext/webidl" } +deno_websocket = { version = "0.182.0", path = "./ext/websocket" } +deno_webstorage = { version = "0.172.0", path = "./ext/webstorage" } # resolvers -deno_resolver = { version = "0.5.0", path = "./resolvers/deno" } -node_resolver = { version = "0.12.0", path = "./resolvers/node" } +deno_resolver = { version = "0.9.0", path = "./resolvers/deno" } +node_resolver = { version = "0.16.0", path = "./resolvers/node" } aes = "=0.8.3" anyhow = "1.0.57" @@ -111,7 +111,7 @@ console_static_text = "=0.8.1" dashmap = "5.5.3" data-encoding = "2.3.3" data-url = "=0.3.0" -deno_cache_dir = "=0.13.0" +deno_cache_dir = "=0.13.2" deno_package_json = { version = "0.1.2", default-features = false } dlopen2 = "0.6.1" ecb = "=0.1.2" @@ -137,7 +137,7 @@ hyper-util = { version = "=0.1.7", features = ["tokio", "client", "client-legacy hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] } indexmap = { version = "2", features = ["serde"] } ipnet = "2.3" -jsonc-parser = { version = "=0.23.0", features = ["serde"] } +jsonc-parser = { version = "=0.26.2", features = ["serde"] } lazy-regex = "3" libc = "0.2.126" libz-sys = { version = "1.1.20", default-features = false } @@ -187,7 +187,7 @@ tar = "=0.4.40" tempfile = "3.4.0" termcolor = "1.1.3" thiserror = "1.0.61" -tokio = { version = "=1.36.0", features = ["full"] } +tokio = { version = "1.36.0", features = ["full"] } tokio-metrics = { version = "0.3.0", features = ["rt"] } tokio-rustls = { version = "0.26.0", default-features = false, features = ["ring", "tls12"] } tokio-socks = "0.5.1" diff --git a/README.md b/README.md index 102319f4f2..19d4fa8a12 100644 --- a/README.md +++ b/README.md @@ -46,6 +46,12 @@ brew install deno choco install deno ``` +[WinGet](https://winstall.app/apps/DenoLand.Deno) (Windows): + +```powershell +winget install --id=DenoLand.Deno +``` + ### Build and install from source Complete instructions for building Deno from source can be found in the manual diff --git a/Releases.md b/Releases.md index 08562134a1..5ce25815bd 100644 --- a/Releases.md +++ b/Releases.md @@ -6,6 +6,124 @@ https://github.com/denoland/deno/releases We also have one-line install commands at: https://github.com/denoland/deno_install +### 2.0.6 / 2024.11.10 + +- feat(ext/http): abort event when request is cancelled (#26781) +- feat(ext/http): abort signal when request is cancelled (#26761) +- feat(lsp): auto-import completions from byonm dependencies (#26680) +- fix(ext/cache): don't panic when creating cache (#26780) +- fix(ext/node): better inspector support (#26471) +- fix(fmt): don't use self-closing tags in HTML (#26754) +- fix(install): cache jsr deps from all workspace config files (#26779) +- fix(node:zlib): gzip & gzipSync should accept ArrayBuffer (#26762) +- fix: performance.timeOrigin (#26787) + +### 2.0.5 / 2024.11.05 + +- fix(add): better error message when adding package that only has pre-release + versions (#26724) +- fix(add): only add npm deps to package.json if it's at least as close as + deno.json (#26683) +- fix(cli): set `npm_config_user_agent` when running npm packages or tasks + (#26639) +- fix(coverage): exclude comment lines from coverage reports (#25939) +- fix(ext/node): add `findSourceMap` to the default export of `node:module` + (#26720) +- fix(ext/node): convert errors from `fs.readFile/fs.readFileSync` to node + format (#26632) +- fix(ext/node): resolve exports even if parent module filename isn't present + (#26553) +- fix(ext/node): return `this` from `http.Server.ref/unref()` (#26647) +- fix(fmt): do not panic for jsx ignore container followed by jsx text (#26723) +- fix(fmt): fix several HTML and components issues (#26654) +- fix(fmt): ignore file directive for YAML files (#26717) +- fix(install): handle invalid function error, and fallback to junctions + regardless of the error (#26730) +- fix(lsp): include unstable features from editor settings (#26655) +- fix(lsp): scope attribution for lazily loaded assets (#26699) +- fix(node): Implement `os.userInfo` properly, add missing `toPrimitive` + (#24702) +- fix(serve): support serve hmr (#26078) +- fix(types): missing `import` permission on `PermissionOptionsObject` (#26627) +- fix(workspace): support wildcard packages (#26568) +- fix: clamp smi in fast calls by default (#26506) +- fix: improved support for cjs and cts modules (#26558) +- fix: op_run_microtasks crash (#26718) +- fix: panic_hook hangs without procfs (#26732) +- fix: remove permission check in op_require_node_module_paths (#26645) +- fix: surface package.json location on dep parse failure (#26665) +- perf(lsp): don't walk coverage directory (#26715) + +### 2.0.4 / 2024.10.29 + +- Revert "fix(ext/node): fix dns.lookup result ordering (#26264)" (#26621) +- Revert "fix(ext/node): use primordials in `ext/node/polyfills/https.ts` + (#26323)" (#26613) +- feat(lsp): "typescript.preferences.preferTypeOnlyAutoImports" setting (#26546) +- fix(check): expose more globals from @types/node (#26603) +- fix(check): ignore resolving `jsxImportSource` when jsx is not used in graph + (#26548) +- fix(cli): Make --watcher CLEAR_SCREEN clear scrollback buffer as well as + visible screen (#25997) +- fix(compile): regression handling redirects (#26586) +- fix(ext/napi): export dynamic symbols list for {Free,Open}BSD (#26605) +- fix(ext/node): add path to `fs.stat` and `fs.statSync` error (#26037) +- fix(ext/node): compatibility with {Free,Open}BSD (#26604) +- fix(ext/node): use primordials in + ext\node\polyfills\internal\crypto\_randomInt.ts (#26534) +- fix(install): cache json exports of JSR packages (#26552) +- fix(install): regression - do not panic when config file contains \r\n + newlines (#26547) +- fix(lsp): make missing import action fix infallible (#26539) +- fix(npm): match npm bearer token generation (#26544) +- fix(upgrade): stop running `deno lsp` processes on windows before attempting + to replace executable (#26542) +- fix(watch): don't panic on invalid file specifiers (#26577) +- fix: do not panic when failing to write to http cache (#26591) +- fix: provide hints in terminal errors for Node.js globals (#26610) +- fix: report exceptions from nextTick (#26579) +- fix: support watch flag to enable watching other files than the main module on + serve subcommand (#26622) +- perf: pass transpiled module to deno_core as known string (#26555) + +### 2.0.3 / 2024.10.25 + +- feat(lsp): interactive inlay hints (#26382) +- fix: support node-api in denort (#26389) +- fix(check): support `--frozen` on deno check (#26479) +- fix(cli): increase size of blocking task threadpool on windows (#26465) +- fix(config): schemas for lint rule and tag autocompletion (#26515) +- fix(ext/console): ignore casing for named colors in css parsing (#26466) +- fix(ext/ffi): return u64/i64 as bigints from nonblocking ffi calls (#26486) +- fix(ext/node): cancel pending ipc writes on channel close (#26504) +- fix(ext/node): map `ERROR_INVALID_NAME` to `ENOENT` on windows (#26475) +- fix(ext/node): only set our end of child process pipe to nonblocking mode + (#26495) +- fix(ext/node): properly map reparse point error in readlink (#26375) +- fix(ext/node): refactor http.ServerResponse into function class (#26210) +- fix(ext/node): stub HTTPParser internal binding (#26401) +- fix(ext/node): use primordials in `ext/node/polyfills/https.ts` (#26323) +- fix(fmt): --ext flag requires to pass files (#26525) +- fix(fmt): upgrade formatters (#26469) +- fix(help): missing package specifier (#26380) +- fix(info): resolve workspace member mappings (#26350) +- fix(install): better json editing (#26450) +- fix(install): cache all exports of JSR packages listed in `deno.json` (#26501) +- fix(install): cache type only module deps in `deno install` (#26497) +- fix(install): don't cache json exports of JSR packages (for now) (#26530) +- fix(install): update lockfile when using package.json (#26458) +- fix(lsp): import-map-remap quickfix for type imports (#26454) +- fix(node/util): support array formats in `styleText` (#26507) +- fix(node:tls): set TLSSocket.alpnProtocol for client connections (#26476) +- fix(npm): ensure scoped package name is encoded in URLs (#26390) +- fix(npm): support version ranges with && or comma (#26453) +- fix: `.npmrc` settings not being passed to install/add command (#26473) +- fix: add 'fmt-component' to unstable features in schema file (#26526) +- fix: share inotify fd across watchers (#26200) +- fix: unpin tokio version (#26457) +- perf(compile): pass module source data from binary directly to v8 (#26494) +- perf: avoid multiple calls to runMicrotask (#26378) + ### 2.0.2 / 2024.10.17 - fix(cli): set napi object property properly (#26344) diff --git a/bench_util/Cargo.toml b/bench_util/Cargo.toml index fbf30b9fcf..d6eefc3a5d 100644 --- a/bench_util/Cargo.toml +++ b/bench_util/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_bench_util" -version = "0.167.0" +version = "0.171.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 7d6fc71622..e98e0ac001 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno" -version = "2.0.2" +version = "2.0.6" authors.workspace = true default-run = "deno" edition.workspace = true @@ -70,11 +70,11 @@ winres.workspace = true [dependencies] deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] } deno_cache_dir = { workspace = true } -deno_config = { version = "=0.37.1", features = ["workspace", "sync"] } +deno_config = { version = "=0.38.2", features = ["workspace", "sync"] } deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } -deno_doc = { version = "0.154.0", default-features = false, features = ["rust", "html", "syntect"] } -deno_graph = { version = "=0.83.3" } -deno_lint = { version = "=0.67.0", features = ["docs"] } +deno_doc = { version = "0.156.0", default-features = false, features = ["rust", "html", "syntect"] } +deno_graph = { version = "=0.84.1" } +deno_lint = { version = "=0.68.0", features = ["docs"] } deno_lockfile.workspace = true deno_npm.workspace = true deno_package_json.workspace = true @@ -84,9 +84,7 @@ deno_runtime = { workspace = true, features = ["include_js_files_for_snapshottin deno_semver.workspace = true deno_task_shell = "=0.18.1" deno_terminal.workspace = true -eszip = "=0.79.1" -libsui = "0.4.0" -napi_sym.workspace = true +libsui = "0.5.0" node_resolver.workspace = true anstream = "0.6.14" @@ -106,10 +104,10 @@ data-encoding.workspace = true dhat = { version = "0.3.3", optional = true } dissimilar = "=1.0.4" dotenvy = "0.15.7" -dprint-plugin-json = "=0.19.3" -dprint-plugin-jupyter = "=0.1.3" +dprint-plugin-json = "=0.19.4" +dprint-plugin-jupyter = "=0.1.5" dprint-plugin-markdown = "=0.17.8" -dprint-plugin-typescript = "=0.93.0" +dprint-plugin-typescript = "=0.93.2" env_logger = "=0.10.0" fancy-regex = "=0.10.0" faster-hex.workspace = true @@ -123,15 +121,15 @@ http-body-util.workspace = true hyper-util.workspace = true import_map = { version = "=0.20.1", features = ["ext"] } indexmap.workspace = true -jsonc-parser.workspace = true +jsonc-parser = { workspace = true, features = ["cst", "serde"] } jupyter_runtime = { package = "runtimelib", version = "=0.14.0" } lazy-regex.workspace = true libc.workspace = true libz-sys.workspace = true log = { workspace = true, features = ["serde"] } lsp-types.workspace = true -malva = "=0.10.1" -markup_fmt = "=0.13.1" +malva = "=0.11.0" +markup_fmt = "=0.15.0" memmem.workspace = true monch.workspace = true notify.workspace = true @@ -168,7 +166,6 @@ typed-arena = "=2.0.2" uuid = { workspace = true, features = ["serde"] } walkdir = "=2.3.2" which.workspace = true -yoke.workspace = true zeromq.workspace = true zip = { version = "2.1.6", default-features = false, features = ["deflate-flate2"] } zstd.workspace = true @@ -176,14 +173,12 @@ zstd.workspace = true [target.'cfg(windows)'.dependencies] junction.workspace = true winapi = { workspace = true, features = ["knownfolders", "mswsock", "objbase", "shlobj", "tlhelp32", "winbase", "winerror", "winsock2"] } -windows-sys.workspace = true [target.'cfg(unix)'.dependencies] nix.workspace = true [dev-dependencies] deno_bench_util.workspace = true -libuv-sys-lite = "=1.48.2" pretty_assertions.workspace = true test_util.workspace = true diff --git a/cli/args/flags.rs b/cli/args/flags.rs index cb3d55e50d..eb77971748 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -1179,8 +1179,8 @@ static DENO_HELP: &str = cstr!( Dependency management: add Add dependencies deno add jsr:@std/assert | deno add npm:express - install Install script as an executable - uninstall Uninstall a script previously installed with deno install + install Installs dependencies either in the local project or globally to a bin directory + uninstall Uninstalls a dependency or an executable script in the installation root's bin directory remove Remove dependencies from the configuration file Tooling: @@ -1856,6 +1856,7 @@ Unless --reload is specified, this command will not re-download already cached d .required_unless_present("help") .value_hint(ValueHint::FilePath), ) + .arg(frozen_lockfile_arg()) .arg(allow_import_arg()) } ) @@ -2273,7 +2274,7 @@ Ignore formatting a file by adding an ignore comment at the top of the file: "sass", "less", "html", "svelte", "vue", "astro", "yml", "yaml", "ipynb", ]) - .help_heading(FMT_HEADING), + .help_heading(FMT_HEADING).requires("files"), ) .arg( Arg::new("ignore") @@ -3387,8 +3388,7 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command { .value_name("IP_OR_HOSTNAME") .help("Allow network access. Optionally specify allowed IP addresses and host names, with ports as necessary") .value_parser(flags_net::validator) - .hide(true) - ; + .hide(true); if let Some(requires) = requires { arg = arg.requires(requires) } @@ -4373,6 +4373,7 @@ fn check_parse( flags.type_check_mode = TypeCheckMode::Local; compile_args_without_check_parse(flags, matches)?; unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime); + frozen_lockfile_arg_parse(flags, matches); let files = matches.remove_many::("file").unwrap().collect(); if matches.get_flag("all") || matches.get_flag("remote") { flags.type_check_mode = TypeCheckMode::All; @@ -6800,6 +6801,32 @@ mod tests { ..Flags::default() } ); + + let r = flags_from_vec(svec!["deno", "fmt", "--ext", "html"]); + assert!(r.is_err()); + let r = flags_from_vec(svec!["deno", "fmt", "--ext", "html", "./**"]); + assert_eq!( + r.unwrap(), + Flags { + subcommand: DenoSubcommand::Fmt(FmtFlags { + check: false, + files: FileFlags { + include: vec!["./**".to_string()], + ignore: vec![], + }, + use_tabs: None, + line_width: None, + indent_width: None, + single_quote: None, + prose_wrap: None, + no_semicolons: None, + unstable_component: false, + watch: Default::default(), + }), + ext: Some("html".to_string()), + ..Flags::default() + } + ); } #[test] diff --git a/cli/args/flags_net.rs b/cli/args/flags_net.rs index 88ffcf0e46..abfcf28382 100644 --- a/cli/args/flags_net.rs +++ b/cli/args/flags_net.rs @@ -51,7 +51,7 @@ pub fn parse(paths: Vec) -> clap::error::Result> { } } else { NetDescriptor::parse(&host_and_port).map_err(|e| { - clap::Error::raw(clap::error::ErrorKind::InvalidValue, format!("{e:?}")) + clap::Error::raw(clap::error::ErrorKind::InvalidValue, e.to_string()) })?; out.push(host_and_port) } diff --git a/cli/args/mod.rs b/cli/args/mod.rs index f905e186ba..e19025f8b1 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -46,6 +46,7 @@ pub use flags::*; pub use lockfile::CliLockfile; pub use lockfile::CliLockfileReadFromPathOptions; pub use package_json::NpmInstallDepsProvider; +pub use package_json::PackageJsonDepValueParseWithLocationError; use deno_ast::ModuleSpecifier; use deno_core::anyhow::bail; @@ -200,6 +201,8 @@ pub fn ts_config_to_transpile_and_emit_options( precompile_jsx_dynamic_props: None, transform_jsx, var_decl_imports: false, + // todo(dsherret): support verbatim_module_syntax here properly + verbatim_module_syntax: false, }, deno_ast::EmitOptions { inline_sources: options.inline_sources, @@ -578,6 +581,7 @@ fn discover_npmrc( let resolved = npmrc .as_resolved(npm_registry_url()) .context("Failed to resolve .npmrc options")?; + log::debug!(".npmrc found at: '{}'", path.display()); Ok(Arc::new(resolved)) } @@ -963,6 +967,9 @@ impl CliOptions { match self.sub_command() { DenoSubcommand::Cache(_) => GraphKind::All, DenoSubcommand::Check(_) => GraphKind::TypesOnly, + DenoSubcommand::Install(InstallFlags { + kind: InstallKind::Local(_), + }) => GraphKind::All, _ => self.type_check_mode().as_graph_kind(), } } @@ -1448,6 +1455,12 @@ impl CliOptions { watch: Some(WatchFlagsWithPaths { hmr, .. }), .. }) = &self.flags.subcommand + { + *hmr + } else if let DenoSubcommand::Serve(ServeFlags { + watch: Some(WatchFlagsWithPaths { hmr, .. }), + .. + }) = &self.flags.subcommand { *hmr } else { @@ -1591,6 +1604,15 @@ impl CliOptions { } pub fn use_byonm(&self) -> bool { + if matches!( + self.sub_command(), + DenoSubcommand::Install(_) + | DenoSubcommand::Add(_) + | DenoSubcommand::Remove(_) + ) { + // For `deno install/add/remove` we want to force the managed resolver so it can set up `node_modules/` directory. + return false; + } if self.node_modules_dir().ok().flatten().is_none() && self.maybe_node_modules_folder.is_some() && self @@ -1668,6 +1690,10 @@ impl CliOptions { if let DenoSubcommand::Run(RunFlags { watch: Some(WatchFlagsWithPaths { paths, .. }), .. + }) + | DenoSubcommand::Serve(ServeFlags { + watch: Some(WatchFlagsWithPaths { paths, .. }), + .. }) = &self.flags.subcommand { full_paths.extend(paths.iter().map(|path| self.initial_cwd.join(path))); diff --git a/cli/args/package_json.rs b/cli/args/package_json.rs index 2ef39a30d2..7dc75550c3 100644 --- a/cli/args/package_json.rs +++ b/cli/args/package_json.rs @@ -5,10 +5,12 @@ use std::sync::Arc; use deno_config::workspace::Workspace; use deno_core::serde_json; +use deno_core::url::Url; use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepValueParseError; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageReq; +use thiserror::Error; #[derive(Debug)] pub struct InstallNpmRemotePkg { @@ -23,11 +25,20 @@ pub struct InstallNpmWorkspacePkg { pub target_dir: PathBuf, } +#[derive(Debug, Error, Clone)] +#[error("Failed to install '{}'\n at {}", alias, location)] +pub struct PackageJsonDepValueParseWithLocationError { + pub location: Url, + pub alias: String, + #[source] + pub source: PackageJsonDepValueParseError, +} + #[derive(Debug, Default)] pub struct NpmInstallDepsProvider { remote_pkgs: Vec, workspace_pkgs: Vec, - pkg_json_dep_errors: Vec, + pkg_json_dep_errors: Vec, } impl NpmInstallDepsProvider { @@ -89,7 +100,13 @@ impl NpmInstallDepsProvider { let dep = match dep { Ok(dep) => dep, Err(err) => { - pkg_json_dep_errors.push(err); + pkg_json_dep_errors.push( + PackageJsonDepValueParseWithLocationError { + location: pkg_json.specifier(), + alias, + source: err, + }, + ); continue; } }; @@ -150,7 +167,9 @@ impl NpmInstallDepsProvider { &self.workspace_pkgs } - pub fn pkg_json_dep_errors(&self) -> &[PackageJsonDepValueParseError] { + pub fn pkg_json_dep_errors( + &self, + ) -> &[PackageJsonDepValueParseWithLocationError] { &self.pkg_json_dep_errors } } diff --git a/cli/bench/encode_into.js b/cli/bench/encode_into.js index 11f5a56d90..ab5e11b04d 100644 --- a/cli/bench/encode_into.js +++ b/cli/bench/encode_into.js @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -// deno-lint-ignore-file no-console +// deno-lint-ignore-file no-console no-process-globals let [total, count] = typeof Deno !== "undefined" ? Deno.args diff --git a/cli/bench/getrandom.js b/cli/bench/getrandom.js index 3c3ec4aa19..fe99bbcbdf 100644 --- a/cli/bench/getrandom.js +++ b/cli/bench/getrandom.js @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -// deno-lint-ignore-file no-console +// deno-lint-ignore-file no-console no-process-globals let [total, count] = typeof Deno !== "undefined" ? Deno.args diff --git a/cli/bench/http.rs b/cli/bench/http.rs deleted file mode 100644 index f739b76ba8..0000000000 --- a/cli/bench/http.rs +++ /dev/null @@ -1,167 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -use std::collections::HashMap; -use std::net::TcpStream; -use std::path::Path; -use std::process::Command; -use std::sync::atomic::AtomicU16; -use std::sync::atomic::Ordering; -use std::time::Duration; -use std::time::Instant; - -use super::Result; - -pub use test_util::parse_wrk_output; -pub use test_util::WrkOutput as HttpBenchmarkResult; -// Some of the benchmarks in this file have been renamed. In case the history -// somehow gets messed up: -// "node_http" was once called "node" -// "deno_tcp" was once called "deno" -// "deno_http" was once called "deno_net_http" - -const DURATION: &str = "10s"; - -pub fn benchmark( - target_path: &Path, -) -> Result> { - let deno_exe = test_util::deno_exe_path(); - let deno_exe = deno_exe.to_string(); - - let hyper_hello_exe = target_path.join("test_server"); - let hyper_hello_exe = hyper_hello_exe.to_str().unwrap(); - - let mut res = HashMap::new(); - let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR")); - let http_dir = manifest_dir.join("bench").join("http"); - for entry in std::fs::read_dir(&http_dir)? { - let entry = entry?; - let pathbuf = entry.path(); - let path = pathbuf.to_str().unwrap(); - if path.ends_with(".lua") { - continue; - } - let file_stem = pathbuf.file_stem().unwrap().to_str().unwrap(); - - let lua_script = http_dir.join(format!("{file_stem}.lua")); - let mut maybe_lua = None; - if lua_script.exists() { - maybe_lua = Some(lua_script.to_str().unwrap()); - } - - let port = get_port(); - // deno run -A --unstable-net - res.insert( - file_stem.to_string(), - run( - &[ - deno_exe.as_str(), - "run", - "--allow-all", - "--unstable-net", - "--enable-testing-features-do-not-use", - path, - &server_addr(port), - ], - port, - None, - None, - maybe_lua, - )?, - ); - } - - res.insert("hyper".to_string(), hyper_http(hyper_hello_exe)?); - - Ok(res) -} - -fn run( - server_cmd: &[&str], - port: u16, - env: Option>, - origin_cmd: Option<&[&str]>, - lua_script: Option<&str>, -) -> Result { - // Wait for port 4544 to become available. - // TODO Need to use SO_REUSEPORT with tokio::net::TcpListener. - std::thread::sleep(Duration::from_secs(5)); - - let mut origin = None; - if let Some(cmd) = origin_cmd { - let mut com = Command::new(cmd[0]); - com.args(&cmd[1..]); - if let Some(env) = env.clone() { - com.envs(env); - } - origin = Some(com.spawn()?); - }; - - println!("{}", server_cmd.join(" ")); - let mut server = { - let mut com = Command::new(server_cmd[0]); - com.args(&server_cmd[1..]); - if let Some(env) = env { - com.envs(env); - } - com.spawn()? - }; - - // Wait for server to wake up. - let now = Instant::now(); - let addr = format!("127.0.0.1:{port}"); - while now.elapsed().as_secs() < 30 { - if TcpStream::connect(&addr).is_ok() { - break; - } - std::thread::sleep(Duration::from_millis(10)); - } - TcpStream::connect(&addr).expect("Failed to connect to server in time"); - println!("Server took {} ms to start", now.elapsed().as_millis()); - - let wrk = test_util::prebuilt_tool_path("wrk"); - assert!(wrk.is_file()); - - let addr = format!("http://{addr}/"); - let wrk = wrk.to_string(); - let mut wrk_cmd = vec![wrk.as_str(), "-d", DURATION, "--latency", &addr]; - - if let Some(lua_script) = lua_script { - wrk_cmd.push("-s"); - wrk_cmd.push(lua_script); - } - - println!("{}", wrk_cmd.join(" ")); - let output = test_util::run_collect(&wrk_cmd, None, None, None, true).0; - - std::thread::sleep(Duration::from_secs(1)); // wait to capture failure. TODO racy. - - println!("{output}"); - assert!( - server.try_wait()?.map(|s| s.success()).unwrap_or(true), - "server ended with error" - ); - - server.kill()?; - if let Some(mut origin) = origin { - origin.kill()?; - } - - Ok(parse_wrk_output(&output)) -} - -static NEXT_PORT: AtomicU16 = AtomicU16::new(4544); -pub(crate) fn get_port() -> u16 { - let p = NEXT_PORT.load(Ordering::SeqCst); - NEXT_PORT.store(p.wrapping_add(1), Ordering::SeqCst); - p -} - -fn server_addr(port: u16) -> String { - format!("0.0.0.0:{port}") -} - -fn hyper_http(exe: &str) -> Result { - let port = get_port(); - println!("http_benchmark testing RUST hyper"); - run(&[exe, &port.to_string()], port, None, None, None) -} diff --git a/cli/bench/http/deno_flash_hono_router.js b/cli/bench/http/deno_flash_hono_router.js deleted file mode 100644 index baced0cece..0000000000 --- a/cli/bench/http/deno_flash_hono_router.js +++ /dev/null @@ -1,10 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -import { Hono } from "https://deno.land/x/hono@v2.0.9/mod.ts"; - -const addr = Deno.args[0] || "127.0.0.1:4500"; -const [hostname, port] = addr.split(":"); - -const app = new Hono(); -app.get("/", (c) => c.text("Hello, World!")); - -Deno.serve({ port: Number(port), hostname }, app.fetch); diff --git a/cli/bench/http/deno_flash_send_file.js b/cli/bench/http/deno_flash_send_file.js deleted file mode 100644 index bf8541f8b0..0000000000 --- a/cli/bench/http/deno_flash_send_file.js +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -const addr = Deno.args[0] || "127.0.0.1:4500"; -const [hostname, port] = addr.split(":"); -const { serve } = Deno; - -const path = new URL("../testdata/128k.bin", import.meta.url).pathname; - -function handler() { - const file = Deno.openSync(path); - return new Response(file.readable); -} - -serve({ hostname, port: Number(port) }, handler); diff --git a/cli/bench/http/deno_http_read_headers.lua b/cli/bench/http/deno_http_read_headers.lua deleted file mode 100644 index 64f1923ff3..0000000000 --- a/cli/bench/http/deno_http_read_headers.lua +++ /dev/null @@ -1,5 +0,0 @@ -wrk.headers["foo"] = "bar" -wrk.headers["User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36" -wrk.headers["Viewport-Width"] = "1920" -wrk.headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9" -wrk.headers["Accept-Language"] = "en,la;q=0.9" \ No newline at end of file diff --git a/cli/bench/http/deno_http_serve.js b/cli/bench/http/deno_http_serve.js deleted file mode 100644 index 639982ce60..0000000000 --- a/cli/bench/http/deno_http_serve.js +++ /dev/null @@ -1,11 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -const addr = Deno.args[0] ?? "127.0.0.1:4500"; -const [hostname, port] = addr.split(":"); -const { serve } = Deno; - -function handler() { - return new Response("Hello World"); -} - -serve({ hostname, port: Number(port), reusePort: true }, handler); diff --git a/cli/bench/http/deno_post_bin.lua b/cli/bench/http/deno_post_bin.lua deleted file mode 100644 index c8f5d3e3f7..0000000000 --- a/cli/bench/http/deno_post_bin.lua +++ /dev/null @@ -1,5 +0,0 @@ -wrk.method = "POST" -wrk.headers["Content-Type"] = "application/octet-stream" - -file = io.open("./cli/bench/testdata/128k.bin", "rb") -wrk.body = file:read("*a") \ No newline at end of file diff --git a/cli/bench/http/deno_post_json.lua b/cli/bench/http/deno_post_json.lua deleted file mode 100644 index cc6c4e226d..0000000000 --- a/cli/bench/http/deno_post_json.lua +++ /dev/null @@ -1,3 +0,0 @@ -wrk.method = "POST" -wrk.headers["Content-Type"] = "application/json" -wrk.body = '{"hello":"deno"}' \ No newline at end of file diff --git a/cli/bench/http/deno_reactdom_ssr_flash.jsx b/cli/bench/http/deno_reactdom_ssr_flash.jsx deleted file mode 100644 index eaabf89121..0000000000 --- a/cli/bench/http/deno_reactdom_ssr_flash.jsx +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -import { renderToReadableStream } from "https://esm.run/react-dom/server"; -import * as React from "https://esm.run/react"; -const { serve } = Deno; -const addr = Deno.args[0] || "127.0.0.1:4500"; -const [hostname, port] = addr.split(":"); - -const App = () => ( - - -

Hello World

- - -); - -const headers = { - headers: { - "Content-Type": "text/html", - }, -}; - -serve({ hostname, port: Number(port) }, async () => { - return new Response(await renderToReadableStream(), headers); -}); diff --git a/cli/bench/http/deno_tcp.ts b/cli/bench/http/deno_tcp.ts deleted file mode 100644 index b795910737..0000000000 --- a/cli/bench/http/deno_tcp.ts +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -// Used for benchmarking Deno's networking. -// TODO(bartlomieju): Replace this with a real HTTP server once -// https://github.com/denoland/deno/issues/726 is completed. -// Note: this is a keep-alive server. -// deno-lint-ignore-file no-console -const addr = Deno.args[0] || "127.0.0.1:4500"; -const [hostname, port] = addr.split(":"); -const listener = Deno.listen({ hostname, port: Number(port) }); -const response = new TextEncoder().encode( - "HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World\n", -); -async function handle(conn: Deno.Conn): Promise { - const buffer = new Uint8Array(1024); - try { - while (true) { - await conn.read(buffer); - await conn.write(response); - } - } catch (e) { - if ( - !(e instanceof Deno.errors.BrokenPipe) && - !(e instanceof Deno.errors.ConnectionReset) - ) { - throw e; - } - } - conn.close(); -} - -console.log("Listening on", addr); -for await (const conn of listener) { - handle(conn); -} diff --git a/cli/bench/main.rs b/cli/bench/main.rs index 72fa7e9636..c3c42d2488 100644 --- a/cli/bench/main.rs +++ b/cli/bench/main.rs @@ -17,7 +17,6 @@ use std::process::Stdio; use std::time::SystemTime; use test_util::PathRef; -mod http; mod lsp; fn read_json(filename: &Path) -> Result { @@ -345,9 +344,11 @@ struct BenchResult { binary_size: HashMap, bundle_size: HashMap, cargo_deps: usize, + // TODO(bartlomieju): remove max_latency: HashMap, max_memory: HashMap, lsp_exec_time: HashMap, + // TODO(bartlomieju): remove req_per_sec: HashMap, syscall_count: HashMap, thread_count: HashMap, @@ -362,7 +363,6 @@ async fn main() -> Result<()> { "binary_size", "cargo_deps", "lsp", - "http", "strace", "mem_usage", ]; @@ -427,21 +427,6 @@ async fn main() -> Result<()> { new_data.lsp_exec_time = lsp_exec_times; } - if benchmarks.contains(&"http") && cfg!(not(target_os = "windows")) { - let stats = http::benchmark(target_dir.as_path())?; - let req_per_sec = stats - .iter() - .map(|(name, result)| (name.clone(), result.requests as i64)) - .collect(); - new_data.req_per_sec = req_per_sec; - let max_latency = stats - .iter() - .map(|(name, result)| (name.clone(), result.latency)) - .collect(); - - new_data.max_latency = max_latency; - } - if cfg!(target_os = "linux") && benchmarks.contains(&"strace") { use std::io::Read; diff --git a/cli/bench/op_now.js b/cli/bench/op_now.js index bcc3ea3c56..7c1427c809 100644 --- a/cli/bench/op_now.js +++ b/cli/bench/op_now.js @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -// deno-lint-ignore-file no-console +// deno-lint-ignore-file no-console no-process-globals const queueMicrotask = globalThis.queueMicrotask || process.nextTick; let [total, count] = typeof Deno !== "undefined" diff --git a/cli/bench/secure_curves.js b/cli/bench/secure_curves.js index 02d248b23f..912b75cccd 100644 --- a/cli/bench/secure_curves.js +++ b/cli/bench/secure_curves.js @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -// deno-lint-ignore-file no-console +// deno-lint-ignore-file no-console no-process-globals let [total, count] = typeof Deno !== "undefined" ? Deno.args diff --git a/cli/bench/tty.js b/cli/bench/tty.js index 248a901137..e494e76af7 100644 --- a/cli/bench/tty.js +++ b/cli/bench/tty.js @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -// deno-lint-ignore-file no-console +// deno-lint-ignore-file no-console no-process-globals const queueMicrotask = globalThis.queueMicrotask || process.nextTick; let [total, count] = typeof Deno !== "undefined" diff --git a/cli/bench/url_parse.js b/cli/bench/url_parse.js index 367cf73f46..9cb0045f64 100644 --- a/cli/bench/url_parse.js +++ b/cli/bench/url_parse.js @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -// deno-lint-ignore-file no-console +// deno-lint-ignore-file no-console no-process-globals const queueMicrotask = globalThis.queueMicrotask || process.nextTick; let [total, count] = typeof Deno !== "undefined" diff --git a/cli/bench/write_file.js b/cli/bench/write_file.js index 104a23a8db..747503ce2a 100644 --- a/cli/bench/write_file.js +++ b/cli/bench/write_file.js @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -// deno-lint-ignore-file no-console +// deno-lint-ignore-file no-console no-process-globals const queueMicrotask = globalThis.queueMicrotask || process.nextTick; let [total, count] = typeof Deno !== "undefined" diff --git a/cli/build.rs b/cli/build.rs index aa5d3d18c6..2678a8dbb0 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -365,6 +365,9 @@ fn main() { return; } + deno_napi::print_linker_flags("deno"); + deno_napi::print_linker_flags("denort"); + // Host snapshots won't work when cross compiling. let target = env::var("TARGET").unwrap(); let host = env::var("HOST").unwrap(); @@ -374,58 +377,6 @@ fn main() { panic!("Cross compiling with snapshot is not supported."); } - let symbols_file_name = match env::consts::OS { - "android" | "freebsd" | "openbsd" => { - "generated_symbol_exports_list_linux.def".to_string() - } - os => format!("generated_symbol_exports_list_{}.def", os), - }; - let symbols_path = std::path::Path::new("napi") - .join(symbols_file_name) - .canonicalize() - .expect( - "Missing symbols list! Generate using tools/napi/generate_symbols_lists.js", - ); - - println!("cargo:rustc-rerun-if-changed={}", symbols_path.display()); - - #[cfg(target_os = "windows")] - println!( - "cargo:rustc-link-arg-bin=deno=/DEF:{}", - symbols_path.display() - ); - - #[cfg(target_os = "macos")] - println!( - "cargo:rustc-link-arg-bin=deno=-Wl,-exported_symbols_list,{}", - symbols_path.display() - ); - - #[cfg(target_os = "linux")] - { - // If a custom compiler is set, the glibc version is not reliable. - // Here, we assume that if a custom compiler is used, that it will be modern enough to support a dynamic symbol list. - if env::var("CC").is_err() - && glibc_version::get_version() - .map(|ver| ver.major <= 2 && ver.minor < 35) - .unwrap_or(false) - { - println!("cargo:warning=Compiling with all symbols exported, this will result in a larger binary. Please use glibc 2.35 or later for an optimised build."); - println!("cargo:rustc-link-arg-bin=deno=-rdynamic"); - } else { - println!( - "cargo:rustc-link-arg-bin=deno=-Wl,--export-dynamic-symbol-list={}", - symbols_path.display() - ); - } - } - - #[cfg(target_os = "android")] - println!( - "cargo:rustc-link-arg-bin=deno=-Wl,--export-dynamic-symbol-list={}", - symbols_path.display() - ); - // To debug snapshot issues uncomment: // op_fetch_asset::trace_serializer(); diff --git a/cli/cache/cache_db.rs b/cli/cache/cache_db.rs index b24078f29b..329ed2d970 100644 --- a/cli/cache/cache_db.rs +++ b/cli/cache/cache_db.rs @@ -57,7 +57,7 @@ impl rusqlite::types::FromSql for CacheDBHash { } /// What should the cache should do on failure? -#[derive(Default)] +#[derive(Debug, Default)] pub enum CacheFailure { /// Return errors if failure mode otherwise unspecified. #[default] @@ -69,6 +69,7 @@ pub enum CacheFailure { } /// Configuration SQL and other parameters for a [`CacheDB`]. +#[derive(Debug)] pub struct CacheDBConfiguration { /// SQL to run for a new database. pub table_initializer: &'static str, @@ -98,6 +99,7 @@ impl CacheDBConfiguration { } } +#[derive(Debug)] enum ConnectionState { Connected(Connection), Blackhole, @@ -106,7 +108,7 @@ enum ConnectionState { /// A cache database that eagerly initializes itself off-thread, preventing initialization operations /// from blocking the main thread. -#[derive(Clone)] +#[derive(Debug, Clone)] pub struct CacheDB { // TODO(mmastrac): We can probably simplify our thread-safe implementation here conn: Arc>>, diff --git a/cli/cache/emit.rs b/cli/cache/emit.rs index 6807f06c10..3c9eecfcbd 100644 --- a/cli/cache/emit.rs +++ b/cli/cache/emit.rs @@ -10,6 +10,7 @@ use deno_core::unsync::sync::AtomicFlag; use super::DiskCache; /// The cache that stores previously emitted files. +#[derive(Debug)] pub struct EmitCache { disk_cache: DiskCache, emit_failed_flag: AtomicFlag, @@ -39,7 +40,7 @@ impl EmitCache { &self, specifier: &ModuleSpecifier, expected_source_hash: u64, - ) -> Option> { + ) -> Option { let emit_filename = self.get_emit_filename(specifier)?; let bytes = self.disk_cache.get(&emit_filename).ok()?; self @@ -91,6 +92,7 @@ impl EmitCache { const LAST_LINE_PREFIX: &str = "\n// denoCacheMetadata="; +#[derive(Debug)] struct EmitFileSerializer { cli_version: &'static str, } @@ -100,7 +102,7 @@ impl EmitFileSerializer { &self, mut bytes: Vec, expected_source_hash: u64, - ) -> Option> { + ) -> Option { let last_newline_index = bytes.iter().rposition(|&b| b == b'\n')?; let (content, last_line) = bytes.split_at(last_newline_index); let hashes = last_line.strip_prefix(LAST_LINE_PREFIX.as_bytes())?; @@ -120,7 +122,7 @@ impl EmitFileSerializer { // everything looks good, truncate and return it bytes.truncate(content.len()); - Some(bytes) + String::from_utf8(bytes).ok() } pub fn serialize(&self, code: &[u8], source_hash: u64) -> Vec { @@ -170,8 +172,6 @@ mod test { }, emit_failed_flag: Default::default(), }; - let to_string = - |bytes: Vec| -> String { String::from_utf8(bytes).unwrap() }; let specifier1 = ModuleSpecifier::from_file_path(temp_dir.path().join("file1.ts")) @@ -188,13 +188,10 @@ mod test { assert_eq!(cache.get_emit_code(&specifier1, 5), None); // providing the correct source hash assert_eq!( - cache.get_emit_code(&specifier1, 10).map(to_string), + cache.get_emit_code(&specifier1, 10), Some(emit_code1.clone()), ); - assert_eq!( - cache.get_emit_code(&specifier2, 2).map(to_string), - Some(emit_code2) - ); + assert_eq!(cache.get_emit_code(&specifier2, 2), Some(emit_code2)); // try changing the cli version (should not load previous ones) let cache = EmitCache { @@ -215,18 +212,12 @@ mod test { }, emit_failed_flag: Default::default(), }; - assert_eq!( - cache.get_emit_code(&specifier1, 5).map(to_string), - Some(emit_code1) - ); + assert_eq!(cache.get_emit_code(&specifier1, 5), Some(emit_code1)); // adding when already exists should not cause issue let emit_code3 = "asdf".to_string(); cache.set_emit_code(&specifier1, 20, emit_code3.as_bytes()); assert_eq!(cache.get_emit_code(&specifier1, 5), None); - assert_eq!( - cache.get_emit_code(&specifier1, 20).map(to_string), - Some(emit_code3) - ); + assert_eq!(cache.get_emit_code(&specifier1, 20), Some(emit_code3)); } } diff --git a/cli/cache/mod.rs b/cli/cache/mod.rs index ded163b4e4..50fc135ddf 100644 --- a/cli/cache/mod.rs +++ b/cli/cache/mod.rs @@ -8,14 +8,9 @@ use crate::file_fetcher::FetchOptions; use crate::file_fetcher::FetchPermissionsOptionRef; use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileOrRedirect; -use crate::npm::CliNpmResolver; -use crate::resolver::CliNodeResolver; use crate::util::fs::atomic_write_file_with_retries; use crate::util::fs::atomic_write_file_with_retries_and_fs; use crate::util::fs::AtomicWriteFileFsAdapter; -use crate::util::path::specifier_has_extension; -use crate::util::text_encoding::arc_str_to_bytes; -use crate::util::text_encoding::from_utf8_lossy_owned; use deno_ast::MediaType; use deno_core::futures; @@ -25,7 +20,9 @@ use deno_graph::source::CacheInfo; use deno_graph::source::LoadFuture; use deno_graph::source::LoadResponse; use deno_graph::source::Loader; +use deno_runtime::deno_fs; use deno_runtime::deno_permissions::PermissionsContainer; +use node_resolver::InNpmPackageChecker; use std::collections::HashMap; use std::path::Path; use std::path::PathBuf; @@ -60,7 +57,6 @@ pub use fast_check::FastCheckCache; pub use incremental::IncrementalCache; pub use module_info::ModuleInfoCache; pub use node::NodeAnalysisCache; -pub use parsed_source::EsmOrCjsChecker; pub use parsed_source::LazyGraphSourceParser; pub use parsed_source::ParsedSourceCache; @@ -181,46 +177,40 @@ pub struct FetchCacherOptions { pub permissions: PermissionsContainer, /// If we're publishing for `deno publish`. pub is_deno_publish: bool, - pub unstable_detect_cjs: bool, } /// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides /// a concise interface to the DENO_DIR when building module graphs. pub struct FetchCacher { pub file_header_overrides: HashMap>, - esm_or_cjs_checker: Arc, file_fetcher: Arc, + fs: Arc, global_http_cache: Arc, - node_resolver: Arc, - npm_resolver: Arc, + in_npm_pkg_checker: Arc, module_info_cache: Arc, permissions: PermissionsContainer, is_deno_publish: bool, - unstable_detect_cjs: bool, cache_info_enabled: bool, } impl FetchCacher { pub fn new( - esm_or_cjs_checker: Arc, file_fetcher: Arc, + fs: Arc, global_http_cache: Arc, - node_resolver: Arc, - npm_resolver: Arc, + in_npm_pkg_checker: Arc, module_info_cache: Arc, options: FetchCacherOptions, ) -> Self { Self { file_fetcher, - esm_or_cjs_checker, + fs, global_http_cache, - node_resolver, - npm_resolver, + in_npm_pkg_checker, module_info_cache, file_header_overrides: options.file_header_overrides, permissions: options.permissions, is_deno_publish: options.is_deno_publish, - unstable_detect_cjs: options.unstable_detect_cjs, cache_info_enabled: false, } } @@ -271,70 +261,23 @@ impl Loader for FetchCacher { ) -> LoadFuture { use deno_graph::source::CacheSetting as LoaderCacheSetting; - if specifier.scheme() == "file" { - if specifier.path().contains("/node_modules/") { - // The specifier might be in a completely different symlinked tree than - // what the node_modules url is in (ex. `/my-project-1/node_modules` - // symlinked to `/my-project-2/node_modules`), so first we checked if the path - // is in a node_modules dir to avoid needlessly canonicalizing, then now compare - // against the canonicalized specifier. - let specifier = - crate::node::resolve_specifier_into_node_modules(specifier); - if self.npm_resolver.in_npm_package(&specifier) { - return Box::pin(futures::future::ready(Ok(Some( - LoadResponse::External { specifier }, - )))); - } - } - - // make local CJS modules external to the graph - if specifier_has_extension(specifier, "cjs") { + if specifier.scheme() == "file" + && specifier.path().contains("/node_modules/") + { + // The specifier might be in a completely different symlinked tree than + // what the node_modules url is in (ex. `/my-project-1/node_modules` + // symlinked to `/my-project-2/node_modules`), so first we checked if the path + // is in a node_modules dir to avoid needlessly canonicalizing, then now compare + // against the canonicalized specifier. + let specifier = crate::node::resolve_specifier_into_node_modules( + specifier, + self.fs.as_ref(), + ); + if self.in_npm_pkg_checker.in_npm_package(&specifier) { return Box::pin(futures::future::ready(Ok(Some( - LoadResponse::External { - specifier: specifier.clone(), - }, + LoadResponse::External { specifier }, )))); } - - if self.unstable_detect_cjs && specifier_has_extension(specifier, "js") { - if let Ok(Some(pkg_json)) = - self.node_resolver.get_closest_package_json(specifier) - { - if pkg_json.typ == "commonjs" { - if let Ok(path) = specifier.to_file_path() { - if let Ok(bytes) = std::fs::read(&path) { - let text: Arc = from_utf8_lossy_owned(bytes).into(); - let is_es_module = match self.esm_or_cjs_checker.is_esm( - specifier, - text.clone(), - MediaType::JavaScript, - ) { - Ok(value) => value, - Err(err) => { - return Box::pin(futures::future::ready(Err(err.into()))); - } - }; - if !is_es_module { - self.node_resolver.mark_cjs_resolution(specifier.clone()); - return Box::pin(futures::future::ready(Ok(Some( - LoadResponse::External { - specifier: specifier.clone(), - }, - )))); - } else { - return Box::pin(futures::future::ready(Ok(Some( - LoadResponse::Module { - specifier: specifier.clone(), - content: arc_str_to_bytes(text), - maybe_headers: None, - }, - )))); - } - } - } - } - } - } } if self.is_deno_publish @@ -378,6 +321,7 @@ impl Loader for FetchCacher { } else { FetchPermissionsOptionRef::DynamicContainer(&permissions) }, + maybe_auth: None, maybe_accept: None, maybe_cache_setting: maybe_cache_setting.as_ref(), }, diff --git a/cli/cache/module_info.rs b/cli/cache/module_info.rs index 4dbb01c37b..060a6f4f0c 100644 --- a/cli/cache/module_info.rs +++ b/cli/cache/module_info.rs @@ -44,18 +44,32 @@ pub static MODULE_INFO_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration { /// A cache of `deno_graph::ModuleInfo` objects. Using this leads to a considerable /// performance improvement because when it exists we can skip parsing a module for /// deno_graph. +#[derive(Debug)] pub struct ModuleInfoCache { conn: CacheDB, + parsed_source_cache: Arc, } impl ModuleInfoCache { #[cfg(test)] - pub fn new_in_memory(version: &'static str) -> Self { - Self::new(CacheDB::in_memory(&MODULE_INFO_CACHE_DB, version)) + pub fn new_in_memory( + version: &'static str, + parsed_source_cache: Arc, + ) -> Self { + Self::new( + CacheDB::in_memory(&MODULE_INFO_CACHE_DB, version), + parsed_source_cache, + ) } - pub fn new(conn: CacheDB) -> Self { - Self { conn } + pub fn new( + conn: CacheDB, + parsed_source_cache: Arc, + ) -> Self { + Self { + conn, + parsed_source_cache, + } } /// Useful for testing: re-create this cache DB with a different current version. @@ -63,6 +77,7 @@ impl ModuleInfoCache { pub(crate) fn recreate_with_version(self, version: &'static str) -> Self { Self { conn: self.conn.recreate_with_version(version), + parsed_source_cache: self.parsed_source_cache, } } @@ -113,13 +128,10 @@ impl ModuleInfoCache { Ok(()) } - pub fn as_module_analyzer<'a>( - &'a self, - parsed_source_cache: &'a Arc, - ) -> ModuleInfoCacheModuleAnalyzer<'a> { + pub fn as_module_analyzer(&self) -> ModuleInfoCacheModuleAnalyzer { ModuleInfoCacheModuleAnalyzer { module_info_cache: self, - parsed_source_cache, + parsed_source_cache: &self.parsed_source_cache, } } } @@ -129,6 +141,84 @@ pub struct ModuleInfoCacheModuleAnalyzer<'a> { parsed_source_cache: &'a Arc, } +impl<'a> ModuleInfoCacheModuleAnalyzer<'a> { + fn load_cached_module_info( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + source_hash: CacheDBHash, + ) -> Option { + match self.module_info_cache.get_module_info( + specifier, + media_type, + source_hash, + ) { + Ok(Some(info)) => Some(info), + Ok(None) => None, + Err(err) => { + log::debug!( + "Error loading module cache info for {}. {:#}", + specifier, + err + ); + None + } + } + } + + fn save_module_info_to_cache( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + source_hash: CacheDBHash, + module_info: &ModuleInfo, + ) { + if let Err(err) = self.module_info_cache.set_module_info( + specifier, + media_type, + source_hash, + module_info, + ) { + log::debug!( + "Error saving module cache info for {}. {:#}", + specifier, + err + ); + } + } + + pub fn analyze_sync( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + source: &Arc, + ) -> Result { + // attempt to load from the cache + let source_hash = CacheDBHash::from_source(source); + if let Some(info) = + self.load_cached_module_info(specifier, media_type, source_hash) + { + return Ok(info); + } + + // otherwise, get the module info from the parsed source cache + let parser = self.parsed_source_cache.as_capturing_parser(); + let analyzer = ParserModuleAnalyzer::new(&parser); + let module_info = + analyzer.analyze_sync(specifier, source.clone(), media_type)?; + + // then attempt to cache it + self.save_module_info_to_cache( + specifier, + media_type, + source_hash, + &module_info, + ); + + Ok(module_info) + } +} + #[async_trait::async_trait(?Send)] impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> { async fn analyze( @@ -139,20 +229,10 @@ impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> { ) -> Result { // attempt to load from the cache let source_hash = CacheDBHash::from_source(&source); - match self.module_info_cache.get_module_info( - specifier, - media_type, - source_hash, - ) { - Ok(Some(info)) => return Ok(info), - Ok(None) => {} - Err(err) => { - log::debug!( - "Error loading module cache info for {}. {:#}", - specifier, - err - ); - } + if let Some(info) = + self.load_cached_module_info(specifier, media_type, source_hash) + { + return Ok(info); } // otherwise, get the module info from the parsed source cache @@ -169,18 +249,12 @@ impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> { .unwrap()?; // then attempt to cache it - if let Err(err) = self.module_info_cache.set_module_info( + self.save_module_info_to_cache( specifier, media_type, source_hash, &module_info, - ) { - log::debug!( - "Error saving module cache info for {}. {:#}", - specifier, - err - ); - } + ); Ok(module_info) } @@ -202,7 +276,7 @@ fn serialize_media_type(media_type: MediaType) -> i64 { Tsx => 11, Json => 12, Wasm => 13, - TsBuildInfo => 14, + Css => 14, SourceMap => 15, Unknown => 16, } @@ -217,7 +291,7 @@ mod test { #[test] pub fn module_info_cache_general_use() { - let cache = ModuleInfoCache::new_in_memory("1.0.0"); + let cache = ModuleInfoCache::new_in_memory("1.0.0", Default::default()); let specifier1 = ModuleSpecifier::parse("https://localhost/mod.ts").unwrap(); let specifier2 = diff --git a/cli/cache/parsed_source.rs b/cli/cache/parsed_source.rs index df6e45c35e..7e819ae998 100644 --- a/cli/cache/parsed_source.rs +++ b/cli/cache/parsed_source.rs @@ -5,12 +5,11 @@ use std::sync::Arc; use deno_ast::MediaType; use deno_ast::ModuleSpecifier; -use deno_ast::ParseDiagnostic; use deno_ast::ParsedSource; use deno_core::parking_lot::Mutex; -use deno_graph::CapturingModuleParser; -use deno_graph::DefaultModuleParser; -use deno_graph::ModuleParser; +use deno_graph::CapturingEsParser; +use deno_graph::DefaultEsParser; +use deno_graph::EsParser; use deno_graph::ParseOptions; use deno_graph::ParsedSourceStore; @@ -47,7 +46,7 @@ impl<'a> LazyGraphSourceParser<'a> { } } -#[derive(Default)] +#[derive(Debug, Default)] pub struct ParsedSourceCache { sources: Mutex>, } @@ -58,12 +57,11 @@ impl ParsedSourceCache { module: &deno_graph::JsModule, ) -> Result { let parser = self.as_capturing_parser(); - // this will conditionally parse because it's using a CapturingModuleParser - parser.parse_module(ParseOptions { + // this will conditionally parse because it's using a CapturingEsParser + parser.parse_program(ParseOptions { specifier: &module.specifier, source: module.source.clone(), media_type: module.media_type, - // don't bother enabling because this method is currently only used for vendoring scope_analysis: false, }) } @@ -87,10 +85,9 @@ impl ParsedSourceCache { specifier, source, media_type, - // don't bother enabling because this method is currently only used for emitting scope_analysis: false, }; - DefaultModuleParser.parse_module(options) + DefaultEsParser.parse_program(options) } /// Frees the parsed source from memory. @@ -100,8 +97,8 @@ impl ParsedSourceCache { /// Creates a parser that will reuse a ParsedSource from the store /// if it exists, or else parse. - pub fn as_capturing_parser(&self) -> CapturingModuleParser { - CapturingModuleParser::new(None, self) + pub fn as_capturing_parser(&self) -> CapturingEsParser { + CapturingEsParser::new(None, self) } } @@ -150,42 +147,3 @@ impl deno_graph::ParsedSourceStore for ParsedSourceCache { } } } - -pub struct EsmOrCjsChecker { - parsed_source_cache: Arc, -} - -impl EsmOrCjsChecker { - pub fn new(parsed_source_cache: Arc) -> Self { - Self { - parsed_source_cache, - } - } - - pub fn is_esm( - &self, - specifier: &ModuleSpecifier, - source: Arc, - media_type: MediaType, - ) -> Result { - // todo(dsherret): add a file cache here to avoid parsing with swc on each run - let source = match self.parsed_source_cache.get_parsed_source(specifier) { - Some(source) => source.clone(), - None => { - let source = deno_ast::parse_program(deno_ast::ParseParams { - specifier: specifier.clone(), - text: source, - media_type, - capture_tokens: true, // capture because it's used for cjs export analysis - scope_analysis: false, - maybe_syntax: None, - })?; - self - .parsed_source_cache - .set_parsed_source(specifier.clone(), source.clone()); - source - } - }; - Ok(source.is_module()) - } -} diff --git a/cli/emit.rs b/cli/emit.rs index b3f4a4477a..8c4f2091cf 100644 --- a/cli/emit.rs +++ b/cli/emit.rs @@ -3,24 +3,28 @@ use crate::cache::EmitCache; use crate::cache::FastInsecureHasher; use crate::cache::ParsedSourceCache; +use crate::resolver::CjsTracker; +use deno_ast::ModuleKind; use deno_ast::SourceMapOption; use deno_ast::SourceRange; use deno_ast::SourceRanged; use deno_ast::SourceRangedForSpanned; +use deno_ast::TranspileModuleOptions; use deno_ast::TranspileResult; use deno_core::error::AnyError; use deno_core::futures::stream::FuturesUnordered; use deno_core::futures::FutureExt; use deno_core::futures::StreamExt; -use deno_core::ModuleCodeBytes; use deno_core::ModuleSpecifier; use deno_graph::MediaType; use deno_graph::Module; use deno_graph::ModuleGraph; use std::sync::Arc; +#[derive(Debug)] pub struct Emitter { + cjs_tracker: Arc, emit_cache: Arc, parsed_source_cache: Arc, transpile_and_emit_options: @@ -31,6 +35,7 @@ pub struct Emitter { impl Emitter { pub fn new( + cjs_tracker: Arc, emit_cache: Arc, parsed_source_cache: Arc, transpile_options: deno_ast::TranspileOptions, @@ -43,6 +48,7 @@ impl Emitter { hasher.finish() }; Self { + cjs_tracker, emit_cache, parsed_source_cache, transpile_and_emit_options: Arc::new((transpile_options, emit_options)), @@ -60,20 +66,19 @@ impl Emitter { continue; }; - let is_emittable = matches!( - module.media_type, - MediaType::TypeScript - | MediaType::Mts - | MediaType::Cts - | MediaType::Jsx - | MediaType::Tsx - ); - if is_emittable { + if module.media_type.is_emittable() { futures.push( self .emit_parsed_source( &module.specifier, module.media_type, + ModuleKind::from_is_cjs( + self.cjs_tracker.is_cjs_with_known_is_script( + &module.specifier, + module.media_type, + module.is_script, + )?, + ), &module.source, ) .boxed_local(), @@ -92,9 +97,10 @@ impl Emitter { pub fn maybe_cached_emit( &self, specifier: &ModuleSpecifier, + module_kind: deno_ast::ModuleKind, source: &str, - ) -> Option> { - let source_hash = self.get_source_hash(source); + ) -> Option { + let source_hash = self.get_source_hash(module_kind, source); self.emit_cache.get_emit_code(specifier, source_hash) } @@ -102,25 +108,27 @@ impl Emitter { &self, specifier: &ModuleSpecifier, media_type: MediaType, + module_kind: deno_ast::ModuleKind, source: &Arc, - ) -> Result { + ) -> Result { // Note: keep this in sync with the sync version below let helper = EmitParsedSourceHelper(self); - match helper.pre_emit_parsed_source(specifier, source) { + match helper.pre_emit_parsed_source(specifier, module_kind, source) { PreEmitResult::Cached(emitted_text) => Ok(emitted_text), PreEmitResult::NotCached { source_hash } => { let parsed_source_cache = self.parsed_source_cache.clone(); let transpile_and_emit_options = self.transpile_and_emit_options.clone(); - let transpile_result = deno_core::unsync::spawn_blocking({ + let transpiled_source = deno_core::unsync::spawn_blocking({ let specifier = specifier.clone(); let source = source.clone(); move || -> Result<_, AnyError> { EmitParsedSourceHelper::transpile( &parsed_source_cache, &specifier, - source.clone(), media_type, + module_kind, + source.clone(), &transpile_and_emit_options.0, &transpile_and_emit_options.1, ) @@ -128,11 +136,12 @@ impl Emitter { }) .await .unwrap()?; - Ok(helper.post_emit_parsed_source( + helper.post_emit_parsed_source( specifier, - transpile_result, + &transpiled_source, source_hash, - )) + ); + Ok(transpiled_source) } } } @@ -141,26 +150,29 @@ impl Emitter { &self, specifier: &ModuleSpecifier, media_type: MediaType, + module_kind: deno_ast::ModuleKind, source: &Arc, - ) -> Result { + ) -> Result { // Note: keep this in sync with the async version above let helper = EmitParsedSourceHelper(self); - match helper.pre_emit_parsed_source(specifier, source) { + match helper.pre_emit_parsed_source(specifier, module_kind, source) { PreEmitResult::Cached(emitted_text) => Ok(emitted_text), PreEmitResult::NotCached { source_hash } => { - let transpile_result = EmitParsedSourceHelper::transpile( + let transpiled_source = EmitParsedSourceHelper::transpile( &self.parsed_source_cache, specifier, - source.clone(), media_type, + module_kind, + source.clone(), &self.transpile_and_emit_options.0, &self.transpile_and_emit_options.1, )?; - Ok(helper.post_emit_parsed_source( + helper.post_emit_parsed_source( specifier, - transpile_result, + &transpiled_source, source_hash, - )) + ); + Ok(transpiled_source) } } } @@ -169,6 +181,7 @@ impl Emitter { pub async fn load_and_emit_for_hmr( &self, specifier: &ModuleSpecifier, + module_kind: deno_ast::ModuleKind, ) -> Result { let media_type = MediaType::from_specifier(specifier); let source_code = tokio::fs::read_to_string( @@ -191,9 +204,14 @@ impl Emitter { let mut options = self.transpile_and_emit_options.1.clone(); options.source_map = SourceMapOption::None; let transpiled_source = parsed_source - .transpile(&self.transpile_and_emit_options.0, &options)? - .into_source() - .into_string()?; + .transpile( + &self.transpile_and_emit_options.0, + &deno_ast::TranspileModuleOptions { + module_kind: Some(module_kind), + }, + &options, + )? + .into_source(); Ok(transpiled_source.text) } MediaType::JavaScript @@ -204,7 +222,7 @@ impl Emitter { | MediaType::Dcts | MediaType::Json | MediaType::Wasm - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::SourceMap | MediaType::Unknown => { // clear this specifier from the parsed source cache as it's now out of date @@ -217,16 +235,17 @@ impl Emitter { /// A hashing function that takes the source code and uses the global emit /// options then generates a string hash which can be stored to /// determine if the cached emit is valid or not. - fn get_source_hash(&self, source_text: &str) -> u64 { + fn get_source_hash(&self, module_kind: ModuleKind, source_text: &str) -> u64 { FastInsecureHasher::new_without_deno_version() // stored in the transpile_and_emit_options_hash .write_str(source_text) .write_u64(self.transpile_and_emit_options_hash) + .write_hashable(module_kind) .finish() } } enum PreEmitResult { - Cached(ModuleCodeBytes), + Cached(String), NotCached { source_hash: u64 }, } @@ -237,14 +256,15 @@ impl<'a> EmitParsedSourceHelper<'a> { pub fn pre_emit_parsed_source( &self, specifier: &ModuleSpecifier, + module_kind: deno_ast::ModuleKind, source: &Arc, ) -> PreEmitResult { - let source_hash = self.0.get_source_hash(source); + let source_hash = self.0.get_source_hash(module_kind, source); if let Some(emit_code) = self.0.emit_cache.get_emit_code(specifier, source_hash) { - PreEmitResult::Cached(emit_code.into_boxed_slice().into()) + PreEmitResult::Cached(emit_code) } else { PreEmitResult::NotCached { source_hash } } @@ -253,25 +273,24 @@ impl<'a> EmitParsedSourceHelper<'a> { pub fn transpile( parsed_source_cache: &ParsedSourceCache, specifier: &ModuleSpecifier, - source: Arc, media_type: MediaType, + module_kind: deno_ast::ModuleKind, + source: Arc, transpile_options: &deno_ast::TranspileOptions, emit_options: &deno_ast::EmitOptions, - ) -> Result { + ) -> Result { // nothing else needs the parsed source at this point, so remove from // the cache in order to not transpile owned let parsed_source = parsed_source_cache .remove_or_parse_module(specifier, source, media_type)?; ensure_no_import_assertion(&parsed_source)?; - Ok(parsed_source.transpile(transpile_options, emit_options)?) - } - - pub fn post_emit_parsed_source( - &self, - specifier: &ModuleSpecifier, - transpile_result: TranspileResult, - source_hash: u64, - ) -> ModuleCodeBytes { + let transpile_result = parsed_source.transpile( + transpile_options, + &TranspileModuleOptions { + module_kind: Some(module_kind), + }, + emit_options, + )?; let transpiled_source = match transpile_result { TranspileResult::Owned(source) => source, TranspileResult::Cloned(source) => { @@ -280,12 +299,20 @@ impl<'a> EmitParsedSourceHelper<'a> { } }; debug_assert!(transpiled_source.source_map.is_none()); + Ok(transpiled_source.text) + } + + pub fn post_emit_parsed_source( + &self, + specifier: &ModuleSpecifier, + transpiled_source: &str, + source_hash: u64, + ) { self.0.emit_cache.set_emit_code( specifier, source_hash, - &transpiled_source.source, + transpiled_source.as_bytes(), ); - transpiled_source.source.into_boxed_slice().into() } } @@ -317,7 +344,7 @@ fn ensure_no_import_assertion( deno_core::anyhow::anyhow!("{}", msg) } - let Some(module) = parsed_source.program_ref().as_module() else { + let deno_ast::ProgramRef::Module(module) = parsed_source.program_ref() else { return Ok(()); }; diff --git a/cli/errors.rs b/cli/errors.rs index 25b3fc3324..b1808f7339 100644 --- a/cli/errors.rs +++ b/cli/errors.rs @@ -88,6 +88,10 @@ fn get_resolution_error_class(err: &ResolutionError) -> &'static str { } } +fn get_try_from_int_error_class(_: &std::num::TryFromIntError) -> &'static str { + "TypeError" +} + pub fn get_error_class_name(e: &AnyError) -> &'static str { deno_runtime::errors::get_error_class_name(e) .or_else(|| { @@ -106,5 +110,9 @@ pub fn get_error_class_name(e: &AnyError) -> &'static str { e.downcast_ref::() .map(get_resolution_error_class) }) + .or_else(|| { + e.downcast_ref::() + .map(get_try_from_int_error_class) + }) .unwrap_or("Error") } diff --git a/cli/factory.rs b/cli/factory.rs index 25f3551102..4a36c75ba2 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -11,10 +11,10 @@ use crate::args::StorageKeyResolver; use crate::args::TsConfigType; use crate::cache::Caches; use crate::cache::CodeCache; +use crate::cache::DenoCacheEnvFsAdapter; use crate::cache::DenoDir; use crate::cache::DenoDirProvider; use crate::cache::EmitCache; -use crate::cache::EsmOrCjsChecker; use crate::cache::GlobalHttpCache; use crate::cache::HttpCache; use crate::cache::LocalHttpCache; @@ -33,12 +33,16 @@ use crate::module_loader::ModuleLoadPreparer; use crate::node::CliCjsCodeAnalyzer; use crate::node::CliNodeCodeTranslator; use crate::npm::create_cli_npm_resolver; +use crate::npm::create_in_npm_pkg_checker; use crate::npm::CliByonmNpmResolverCreateOptions; +use crate::npm::CliManagedInNpmPkgCheckerCreateOptions; +use crate::npm::CliManagedNpmResolverCreateOptions; use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolverCreateOptions; -use crate::npm::CliNpmResolverManagedCreateOptions; use crate::npm::CliNpmResolverManagedSnapshotOption; -use crate::resolver::CjsResolutionStore; +use crate::npm::CreateInNpmPkgCheckerOptions; +use crate::resolver::CjsTracker; +use crate::resolver::CjsTrackerOptions; use crate::resolver::CliDenoResolverFs; use crate::resolver::CliGraphResolver; use crate::resolver::CliGraphResolverOptions; @@ -51,6 +55,7 @@ use crate::tools::check::TypeChecker; use crate::tools::coverage::CoverageCollector; use crate::tools::lint::LintRuleProvider; use crate::tools::run::hmr::HmrRunner; +use crate::tsc::TypeCheckingCjsTracker; use crate::util::file_watcher::WatcherCommunicator; use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::progress_bar::ProgressBar; @@ -59,6 +64,7 @@ use crate::worker::CliMainWorkerFactory; use crate::worker::CliMainWorkerOptions; use std::path::PathBuf; +use deno_cache_dir::npm::NpmCacheDir; use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::WorkspaceResolver; use deno_core::error::AnyError; @@ -68,6 +74,7 @@ use deno_core::FeatureChecker; use deno_runtime::deno_fs; use deno_runtime::deno_node::DenoFsNodeResolverEnv; use deno_runtime::deno_node::NodeResolver; +use deno_runtime::deno_node::PackageJsonResolver; use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_tls::rustls::RootCertStore; @@ -77,6 +84,7 @@ use deno_runtime::inspector_server::InspectorServer; use deno_runtime::permissions::RuntimePermissionDescriptorParser; use log::warn; use node_resolver::analyze::NodeCodeTranslator; +use node_resolver::InNpmPackageChecker; use once_cell::sync::OnceCell; use std::future::Future; use std::sync::Arc; @@ -164,39 +172,41 @@ impl Deferred { #[derive(Default)] struct CliFactoryServices { - cli_options: Deferred>, + blob_store: Deferred>, caches: Deferred>, + cjs_tracker: Deferred>, + cli_node_resolver: Deferred>, + cli_options: Deferred>, + code_cache: Deferred>, + emit_cache: Deferred>, + emitter: Deferred>, + feature_checker: Deferred>, file_fetcher: Deferred>, + fs: Deferred>, global_http_cache: Deferred>, http_cache: Deferred>, http_client_provider: Deferred>, - emit_cache: Deferred>, - emitter: Deferred>, - esm_or_cjs_checker: Deferred>, - fs: Deferred>, + in_npm_pkg_checker: Deferred>, main_graph_container: Deferred>, - maybe_inspector_server: Deferred>>, - root_cert_store_provider: Deferred>, - blob_store: Deferred>, - module_info_cache: Deferred>, - parsed_source_cache: Deferred>, - resolver: Deferred>, maybe_file_watcher_reporter: Deferred>, + maybe_inspector_server: Deferred>>, module_graph_builder: Deferred>, module_graph_creator: Deferred>, + module_info_cache: Deferred>, module_load_preparer: Deferred>, node_code_translator: Deferred>, node_resolver: Deferred>, + npm_cache_dir: Deferred>, npm_resolver: Deferred>, + parsed_source_cache: Deferred>, permission_desc_parser: Deferred>, + pkg_json_resolver: Deferred>, + resolver: Deferred>, + root_cert_store_provider: Deferred>, root_permissions_container: Deferred, sloppy_imports_resolver: Deferred>>, text_only_progress_bar: Deferred, type_checker: Deferred>, - cjs_resolutions: Deferred>, - cli_node_resolver: Deferred>, - feature_checker: Deferred>, - code_cache: Deferred>, workspace_resolver: Deferred>, } @@ -300,12 +310,6 @@ impl CliFactory { .get_or_init(|| ProgressBar::new(ProgressBarStyle::TextOnly)) } - pub fn esm_or_cjs_checker(&self) -> &Arc { - self.services.esm_or_cjs_checker.get_or_init(|| { - Arc::new(EsmOrCjsChecker::new(self.parsed_source_cache().clone())) - }) - } - pub fn global_http_cache(&self) -> Result<&Arc, AnyError> { self.services.global_http_cache.get_or_try_init(|| { Ok(Arc::new(GlobalHttpCache::new( @@ -359,56 +363,112 @@ impl CliFactory { self.services.fs.get_or_init(|| Arc::new(deno_fs::RealFs)) } + pub fn in_npm_pkg_checker( + &self, + ) -> Result<&Arc, AnyError> { + self.services.in_npm_pkg_checker.get_or_try_init(|| { + let cli_options = self.cli_options()?; + let options = if cli_options.use_byonm() { + CreateInNpmPkgCheckerOptions::Byonm + } else { + CreateInNpmPkgCheckerOptions::Managed( + CliManagedInNpmPkgCheckerCreateOptions { + root_cache_dir_url: self.npm_cache_dir()?.root_dir_url(), + maybe_node_modules_path: cli_options + .node_modules_dir_path() + .map(|p| p.as_path()), + }, + ) + }; + Ok(create_in_npm_pkg_checker(options)) + }) + } + + pub fn npm_cache_dir(&self) -> Result<&Arc, AnyError> { + self.services.npm_cache_dir.get_or_try_init(|| { + let fs = self.fs(); + let global_path = self.deno_dir()?.npm_folder_path(); + let cli_options = self.cli_options()?; + Ok(Arc::new(NpmCacheDir::new( + &DenoCacheEnvFsAdapter(fs.as_ref()), + global_path, + cli_options.npmrc().get_all_known_registries_urls(), + ))) + }) + } + pub async fn npm_resolver( &self, ) -> Result<&Arc, AnyError> { self .services .npm_resolver - .get_or_try_init_async(async { - let fs = self.fs(); - let cli_options = self.cli_options()?; - // For `deno install` we want to force the managed resolver so it can set up `node_modules/` directory. - create_cli_npm_resolver(if cli_options.use_byonm() && !matches!(cli_options.sub_command(), DenoSubcommand::Install(_) | DenoSubcommand::Add(_) | DenoSubcommand::Remove(_)) { - CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { - fs: CliDenoResolverFs(fs.clone()), - root_node_modules_dir: Some(match cli_options.node_modules_dir_path() { - Some(node_modules_path) => node_modules_path.to_path_buf(), - // path needs to be canonicalized for node resolution - // (node_modules_dir_path above is already canonicalized) - None => canonicalize_path_maybe_not_exists(cli_options.initial_cwd())? - .join("node_modules"), - }), - }) - } else { - CliNpmResolverCreateOptions::Managed(CliNpmResolverManagedCreateOptions { - snapshot: match cli_options.resolve_npm_resolution_snapshot()? { - Some(snapshot) => { - CliNpmResolverManagedSnapshotOption::Specified(Some(snapshot)) - } - None => match cli_options.maybe_lockfile() { - Some(lockfile) => { - CliNpmResolverManagedSnapshotOption::ResolveFromLockfile( - lockfile.clone(), - ) - } - None => CliNpmResolverManagedSnapshotOption::Specified(None), + .get_or_try_init_async( + async { + let fs = self.fs(); + let cli_options = self.cli_options()?; + create_cli_npm_resolver(if cli_options.use_byonm() { + CliNpmResolverCreateOptions::Byonm( + CliByonmNpmResolverCreateOptions { + fs: CliDenoResolverFs(fs.clone()), + pkg_json_resolver: self.pkg_json_resolver().clone(), + root_node_modules_dir: Some( + match cli_options.node_modules_dir_path() { + Some(node_modules_path) => node_modules_path.to_path_buf(), + // path needs to be canonicalized for node resolution + // (node_modules_dir_path above is already canonicalized) + None => canonicalize_path_maybe_not_exists( + cli_options.initial_cwd(), + )? + .join("node_modules"), + }, + ), }, - }, - maybe_lockfile: cli_options.maybe_lockfile().cloned(), - fs: fs.clone(), - http_client_provider: self.http_client_provider().clone(), - npm_global_cache_dir: self.deno_dir()?.npm_folder_path(), - cache_setting: cli_options.cache_setting(), - text_only_progress_bar: self.text_only_progress_bar().clone(), - maybe_node_modules_path: cli_options.node_modules_dir_path().cloned(), - npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::from_workspace(cli_options.workspace())), - npm_system_info: cli_options.npm_system_info(), - npmrc: cli_options.npmrc().clone(), - lifecycle_scripts: cli_options.lifecycle_scripts_config(), + ) + } else { + CliNpmResolverCreateOptions::Managed( + CliManagedNpmResolverCreateOptions { + snapshot: match cli_options.resolve_npm_resolution_snapshot()? { + Some(snapshot) => { + CliNpmResolverManagedSnapshotOption::Specified(Some( + snapshot, + )) + } + None => match cli_options.maybe_lockfile() { + Some(lockfile) => { + CliNpmResolverManagedSnapshotOption::ResolveFromLockfile( + lockfile.clone(), + ) + } + None => { + CliNpmResolverManagedSnapshotOption::Specified(None) + } + }, + }, + maybe_lockfile: cli_options.maybe_lockfile().cloned(), + fs: fs.clone(), + http_client_provider: self.http_client_provider().clone(), + npm_cache_dir: self.npm_cache_dir()?.clone(), + cache_setting: cli_options.cache_setting(), + text_only_progress_bar: self.text_only_progress_bar().clone(), + maybe_node_modules_path: cli_options + .node_modules_dir_path() + .cloned(), + npm_install_deps_provider: Arc::new( + NpmInstallDepsProvider::from_workspace( + cli_options.workspace(), + ), + ), + npm_system_info: cli_options.npm_system_info(), + npmrc: cli_options.npmrc().clone(), + lifecycle_scripts: cli_options.lifecycle_scripts_config(), + }, + ) }) - }).await - }.boxed_local()) + .await + } + .boxed_local(), + ) .await } @@ -513,6 +573,7 @@ impl CliFactory { self.services.module_info_cache.get_or_try_init(|| { Ok(Arc::new(ModuleInfoCache::new( self.caches()?.dep_analysis_db(), + self.parsed_source_cache().clone(), ))) }) } @@ -541,6 +602,7 @@ impl CliFactory { ts_config_result.ts_config, )?; Ok(Arc::new(Emitter::new( + self.cjs_tracker()?.clone(), self.emit_cache()?.clone(), self.parsed_source_cache().clone(), transpile_options, @@ -564,7 +626,9 @@ impl CliFactory { async { Ok(Arc::new(NodeResolver::new( DenoFsNodeResolverEnv::new(self.fs().clone()), + self.in_npm_pkg_checker()?.clone(), self.npm_resolver().await?.clone().into_npm_resolver(), + self.pkg_json_resolver().clone(), ))) } .boxed_local(), @@ -582,24 +646,35 @@ impl CliFactory { let caches = self.caches()?; let node_analysis_cache = NodeAnalysisCache::new(caches.node_analysis_db()); - let node_resolver = self.cli_node_resolver().await?.clone(); + let node_resolver = self.node_resolver().await?.clone(); let cjs_esm_analyzer = CliCjsCodeAnalyzer::new( node_analysis_cache, + self.cjs_tracker()?.clone(), self.fs().clone(), - node_resolver, Some(self.parsed_source_cache().clone()), + self.cli_options()?.is_npm_main(), ); Ok(Arc::new(NodeCodeTranslator::new( cjs_esm_analyzer, DenoFsNodeResolverEnv::new(self.fs().clone()), - self.node_resolver().await?.clone(), + self.in_npm_pkg_checker()?.clone(), + node_resolver, self.npm_resolver().await?.clone().into_npm_resolver(), + self.pkg_json_resolver().clone(), ))) }) .await } + pub fn pkg_json_resolver(&self) -> &Arc { + self.services.pkg_json_resolver.get_or_init(|| { + Arc::new(PackageJsonResolver::new(DenoFsNodeResolverEnv::new( + self.fs().clone(), + ))) + }) + } + pub async fn type_checker(&self) -> Result<&Arc, AnyError> { self .services @@ -608,6 +683,10 @@ impl CliFactory { let cli_options = self.cli_options()?; Ok(Arc::new(TypeChecker::new( self.caches()?.clone(), + Arc::new(TypeCheckingCjsTracker::new( + self.cjs_tracker()?.clone(), + self.module_info_cache()?.clone(), + )), cli_options.clone(), self.module_graph_builder().await?.clone(), self.node_resolver().await?.clone(), @@ -626,19 +705,18 @@ impl CliFactory { .get_or_try_init_async(async { let cli_options = self.cli_options()?; Ok(Arc::new(ModuleGraphBuilder::new( - cli_options.clone(), self.caches()?.clone(), - self.esm_or_cjs_checker().clone(), + cli_options.clone(), + self.file_fetcher()?.clone(), self.fs().clone(), - self.resolver().await?.clone(), - self.cli_node_resolver().await?.clone(), - self.npm_resolver().await?.clone(), - self.module_info_cache()?.clone(), - self.parsed_source_cache().clone(), + self.global_http_cache()?.clone(), + self.in_npm_pkg_checker()?.clone(), cli_options.maybe_lockfile().cloned(), self.maybe_file_watcher_reporter().clone(), - self.file_fetcher()?.clone(), - self.global_http_cache()?.clone(), + self.module_info_cache()?.clone(), + self.npm_resolver().await?.clone(), + self.parsed_source_cache().clone(), + self.resolver().await?.clone(), self.root_permissions_container()?.clone(), ))) }) @@ -710,8 +788,17 @@ impl CliFactory { .await } - pub fn cjs_resolutions(&self) -> &Arc { - self.services.cjs_resolutions.get_or_init(Default::default) + pub fn cjs_tracker(&self) -> Result<&Arc, AnyError> { + self.services.cjs_tracker.get_or_try_init(|| { + let options = self.cli_options()?; + Ok(Arc::new(CjsTracker::new( + self.in_npm_pkg_checker()?.clone(), + self.pkg_json_resolver().clone(), + CjsTrackerOptions { + unstable_detect_cjs: options.unstable_detect_cjs(), + }, + ))) + }) } pub async fn cli_node_resolver( @@ -722,8 +809,9 @@ impl CliFactory { .cli_node_resolver .get_or_try_init_async(async { Ok(Arc::new(CliNodeResolver::new( - self.cjs_resolutions().clone(), + self.cjs_tracker()?.clone(), self.fs().clone(), + self.in_npm_pkg_checker()?.clone(), self.node_resolver().await?.clone(), self.npm_resolver().await?.clone(), ))) @@ -761,7 +849,9 @@ impl CliFactory { ) -> Result { let cli_options = self.cli_options()?; Ok(DenoCompileBinaryWriter::new( + self.cjs_tracker()?, self.deno_dir()?, + self.emitter()?, self.file_fetcher()?, self.http_client_provider(), self.npm_resolver().await?.as_ref(), @@ -790,53 +880,60 @@ impl CliFactory { &self, ) -> Result { let cli_options = self.cli_options()?; + let fs = self.fs(); let node_resolver = self.node_resolver().await?; let npm_resolver = self.npm_resolver().await?; - let fs = self.fs(); let cli_node_resolver = self.cli_node_resolver().await?; let cli_npm_resolver = self.npm_resolver().await?.clone(); + let in_npm_pkg_checker = self.in_npm_pkg_checker()?; let maybe_file_watcher_communicator = if cli_options.has_hmr() { Some(self.watcher_communicator.clone().unwrap()) } else { None }; + let node_code_translator = self.node_code_translator().await?; + let cjs_tracker = self.cjs_tracker()?.clone(); + let pkg_json_resolver = self.pkg_json_resolver().clone(); Ok(CliMainWorkerFactory::new( self.blob_store().clone(), - self.cjs_resolutions().clone(), if cli_options.code_cache_enabled() { Some(self.code_cache()?.clone()) } else { None }, self.feature_checker()?.clone(), - self.fs().clone(), + fs.clone(), maybe_file_watcher_communicator, self.maybe_inspector_server()?.clone(), cli_options.maybe_lockfile().cloned(), Box::new(CliModuleLoaderFactory::new( cli_options, + cjs_tracker, if cli_options.code_cache_enabled() { Some(self.code_cache()?.clone()) } else { None }, self.emitter()?.clone(), + fs.clone(), + in_npm_pkg_checker.clone(), self.main_module_graph_container().await?.clone(), self.module_load_preparer().await?.clone(), + node_code_translator.clone(), cli_node_resolver.clone(), cli_npm_resolver.clone(), NpmModuleLoader::new( - self.cjs_resolutions().clone(), - self.node_code_translator().await?.clone(), + self.cjs_tracker()?.clone(), fs.clone(), - cli_node_resolver.clone(), + node_code_translator.clone(), ), self.parsed_source_cache().clone(), self.resolver().await?.clone(), )), node_resolver.clone(), npm_resolver.clone(), + pkg_json_resolver, self.root_cert_store_provider().clone(), self.root_permissions_container()?.clone(), StorageKeyResolver::from_options(cli_options), @@ -852,8 +949,10 @@ impl CliFactory { let create_hmr_runner = if cli_options.has_hmr() { let watcher_communicator = self.watcher_communicator.clone().unwrap(); let emitter = self.emitter()?.clone(); + let cjs_tracker = self.cjs_tracker()?.clone(); let fn_: crate::worker::CreateHmrRunnerCb = Box::new(move |session| { Box::new(HmrRunner::new( + cjs_tracker.clone(), emitter.clone(), session, watcher_communicator.clone(), @@ -890,7 +989,6 @@ impl CliFactory { inspect_wait: cli_options.inspect_wait().is_some(), strace_ops: cli_options.strace_ops().clone(), is_inspecting: cli_options.is_inspecting(), - is_npm_main: cli_options.is_npm_main(), location: cli_options.location_flag().clone(), // if the user ran a binary command, we'll need to set process.argv[0] // to be the name of the binary command instead of deno @@ -908,7 +1006,6 @@ impl CliFactory { node_ipc: cli_options.node_ipc_fd(), serve_port: cli_options.serve_port(), serve_host: cli_options.serve_host(), - unstable_detect_cjs: cli_options.unstable_detect_cjs(), }) } } diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs index e92aca5420..95d778f0bb 100644 --- a/cli/file_fetcher.rs +++ b/cli/file_fetcher.rs @@ -24,6 +24,7 @@ use deno_graph::source::LoaderChecksum; use deno_path_util::url_to_file_path; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_web::BlobStore; +use http::header; use log::debug; use std::borrow::Cow; use std::collections::HashMap; @@ -181,6 +182,7 @@ pub enum FetchPermissionsOptionRef<'a> { pub struct FetchOptions<'a> { pub specifier: &'a ModuleSpecifier, pub permissions: FetchPermissionsOptionRef<'a>, + pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>, pub maybe_accept: Option<&'a str>, pub maybe_cache_setting: Option<&'a CacheSetting>, } @@ -350,6 +352,7 @@ impl FileFetcher { maybe_accept: Option<&str>, cache_setting: &CacheSetting, maybe_checksum: Option<&LoaderChecksum>, + maybe_auth: Option<(header::HeaderName, header::HeaderValue)>, ) -> Result { debug!( "FileFetcher::fetch_remote_no_follow - specifier: {}", @@ -442,6 +445,7 @@ impl FileFetcher { .as_ref() .map(|(_, etag)| etag.clone()), maybe_auth_token: maybe_auth_token.clone(), + maybe_auth: maybe_auth.clone(), maybe_progress_guard: maybe_progress_guard.as_ref(), }) .await? @@ -538,7 +542,18 @@ impl FileFetcher { specifier: &ModuleSpecifier, ) -> Result { self - .fetch_inner(specifier, FetchPermissionsOptionRef::AllowAll) + .fetch_inner(specifier, None, FetchPermissionsOptionRef::AllowAll) + .await + } + + #[inline(always)] + pub async fn fetch_bypass_permissions_with_maybe_auth( + &self, + specifier: &ModuleSpecifier, + maybe_auth: Option<(header::HeaderName, header::HeaderValue)>, + ) -> Result { + self + .fetch_inner(specifier, maybe_auth, FetchPermissionsOptionRef::AllowAll) .await } @@ -552,6 +567,7 @@ impl FileFetcher { self .fetch_inner( specifier, + None, FetchPermissionsOptionRef::StaticContainer(permissions), ) .await @@ -560,12 +576,14 @@ impl FileFetcher { async fn fetch_inner( &self, specifier: &ModuleSpecifier, + maybe_auth: Option<(header::HeaderName, header::HeaderValue)>, permissions: FetchPermissionsOptionRef<'_>, ) -> Result { self .fetch_with_options(FetchOptions { specifier, permissions, + maybe_auth, maybe_accept: None, maybe_cache_setting: None, }) @@ -585,12 +603,14 @@ impl FileFetcher { max_redirect: usize, ) -> Result { let mut specifier = Cow::Borrowed(options.specifier); + let mut maybe_auth = options.maybe_auth.clone(); for _ in 0..=max_redirect { match self .fetch_no_follow_with_options(FetchNoFollowOptions { fetch_options: FetchOptions { specifier: &specifier, permissions: options.permissions, + maybe_auth: maybe_auth.clone(), maybe_accept: options.maybe_accept, maybe_cache_setting: options.maybe_cache_setting, }, @@ -602,6 +622,10 @@ impl FileFetcher { return Ok(file); } FileOrRedirect::Redirect(redirect_specifier) => { + // If we were redirected to another origin, don't send the auth header anymore. + if redirect_specifier.origin() != specifier.origin() { + maybe_auth = None; + } specifier = Cow::Owned(redirect_specifier); } } @@ -666,6 +690,7 @@ impl FileFetcher { options.maybe_accept, options.maybe_cache_setting.unwrap_or(&self.cache_setting), maybe_checksum, + options.maybe_auth, ) .await } @@ -756,6 +781,7 @@ mod tests { FetchOptions { specifier, permissions: FetchPermissionsOptionRef::AllowAll, + maybe_auth: None, maybe_accept: None, maybe_cache_setting: Some(&file_fetcher.cache_setting), }, @@ -1255,6 +1281,7 @@ mod tests { FetchOptions { specifier: &specifier, permissions: FetchPermissionsOptionRef::AllowAll, + maybe_auth: None, maybe_accept: None, maybe_cache_setting: Some(&file_fetcher.cache_setting), }, @@ -1268,6 +1295,7 @@ mod tests { FetchOptions { specifier: &specifier, permissions: FetchPermissionsOptionRef::AllowAll, + maybe_auth: None, maybe_accept: None, maybe_cache_setting: Some(&file_fetcher.cache_setting), }, diff --git a/cli/graph_util.rs b/cli/graph_util.rs index e67ae7821b..46257cf785 100644 --- a/cli/graph_util.rs +++ b/cli/graph_util.rs @@ -6,7 +6,6 @@ use crate::args::CliLockfile; use crate::args::CliOptions; use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS; use crate::cache; -use crate::cache::EsmOrCjsChecker; use crate::cache::GlobalHttpCache; use crate::cache::ModuleInfoCache; use crate::cache::ParsedSourceCache; @@ -15,7 +14,6 @@ use crate::errors::get_error_class_name; use crate::file_fetcher::FileFetcher; use crate::npm::CliNpmResolver; use crate::resolver::CliGraphResolver; -use crate::resolver::CliNodeResolver; use crate::resolver::CliSloppyImportsResolver; use crate::resolver::SloppyImportsCachedFs; use crate::tools::check; @@ -50,6 +48,7 @@ use deno_runtime::deno_permissions::PermissionsContainer; use deno_semver::jsr::JsrDepPackageReq; use deno_semver::package::PackageNv; use import_map::ImportMapError; +use node_resolver::InNpmPackageChecker; use std::collections::HashSet; use std::error::Error; use std::ops::Deref; @@ -379,54 +378,51 @@ pub struct BuildFastCheckGraphOptions<'a> { } pub struct ModuleGraphBuilder { - options: Arc, caches: Arc, - esm_or_cjs_checker: Arc, + cli_options: Arc, + file_fetcher: Arc, fs: Arc, - resolver: Arc, - node_resolver: Arc, - npm_resolver: Arc, - module_info_cache: Arc, - parsed_source_cache: Arc, + global_http_cache: Arc, + in_npm_pkg_checker: Arc, lockfile: Option>, maybe_file_watcher_reporter: Option, - file_fetcher: Arc, - global_http_cache: Arc, + module_info_cache: Arc, + npm_resolver: Arc, + parsed_source_cache: Arc, + resolver: Arc, root_permissions_container: PermissionsContainer, } impl ModuleGraphBuilder { #[allow(clippy::too_many_arguments)] pub fn new( - options: Arc, caches: Arc, - esm_or_cjs_checker: Arc, + cli_options: Arc, + file_fetcher: Arc, fs: Arc, - resolver: Arc, - node_resolver: Arc, - npm_resolver: Arc, - module_info_cache: Arc, - parsed_source_cache: Arc, + global_http_cache: Arc, + in_npm_pkg_checker: Arc, lockfile: Option>, maybe_file_watcher_reporter: Option, - file_fetcher: Arc, - global_http_cache: Arc, + module_info_cache: Arc, + npm_resolver: Arc, + parsed_source_cache: Arc, + resolver: Arc, root_permissions_container: PermissionsContainer, ) -> Self { Self { - options, caches, - esm_or_cjs_checker, + cli_options, + file_fetcher, fs, - resolver, - node_resolver, - npm_resolver, - module_info_cache, - parsed_source_cache, + global_http_cache, + in_npm_pkg_checker, lockfile, maybe_file_watcher_reporter, - file_fetcher, - global_http_cache, + module_info_cache, + npm_resolver, + parsed_source_cache, + resolver, root_permissions_container, } } @@ -512,13 +508,11 @@ impl ModuleGraphBuilder { } let maybe_imports = if options.graph_kind.include_types() { - self.options.to_compiler_option_types()? + self.cli_options.to_compiler_option_types()? } else { Vec::new() }; - let analyzer = self - .module_info_cache - .as_module_analyzer(&self.parsed_source_cache); + let analyzer = self.module_info_cache.as_module_analyzer(); let mut loader = match options.loader { Some(loader) => MutLoaderRef::Borrowed(loader), None => MutLoaderRef::Owned(self.create_graph_loader()), @@ -566,7 +560,7 @@ impl ModuleGraphBuilder { // ensure an "npm install" is done if the user has explicitly // opted into using a node_modules directory if self - .options + .cli_options .node_modules_dir()? .map(|m| m.uses_node_modules_dir()) .unwrap_or(false) @@ -677,10 +671,10 @@ impl ModuleGraphBuilder { graph.build_fast_check_type_graph( deno_graph::BuildFastCheckTypeGraphOptions { - jsr_url_provider: &CliJsrUrlProvider, + es_parser: Some(&parser), fast_check_cache: fast_check_cache.as_ref().map(|c| c as _), fast_check_dts: false, - module_parser: Some(&parser), + jsr_url_provider: &CliJsrUrlProvider, resolver: Some(graph_resolver), npm_resolver: Some(&graph_npm_resolver), workspace_fast_check: options.workspace_fast_check, @@ -699,20 +693,18 @@ impl ModuleGraphBuilder { permissions: PermissionsContainer, ) -> cache::FetchCacher { cache::FetchCacher::new( - self.esm_or_cjs_checker.clone(), self.file_fetcher.clone(), + self.fs.clone(), self.global_http_cache.clone(), - self.node_resolver.clone(), - self.npm_resolver.clone(), + self.in_npm_pkg_checker.clone(), self.module_info_cache.clone(), cache::FetchCacherOptions { - file_header_overrides: self.options.resolve_file_header_overrides(), + file_header_overrides: self.cli_options.resolve_file_header_overrides(), permissions, is_deno_publish: matches!( - self.options.sub_command(), + self.cli_options.sub_command(), crate::args::DenoSubcommand::Publish { .. } ), - unstable_detect_cjs: self.options.unstable_detect_cjs(), }, ) } @@ -737,12 +729,12 @@ impl ModuleGraphBuilder { &self.fs, roots, GraphValidOptions { - kind: if self.options.type_check_mode().is_true() { + kind: if self.cli_options.type_check_mode().is_true() { GraphKind::All } else { GraphKind::CodeOnly }, - check_js: self.options.check_js(), + check_js: self.cli_options.check_js(), exit_integrity_errors: true, }, ) @@ -1009,7 +1001,11 @@ impl deno_graph::source::Reporter for FileWatcherReporter { ) { let mut file_paths = self.file_paths.lock(); if specifier.scheme() == "file" { - file_paths.push(specifier.to_file_path().unwrap()); + // Don't trust that the path is a valid path at this point: + // https://github.com/denoland/deno/issues/26209. + if let Ok(file_path) = specifier.to_file_path() { + file_paths.push(file_path); + } } if modules_done == modules_total { diff --git a/cli/http_util.rs b/cli/http_util.rs index 9c9ae9e413..4b17936d68 100644 --- a/cli/http_util.rs +++ b/cli/http_util.rs @@ -19,6 +19,7 @@ use deno_runtime::deno_fetch; use deno_runtime::deno_fetch::create_http_client; use deno_runtime::deno_fetch::CreateHttpClientOptions; use deno_runtime::deno_tls::RootCertStoreProvider; +use http::header; use http::header::HeaderName; use http::header::HeaderValue; use http::header::ACCEPT; @@ -204,6 +205,7 @@ pub struct FetchOnceArgs<'a> { pub maybe_accept: Option, pub maybe_etag: Option, pub maybe_auth_token: Option, + pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>, pub maybe_progress_guard: Option<&'a UpdateGuard>, } @@ -382,6 +384,8 @@ impl HttpClient { request .headers_mut() .insert(AUTHORIZATION, authorization_val); + } else if let Some((header, value)) = args.maybe_auth { + request.headers_mut().insert(header, value); } if let Some(accept) = args.maybe_accept { let accepts_val = HeaderValue::from_str(&accept)?; @@ -792,6 +796,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -818,6 +823,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -845,6 +851,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -866,6 +873,7 @@ mod test { maybe_etag: Some("33a64df551425fcc55e".to_string()), maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; assert_eq!(res.unwrap(), FetchOnceResult::NotModified); @@ -885,6 +893,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -914,6 +923,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Code(body, _)) = result { @@ -939,6 +949,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Redirect(url, _)) = result { @@ -974,6 +985,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -1021,6 +1033,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; @@ -1083,6 +1096,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; @@ -1136,6 +1150,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -1177,6 +1192,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -1199,6 +1215,7 @@ mod test { maybe_etag: Some("33a64df551425fcc55e".to_string()), maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; assert_eq!(res.unwrap(), FetchOnceResult::NotModified); @@ -1233,6 +1250,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -1262,6 +1280,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; assert!(result.is_err()); @@ -1283,6 +1302,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; @@ -1306,6 +1326,7 @@ mod test { maybe_etag: None, maybe_auth_token: None, maybe_progress_guard: None, + maybe_auth: None, }) .await; diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs index 8c2e8bb1dc..98215855c9 100644 --- a/cli/lsp/analysis.rs +++ b/cli/lsp/analysis.rs @@ -12,13 +12,14 @@ use super::urls::url_to_uri; use crate::args::jsr_url; use crate::lsp::search::PackageSearchApi; use crate::tools::lint::CliLinter; +use crate::util::path::relative_specifier; use deno_config::workspace::MappedResolution; +use deno_graph::source::ResolutionMode; use deno_lint::diagnostic::LintDiagnosticRange; use deno_ast::SourceRange; use deno_ast::SourceRangedForSpanned; use deno_ast::SourceTextInfo; -use deno_core::anyhow::anyhow; use deno_core::error::custom_error; use deno_core::error::AnyError; use deno_core::serde::Deserialize; @@ -37,9 +38,9 @@ use deno_semver::package::PackageReq; use deno_semver::package::PackageReqReference; use deno_semver::Version; use import_map::ImportMap; -use node_resolver::NpmResolver; use once_cell::sync::Lazy; use regex::Regex; +use std::borrow::Cow; use std::cmp::Ordering; use std::collections::HashMap; use std::collections::HashSet; @@ -229,6 +230,7 @@ pub struct TsResponseImportMapper<'a> { documents: &'a Documents, maybe_import_map: Option<&'a ImportMap>, resolver: &'a LspResolver, + tsc_specifier_map: &'a tsc::TscSpecifierMap, file_referrer: ModuleSpecifier, } @@ -237,12 +239,14 @@ impl<'a> TsResponseImportMapper<'a> { documents: &'a Documents, maybe_import_map: Option<&'a ImportMap>, resolver: &'a LspResolver, + tsc_specifier_map: &'a tsc::TscSpecifierMap, file_referrer: &ModuleSpecifier, ) -> Self { Self { documents, maybe_import_map, resolver, + tsc_specifier_map, file_referrer: file_referrer.clone(), } } @@ -336,7 +340,12 @@ impl<'a> TsResponseImportMapper<'a> { .resolver .maybe_managed_npm_resolver(Some(&self.file_referrer)) { - if npm_resolver.in_npm_package(specifier) { + let in_npm_pkg = self + .resolver + .maybe_node_resolver(Some(&self.file_referrer)) + .map(|n| n.in_npm_package(specifier)) + .unwrap_or(false); + if in_npm_pkg { if let Ok(Some(pkg_id)) = npm_resolver.resolve_pkg_id_from_specifier(specifier) { @@ -383,6 +392,11 @@ impl<'a> TsResponseImportMapper<'a> { } } } + } else if let Some(dep_name) = self + .resolver + .file_url_to_package_json_dep(specifier, Some(&self.file_referrer)) + { + return Some(dep_name); } // check if the import map has this specifier @@ -453,19 +467,36 @@ impl<'a> TsResponseImportMapper<'a> { specifier: &str, referrer: &ModuleSpecifier, ) -> Option { - if let Ok(specifier) = referrer.join(specifier) { - if let Some(specifier) = self.check_specifier(&specifier, referrer) { - return Some(specifier); - } - } - let specifier = specifier.strip_suffix(".js").unwrap_or(specifier); - for ext in SUPPORTED_EXTENSIONS { - let specifier_with_ext = format!("{specifier}{ext}"); - if self - .documents - .contains_import(&specifier_with_ext, referrer) + let specifier_stem = specifier.strip_suffix(".js").unwrap_or(specifier); + let specifiers = std::iter::once(Cow::Borrowed(specifier)).chain( + SUPPORTED_EXTENSIONS + .iter() + .map(|ext| Cow::Owned(format!("{specifier_stem}{ext}"))), + ); + for specifier in specifiers { + if let Some(specifier) = self + .resolver + .as_graph_resolver(Some(&self.file_referrer)) + .resolve( + &specifier, + &deno_graph::Range { + specifier: referrer.clone(), + start: deno_graph::Position::zeroed(), + end: deno_graph::Position::zeroed(), + }, + ResolutionMode::Types, + ) + .ok() + .and_then(|s| self.tsc_specifier_map.normalize(s.as_str()).ok()) + .filter(|s| self.documents.exists(s, Some(&self.file_referrer))) { - return Some(specifier_with_ext); + if let Some(specifier) = self + .check_specifier(&specifier, referrer) + .or_else(|| relative_specifier(referrer, &specifier)) + .filter(|s| !s.contains("/node_modules/")) + { + return Some(specifier); + } } } None @@ -555,8 +586,9 @@ fn try_reverse_map_package_json_exports( pub fn fix_ts_import_changes( referrer: &ModuleSpecifier, changes: &[tsc::FileTextChanges], - import_mapper: &TsResponseImportMapper, + language_server: &language_server::Inner, ) -> Result, AnyError> { + let import_mapper = language_server.get_ts_response_import_mapper(referrer); let mut r = Vec::new(); for change in changes { let mut text_changes = Vec::new(); @@ -598,68 +630,63 @@ pub fn fix_ts_import_changes( /// Fix tsc import code actions so that the module specifier is correct for /// resolution by Deno (includes the extension). -fn fix_ts_import_action( +fn fix_ts_import_action<'a>( referrer: &ModuleSpecifier, - action: &tsc::CodeFixAction, - import_mapper: &TsResponseImportMapper, -) -> Result, AnyError> { - if matches!( + action: &'a tsc::CodeFixAction, + language_server: &language_server::Inner, +) -> Option> { + if !matches!( action.fix_name.as_str(), "import" | "fixMissingFunctionDeclaration" ) { - let change = action + return Some(Cow::Borrowed(action)); + } + let specifier = (|| { + let text_change = action.changes.first()?.text_changes.first()?; + let captures = IMPORT_SPECIFIER_RE.captures(&text_change.new_text)?; + Some(captures.get(1)?.as_str()) + })(); + let Some(specifier) = specifier else { + return Some(Cow::Borrowed(action)); + }; + let import_mapper = language_server.get_ts_response_import_mapper(referrer); + if let Some(new_specifier) = + import_mapper.check_unresolved_specifier(specifier, referrer) + { + let description = action.description.replace(specifier, &new_specifier); + let changes = action .changes - .first() - .ok_or_else(|| anyhow!("Unexpected action changes."))?; - let text_change = change - .text_changes - .first() - .ok_or_else(|| anyhow!("Missing text change."))?; - if let Some(captures) = IMPORT_SPECIFIER_RE.captures(&text_change.new_text) - { - let specifier = captures - .get(1) - .ok_or_else(|| anyhow!("Missing capture."))? - .as_str(); - if let Some(new_specifier) = - import_mapper.check_unresolved_specifier(specifier, referrer) - { - let description = action.description.replace(specifier, &new_specifier); - let changes = action - .changes + .iter() + .map(|c| { + let text_changes = c + .text_changes .iter() - .map(|c| { - let text_changes = c - .text_changes - .iter() - .map(|tc| tsc::TextChange { - span: tc.span.clone(), - new_text: tc.new_text.replace(specifier, &new_specifier), - }) - .collect(); - tsc::FileTextChanges { - file_name: c.file_name.clone(), - text_changes, - is_new_file: c.is_new_file, - } + .map(|tc| tsc::TextChange { + span: tc.span.clone(), + new_text: tc.new_text.replace(specifier, &new_specifier), }) .collect(); + tsc::FileTextChanges { + file_name: c.file_name.clone(), + text_changes, + is_new_file: c.is_new_file, + } + }) + .collect(); - return Ok(Some(tsc::CodeFixAction { - description, - changes, - commands: None, - fix_name: action.fix_name.clone(), - fix_id: None, - fix_all_description: None, - })); - } else if !import_mapper.is_valid_import(specifier, referrer) { - return Ok(None); - } - } + Some(Cow::Owned(tsc::CodeFixAction { + description, + changes, + commands: None, + fix_name: action.fix_name.clone(), + fix_id: None, + fix_all_description: None, + })) + } else if !import_mapper.is_valid_import(specifier, referrer) { + None + } else { + Some(Cow::Borrowed(action)) } - - Ok(Some(action.clone())) } /// Determines if two TypeScript diagnostic codes are effectively equivalent. @@ -730,7 +757,7 @@ pub fn ts_changes_to_edit( })) } -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CodeActionData { pub specifier: ModuleSpecifier, @@ -1000,11 +1027,7 @@ impl CodeActionCollection { "The action returned from TypeScript is unsupported.", )); } - let Some(action) = fix_ts_import_action( - specifier, - action, - &language_server.get_ts_response_import_mapper(specifier), - )? + let Some(action) = fix_ts_import_action(specifier, action, language_server) else { return Ok(()); }; @@ -1027,7 +1050,7 @@ impl CodeActionCollection { }); self .actions - .push(CodeActionKind::Tsc(code_action, action.clone())); + .push(CodeActionKind::Tsc(code_action, action.as_ref().clone())); if let Some(fix_id) = &action.fix_id { if let Some(CodeActionKind::Tsc(existing_fix_all, existing_action)) = @@ -1054,10 +1077,12 @@ impl CodeActionCollection { specifier: &ModuleSpecifier, diagnostic: &lsp::Diagnostic, ) { - let data = Some(json!({ - "specifier": specifier, - "fixId": action.fix_id, - })); + let data = action.fix_id.as_ref().map(|fix_id| { + json!(CodeActionData { + specifier: specifier.clone(), + fix_id: fix_id.clone(), + }) + }); let title = if let Some(description) = &action.fix_all_description { description.clone() } else { @@ -1206,14 +1231,11 @@ impl CodeActionCollection { }), ); - match parsed_source.program_ref() { - deno_ast::swc::ast::Program::Module(module) => module - .body - .iter() - .find(|i| i.range().contains(&specifier_range)) - .map(|i| text_info.line_and_column_index(i.range().start)), - deno_ast::swc::ast::Program::Script(_) => None, - } + parsed_source + .program_ref() + .body() + .find(|i| i.range().contains(&specifier_range)) + .map(|i| text_info.line_and_column_index(i.range().start)) } async fn deno_types_for_npm_action( diff --git a/cli/lsp/code_lens.rs b/cli/lsp/code_lens.rs index e117888fba..a57ca3ac9f 100644 --- a/cli/lsp/code_lens.rs +++ b/cli/lsp/code_lens.rs @@ -421,7 +421,7 @@ pub fn collect_test( ) -> Result, AnyError> { let mut collector = DenoTestCollector::new(specifier.clone(), parsed_source.clone()); - parsed_source.module().visit_with(&mut collector); + parsed_source.program().visit_with(&mut collector); Ok(collector.take()) } @@ -581,7 +581,7 @@ mod tests { .unwrap(); let mut collector = DenoTestCollector::new(specifier, parsed_module.clone()); - parsed_module.module().visit_with(&mut collector); + parsed_module.program().visit_with(&mut collector); assert_eq!( collector.take(), vec![ diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index 74f3583d68..34bf64446d 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -41,6 +41,7 @@ use deno_runtime::deno_node::PackageJson; use indexmap::IndexSet; use lsp_types::ClientCapabilities; use std::collections::BTreeMap; +use std::collections::BTreeSet; use std::collections::HashMap; use std::ops::Deref; use std::ops::DerefMut; @@ -439,6 +440,8 @@ pub struct LanguagePreferences { pub use_aliases_for_renames: bool, #[serde(default)] pub quote_style: QuoteStyle, + #[serde(default)] + pub prefer_type_only_auto_imports: bool, } impl Default for LanguagePreferences { @@ -449,6 +452,7 @@ impl Default for LanguagePreferences { auto_import_file_exclude_patterns: vec![], use_aliases_for_renames: true, quote_style: Default::default(), + prefer_type_only_auto_imports: false, } } } @@ -981,7 +985,7 @@ impl Config { | MediaType::Tsx => Some(&workspace_settings.typescript), MediaType::Json | MediaType::Wasm - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::SourceMap | MediaType::Unknown => None, } @@ -1187,6 +1191,7 @@ pub struct ConfigData { pub resolver: Arc, pub sloppy_imports_resolver: Option>, pub import_map_from_settings: Option, + pub unstable: BTreeSet, watched_files: HashMap, } @@ -1584,9 +1589,16 @@ impl ConfigData { .join("\n") ); } + let unstable = member_dir + .workspace + .unstable_features() + .iter() + .chain(settings.unstable.as_deref()) + .cloned() + .collect::>(); let unstable_sloppy_imports = std::env::var("DENO_UNSTABLE_SLOPPY_IMPORTS") .is_ok() - || member_dir.workspace.has_unstable("sloppy-imports"); + || unstable.contains("sloppy-imports"); let sloppy_imports_resolver = unstable_sloppy_imports.then(|| { Arc::new(CliSloppyImportsResolver::new( SloppyImportsCachedFs::new_without_stat_cache(Arc::new( @@ -1627,6 +1639,7 @@ impl ConfigData { lockfile, npmrc, import_map_from_settings, + unstable, watched_files, } } @@ -2251,6 +2264,7 @@ mod tests { auto_import_file_exclude_patterns: vec![], use_aliases_for_renames: true, quote_style: QuoteStyle::Auto, + prefer_type_only_auto_imports: false, }, suggest: CompletionSettings { complete_function_calls: false, @@ -2296,6 +2310,7 @@ mod tests { auto_import_file_exclude_patterns: vec![], use_aliases_for_renames: true, quote_style: QuoteStyle::Auto, + prefer_type_only_auto_imports: false, }, suggest: CompletionSettings { complete_function_calls: false, diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index caabd3f04e..83c00d27ed 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -1499,7 +1499,11 @@ fn diagnose_dependency( .data_for_specifier(referrer_doc.file_referrer().unwrap_or(referrer)) .and_then(|d| d.resolver.maybe_import_map()); if let Some(import_map) = import_map { - if let Resolution::Ok(resolved) = &dependency.maybe_code { + let resolved = dependency + .maybe_code + .ok() + .or_else(|| dependency.maybe_type.ok()); + if let Some(resolved) = resolved { if let Some(to) = import_map.lookup(&resolved.specifier, referrer) { if dependency_key != to { diagnostics.push( diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index 7d1ca6810d..ce13c32157 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -272,7 +272,7 @@ fn get_maybe_test_module_fut( parsed_source.specifier().clone(), parsed_source.text_info_lazy().clone(), ); - parsed_source.module().visit_with(&mut collector); + parsed_source.program().visit_with(&mut collector); Arc::new(collector.take()) }) .map(Result::ok) @@ -332,12 +332,8 @@ impl Document { .filter(|s| cache.is_valid_file_referrer(s)) .cloned() .or(file_referrer); - let media_type = resolve_media_type( - &specifier, - maybe_headers.as_ref(), - maybe_language_id, - &resolver, - ); + let media_type = + resolve_media_type(&specifier, maybe_headers.as_ref(), maybe_language_id); let (maybe_parsed_source, maybe_module) = if media_type_is_diagnosable(media_type) { parse_and_analyze_module( @@ -399,7 +395,6 @@ impl Document { &self.specifier, self.maybe_headers.as_ref(), self.maybe_language_id, - &resolver, ); let dependencies; let maybe_types_dependency; @@ -764,14 +759,7 @@ fn resolve_media_type( specifier: &ModuleSpecifier, maybe_headers: Option<&HashMap>, maybe_language_id: Option, - resolver: &LspResolver, ) -> MediaType { - if resolver.in_node_modules(specifier) { - if let Some(media_type) = resolver.node_media_type(specifier) { - return media_type; - } - } - if let Some(language_id) = maybe_language_id { return MediaType::from_specifier_and_content_type( specifier, @@ -1071,34 +1059,6 @@ impl Documents { self.cache.is_valid_file_referrer(specifier) } - /// Return `true` if the provided specifier can be resolved to a document, - /// otherwise `false`. - pub fn contains_import( - &self, - specifier: &str, - referrer: &ModuleSpecifier, - ) -> bool { - let file_referrer = self.get_file_referrer(referrer); - let maybe_specifier = self - .resolver - .as_graph_resolver(file_referrer.as_deref()) - .resolve( - specifier, - &deno_graph::Range { - specifier: referrer.clone(), - start: deno_graph::Position::zeroed(), - end: deno_graph::Position::zeroed(), - }, - ResolutionMode::Types, - ) - .ok(); - if let Some(import_specifier) = maybe_specifier { - self.exists(&import_specifier, file_referrer.as_deref()) - } else { - false - } - } - pub fn resolve_document_specifier( &self, specifier: &ModuleSpecifier, @@ -1561,7 +1521,7 @@ fn parse_source( text: Arc, media_type: MediaType, ) -> ParsedSourceResult { - deno_ast::parse_module(deno_ast::ParseParams { + deno_ast::parse_program(deno_ast::ParseParams { specifier, text, media_type, diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 908afa1657..2554fa34b1 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -863,7 +863,10 @@ impl Inner { // We ignore these directories by default because there is a // high likelihood they aren't relevant. Someone can opt-into // them by specifying one of them as an enabled path. - if matches!(dir_name.as_str(), "vendor" | "node_modules" | ".git") { + if matches!( + dir_name.as_str(), + "vendor" | "coverage" | "node_modules" | ".git" + ) { continue; } // ignore cargo target directories for anyone using Deno with Rust @@ -904,7 +907,7 @@ impl Inner { | MediaType::Tsx => {} MediaType::Wasm | MediaType::SourceMap - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::Unknown => { if path.extension().and_then(|s| s.to_str()) != Some("jsonc") { continue; @@ -1384,14 +1387,10 @@ impl Inner { .clone(); fmt_options.use_tabs = Some(!params.options.insert_spaces); fmt_options.indent_width = Some(params.options.tab_size as u8); - let maybe_workspace = self - .config - .tree - .data_for_specifier(&specifier) - .map(|d| &d.member_dir.workspace); + let config_data = self.config.tree.data_for_specifier(&specifier); let unstable_options = UnstableFmtOptions { - component: maybe_workspace - .map(|w| w.has_unstable("fmt-component")) + component: config_data + .map(|d| d.unstable.contains("fmt-component")) .unwrap_or(false), }; let document = document.clone(); @@ -1838,7 +1837,7 @@ impl Inner { fix_ts_import_changes( &code_action_data.specifier, &combined_code_actions.changes, - &self.get_ts_response_import_mapper(&code_action_data.specifier), + self, ) .map_err(|err| { error!("Unable to remap changes: {:#}", err); @@ -1891,7 +1890,7 @@ impl Inner { refactor_edit_info.edits = fix_ts_import_changes( &action_data.specifier, &refactor_edit_info.edits, - &self.get_ts_response_import_mapper(&action_data.specifier), + self, ) .map_err(|err| { error!("Unable to remap changes: {:#}", err); @@ -1922,7 +1921,8 @@ impl Inner { // todo(dsherret): this should probably just take the resolver itself // as the import map is an implementation detail .and_then(|d| d.resolver.maybe_import_map()), - self.resolver.as_ref(), + &self.resolver, + &self.ts_server.specifier_map, file_referrer, ) } @@ -2285,7 +2285,11 @@ impl Inner { .into(), scope.cloned(), ) - .await; + .await + .unwrap_or_else(|err| { + error!("Unable to get completion info from TypeScript: {:#}", err); + None + }); if let Some(completions) = maybe_completion_info { response = Some( @@ -3812,7 +3816,7 @@ impl Inner { let maybe_inlay_hints = maybe_inlay_hints.map(|hints| { hints .iter() - .map(|hint| hint.to_lsp(line_index.clone())) + .map(|hint| hint.to_lsp(line_index.clone(), self)) .collect() }); self.performance.measure(mark); @@ -3948,7 +3952,9 @@ mod tests { fn test_walk_workspace() { let temp_dir = TempDir::new(); temp_dir.create_dir_all("root1/vendor/"); + temp_dir.create_dir_all("root1/coverage/"); temp_dir.write("root1/vendor/mod.ts", ""); // no, vendor + temp_dir.write("root1/coverage/mod.ts", ""); // no, coverage temp_dir.create_dir_all("root1/node_modules/"); temp_dir.write("root1/node_modules/mod.ts", ""); // no, node_modules diff --git a/cli/lsp/npm.rs b/cli/lsp/npm.rs index 8bdeb7e7d8..2decfc3429 100644 --- a/cli/lsp/npm.rs +++ b/cli/lsp/npm.rs @@ -4,6 +4,7 @@ use dashmap::DashMap; use deno_core::anyhow::anyhow; use deno_core::error::AnyError; use deno_core::serde_json; +use deno_npm::npm_rc::NpmRc; use deno_semver::package::PackageNv; use deno_semver::Version; use serde::Deserialize; @@ -25,7 +26,10 @@ pub struct CliNpmSearchApi { impl CliNpmSearchApi { pub fn new(file_fetcher: Arc) -> Self { - let resolver = NpmFetchResolver::new(file_fetcher.clone()); + let resolver = NpmFetchResolver::new( + file_fetcher.clone(), + Arc::new(NpmRc::default().as_resolved(npm_registry_url()).unwrap()), + ); Self { file_fetcher, resolver, diff --git a/cli/lsp/registries.rs b/cli/lsp/registries.rs index 5f7ce00823..ade353e683 100644 --- a/cli/lsp/registries.rs +++ b/cli/lsp/registries.rs @@ -482,6 +482,7 @@ impl ModuleRegistry { .fetch_with_options(FetchOptions { specifier: &specifier, permissions: FetchPermissionsOptionRef::AllowAll, + maybe_auth: None, maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"), maybe_cache_setting: None, }) diff --git a/cli/lsp/resolver.rs b/cli/lsp/resolver.rs index c89273147a..f5df24d575 100644 --- a/cli/lsp/resolver.rs +++ b/cli/lsp/resolver.rs @@ -2,6 +2,8 @@ use dashmap::DashMap; use deno_ast::MediaType; +use deno_ast::ParsedSource; +use deno_cache_dir::npm::NpmCacheDir; use deno_cache_dir::HttpCache; use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::WorkspaceResolver; @@ -14,15 +16,15 @@ use deno_path_util::url_to_file_path; use deno_runtime::deno_fs; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::PackageJson; +use deno_runtime::deno_node::PackageJsonResolver; use deno_semver::jsr::JsrPackageReqReference; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; use indexmap::IndexMap; use node_resolver::errors::ClosestPkgJsonError; -use node_resolver::NodeResolution; +use node_resolver::InNpmPackageChecker; use node_resolver::NodeResolutionMode; -use node_resolver::NpmResolver; use std::borrow::Cow; use std::collections::BTreeMap; use std::collections::BTreeSet; @@ -36,6 +38,7 @@ use crate::args::create_default_npmrc; use crate::args::CacheSetting; use crate::args::CliLockfile; use crate::args::NpmInstallDepsProvider; +use crate::cache::DenoCacheEnvFsAdapter; use crate::graph_util::CliJsrUrlProvider; use crate::http_util::HttpClientProvider; use crate::lsp::config::Config; @@ -43,40 +46,50 @@ use crate::lsp::config::ConfigData; use crate::lsp::logging::lsp_warn; use crate::npm::create_cli_npm_resolver_for_lsp; use crate::npm::CliByonmNpmResolverCreateOptions; +use crate::npm::CliManagedInNpmPkgCheckerCreateOptions; +use crate::npm::CliManagedNpmResolverCreateOptions; use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolverCreateOptions; -use crate::npm::CliNpmResolverManagedCreateOptions; use crate::npm::CliNpmResolverManagedSnapshotOption; +use crate::npm::CreateInNpmPkgCheckerOptions; use crate::npm::ManagedCliNpmResolver; -use crate::resolver::CjsResolutionStore; +use crate::resolver::CjsTracker; +use crate::resolver::CjsTrackerOptions; use crate::resolver::CliDenoResolverFs; use crate::resolver::CliGraphResolver; use crate::resolver::CliGraphResolverOptions; use crate::resolver::CliNodeResolver; use crate::resolver::WorkerCliNpmGraphResolver; +use crate::tsc::into_specifier_and_media_type; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; #[derive(Debug, Clone)] struct LspScopeResolver { + cjs_tracker: Option>, graph_resolver: Arc, jsr_resolver: Option>, npm_resolver: Option>, node_resolver: Option>, + pkg_json_resolver: Option>, redirect_resolver: Option>, graph_imports: Arc>, + package_json_deps_by_resolution: Arc>, config_data: Option>, } impl Default for LspScopeResolver { fn default() -> Self { Self { + cjs_tracker: None, graph_resolver: create_graph_resolver(None, None, None), jsr_resolver: None, npm_resolver: None, node_resolver: None, + pkg_json_resolver: None, redirect_resolver: None, graph_imports: Default::default(), + package_json_deps_by_resolution: Default::default(), config_data: None, } } @@ -90,14 +103,35 @@ impl LspScopeResolver { ) -> Self { let mut npm_resolver = None; let mut node_resolver = None; + let mut lsp_cjs_tracker = None; + let fs = Arc::new(deno_fs::RealFs); + let pkg_json_resolver = Arc::new(PackageJsonResolver::new( + deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), + )); if let Some(http_client) = http_client_provider { npm_resolver = create_npm_resolver( config_data.map(|d| d.as_ref()), cache, http_client, + &pkg_json_resolver, ) .await; - node_resolver = create_node_resolver(npm_resolver.as_ref()); + if let Some(npm_resolver) = &npm_resolver { + let in_npm_pkg_checker = create_in_npm_pkg_checker(npm_resolver); + let cjs_tracker = create_cjs_tracker( + in_npm_pkg_checker.clone(), + pkg_json_resolver.clone(), + ); + lsp_cjs_tracker = + Some(Arc::new(LspCjsTracker::new(cjs_tracker.clone()))); + node_resolver = Some(create_node_resolver( + cjs_tracker, + fs.clone(), + in_npm_pkg_checker, + npm_resolver, + pkg_json_resolver.clone(), + )); + } } let graph_resolver = create_graph_resolver( config_data.map(|d| d.as_ref()), @@ -133,13 +167,43 @@ impl LspScopeResolver { ) }) .unwrap_or_default(); + let package_json_deps_by_resolution = (|| { + let node_resolver = node_resolver.as_ref()?; + let package_json = config_data?.maybe_pkg_json()?; + let referrer = package_json.specifier(); + let dependencies = package_json.dependencies.as_ref()?; + let result = dependencies + .iter() + .flat_map(|(name, _)| { + let req_ref = + NpmPackageReqReference::from_str(&format!("npm:{name}")).ok()?; + let specifier = into_specifier_and_media_type(Some( + node_resolver + .resolve_req_reference( + &req_ref, + &referrer, + NodeResolutionMode::Types, + ) + .ok()?, + )) + .0; + Some((specifier, name.clone())) + }) + .collect(); + Some(result) + })(); + let package_json_deps_by_resolution = + Arc::new(package_json_deps_by_resolution.unwrap_or_default()); Self { + cjs_tracker: lsp_cjs_tracker, graph_resolver, jsr_resolver, npm_resolver, node_resolver, + pkg_json_resolver: Some(pkg_json_resolver), redirect_resolver, graph_imports, + package_json_deps_by_resolution, config_data: config_data.cloned(), } } @@ -147,19 +211,44 @@ impl LspScopeResolver { fn snapshot(&self) -> Arc { let npm_resolver = self.npm_resolver.as_ref().map(|r| r.clone_snapshotted()); - let node_resolver = create_node_resolver(npm_resolver.as_ref()); + let fs = Arc::new(deno_fs::RealFs); + let pkg_json_resolver = Arc::new(PackageJsonResolver::new( + deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), + )); + let mut node_resolver = None; + let mut lsp_cjs_tracker = None; + if let Some(npm_resolver) = &npm_resolver { + let in_npm_pkg_checker = create_in_npm_pkg_checker(npm_resolver); + let cjs_tracker = create_cjs_tracker( + in_npm_pkg_checker.clone(), + pkg_json_resolver.clone(), + ); + lsp_cjs_tracker = Some(Arc::new(LspCjsTracker::new(cjs_tracker.clone()))); + node_resolver = Some(create_node_resolver( + cjs_tracker, + fs, + in_npm_pkg_checker, + npm_resolver, + pkg_json_resolver.clone(), + )); + } let graph_resolver = create_graph_resolver( self.config_data.as_deref(), npm_resolver.as_ref(), node_resolver.as_ref(), ); Arc::new(Self { + cjs_tracker: lsp_cjs_tracker, graph_resolver, jsr_resolver: self.jsr_resolver.clone(), npm_resolver, node_resolver, redirect_resolver: self.redirect_resolver.clone(), + pkg_json_resolver: Some(pkg_json_resolver), graph_imports: self.graph_imports.clone(), + package_json_deps_by_resolution: self + .package_json_deps_by_resolution + .clone(), config_data: self.config_data.clone(), }) } @@ -261,6 +350,22 @@ impl LspResolver { resolver.graph_resolver.create_graph_npm_resolver() } + pub fn maybe_cjs_tracker( + &self, + file_referrer: Option<&ModuleSpecifier>, + ) -> Option<&Arc> { + let resolver = self.get_scope_resolver(file_referrer); + resolver.cjs_tracker.as_ref() + } + + pub fn maybe_node_resolver( + &self, + file_referrer: Option<&ModuleSpecifier>, + ) -> Option<&Arc> { + let resolver = self.get_scope_resolver(file_referrer); + resolver.node_resolver.as_ref() + } + pub fn maybe_managed_npm_resolver( &self, file_referrer: Option<&ModuleSpecifier>, @@ -328,13 +433,25 @@ impl LspResolver { ) -> Option<(ModuleSpecifier, MediaType)> { let resolver = self.get_scope_resolver(file_referrer); let node_resolver = resolver.node_resolver.as_ref()?; - Some(NodeResolution::into_specifier_and_media_type(Some( + Some(into_specifier_and_media_type(Some( node_resolver .resolve_req_reference(req_ref, referrer, NodeResolutionMode::Types) .ok()?, ))) } + pub fn file_url_to_package_json_dep( + &self, + specifier: &ModuleSpecifier, + file_referrer: Option<&ModuleSpecifier>, + ) -> Option { + let resolver = self.get_scope_resolver(file_referrer); + resolver + .package_json_deps_by_resolution + .get(specifier) + .cloned() + } + pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool { fn has_node_modules_dir(specifier: &ModuleSpecifier) -> bool { // consider any /node_modules/ directory as being in the node_modules @@ -346,14 +463,10 @@ impl LspResolver { .contains("/node_modules/") } - let global_npm_resolver = self - .get_scope_resolver(Some(specifier)) - .npm_resolver - .as_ref() - .and_then(|npm_resolver| npm_resolver.as_managed()) - .filter(|r| r.root_node_modules_path().is_none()); - if let Some(npm_resolver) = &global_npm_resolver { - if npm_resolver.in_npm_package(specifier) { + if let Some(node_resolver) = + &self.get_scope_resolver(Some(specifier)).node_resolver + { + if node_resolver.in_npm_package(specifier) { return true; } } @@ -361,18 +474,6 @@ impl LspResolver { has_node_modules_dir(specifier) } - pub fn node_media_type( - &self, - specifier: &ModuleSpecifier, - ) -> Option { - let resolver = self.get_scope_resolver(Some(specifier)); - let node_resolver = resolver.node_resolver.as_ref()?; - let resolution = node_resolver - .url_to_node_resolution(specifier.clone()) - .ok()?; - Some(NodeResolution::into_specifier_and_media_type(Some(resolution)).1) - } - pub fn is_bare_package_json_dep( &self, specifier_text: &str, @@ -398,10 +499,10 @@ impl LspResolver { referrer: &ModuleSpecifier, ) -> Result>, ClosestPkgJsonError> { let resolver = self.get_scope_resolver(Some(referrer)); - let Some(node_resolver) = resolver.node_resolver.as_ref() else { + let Some(pkg_json_resolver) = resolver.pkg_json_resolver.as_ref() else { return Ok(None); }; - node_resolver.get_closest_package_json(referrer) + pkg_json_resolver.get_closest_package_json(referrer) } pub fn resolve_redirects( @@ -457,11 +558,13 @@ async fn create_npm_resolver( config_data: Option<&ConfigData>, cache: &LspCache, http_client_provider: &Arc, + pkg_json_resolver: &Arc, ) -> Option> { let enable_byonm = config_data.map(|d| d.byonm).unwrap_or(false); let options = if enable_byonm { CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { fs: CliDenoResolverFs(Arc::new(deno_fs::RealFs)), + pkg_json_resolver: pkg_json_resolver.clone(), root_node_modules_dir: config_data.and_then(|config_data| { config_data.node_modules_dir.clone().or_else(|| { url_to_file_path(&config_data.scope) @@ -471,7 +574,15 @@ async fn create_npm_resolver( }), }) } else { - CliNpmResolverCreateOptions::Managed(CliNpmResolverManagedCreateOptions { + let npmrc = config_data + .and_then(|d| d.npmrc.clone()) + .unwrap_or_else(create_default_npmrc); + let npm_cache_dir = Arc::new(NpmCacheDir::new( + &DenoCacheEnvFsAdapter(&deno_fs::RealFs), + cache.deno_dir().npm_folder_path(), + npmrc.get_all_known_registries_urls(), + )); + CliNpmResolverCreateOptions::Managed(CliManagedNpmResolverCreateOptions { http_client_provider: http_client_provider.clone(), snapshot: match config_data.and_then(|d| d.lockfile.as_ref()) { Some(lockfile) => { @@ -485,7 +596,7 @@ async fn create_npm_resolver( // updating it. Only the cache request should update the lockfile. maybe_lockfile: None, fs: Arc::new(deno_fs::RealFs), - npm_global_cache_dir: cache.deno_dir().npm_folder_path(), + npm_cache_dir, // Use an "only" cache setting in order to make the // user do an explicit "cache" command and prevent // the cache from being filled with lots of packages while @@ -496,9 +607,7 @@ async fn create_npm_resolver( .and_then(|d| d.node_modules_dir.clone()), // only used for top level install, so we can ignore this npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::empty()), - npmrc: config_data - .and_then(|d| d.npmrc.clone()) - .unwrap_or_else(create_default_npmrc), + npmrc, npm_system_info: NpmSystemInfo::default(), lifecycle_scripts: Default::default(), }) @@ -506,28 +615,59 @@ async fn create_npm_resolver( Some(create_cli_npm_resolver_for_lsp(options).await) } +fn create_cjs_tracker( + in_npm_pkg_checker: Arc, + pkg_json_resolver: Arc, +) -> Arc { + Arc::new(CjsTracker::new( + in_npm_pkg_checker, + pkg_json_resolver, + CjsTrackerOptions { + // todo(dsherret): support in the lsp by stabilizing the feature + // so that we don't have to pipe the config in here + unstable_detect_cjs: false, + }, + )) +} + +fn create_in_npm_pkg_checker( + npm_resolver: &Arc, +) -> Arc { + crate::npm::create_in_npm_pkg_checker(match npm_resolver.as_inner() { + crate::npm::InnerCliNpmResolverRef::Byonm(_) => { + CreateInNpmPkgCheckerOptions::Byonm + } + crate::npm::InnerCliNpmResolverRef::Managed(m) => { + CreateInNpmPkgCheckerOptions::Managed( + CliManagedInNpmPkgCheckerCreateOptions { + root_cache_dir_url: m.global_cache_root_url(), + maybe_node_modules_path: m.maybe_node_modules_path(), + }, + ) + } + }) +} + fn create_node_resolver( - npm_resolver: Option<&Arc>, -) -> Option> { - use once_cell::sync::Lazy; - - // it's not ideal to share this across all scopes and to - // never clear it, but it's fine for the time being - static CJS_RESOLUTIONS: Lazy> = - Lazy::new(Default::default); - - let npm_resolver = npm_resolver?; - let fs = Arc::new(deno_fs::RealFs); + cjs_tracker: Arc, + fs: Arc, + in_npm_pkg_checker: Arc, + npm_resolver: &Arc, + pkg_json_resolver: Arc, +) -> Arc { let node_resolver_inner = Arc::new(NodeResolver::new( deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), + in_npm_pkg_checker.clone(), npm_resolver.clone().into_npm_resolver(), + pkg_json_resolver.clone(), )); - Some(Arc::new(CliNodeResolver::new( - CJS_RESOLUTIONS.clone(), + Arc::new(CliNodeResolver::new( + cjs_tracker.clone(), fs, + in_npm_pkg_checker, node_resolver_inner, npm_resolver.clone(), - ))) + )) } fn create_graph_resolver( @@ -555,8 +695,8 @@ fn create_graph_resolver( workspace.to_maybe_jsx_import_source_config().ok().flatten() }), maybe_vendor_dir: config_data.and_then(|d| d.vendor_dir.as_ref()), - bare_node_builtins_enabled: workspace - .is_some_and(|workspace| workspace.has_unstable("bare-node-builtins")), + bare_node_builtins_enabled: config_data + .is_some_and(|d| d.unstable.contains("bare-node-builtins")), sloppy_imports_resolver: config_data .and_then(|d| d.sloppy_imports_resolver.clone()), })) @@ -702,6 +842,45 @@ impl RedirectResolver { } } +#[derive(Debug)] +pub struct LspCjsTracker { + cjs_tracker: Arc, +} + +impl LspCjsTracker { + pub fn new(cjs_tracker: Arc) -> Self { + Self { cjs_tracker } + } + + pub fn is_cjs( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + maybe_parsed_source: Option<&ParsedSource>, + ) -> bool { + if let Some(module_kind) = + self.cjs_tracker.get_known_kind(specifier, media_type) + { + module_kind.is_cjs() + } else { + let maybe_is_script = maybe_parsed_source.map(|p| p.compute_is_script()); + maybe_is_script + .and_then(|is_script| { + self + .cjs_tracker + .is_cjs_with_known_is_script(specifier, media_type, is_script) + .ok() + }) + .unwrap_or_else(|| { + self + .cjs_tracker + .is_maybe_cjs(specifier, media_type) + .unwrap_or(false) + }) + } + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/cli/lsp/testing/collectors.rs b/cli/lsp/testing/collectors.rs index 2f2ddb8773..2dd7ec0d96 100644 --- a/cli/lsp/testing/collectors.rs +++ b/cli/lsp/testing/collectors.rs @@ -650,7 +650,7 @@ pub mod tests { .unwrap(); let text_info = parsed_module.text_info_lazy().clone(); let mut collector = TestCollector::new(specifier, text_info); - parsed_module.module().visit_with(&mut collector); + parsed_module.program().visit_with(&mut collector); collector.take() } diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index cfab39b20b..6f63ced5be 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -236,7 +236,7 @@ pub struct TsServer { performance: Arc, sender: mpsc::UnboundedSender, receiver: Mutex>>, - specifier_map: Arc, + pub specifier_map: Arc, inspector_server: Mutex>>, pending_change: Mutex>, } @@ -882,20 +882,22 @@ impl TsServer { options: GetCompletionsAtPositionOptions, format_code_settings: FormatCodeSettings, scope: Option, - ) -> Option { + ) -> Result, AnyError> { let req = TscRequest::GetCompletionsAtPosition(Box::new(( self.specifier_map.denormalize(&specifier), position, options, format_code_settings, ))); - match self.request(snapshot, req, scope).await { - Ok(maybe_info) => maybe_info, - Err(err) => { - log::error!("Unable to get completion info from TypeScript: {:#}", err); - None - } - } + self + .request::>(snapshot, req, scope) + .await + .map(|mut info| { + if let Some(info) = &mut info { + info.normalize(&self.specifier_map); + } + info + }) } pub async fn get_completion_details( @@ -2182,6 +2184,50 @@ impl NavigateToItem { } } +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct InlayHintDisplayPart { + pub text: String, + pub span: Option, + pub file: Option, +} + +impl InlayHintDisplayPart { + pub fn to_lsp( + &self, + language_server: &language_server::Inner, + ) -> lsp::InlayHintLabelPart { + let location = self.file.as_ref().map(|f| { + let specifier = + resolve_url(f).unwrap_or_else(|_| INVALID_SPECIFIER.clone()); + let file_referrer = + language_server.documents.get_file_referrer(&specifier); + let uri = language_server + .url_map + .specifier_to_uri(&specifier, file_referrer.as_deref()) + .unwrap_or_else(|_| INVALID_URI.clone()); + let range = self + .span + .as_ref() + .and_then(|s| { + let asset_or_doc = + language_server.get_asset_or_document(&specifier).ok()?; + Some(s.to_range(asset_or_doc.line_index())) + }) + .unwrap_or_else(|| { + lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)) + }); + lsp::Location { uri, range } + }); + lsp::InlayHintLabelPart { + value: self.text.clone(), + tooltip: None, + location, + command: None, + } + } +} + #[derive(Debug, Clone, Deserialize)] pub enum InlayHintKind { Type, @@ -2203,6 +2249,7 @@ impl InlayHintKind { #[serde(rename_all = "camelCase")] pub struct InlayHint { pub text: String, + pub display_parts: Option>, pub position: u32, pub kind: InlayHintKind, pub whitespace_before: Option, @@ -2210,10 +2257,23 @@ pub struct InlayHint { } impl InlayHint { - pub fn to_lsp(&self, line_index: Arc) -> lsp::InlayHint { + pub fn to_lsp( + &self, + line_index: Arc, + language_server: &language_server::Inner, + ) -> lsp::InlayHint { lsp::InlayHint { position: line_index.position_tsc(self.position.into()), - label: lsp::InlayHintLabel::String(self.text.clone()), + label: if let Some(display_parts) = &self.display_parts { + lsp::InlayHintLabel::LabelParts( + display_parts + .iter() + .map(|p| p.to_lsp(language_server)) + .collect(), + ) + } else { + lsp::InlayHintLabel::String(self.text.clone()) + }, kind: self.kind.to_lsp(), padding_left: self.whitespace_before, padding_right: self.whitespace_after, @@ -3584,6 +3644,12 @@ pub struct CompletionInfo { } impl CompletionInfo { + fn normalize(&mut self, specifier_map: &TscSpecifierMap) { + for entry in &mut self.entries { + entry.normalize(specifier_map); + } + } + pub fn as_completion_response( &self, line_index: Arc, @@ -3645,11 +3711,17 @@ pub struct CompletionItemData { #[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] -struct CompletionEntryDataImport { +struct CompletionEntryDataAutoImport { module_specifier: String, file_name: String, } +#[derive(Debug)] +pub struct CompletionNormalizedAutoImportData { + raw: CompletionEntryDataAutoImport, + normalized: ModuleSpecifier, +} + #[derive(Debug, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CompletionEntry { @@ -3682,9 +3754,28 @@ pub struct CompletionEntry { is_import_statement_completion: Option, #[serde(skip_serializing_if = "Option::is_none")] data: Option, + /// This is not from tsc, we add it for convenience during normalization. + /// Represents `self.data.file_name`, but normalized. + #[serde(skip)] + auto_import_data: Option, } impl CompletionEntry { + fn normalize(&mut self, specifier_map: &TscSpecifierMap) { + let Some(data) = &self.data else { + return; + }; + let Ok(raw) = + serde_json::from_value::(data.clone()) + else { + return; + }; + if let Ok(normalized) = specifier_map.normalize(&raw.file_name) { + self.auto_import_data = + Some(CompletionNormalizedAutoImportData { raw, normalized }); + } + } + fn get_commit_characters( &self, info: &CompletionInfo, @@ -3833,25 +3924,24 @@ impl CompletionEntry { if let Some(source) = &self.source { let mut display_source = source.clone(); - if let Some(data) = &self.data { - if let Ok(import_data) = - serde_json::from_value::(data.clone()) + if let Some(import_data) = &self.auto_import_data { + if let Some(new_module_specifier) = language_server + .get_ts_response_import_mapper(specifier) + .check_specifier(&import_data.normalized, specifier) + .or_else(|| relative_specifier(specifier, &import_data.normalized)) { - if let Ok(import_specifier) = resolve_url(&import_data.file_name) { - if let Some(new_module_specifier) = language_server - .get_ts_response_import_mapper(specifier) - .check_specifier(&import_specifier, specifier) - .or_else(|| relative_specifier(specifier, &import_specifier)) - { - display_source.clone_from(&new_module_specifier); - if new_module_specifier != import_data.module_specifier { - specifier_rewrite = - Some((import_data.module_specifier, new_module_specifier)); - } - } else if source.starts_with(jsr_url().as_str()) { - return None; - } + if new_module_specifier.contains("/node_modules/") { + return None; } + display_source.clone_from(&new_module_specifier); + if new_module_specifier != import_data.raw.module_specifier { + specifier_rewrite = Some(( + import_data.raw.module_specifier.clone(), + new_module_specifier, + )); + } + } else if source.starts_with(jsr_url().as_str()) { + return None; } } // We want relative or bare (import-mapped or otherwise) specifiers to @@ -4154,6 +4244,13 @@ impl TscSpecifierMap { return specifier.to_string(); } let mut specifier = original.to_string(); + if specifier.contains("/node_modules/.deno/") + && !specifier.contains("/node_modules/@types/node/") + { + // The ts server doesn't give completions from files in + // `node_modules/.deno/`. We work around it like this. + specifier = specifier.replace("/node_modules/", "/$node_modules/"); + } let media_type = MediaType::from_specifier(original); // If the URL-inferred media type doesn't correspond to tsc's path-inferred // media type, force it to be the same by appending an extension. @@ -4271,7 +4368,7 @@ fn op_is_cancelled(state: &mut OpState) -> bool { fn op_is_node_file(state: &mut OpState, #[string] path: String) -> bool { let state = state.borrow::(); let mark = state.performance.mark("tsc.op.op_is_node_file"); - let r = match ModuleSpecifier::parse(&path) { + let r = match state.specifier_map.normalize(path) { Ok(specifier) => state.state_snapshot.resolver.in_node_modules(&specifier), Err(_) => false, }; @@ -4304,14 +4401,25 @@ fn op_load<'s>( None } else { let asset_or_document = state.get_asset_or_document(&specifier); - asset_or_document.map(|doc| LoadResponse { - data: doc.text(), - script_kind: crate::tsc::as_ts_script_kind(doc.media_type()), - version: state.script_version(&specifier), - is_cjs: matches!( - doc.media_type(), - MediaType::Cjs | MediaType::Cts | MediaType::Dcts - ), + asset_or_document.map(|doc| { + let maybe_cjs_tracker = state + .state_snapshot + .resolver + .maybe_cjs_tracker(Some(&specifier)); + LoadResponse { + data: doc.text(), + script_kind: crate::tsc::as_ts_script_kind(doc.media_type()), + version: state.script_version(&specifier), + is_cjs: maybe_cjs_tracker + .map(|t| { + t.is_cjs( + &specifier, + doc.media_type(), + doc.maybe_parsed_source().and_then(|p| p.as_ref().ok()), + ) + }) + .unwrap_or(false), + } }) }; @@ -4540,7 +4648,10 @@ fn op_script_names(state: &mut OpState) -> ScriptNames { for doc in &docs { let specifier = doc.specifier(); let is_open = doc.is_open(); - if is_open || specifier.scheme() == "file" { + if is_open + || (specifier.scheme() == "file" + && !state.state_snapshot.resolver.in_node_modules(specifier)) + { let script_names = doc .scope() .and_then(|s| result.by_scope.get_mut(s)) @@ -4892,6 +5003,10 @@ pub struct UserPreferences { pub allow_rename_of_import_path: Option, #[serde(skip_serializing_if = "Option::is_none")] pub auto_import_file_exclude_patterns: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub interactive_inlay_hints: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub prefer_type_only_auto_imports: Option, } impl UserPreferences { @@ -4909,6 +5024,7 @@ impl UserPreferences { include_completions_with_snippet_text: Some( config.snippet_support_capable(), ), + interactive_inlay_hints: Some(true), provide_refactor_not_applicable_reason: Some(true), quote_preference: Some(fmt_config.into()), use_label_details_in_completion_entries: Some(true), @@ -5013,6 +5129,9 @@ impl UserPreferences { } else { Some(language_settings.preferences.quote_style) }, + prefer_type_only_auto_imports: Some( + language_settings.preferences.prefer_type_only_auto_imports, + ), ..base_preferences } } @@ -5958,6 +6077,7 @@ mod tests { Some(temp_dir.url()), ) .await + .unwrap() .unwrap(); assert_eq!(info.entries.len(), 22); let details = ts_server @@ -6117,6 +6237,7 @@ mod tests { Some(temp_dir.url()), ) .await + .unwrap() .unwrap(); let entry = info .entries @@ -6154,7 +6275,7 @@ mod tests { let change = changes.text_changes.first().unwrap(); assert_eq!( change.new_text, - "import type { someLongVariable } from './b.ts'\n" + "import { someLongVariable } from './b.ts'\n" ); } diff --git a/cli/main.rs b/cli/main.rs index d99a4c402a..04daff6700 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -15,7 +15,6 @@ mod js; mod jsr; mod lsp; mod module_loader; -mod napi; mod node; mod npm; mod ops; @@ -136,7 +135,7 @@ async fn run_subcommand(flags: Arc) -> Result { tools::compile::compile(flags, compile_flags).await }), DenoSubcommand::Coverage(coverage_flags) => spawn_subcommand(async { - tools::coverage::cover_files(flags, coverage_flags).await + tools::coverage::cover_files(flags, coverage_flags) }), DenoSubcommand::Fmt(fmt_flags) => { spawn_subcommand( @@ -169,10 +168,10 @@ async fn run_subcommand(flags: Arc) -> Result { if std::io::stderr().is_terminal() { log::warn!( "{} command is intended to be run by text editors and IDEs and shouldn't be run manually. - + Visit https://docs.deno.com/runtime/getting_started/setup_your_environment/ for instruction how to setup your favorite text editor. - + Press Ctrl+C to exit. ", colors::cyan("deno lsp")); } diff --git a/cli/mainrt.rs b/cli/mainrt.rs index 02d58fcee1..f5b798f817 100644 --- a/cli/mainrt.rs +++ b/cli/mainrt.rs @@ -88,11 +88,10 @@ fn main() { let standalone = standalone::extract_standalone(Cow::Owned(args)); let future = async move { match standalone { - Ok(Some(future)) => { - let (metadata, eszip) = future.await?; - util::logger::init(metadata.log_level); - load_env_vars(&metadata.env_vars_from_env_file); - let exit_code = standalone::run(eszip, metadata).await?; + Ok(Some(data)) => { + util::logger::init(data.metadata.log_level); + load_env_vars(&data.metadata.env_vars_from_env_file); + let exit_code = standalone::run(data).await?; std::process::exit(exit_code); } Ok(None) => Ok(()), diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 37d42f78e5..43c9e1aa07 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -2,6 +2,7 @@ use std::borrow::Cow; use std::cell::RefCell; +use std::path::Path; use std::path::PathBuf; use std::pin::Pin; use std::rc::Rc; @@ -23,19 +24,23 @@ use crate::graph_container::ModuleGraphUpdatePermit; use crate::graph_util::CreateGraphOptions; use crate::graph_util::ModuleGraphBuilder; use crate::node; +use crate::node::CliNodeCodeTranslator; use crate::npm::CliNpmResolver; +use crate::resolver::CjsTracker; use crate::resolver::CliGraphResolver; use crate::resolver::CliNodeResolver; use crate::resolver::ModuleCodeStringSource; +use crate::resolver::NotSupportedKindInNpmError; use crate::resolver::NpmModuleLoader; use crate::tools::check; use crate::tools::check::TypeChecker; use crate::util::progress_bar::ProgressBar; use crate::util::text_encoding::code_without_source_map; use crate::util::text_encoding::source_map_from_code; -use crate::worker::ModuleLoaderAndSourceMapGetter; +use crate::worker::CreateModuleLoaderResult; use crate::worker::ModuleLoaderFactory; use deno_ast::MediaType; +use deno_ast::ModuleKind; use deno_core::anyhow::anyhow; use deno_core::anyhow::bail; use deno_core::anyhow::Context; @@ -63,9 +68,12 @@ use deno_graph::Module; use deno_graph::ModuleGraph; use deno_graph::Resolution; use deno_runtime::code_cache; +use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::create_host_defined_options; +use deno_runtime::deno_node::NodeRequireLoader; use deno_runtime::deno_permissions::PermissionsContainer; use deno_semver::npm::NpmPackageReqReference; +use node_resolver::InNpmPackageChecker; use node_resolver::NodeResolutionMode; pub struct ModuleLoadPreparer { @@ -198,11 +206,16 @@ struct SharedCliModuleLoaderState { lib_worker: TsTypeLib, initial_cwd: PathBuf, is_inspecting: bool, + is_npm_main: bool, is_repl: bool, + cjs_tracker: Arc, code_cache: Option>, emitter: Arc, + fs: Arc, + in_npm_pkg_checker: Arc, main_module_graph_container: Arc, module_load_preparer: Arc, + node_code_translator: Arc, node_resolver: Arc, npm_resolver: Arc, npm_module_loader: NpmModuleLoader, @@ -218,10 +231,14 @@ impl CliModuleLoaderFactory { #[allow(clippy::too_many_arguments)] pub fn new( options: &CliOptions, + cjs_tracker: Arc, code_cache: Option>, emitter: Arc, + fs: Arc, + in_npm_pkg_checker: Arc, main_module_graph_container: Arc, module_load_preparer: Arc, + node_code_translator: Arc, node_resolver: Arc, npm_resolver: Arc, npm_module_loader: NpmModuleLoader, @@ -235,14 +252,19 @@ impl CliModuleLoaderFactory { lib_worker: options.ts_type_lib_worker(), initial_cwd: options.initial_cwd().to_path_buf(), is_inspecting: options.is_inspecting(), + is_npm_main: options.is_npm_main(), is_repl: matches!( options.sub_command(), DenoSubcommand::Repl(_) | DenoSubcommand::Jupyter(_) ), + cjs_tracker, code_cache, emitter, + fs, + in_npm_pkg_checker, main_module_graph_container, module_load_preparer, + node_code_translator, node_resolver, npm_resolver, npm_module_loader, @@ -259,19 +281,30 @@ impl CliModuleLoaderFactory { is_worker: bool, parent_permissions: PermissionsContainer, permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter { - let loader = Rc::new(CliModuleLoader(Rc::new(CliModuleLoaderInner { - lib, - is_worker, - parent_permissions, - permissions, + ) -> CreateModuleLoaderResult { + let module_loader = + Rc::new(CliModuleLoader(Rc::new(CliModuleLoaderInner { + lib, + is_worker, + is_npm_main: self.shared.is_npm_main, + parent_permissions, + permissions, + graph_container: graph_container.clone(), + node_code_translator: self.shared.node_code_translator.clone(), + emitter: self.shared.emitter.clone(), + parsed_source_cache: self.shared.parsed_source_cache.clone(), + shared: self.shared.clone(), + }))); + let node_require_loader = Rc::new(CliNodeRequireLoader::new( + self.shared.emitter.clone(), + self.shared.fs.clone(), graph_container, - emitter: self.shared.emitter.clone(), - parsed_source_cache: self.shared.parsed_source_cache.clone(), - shared: self.shared.clone(), - }))); - ModuleLoaderAndSourceMapGetter { - module_loader: loader, + self.shared.in_npm_pkg_checker.clone(), + self.shared.npm_resolver.clone(), + )); + CreateModuleLoaderResult { + module_loader, + node_require_loader, } } } @@ -280,7 +313,7 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory { fn create_for_main( &self, root_permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter { + ) -> CreateModuleLoaderResult { self.create_with_lib( (*self.shared.main_module_graph_container).clone(), self.shared.lib_window, @@ -294,7 +327,7 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory { &self, parent_permissions: PermissionsContainer, permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter { + ) -> CreateModuleLoaderResult { self.create_with_lib( // create a fresh module graph for the worker WorkerModuleGraphContainer::new(Arc::new(ModuleGraph::new( @@ -310,6 +343,7 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory { struct CliModuleLoaderInner { lib: TsTypeLib, + is_npm_main: bool, is_worker: bool, /// The initial set of permissions used to resolve the static imports in the /// worker. These are "allow all" for main worker, and parent thread @@ -318,6 +352,7 @@ struct CliModuleLoaderInner { permissions: PermissionsContainer, shared: Arc, emitter: Arc, + node_code_translator: Arc, parsed_source_cache: Arc, graph_container: TGraphContainer, } @@ -331,24 +366,7 @@ impl maybe_referrer: Option<&ModuleSpecifier>, requested_module_type: RequestedModuleType, ) -> Result { - let code_source = match self.load_prepared_module(specifier).await? { - Some(code_source) => code_source, - None => { - if self.shared.npm_module_loader.if_in_npm_package(specifier) { - self - .shared - .npm_module_loader - .load(specifier, maybe_referrer) - .await? - } else { - let mut msg = format!("Loading unprepared module: {specifier}"); - if let Some(referrer) = maybe_referrer { - msg = format!("{}, imported from: {}", msg, referrer.as_str()); - } - return Err(anyhow!(msg)); - } - } - }; + let code_source = self.load_code_source(specifier, maybe_referrer).await?; let code = if self.shared.is_inspecting { // we need the code with the source map in order for // it to work with --inspect or --inspect-brk @@ -402,6 +420,29 @@ impl )) } + async fn load_code_source( + &self, + specifier: &ModuleSpecifier, + maybe_referrer: Option<&ModuleSpecifier>, + ) -> Result { + if let Some(code_source) = self.load_prepared_module(specifier).await? { + return Ok(code_source); + } + if self.shared.node_resolver.in_npm_package(specifier) { + return self + .shared + .npm_module_loader + .load(specifier, maybe_referrer) + .await; + } + + let mut msg = format!("Loading unprepared module: {specifier}"); + if let Some(referrer) = maybe_referrer { + msg = format!("{}, imported from: {}", msg, referrer.as_str()); + } + Err(anyhow!(msg)) + } + fn resolve_referrer( &self, referrer: &str, @@ -474,15 +515,11 @@ impl if self.shared.is_repl { if let Ok(reference) = NpmPackageReqReference::from_specifier(&specifier) { - return self - .shared - .node_resolver - .resolve_req_reference( - &reference, - referrer, - NodeResolutionMode::Execution, - ) - .map(|res| res.into_url()); + return self.shared.node_resolver.resolve_req_reference( + &reference, + referrer, + NodeResolutionMode::Execution, + ); } } @@ -506,13 +543,15 @@ impl .with_context(|| { format!("Could not resolve '{}'.", module.nv_reference) })? - .into_url() } Some(Module::Node(module)) => module.specifier.clone(), Some(Module::Js(module)) => module.specifier.clone(), Some(Module::Json(module)) => module.specifier.clone(), Some(Module::External(module)) => { - node::resolve_specifier_into_node_modules(&module.specifier) + node::resolve_specifier_into_node_modules( + &module.specifier, + self.shared.fs.as_ref(), + ) } None => specifier.into_owned(), }; @@ -534,23 +573,32 @@ impl }) => { let transpile_result = self .emitter - .emit_parsed_source(specifier, media_type, source) + .emit_parsed_source(specifier, media_type, ModuleKind::Esm, source) .await?; // at this point, we no longer need the parsed source in memory, so free it self.parsed_source_cache.free(specifier); Ok(Some(ModuleCodeStringSource { - code: ModuleSourceCode::Bytes(transpile_result), + // note: it's faster to provide a string if we know it's a string + code: ModuleSourceCode::String(transpile_result.into()), found_url: specifier.clone(), media_type, })) } + Some(CodeOrDeferredEmit::Cjs { + specifier, + media_type, + source, + }) => self + .load_maybe_cjs(specifier, media_type, source) + .await + .map(Some), None => Ok(None), } } - fn load_prepared_module_sync( + fn load_prepared_module_for_source_map_sync( &self, specifier: &ModuleSpecifier, ) -> Result, AnyError> { @@ -563,19 +611,31 @@ impl media_type, source, }) => { - let transpile_result = self - .emitter - .emit_parsed_source_sync(specifier, media_type, source)?; + let transpile_result = self.emitter.emit_parsed_source_sync( + specifier, + media_type, + ModuleKind::Esm, + source, + )?; // at this point, we no longer need the parsed source in memory, so free it self.parsed_source_cache.free(specifier); Ok(Some(ModuleCodeStringSource { - code: ModuleSourceCode::Bytes(transpile_result), + // note: it's faster to provide a string if we know it's a string + code: ModuleSourceCode::String(transpile_result.into()), found_url: specifier.clone(), media_type, })) } + Some(CodeOrDeferredEmit::Cjs { .. }) => { + self.parsed_source_cache.free(specifier); + + // todo(dsherret): to make this work, we should probably just + // rely on the CJS export cache. At the moment this is hard because + // cjs export analysis is only async + Ok(None) + } None => Ok(None), } } @@ -605,20 +665,40 @@ impl source, media_type, specifier, + is_script, .. })) => { + // todo(dsherret): revert in https://github.com/denoland/deno/pull/26439 + if self.is_npm_main && *is_script + || self.shared.cjs_tracker.is_cjs_with_known_is_script( + specifier, + *media_type, + *is_script, + )? + { + return Ok(Some(CodeOrDeferredEmit::Cjs { + specifier, + media_type: *media_type, + source, + })); + } let code: ModuleCodeString = match media_type { MediaType::JavaScript | MediaType::Unknown - | MediaType::Cjs | MediaType::Mjs | MediaType::Json => source.clone().into(), MediaType::Dts | MediaType::Dcts | MediaType::Dmts => { Default::default() } + MediaType::Cjs | MediaType::Cts => { + return Ok(Some(CodeOrDeferredEmit::Cjs { + specifier, + media_type: *media_type, + source, + })); + } MediaType::TypeScript | MediaType::Mts - | MediaType::Cts | MediaType::Jsx | MediaType::Tsx => { return Ok(Some(CodeOrDeferredEmit::DeferredEmit { @@ -627,7 +707,7 @@ impl source, })); } - MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => { + MediaType::Css | MediaType::Wasm | MediaType::SourceMap => { panic!("Unexpected media type {media_type} for {specifier}") } }; @@ -649,6 +729,48 @@ impl | None => Ok(None), } } + + async fn load_maybe_cjs( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + original_source: &Arc, + ) -> Result { + let js_source = if media_type.is_emittable() { + Cow::Owned( + self + .emitter + .emit_parsed_source( + specifier, + media_type, + ModuleKind::Cjs, + original_source, + ) + .await?, + ) + } else { + Cow::Borrowed(original_source.as_ref()) + }; + let text = self + .node_code_translator + .translate_cjs_to_esm(specifier, Some(js_source)) + .await?; + // at this point, we no longer need the parsed source in memory, so free it + self.parsed_source_cache.free(specifier); + Ok(ModuleCodeStringSource { + code: match text { + // perf: if the text is borrowed, that means it didn't make any changes + // to the original source, so we can just provide that instead of cloning + // the borrowed text + Cow::Borrowed(_) => { + ModuleSourceCode::String(original_source.clone().into()) + } + Cow::Owned(text) => ModuleSourceCode::String(text.into()), + }, + found_url: specifier.clone(), + media_type, + }) + } } enum CodeOrDeferredEmit<'a> { @@ -658,6 +780,11 @@ enum CodeOrDeferredEmit<'a> { media_type: MediaType, source: &'a Arc, }, + Cjs { + specifier: &'a ModuleSpecifier, + media_type: MediaType, + source: &'a Arc, + }, } // todo(dsherret): this double Rc boxing is not ideal @@ -819,7 +946,10 @@ impl ModuleLoader "wasm" | "file" | "http" | "https" | "data" | "blob" => (), _ => return None, } - let source = self.0.load_prepared_module_sync(&specifier).ok()??; + let source = self + .0 + .load_prepared_module_for_source_map_sync(&specifier) + .ok()??; source_map_from_code(source.code.as_bytes()) } @@ -898,3 +1028,79 @@ impl ModuleGraphUpdatePermit for WorkerModuleGraphUpdatePermit { drop(self.permit); // explicit drop for clarity } } + +#[derive(Debug)] +struct CliNodeRequireLoader { + emitter: Arc, + fs: Arc, + graph_container: TGraphContainer, + in_npm_pkg_checker: Arc, + npm_resolver: Arc, +} + +impl + CliNodeRequireLoader +{ + pub fn new( + emitter: Arc, + fs: Arc, + graph_container: TGraphContainer, + in_npm_pkg_checker: Arc, + npm_resolver: Arc, + ) -> Self { + Self { + emitter, + fs, + graph_container, + in_npm_pkg_checker, + npm_resolver, + } + } +} + +impl NodeRequireLoader + for CliNodeRequireLoader +{ + fn ensure_read_permission<'a>( + &self, + permissions: &mut dyn deno_runtime::deno_node::NodePermissions, + path: &'a Path, + ) -> Result, AnyError> { + if let Ok(url) = deno_path_util::url_from_file_path(path) { + // allow reading if it's in the module graph + if self.graph_container.graph().get(&url).is_some() { + return Ok(std::borrow::Cow::Borrowed(path)); + } + } + self.npm_resolver.ensure_read_permission(permissions, path) + } + + fn load_text_file_lossy(&self, path: &Path) -> Result { + // todo(dsherret): use the preloaded module from the graph if available? + let media_type = MediaType::from_path(path); + let text = self.fs.read_text_file_lossy_sync(path, None)?; + if media_type.is_emittable() { + let specifier = deno_path_util::url_from_file_path(path)?; + if self.in_npm_pkg_checker.in_npm_package(&specifier) { + return Err( + NotSupportedKindInNpmError { + media_type, + specifier, + } + .into(), + ); + } + self.emitter.emit_parsed_source_sync( + &specifier, + media_type, + // this is probably not super accurate due to require esm, but probably ok. + // If we find this causes a lot of churn in the emit cache then we should + // investigate how we can make this better + ModuleKind::Cjs, + &text.into(), + ) + } else { + Ok(text) + } + } +} diff --git a/cli/napi/README.md b/cli/napi/README.md deleted file mode 100644 index 7b359ac6ec..0000000000 --- a/cli/napi/README.md +++ /dev/null @@ -1,114 +0,0 @@ -# napi - -This directory contains source for Deno's Node-API implementation. It depends on -`napi_sym` and `deno_napi`. - -Files are generally organized the same as in Node.js's implementation to ease in -ensuring compatibility. - -## Adding a new function - -Add the symbol name to -[`cli/napi_sym/symbol_exports.json`](../napi_sym/symbol_exports.json). - -```diff -{ - "symbols": [ - ... - "napi_get_undefined", -- "napi_get_null" -+ "napi_get_null", -+ "napi_get_boolean" - ] -} -``` - -Determine where to place the implementation. `napi_get_boolean` is related to JS -values so we will place it in `js_native_api.rs`. If something is not clear, -just create a new file module. - -See [`napi_sym`](../napi_sym/) for writing the implementation: - -```rust -#[napi_sym::napi_sym] -pub fn napi_get_boolean( - env: *mut Env, - value: bool, - result: *mut napi_value, -) -> Result { - // ... - Ok(()) -} -``` - -Update the generated symbol lists using the script: - -``` -deno run --allow-write tools/napi/generate_symbols_lists.js -``` - -Add a test in [`/tests/napi`](../../tests/napi/). You can also refer to Node.js -test suite for Node-API. - -```js -// tests/napi/boolean_test.js -import { assertEquals, loadTestLibrary } from "./common.js"; -const lib = loadTestLibrary(); -Deno.test("napi get boolean", function () { - assertEquals(lib.test_get_boolean(true), true); - assertEquals(lib.test_get_boolean(false), false); -}); -``` - -```rust -// tests/napi/src/boolean.rs - -use napi_sys::Status::napi_ok; -use napi_sys::ValueType::napi_boolean; -use napi_sys::*; - -extern "C" fn test_boolean( - env: napi_env, - info: napi_callback_info, -) -> napi_value { - let (args, argc, _) = crate::get_callback_info!(env, info, 1); - assert_eq!(argc, 1); - - let mut ty = -1; - assert!(unsafe { napi_typeof(env, args[0], &mut ty) } == napi_ok); - assert_eq!(ty, napi_boolean); - - // Use napi_get_boolean here... - - value -} - -pub fn init(env: napi_env, exports: napi_value) { - let properties = &[crate::new_property!(env, "test_boolean\0", test_boolean)]; - - unsafe { - napi_define_properties(env, exports, properties.len(), properties.as_ptr()) - }; -} -``` - -```diff -// tests/napi/src/lib.rs - -+ mod boolean; - -... - -#[no_mangle] -unsafe extern "C" fn napi_register_module_v1( - env: napi_env, - exports: napi_value, -) -> napi_value { - ... -+ boolean::init(env, exports); - - exports -} -``` - -Run the test using `cargo test -p tests/napi`. diff --git a/cli/napi/mod.rs b/cli/napi/mod.rs deleted file mode 100644 index 811efb1ecc..0000000000 --- a/cli/napi/mod.rs +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -#![allow(unused_mut)] -#![allow(non_camel_case_types)] -#![allow(clippy::undocumented_unsafe_blocks)] - -//! Symbols to be exported are now defined in this JSON file. -//! The `#[napi_sym]` macro checks for missing entries and panics. -//! -//! `./tools/napi/generate_symbols_list.js` is used to generate the LINK `cli/exports.def` on Windows, -//! which is also checked into git. -//! -//! To add a new napi function: -//! 1. Place `#[napi_sym]` on top of your implementation. -//! 2. Add the function's identifier to this JSON list. -//! 3. Finally, run `tools/napi/generate_symbols_list.js` to update `cli/napi/generated_symbol_exports_list_*.def`. - -pub mod js_native_api; -pub mod node_api; -pub mod util; -pub mod uv; diff --git a/cli/node.rs b/cli/node.rs index 733d5f8717..1d410a726a 100644 --- a/cli/node.rs +++ b/cli/node.rs @@ -1,11 +1,14 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use std::borrow::Cow; use std::sync::Arc; use deno_ast::MediaType; use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; use deno_graph::ParsedSourceStore; +use deno_path_util::url_from_file_path; +use deno_path_util::url_to_file_path; use deno_runtime::deno_fs; use deno_runtime::deno_node::DenoFsNodeResolverEnv; use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis; @@ -18,8 +21,8 @@ use serde::Serialize; use crate::cache::CacheDBHash; use crate::cache::NodeAnalysisCache; use crate::cache::ParsedSourceCache; -use crate::resolver::CliNodeResolver; -use crate::util::fs::canonicalize_path_maybe_not_exists; +use crate::resolver::CjsTracker; +use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs; pub type CliNodeCodeTranslator = NodeCodeTranslator; @@ -32,14 +35,14 @@ pub type CliNodeCodeTranslator = /// because the node_modules folder might not exist at that time. pub fn resolve_specifier_into_node_modules( specifier: &ModuleSpecifier, + fs: &dyn deno_fs::FileSystem, ) -> ModuleSpecifier { - specifier - .to_file_path() + url_to_file_path(specifier) .ok() // this path might not exist at the time the graph is being created // because the node_modules folder might not yet exist - .and_then(|path| canonicalize_path_maybe_not_exists(&path).ok()) - .and_then(|path| ModuleSpecifier::from_file_path(path).ok()) + .and_then(|path| canonicalize_path_maybe_not_exists_with_fs(&path, fs).ok()) + .and_then(|path| url_from_file_path(&path).ok()) .unwrap_or_else(|| specifier.clone()) } @@ -56,23 +59,29 @@ pub enum CliCjsAnalysis { pub struct CliCjsCodeAnalyzer { cache: NodeAnalysisCache, + cjs_tracker: Arc, fs: deno_fs::FileSystemRc, - node_resolver: Arc, parsed_source_cache: Option>, + // todo(dsherret): hack, remove in https://github.com/denoland/deno/pull/26439 + // For example, this does not properly handle if cjs analysis was already done + // and has been cached. + is_npm_main: bool, } impl CliCjsCodeAnalyzer { pub fn new( cache: NodeAnalysisCache, + cjs_tracker: Arc, fs: deno_fs::FileSystemRc, - node_resolver: Arc, parsed_source_cache: Option>, + is_npm_main: bool, ) -> Self { Self { cache, + cjs_tracker, fs, - node_resolver, parsed_source_cache, + is_npm_main, } } @@ -88,7 +97,7 @@ impl CliCjsCodeAnalyzer { return Ok(analysis); } - let mut media_type = MediaType::from_specifier(specifier); + let media_type = MediaType::from_specifier(specifier); if media_type == MediaType::Json { return Ok(CliCjsAnalysis::Cjs { exports: vec![], @@ -96,62 +105,53 @@ impl CliCjsCodeAnalyzer { }); } - if media_type == MediaType::JavaScript { - if let Some(package_json) = - self.node_resolver.get_closest_package_json(specifier)? - { - match package_json.typ.as_str() { - "commonjs" => { - media_type = MediaType::Cjs; - } - "module" => { - media_type = MediaType::Mjs; - } - _ => {} - } - } - } + let cjs_tracker = self.cjs_tracker.clone(); + let is_npm_main = self.is_npm_main; + let is_maybe_cjs = + cjs_tracker.is_maybe_cjs(specifier, media_type)? || is_npm_main; + let analysis = if is_maybe_cjs { + let maybe_parsed_source = self + .parsed_source_cache + .as_ref() + .and_then(|c| c.remove_parsed_source(specifier)); - let maybe_parsed_source = self - .parsed_source_cache - .as_ref() - .and_then(|c| c.remove_parsed_source(specifier)); - - let analysis = deno_core::unsync::spawn_blocking({ - let specifier = specifier.clone(); - let source: Arc = source.into(); - move || -> Result<_, deno_ast::ParseDiagnostic> { - let parsed_source = - maybe_parsed_source.map(Ok).unwrap_or_else(|| { - deno_ast::parse_program(deno_ast::ParseParams { - specifier, - text: source, - media_type, - capture_tokens: true, - scope_analysis: false, - maybe_syntax: None, + deno_core::unsync::spawn_blocking({ + let specifier = specifier.clone(); + let source: Arc = source.into(); + move || -> Result<_, AnyError> { + let parsed_source = + maybe_parsed_source.map(Ok).unwrap_or_else(|| { + deno_ast::parse_program(deno_ast::ParseParams { + specifier, + text: source, + media_type, + capture_tokens: true, + scope_analysis: false, + maybe_syntax: None, + }) + })?; + let is_script = parsed_source.compute_is_script(); + let is_cjs = cjs_tracker.is_cjs_with_known_is_script( + parsed_source.specifier(), + media_type, + is_script, + )? || is_script && is_npm_main; + if is_cjs { + let analysis = parsed_source.analyze_cjs(); + Ok(CliCjsAnalysis::Cjs { + exports: analysis.exports, + reexports: analysis.reexports, }) - })?; - if parsed_source.is_script() { - let analysis = parsed_source.analyze_cjs(); - Ok(CliCjsAnalysis::Cjs { - exports: analysis.exports, - reexports: analysis.reexports, - }) - } else if media_type == MediaType::Cjs { - // FIXME: `deno_ast` should internally handle MediaType::Cjs implying that - // the result must never be Esm - Ok(CliCjsAnalysis::Cjs { - exports: vec![], - reexports: vec![], - }) - } else { - Ok(CliCjsAnalysis::Esm) + } else { + Ok(CliCjsAnalysis::Esm) + } } - } - }) - .await - .unwrap()?; + }) + .await + .unwrap()? + } else { + CliCjsAnalysis::Esm + }; self .cache @@ -163,11 +163,11 @@ impl CliCjsCodeAnalyzer { #[async_trait::async_trait(?Send)] impl CjsCodeAnalyzer for CliCjsCodeAnalyzer { - async fn analyze_cjs( + async fn analyze_cjs<'a>( &self, specifier: &ModuleSpecifier, - source: Option, - ) -> Result { + source: Option>, + ) -> Result, AnyError> { let source = match source { Some(source) => source, None => { @@ -175,7 +175,7 @@ impl CjsCodeAnalyzer for CliCjsCodeAnalyzer { if let Ok(source_from_file) = self.fs.read_text_file_lossy_async(path, None).await { - source_from_file + Cow::Owned(source_from_file) } else { return Ok(ExtNodeCjsAnalysis::Cjs(CjsAnalysisExports { exports: vec![], diff --git a/cli/npm/byonm.rs b/cli/npm/byonm.rs index fc095ab16f..45fa4cfd1f 100644 --- a/cli/npm/byonm.rs +++ b/cli/npm/byonm.rs @@ -10,8 +10,8 @@ use deno_core::serde_json; use deno_core::url::Url; use deno_resolver::npm::ByonmNpmResolver; use deno_resolver::npm::ByonmNpmResolverCreateOptions; +use deno_runtime::deno_node::DenoFsNodeResolverEnv; use deno_runtime::deno_node::NodePermissions; -use deno_runtime::deno_node::NodeRequireResolver; use deno_runtime::ops::process::NpmProcessStateProvider; use deno_semver::package::PackageReq; use node_resolver::NpmResolver; @@ -25,30 +25,14 @@ use super::InnerCliNpmResolverRef; use super::ResolvePkgFolderFromDenoReqError; pub type CliByonmNpmResolverCreateOptions = - ByonmNpmResolverCreateOptions; -pub type CliByonmNpmResolver = ByonmNpmResolver; + ByonmNpmResolverCreateOptions; +pub type CliByonmNpmResolver = + ByonmNpmResolver; // todo(dsherret): the services hanging off `CliNpmResolver` doesn't seem ideal. We should probably decouple. #[derive(Debug)] struct CliByonmWrapper(Arc); -impl NodeRequireResolver for CliByonmWrapper { - fn ensure_read_permission<'a>( - &self, - permissions: &mut dyn NodePermissions, - path: &'a Path, - ) -> Result, AnyError> { - if !path - .components() - .any(|c| c.as_os_str().to_ascii_lowercase() == "node_modules") - { - permissions.check_read_path(path) - } else { - Ok(Cow::Borrowed(path)) - } - } -} - impl NpmProcessStateProvider for CliByonmWrapper { fn get_npm_process_state(&self) -> String { serde_json::to_string(&NpmProcessState { @@ -67,10 +51,6 @@ impl CliNpmResolver for CliByonmNpmResolver { self } - fn into_require_resolver(self: Arc) -> Arc { - Arc::new(CliByonmWrapper(self)) - } - fn into_process_state_provider( self: Arc, ) -> Arc { @@ -100,6 +80,21 @@ impl CliNpmResolver for CliByonmNpmResolver { .map_err(ResolvePkgFolderFromDenoReqError::Byonm) } + fn ensure_read_permission<'a>( + &self, + permissions: &mut dyn NodePermissions, + path: &'a Path, + ) -> Result, AnyError> { + if !path + .components() + .any(|c| c.as_os_str().to_ascii_lowercase() == "node_modules") + { + permissions.check_read_path(path).map_err(Into::into) + } else { + Ok(Cow::Borrowed(path)) + } + } + fn check_state_hash(&self) -> Option { // it is very difficult to determine the check state hash for byonm // so we just return None to signify check caching is not supported diff --git a/cli/npm/common.rs b/cli/npm/common.rs index de282310a1..55f1bc086d 100644 --- a/cli/npm/common.rs +++ b/cli/npm/common.rs @@ -3,6 +3,7 @@ use base64::prelude::BASE64_STANDARD; use base64::Engine; use deno_core::anyhow::bail; +use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_npm::npm_rc::RegistryConfig; use http::header; @@ -36,17 +37,21 @@ pub fn maybe_auth_header_for_npm_registry( } if username.is_some() && password.is_some() { + // The npm client does some double encoding when generating the + // bearer token value, see + // https://github.com/npm/cli/blob/780afc50e3a345feb1871a28e33fa48235bc3bd5/workspaces/config/lib/index.js#L846-L851 + let pw_base64 = BASE64_STANDARD + .decode(password.unwrap()) + .with_context(|| "The password in npmrc is an invalid base64 string")?; + let bearer = BASE64_STANDARD.encode(format!( + "{}:{}", + username.unwrap(), + String::from_utf8_lossy(&pw_base64) + )); + return Ok(Some(( header::AUTHORIZATION, - header::HeaderValue::from_str(&format!( - "Basic {}", - BASE64_STANDARD.encode(format!( - "{}:{}", - username.unwrap(), - password.unwrap() - )) - )) - .unwrap(), + header::HeaderValue::from_str(&format!("Basic {}", bearer)).unwrap(), ))); } diff --git a/cli/npm/managed/cache/mod.rs b/cli/npm/managed/cache/mod.rs index fa0e8c8a59..8ae99f41e0 100644 --- a/cli/npm/managed/cache/mod.rs +++ b/cli/npm/managed/cache/mod.rs @@ -26,7 +26,7 @@ use crate::cache::CACHE_PERM; use crate::util::fs::atomic_write_file_with_retries; use crate::util::fs::hard_link_dir_recursive; -mod registry_info; +pub mod registry_info; mod tarball; mod tarball_extract; @@ -36,7 +36,7 @@ pub use tarball::TarballCache; /// Stores a single copy of npm packages in a cache. #[derive(Debug)] pub struct NpmCache { - cache_dir: NpmCacheDir, + cache_dir: Arc, cache_setting: CacheSetting, npmrc: Arc, /// ensures a package is only downloaded once per run @@ -45,7 +45,7 @@ pub struct NpmCache { impl NpmCache { pub fn new( - cache_dir: NpmCacheDir, + cache_dir: Arc, cache_setting: CacheSetting, npmrc: Arc, ) -> Self { @@ -61,6 +61,10 @@ impl NpmCache { &self.cache_setting } + pub fn root_dir_path(&self) -> &Path { + self.cache_dir.root_dir() + } + pub fn root_dir_url(&self) -> &Url { self.cache_dir.root_dir_url() } @@ -152,10 +156,6 @@ impl NpmCache { self.cache_dir.package_name_folder(name, registry_url) } - pub fn root_folder(&self) -> PathBuf { - self.cache_dir.root_dir().to_owned() - } - pub fn resolve_package_folder_id_from_specifier( &self, specifier: &ModuleSpecifier, diff --git a/cli/npm/managed/cache/registry_info.rs b/cli/npm/managed/cache/registry_info.rs index 6c4a7503b5..6d39d3c13f 100644 --- a/cli/npm/managed/cache/registry_info.rs +++ b/cli/npm/managed/cache/registry_info.rs @@ -84,7 +84,7 @@ impl RegistryInfoDownloader { self.load_package_info_inner(name).await.with_context(|| { format!( "Error getting response at {} for package \"{}\"", - self.get_package_url(name), + get_package_url(&self.npmrc, name), name ) }) @@ -190,7 +190,7 @@ impl RegistryInfoDownloader { fn create_load_future(self: &Arc, name: &str) -> LoadFuture { let downloader = self.clone(); - let package_url = self.get_package_url(name); + let package_url = get_package_url(&self.npmrc, name); let registry_config = self.npmrc.get_registry_config(name); let maybe_auth_header = match maybe_auth_header_for_npm_registry(registry_config) { @@ -239,36 +239,36 @@ impl RegistryInfoDownloader { .map(|r| r.map_err(Arc::new)) .boxed_local() } - - fn get_package_url(&self, name: &str) -> Url { - let registry_url = self.npmrc.get_registry_url(name); - // The '/' character in scoped package names "@scope/name" must be - // encoded for older third party registries. Newer registries and - // npm itself support both ways - // - encoded: https://registry.npmjs.org/@rollup%2fplugin-json - // - non-ecoded: https://registry.npmjs.org/@rollup/plugin-json - // To support as many third party registries as possible we'll - // always encode the '/' character. - - // list of all characters used in npm packages: - // !, ', (, ), *, -, ., /, [0-9], @, [A-Za-z], _, ~ - const ASCII_SET: percent_encoding::AsciiSet = - percent_encoding::NON_ALPHANUMERIC - .remove(b'!') - .remove(b'\'') - .remove(b'(') - .remove(b')') - .remove(b'*') - .remove(b'-') - .remove(b'.') - .remove(b'@') - .remove(b'_') - .remove(b'~'); - let name = percent_encoding::utf8_percent_encode(name, &ASCII_SET); - registry_url - // Ensure that scoped package name percent encoding is lower cased - // to match npm. - .join(&name.to_string().replace("%2F", "%2f")) - .unwrap() - } +} + +pub fn get_package_url(npmrc: &ResolvedNpmRc, name: &str) -> Url { + let registry_url = npmrc.get_registry_url(name); + // The '/' character in scoped package names "@scope/name" must be + // encoded for older third party registries. Newer registries and + // npm itself support both ways + // - encoded: https://registry.npmjs.org/@rollup%2fplugin-json + // - non-ecoded: https://registry.npmjs.org/@rollup/plugin-json + // To support as many third party registries as possible we'll + // always encode the '/' character. + + // list of all characters used in npm packages: + // !, ', (, ), *, -, ., /, [0-9], @, [A-Za-z], _, ~ + const ASCII_SET: percent_encoding::AsciiSet = + percent_encoding::NON_ALPHANUMERIC + .remove(b'!') + .remove(b'\'') + .remove(b'(') + .remove(b')') + .remove(b'*') + .remove(b'-') + .remove(b'.') + .remove(b'@') + .remove(b'_') + .remove(b'~'); + let name = percent_encoding::utf8_percent_encode(name, &ASCII_SET); + registry_url + // Ensure that scoped package name percent encoding is lower cased + // to match npm. + .join(&name.to_string().replace("%2F", "%2f")) + .unwrap() } diff --git a/cli/npm/managed/mod.rs b/cli/npm/managed/mod.rs index ec50a9c65a..4a91bc3474 100644 --- a/cli/npm/managed/mod.rs +++ b/cli/npm/managed/mod.rs @@ -12,6 +12,7 @@ use deno_cache_dir::npm::NpmCacheDir; use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::serde_json; +use deno_core::url::Url; use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::registry::NpmPackageInfo; use deno_npm::registry::NpmRegistryApi; @@ -24,12 +25,12 @@ use deno_npm::NpmSystemInfo; use deno_runtime::colors; use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::NodePermissions; -use deno_runtime::deno_node::NodeRequireResolver; use deno_runtime::ops::process::NpmProcessStateProvider; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; use node_resolver::errors::PackageFolderResolveError; use node_resolver::errors::PackageFolderResolveIoError; +use node_resolver::InNpmPackageChecker; use node_resolver::NpmResolver; use resolution::AddPkgReqsResult; @@ -38,7 +39,7 @@ use crate::args::LifecycleScriptsConfig; use crate::args::NpmInstallDepsProvider; use crate::args::NpmProcessState; use crate::args::NpmProcessStateKind; -use crate::cache::DenoCacheEnvFsAdapter; +use crate::args::PackageJsonDepValueParseWithLocationError; use crate::cache::FastInsecureHasher; use crate::http_util::HttpClientProvider; use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs; @@ -55,7 +56,7 @@ use super::CliNpmResolver; use super::InnerCliNpmResolverRef; use super::ResolvePkgFolderFromDenoReqError; -mod cache; +pub mod cache; mod registry; mod resolution; mod resolvers; @@ -65,12 +66,12 @@ pub enum CliNpmResolverManagedSnapshotOption { Specified(Option), } -pub struct CliNpmResolverManagedCreateOptions { +pub struct CliManagedNpmResolverCreateOptions { pub snapshot: CliNpmResolverManagedSnapshotOption, pub maybe_lockfile: Option>, pub fs: Arc, pub http_client_provider: Arc, - pub npm_global_cache_dir: PathBuf, + pub npm_cache_dir: Arc, pub cache_setting: crate::args::CacheSetting, pub text_only_progress_bar: crate::util::progress_bar::ProgressBar, pub maybe_node_modules_path: Option, @@ -81,7 +82,7 @@ pub struct CliNpmResolverManagedCreateOptions { } pub async fn create_managed_npm_resolver_for_lsp( - options: CliNpmResolverManagedCreateOptions, + options: CliManagedNpmResolverCreateOptions, ) -> Arc { let npm_cache = create_cache(&options); let npm_api = create_api(&options, npm_cache.clone()); @@ -114,7 +115,7 @@ pub async fn create_managed_npm_resolver_for_lsp( } pub async fn create_managed_npm_resolver( - options: CliNpmResolverManagedCreateOptions, + options: CliManagedNpmResolverCreateOptions, ) -> Result, AnyError> { let npm_cache = create_cache(&options); let npm_api = create_api(&options, npm_cache.clone()); @@ -188,20 +189,16 @@ fn create_inner( )) } -fn create_cache(options: &CliNpmResolverManagedCreateOptions) -> Arc { +fn create_cache(options: &CliManagedNpmResolverCreateOptions) -> Arc { Arc::new(NpmCache::new( - NpmCacheDir::new( - &DenoCacheEnvFsAdapter(options.fs.as_ref()), - options.npm_global_cache_dir.clone(), - options.npmrc.get_all_known_registries_urls(), - ), + options.npm_cache_dir.clone(), options.cache_setting.clone(), options.npmrc.clone(), )) } fn create_api( - options: &CliNpmResolverManagedCreateOptions, + options: &CliManagedNpmResolverCreateOptions, npm_cache: Arc, ) -> Arc { Arc::new(CliNpmRegistryApi::new( @@ -258,6 +255,35 @@ async fn snapshot_from_lockfile( Ok(snapshot) } +#[derive(Debug)] +struct ManagedInNpmPackageChecker { + root_dir: Url, +} + +impl InNpmPackageChecker for ManagedInNpmPackageChecker { + fn in_npm_package(&self, specifier: &Url) -> bool { + specifier.as_ref().starts_with(self.root_dir.as_str()) + } +} + +pub struct CliManagedInNpmPkgCheckerCreateOptions<'a> { + pub root_cache_dir_url: &'a Url, + pub maybe_node_modules_path: Option<&'a Path>, +} + +pub fn create_managed_in_npm_pkg_checker( + options: CliManagedInNpmPkgCheckerCreateOptions, +) -> Arc { + let root_dir = match options.maybe_node_modules_path { + Some(node_modules_folder) => { + deno_path_util::url_from_directory_path(node_modules_folder).unwrap() + } + None => options.root_cache_dir_url.clone(), + }; + debug_assert!(root_dir.as_str().ends_with('/')); + Arc::new(ManagedInNpmPackageChecker { root_dir }) +} + /// An npm resolver where the resolution is managed by Deno rather than /// the user bringing their own node_modules (BYONM) on the file system. pub struct ManagedCliNpmResolver { @@ -480,19 +506,24 @@ impl ManagedCliNpmResolver { self.resolution.resolve_pkg_id_from_pkg_req(req) } - pub fn ensure_no_pkg_json_dep_errors(&self) -> Result<(), AnyError> { + pub fn ensure_no_pkg_json_dep_errors( + &self, + ) -> Result<(), Box> { for err in self.npm_install_deps_provider.pkg_json_dep_errors() { - match err { + match &err.source { deno_package_json::PackageJsonDepValueParseError::VersionReq(_) => { - return Err( - AnyError::from(err.clone()) - .context("Failed to install from package.json"), - ); + return Err(Box::new(err.clone())); } deno_package_json::PackageJsonDepValueParseError::Unsupported { .. } => { - log::warn!("{} {} in package.json", colors::yellow("Warning"), err) + // only warn for this one + log::warn!( + "{} {}\n at {}", + colors::yellow("Warning"), + err.source, + err.location, + ) } } } @@ -549,8 +580,16 @@ impl ManagedCliNpmResolver { .map_err(|err| err.into()) } - pub fn global_cache_root_folder(&self) -> PathBuf { - self.npm_cache.root_folder() + pub fn maybe_node_modules_path(&self) -> Option<&Path> { + self.fs_resolver.node_modules_path() + } + + pub fn global_cache_root_path(&self) -> &Path { + self.npm_cache.root_dir_path() + } + + pub fn global_cache_root_url(&self) -> &Url { + self.npm_cache.root_dir_url() } } @@ -585,22 +624,6 @@ impl NpmResolver for ManagedCliNpmResolver { log::debug!("Resolved {} from {} to {}", name, referrer, path.display()); Ok(path) } - - fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { - let root_dir_url = self.fs_resolver.root_dir_url(); - debug_assert!(root_dir_url.as_str().ends_with('/')); - specifier.as_ref().starts_with(root_dir_url.as_str()) - } -} - -impl NodeRequireResolver for ManagedCliNpmResolver { - fn ensure_read_permission<'a>( - &self, - permissions: &mut dyn NodePermissions, - path: &'a Path, - ) -> Result, AnyError> { - self.fs_resolver.ensure_read_permission(permissions, path) - } } impl NpmProcessStateProvider for ManagedCliNpmResolver { @@ -617,10 +640,6 @@ impl CliNpmResolver for ManagedCliNpmResolver { self } - fn into_require_resolver(self: Arc) -> Arc { - self - } - fn into_process_state_provider( self: Arc, ) -> Arc { @@ -681,6 +700,14 @@ impl CliNpmResolver for ManagedCliNpmResolver { .map_err(ResolvePkgFolderFromDenoReqError::Managed) } + fn ensure_read_permission<'a>( + &self, + permissions: &mut dyn NodePermissions, + path: &'a Path, + ) -> Result, AnyError> { + self.fs_resolver.ensure_read_permission(permissions, path) + } + fn check_state_hash(&self) -> Option { // We could go further and check all the individual // npm packages, but that's probably overkill. diff --git a/cli/npm/managed/resolvers/common.rs b/cli/npm/managed/resolvers/common.rs index 867bb4168a..eee11c7604 100644 --- a/cli/npm/managed/resolvers/common.rs +++ b/cli/npm/managed/resolvers/common.rs @@ -17,7 +17,6 @@ use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::futures; use deno_core::futures::StreamExt; -use deno_core::url::Url; use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; @@ -30,9 +29,6 @@ use crate::npm::managed::cache::TarballCache; /// Part of the resolution that interacts with the file system. #[async_trait(?Send)] pub trait NpmPackageFsResolver: Send + Sync { - /// Specifier for the root directory. - fn root_dir_url(&self) -> &Url; - /// The local node_modules folder if it is applicable to the implementation. fn node_modules_path(&self) -> Option<&Path>; @@ -137,7 +133,7 @@ impl RegistryReadPermissionChecker { } } - permissions.check_read_path(path) + permissions.check_read_path(path).map_err(Into::into) } } diff --git a/cli/npm/managed/resolvers/global.rs b/cli/npm/managed/resolvers/global.rs index 5be315e992..f0193e78e9 100644 --- a/cli/npm/managed/resolvers/global.rs +++ b/cli/npm/managed/resolvers/global.rs @@ -11,7 +11,6 @@ use crate::colors; use async_trait::async_trait; use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; -use deno_core::url::Url; use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; @@ -56,7 +55,7 @@ impl GlobalNpmPackageResolver { Self { registry_read_permission_checker: RegistryReadPermissionChecker::new( fs, - cache.root_folder(), + cache.root_dir_path().to_path_buf(), ), cache, tarball_cache, @@ -69,10 +68,6 @@ impl GlobalNpmPackageResolver { #[async_trait(?Send)] impl NpmPackageFsResolver for GlobalNpmPackageResolver { - fn root_dir_url(&self) -> &Url { - self.cache.root_dir_url() - } - fn node_modules_path(&self) -> Option<&Path> { None } diff --git a/cli/npm/managed/resolvers/local.rs b/cli/npm/managed/resolvers/local.rs index 54f7576ade..eddb0dc9b6 100644 --- a/cli/npm/managed/resolvers/local.rs +++ b/cli/npm/managed/resolvers/local.rs @@ -155,10 +155,6 @@ impl LocalNpmPackageResolver { #[async_trait(?Send)] impl NpmPackageFsResolver for LocalNpmPackageResolver { - fn root_dir_url(&self) -> &Url { - &self.root_node_modules_url - } - fn node_modules_path(&self) -> Option<&Path> { Some(self.root_node_modules_path.as_ref()) } @@ -1039,12 +1035,18 @@ fn junction_or_symlink_dir( if symlink_err.kind() == std::io::ErrorKind::PermissionDenied => { USE_JUNCTIONS.store(true, std::sync::atomic::Ordering::Relaxed); - junction::create(old_path, new_path).map_err(Into::into) + junction::create(old_path, new_path) + .context("Failed creating junction in node_modules folder") + } + Err(symlink_err) => { + log::warn!( + "{} Unexpected error symlinking node_modules: {symlink_err}", + colors::yellow("Warning") + ); + USE_JUNCTIONS.store(true, std::sync::atomic::Ordering::Relaxed); + junction::create(old_path, new_path) + .context("Failed creating junction in node_modules folder") } - Err(symlink_err) => Err( - AnyError::from(symlink_err) - .context("Failed creating symlink in node_modules folder"), - ), } } diff --git a/cli/npm/mod.rs b/cli/npm/mod.rs index 53baaf77b4..0d434ca27f 100644 --- a/cli/npm/mod.rs +++ b/cli/npm/mod.rs @@ -4,30 +4,37 @@ mod byonm; mod common; mod managed; +use std::borrow::Cow; use std::path::Path; use std::path::PathBuf; use std::sync::Arc; +use common::maybe_auth_header_for_npm_registry; use dashmap::DashMap; use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; use deno_core::serde_json; +use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::registry::NpmPackageInfo; +use deno_resolver::npm::ByonmInNpmPackageChecker; use deno_resolver::npm::ByonmNpmResolver; use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError; -use deno_runtime::deno_node::NodeRequireResolver; +use deno_runtime::deno_node::NodePermissions; use deno_runtime::ops::process::NpmProcessStateProvider; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; +use managed::cache::registry_info::get_package_url; +use managed::create_managed_in_npm_pkg_checker; +use node_resolver::InNpmPackageChecker; use node_resolver::NpmResolver; use thiserror::Error; -use crate::args::npm_registry_url; use crate::file_fetcher::FileFetcher; pub use self::byonm::CliByonmNpmResolver; pub use self::byonm::CliByonmNpmResolverCreateOptions; -pub use self::managed::CliNpmResolverManagedCreateOptions; +pub use self::managed::CliManagedInNpmPkgCheckerCreateOptions; +pub use self::managed::CliManagedNpmResolverCreateOptions; pub use self::managed::CliNpmResolverManagedSnapshotOption; pub use self::managed::ManagedCliNpmResolver; @@ -40,7 +47,7 @@ pub enum ResolvePkgFolderFromDenoReqError { } pub enum CliNpmResolverCreateOptions { - Managed(CliNpmResolverManagedCreateOptions), + Managed(CliManagedNpmResolverCreateOptions), Byonm(CliByonmNpmResolverCreateOptions), } @@ -66,6 +73,22 @@ pub async fn create_cli_npm_resolver( } } +pub enum CreateInNpmPkgCheckerOptions<'a> { + Managed(CliManagedInNpmPkgCheckerCreateOptions<'a>), + Byonm, +} + +pub fn create_in_npm_pkg_checker( + options: CreateInNpmPkgCheckerOptions, +) -> Arc { + match options { + CreateInNpmPkgCheckerOptions::Managed(options) => { + create_managed_in_npm_pkg_checker(options) + } + CreateInNpmPkgCheckerOptions::Byonm => Arc::new(ByonmInNpmPackageChecker), + } +} + pub enum InnerCliNpmResolverRef<'a> { Managed(&'a ManagedCliNpmResolver), #[allow(dead_code)] @@ -74,7 +97,6 @@ pub enum InnerCliNpmResolverRef<'a> { pub trait CliNpmResolver: NpmResolver { fn into_npm_resolver(self: Arc) -> Arc; - fn into_require_resolver(self: Arc) -> Arc; fn into_process_state_provider( self: Arc, ) -> Arc; @@ -105,6 +127,12 @@ pub trait CliNpmResolver: NpmResolver { referrer: &ModuleSpecifier, ) -> Result; + fn ensure_read_permission<'a>( + &self, + permissions: &mut dyn NodePermissions, + path: &'a Path, + ) -> Result, AnyError>; + /// Returns a hash returning the state of the npm resolver /// or `None` if the state currently can't be determined. fn check_state_hash(&self) -> Option; @@ -115,14 +143,19 @@ pub struct NpmFetchResolver { nv_by_req: DashMap>, info_by_name: DashMap>>, file_fetcher: Arc, + npmrc: Arc, } impl NpmFetchResolver { - pub fn new(file_fetcher: Arc) -> Self { + pub fn new( + file_fetcher: Arc, + npmrc: Arc, + ) -> Self { Self { nv_by_req: Default::default(), info_by_name: Default::default(), file_fetcher, + npmrc, } } @@ -157,11 +190,21 @@ impl NpmFetchResolver { return info.value().clone(); } let fetch_package_info = || async { - let info_url = npm_registry_url().join(name).ok()?; + let info_url = get_package_url(&self.npmrc, name); let file_fetcher = self.file_fetcher.clone(); + let registry_config = self.npmrc.get_registry_config(name); + // TODO(bartlomieju): this should error out, not use `.ok()`. + let maybe_auth_header = + maybe_auth_header_for_npm_registry(registry_config).ok()?; // spawn due to the lsp's `Send` requirement let file = deno_core::unsync::spawn(async move { - file_fetcher.fetch_bypass_permissions(&info_url).await.ok() + file_fetcher + .fetch_bypass_permissions_with_maybe_auth( + &info_url, + maybe_auth_header, + ) + .await + .ok() }) .await .ok()??; @@ -172,3 +215,15 @@ impl NpmFetchResolver { info } } + +pub const NPM_CONFIG_USER_AGENT_ENV_VAR: &str = "npm_config_user_agent"; + +pub fn get_npm_config_user_agent() -> String { + format!( + "deno/{} npm/? deno/{} {} {}", + env!("CARGO_PKG_VERSION"), + env!("CARGO_PKG_VERSION"), + std::env::consts::OS, + std::env::consts::ARCH + ) +} diff --git a/cli/ops/bench.rs b/cli/ops/bench.rs index 5d1e6e746d..1f4a4bd9b5 100644 --- a/cli/ops/bench.rs +++ b/cli/ops/bench.rs @@ -2,7 +2,6 @@ use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering; -use std::time; use deno_core::error::generic_error; use deno_core::error::type_error; @@ -13,6 +12,7 @@ use deno_core::ModuleSpecifier; use deno_core::OpState; use deno_runtime::deno_permissions::ChildPermissionsArg; use deno_runtime::deno_permissions::PermissionsContainer; +use deno_runtime::deno_web::StartTime; use tokio::sync::mpsc::UnboundedSender; use uuid::Uuid; @@ -56,7 +56,7 @@ struct PermissionsHolder(Uuid, PermissionsContainer); pub fn op_pledge_test_permissions( state: &mut OpState, #[serde] args: ChildPermissionsArg, -) -> Result { +) -> Result { let token = Uuid::new_v4(); let parent_permissions = state.borrow_mut::(); let worker_permissions = parent_permissions.create_child_permissions(args)?; @@ -147,8 +147,8 @@ fn op_dispatch_bench_event(state: &mut OpState, #[serde] event: BenchEvent) { #[op2(fast)] #[number] -fn op_bench_now(state: &mut OpState) -> Result { - let ns = state.borrow::().elapsed().as_nanos(); +fn op_bench_now(state: &mut OpState) -> Result { + let ns = state.borrow::().elapsed().as_nanos(); let ns_u64 = u64::try_from(ns)?; Ok(ns_u64) } diff --git a/cli/ops/jupyter.rs b/cli/ops/jupyter.rs index f7f006d9bd..5bdf97e60f 100644 --- a/cli/ops/jupyter.rs +++ b/cli/ops/jupyter.rs @@ -46,7 +46,7 @@ pub fn op_jupyter_input( state: &mut OpState, #[string] prompt: String, is_password: bool, -) -> Result, AnyError> { +) -> Option { let (last_execution_request, stdin_connection_proxy) = { ( state.borrow::>>>().clone(), @@ -58,11 +58,11 @@ pub fn op_jupyter_input( if let Some(last_request) = maybe_last_request { let JupyterMessageContent::ExecuteRequest(msg) = &last_request.content else { - return Ok(None); + return None; }; if !msg.allow_stdin { - return Ok(None); + return None; } let content = InputRequest { @@ -73,7 +73,7 @@ pub fn op_jupyter_input( let msg = JupyterMessage::new(content, Some(&last_request)); let Ok(()) = stdin_connection_proxy.lock().tx.send(msg) else { - return Ok(None); + return None; }; // Need to spawn a separate thread here, because `blocking_recv()` can't @@ -82,17 +82,25 @@ pub fn op_jupyter_input( stdin_connection_proxy.lock().rx.blocking_recv() }); let Ok(Some(response)) = join_handle.join() else { - return Ok(None); + return None; }; let JupyterMessageContent::InputReply(msg) = response.content else { - return Ok(None); + return None; }; - return Ok(Some(msg.value)); + return Some(msg.value); } - Ok(None) + None +} + +#[derive(Debug, thiserror::Error)] +pub enum JupyterBroadcastError { + #[error(transparent)] + SerdeJson(serde_json::Error), + #[error(transparent)] + ZeroMq(AnyError), } #[op2(async)] @@ -102,7 +110,7 @@ pub async fn op_jupyter_broadcast( #[serde] content: serde_json::Value, #[serde] metadata: serde_json::Value, #[serde] buffers: Vec, -) -> Result<(), AnyError> { +) -> Result<(), JupyterBroadcastError> { let (iopub_connection, last_execution_request) = { let s = state.borrow(); @@ -125,36 +133,35 @@ pub async fn op_jupyter_broadcast( content, err ); - err + JupyterBroadcastError::SerdeJson(err) })?; let jupyter_message = JupyterMessage::new(content, Some(&last_request)) .with_metadata(metadata) .with_buffers(buffers.into_iter().map(|b| b.to_vec().into()).collect()); - iopub_connection.lock().send(jupyter_message).await?; + iopub_connection + .lock() + .send(jupyter_message) + .await + .map_err(JupyterBroadcastError::ZeroMq)?; } Ok(()) } #[op2(fast)] -pub fn op_print( - state: &mut OpState, - #[string] msg: &str, - is_err: bool, -) -> Result<(), AnyError> { +pub fn op_print(state: &mut OpState, #[string] msg: &str, is_err: bool) { let sender = state.borrow_mut::>(); if is_err { if let Err(err) = sender.send(StreamContent::stderr(msg)) { log::error!("Failed to send stderr message: {}", err); } - return Ok(()); + return; } if let Err(err) = sender.send(StreamContent::stdout(msg)) { log::error!("Failed to send stdout message: {}", err); } - Ok(()) } diff --git a/cli/ops/testing.rs b/cli/ops/testing.rs index c3f469656c..00aafb8286 100644 --- a/cli/ops/testing.rs +++ b/cli/ops/testing.rs @@ -51,7 +51,7 @@ struct PermissionsHolder(Uuid, PermissionsContainer); pub fn op_pledge_test_permissions( state: &mut OpState, #[serde] args: ChildPermissionsArg, -) -> Result { +) -> Result { let token = Uuid::new_v4(); let parent_permissions = state.borrow_mut::(); let worker_permissions = parent_permissions.create_child_permissions(args)?; @@ -150,7 +150,7 @@ fn op_register_test_step( #[smi] parent_id: usize, #[smi] root_id: usize, #[string] root_name: String, -) -> Result { +) -> usize { let id = NEXT_ID.fetch_add(1, Ordering::SeqCst); let origin = state.borrow::().to_string(); let description = TestStepDescription { @@ -169,7 +169,7 @@ fn op_register_test_step( }; let sender = state.borrow_mut::(); sender.send(TestEvent::StepRegister(description)).ok(); - Ok(id) + id } #[op2(fast)] diff --git a/cli/resolver.rs b/cli/resolver.rs index 84c671268a..710b975093 100644 --- a/cli/resolver.rs +++ b/cli/resolver.rs @@ -4,6 +4,7 @@ use async_trait::async_trait; use dashmap::DashMap; use dashmap::DashSet; use deno_ast::MediaType; +use deno_ast::ModuleKind; use deno_config::workspace::MappedResolution; use deno_config::workspace::MappedResolutionDiagnostic; use deno_config::workspace::MappedResolutionError; @@ -11,6 +12,7 @@ use deno_config::workspace::WorkspaceResolver; use deno_core::anyhow::anyhow; use deno_core::anyhow::Context; use deno_core::error::AnyError; +use deno_core::url::Url; use deno_core::ModuleSourceCode; use deno_core::ModuleSpecifier; use deno_graph::source::ResolutionMode; @@ -29,6 +31,7 @@ use deno_runtime::deno_fs; use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_node::is_builtin_node_module; use deno_runtime::deno_node::NodeResolver; +use deno_runtime::deno_node::PackageJsonResolver; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageReq; use node_resolver::errors::ClosestPkgJsonError; @@ -38,21 +41,22 @@ use node_resolver::errors::PackageFolderResolveErrorKind; use node_resolver::errors::PackageFolderResolveIoError; use node_resolver::errors::PackageNotFoundError; use node_resolver::errors::PackageResolveErrorKind; -use node_resolver::errors::UrlToNodeResolutionError; +use node_resolver::errors::PackageSubpathResolveError; +use node_resolver::InNpmPackageChecker; use node_resolver::NodeModuleKind; use node_resolver::NodeResolution; use node_resolver::NodeResolutionMode; -use node_resolver::PackageJson; +use std::borrow::Cow; use std::path::Path; use std::path::PathBuf; use std::sync::Arc; +use thiserror::Error; use crate::args::JsxImportSourceConfig; use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS; use crate::node::CliNodeCodeTranslator; use crate::npm::CliNpmResolver; use crate::npm::InnerCliNpmResolverRef; -use crate::util::path::specifier_has_extension; use crate::util::sync::AtomicFlag; use crate::util::text_encoding::from_utf8_lossy_owned; @@ -104,36 +108,32 @@ impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs { #[derive(Debug)] pub struct CliNodeResolver { - cjs_resolutions: Arc, + cjs_tracker: Arc, fs: Arc, + in_npm_pkg_checker: Arc, node_resolver: Arc, npm_resolver: Arc, } impl CliNodeResolver { pub fn new( - cjs_resolutions: Arc, + cjs_tracker: Arc, fs: Arc, + in_npm_pkg_checker: Arc, node_resolver: Arc, npm_resolver: Arc, ) -> Self { Self { - cjs_resolutions, + cjs_tracker, fs, + in_npm_pkg_checker, node_resolver, npm_resolver, } } pub fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { - self.npm_resolver.in_npm_package(specifier) - } - - pub fn get_closest_package_json( - &self, - referrer: &ModuleSpecifier, - ) -> Result>, ClosestPkgJsonError> { - self.node_resolver.get_closest_package_json(referrer) + self.in_npm_pkg_checker.in_npm_package(specifier) } pub fn resolve_if_for_npm_pkg( @@ -153,8 +153,7 @@ impl CliNodeResolver { | NodeResolveErrorKind::UnsupportedEsmUrlScheme(_) | NodeResolveErrorKind::DataUrlReferrer(_) | NodeResolveErrorKind::TypesNotFound(_) - | NodeResolveErrorKind::FinalizeResolution(_) - | NodeResolveErrorKind::UrlToNodeResolution(_) => Err(err.into()), + | NodeResolveErrorKind::FinalizeResolution(_) => Err(err.into()), NodeResolveErrorKind::PackageResolve(err) => { let err = err.into_kind(); match err { @@ -216,7 +215,11 @@ impl CliNodeResolver { referrer: &ModuleSpecifier, mode: NodeResolutionMode, ) -> Result { - let referrer_kind = if self.cjs_resolutions.is_known_cjs(referrer) { + let referrer_kind = if self + .cjs_tracker + .is_maybe_cjs(referrer, MediaType::from_specifier(referrer)) + .map_err(|err| NodeResolveErrorKind::PackageResolve(err.into()))? + { NodeModuleKind::Cjs } else { NodeModuleKind::Esm @@ -226,7 +229,7 @@ impl CliNodeResolver { self .node_resolver .resolve(specifier, referrer, referrer_kind, mode)?; - Ok(self.handle_node_resolution(res)) + Ok(res) } pub fn resolve_req_reference( @@ -234,7 +237,7 @@ impl CliNodeResolver { req_ref: &NpmPackageReqReference, referrer: &ModuleSpecifier, mode: NodeResolutionMode, - ) -> Result { + ) -> Result { self.resolve_req_with_sub_path( req_ref.req(), req_ref.sub_path(), @@ -249,7 +252,7 @@ impl CliNodeResolver { sub_path: Option<&str>, referrer: &ModuleSpecifier, mode: NodeResolutionMode, - ) -> Result { + ) -> Result { let package_folder = self .npm_resolver .resolve_pkg_folder_from_deno_module_req(req, referrer)?; @@ -260,7 +263,7 @@ impl CliNodeResolver { mode, ); match resolution_result { - Ok(resolution) => Ok(resolution), + Ok(url) => Ok(url), Err(err) => { if self.npm_resolver.as_byonm().is_some() { let package_json_path = package_folder.join("package.json"); @@ -271,7 +274,7 @@ impl CliNodeResolver { )); } } - Err(err) + Err(err.into()) } } } @@ -282,16 +285,13 @@ impl CliNodeResolver { sub_path: Option<&str>, maybe_referrer: Option<&ModuleSpecifier>, mode: NodeResolutionMode, - ) -> Result { - let res = self - .node_resolver - .resolve_package_subpath_from_deno_module( - package_folder, - sub_path, - maybe_referrer, - mode, - )?; - Ok(self.handle_node_resolution(res)) + ) -> Result { + self.node_resolver.resolve_package_subpath_from_deno_module( + package_folder, + sub_path, + maybe_referrer, + mode, + ) } pub fn handle_if_in_node_modules( @@ -306,71 +306,45 @@ impl CliNodeResolver { // so canoncalize then check if it's in the node_modules directory. // If so, check if we need to store this specifier as being a CJS // resolution. - let specifier = - crate::node::resolve_specifier_into_node_modules(specifier); - if self.in_npm_package(&specifier) { - let resolution = - self.node_resolver.url_to_node_resolution(specifier)?; - let resolution = self.handle_node_resolution(resolution); - return Ok(Some(resolution.into_url())); - } + let specifier = crate::node::resolve_specifier_into_node_modules( + specifier, + self.fs.as_ref(), + ); + return Ok(Some(specifier)); } Ok(None) } - - pub fn url_to_node_resolution( - &self, - specifier: ModuleSpecifier, - ) -> Result { - self.node_resolver.url_to_node_resolution(specifier) - } - - fn handle_node_resolution( - &self, - resolution: NodeResolution, - ) -> NodeResolution { - if let NodeResolution::CommonJs(specifier) = &resolution { - // remember that this was a common js resolution - self.mark_cjs_resolution(specifier.clone()); - } - resolution - } - - pub fn mark_cjs_resolution(&self, specifier: ModuleSpecifier) { - self.cjs_resolutions.insert(specifier); - } } -// todo(dsherret): move to module_loader.rs +#[derive(Debug, Error)] +#[error("{media_type} files are not supported in npm packages: {specifier}")] +pub struct NotSupportedKindInNpmError { + pub media_type: MediaType, + pub specifier: Url, +} + +// todo(dsherret): move to module_loader.rs (it seems to be here due to use in standalone) #[derive(Clone)] pub struct NpmModuleLoader { - cjs_resolutions: Arc, - node_code_translator: Arc, + cjs_tracker: Arc, fs: Arc, - node_resolver: Arc, + node_code_translator: Arc, } impl NpmModuleLoader { pub fn new( - cjs_resolutions: Arc, - node_code_translator: Arc, + cjs_tracker: Arc, fs: Arc, - node_resolver: Arc, + node_code_translator: Arc, ) -> Self { Self { - cjs_resolutions, + cjs_tracker, node_code_translator, fs, - node_resolver, } } - pub fn if_in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { - self.node_resolver.in_npm_package(specifier) - || self.cjs_resolutions.is_known_cjs(specifier) - } - pub async fn load( &self, specifier: &ModuleSpecifier, @@ -413,20 +387,30 @@ impl NpmModuleLoader { } })?; - let code = if self.cjs_resolutions.is_known_cjs(specifier) { + let media_type = MediaType::from_specifier(specifier); + if media_type.is_emittable() { + return Err(AnyError::from(NotSupportedKindInNpmError { + media_type, + specifier: specifier.clone(), + })); + } + + let code = if self.cjs_tracker.is_maybe_cjs(specifier, media_type)? { // translate cjs to esm if it's cjs and inject node globals let code = from_utf8_lossy_owned(code); ModuleSourceCode::String( self .node_code_translator - .translate_cjs_to_esm(specifier, Some(code)) + .translate_cjs_to_esm(specifier, Some(Cow::Owned(code))) .await? + .into_owned() .into(), ) } else { // esm and json code is untouched ModuleSourceCode::Bytes(code.into_boxed_slice().into()) }; + Ok(ModuleCodeStringSource { code, found_url: specifier.clone(), @@ -435,21 +419,165 @@ impl NpmModuleLoader { } } +pub struct CjsTrackerOptions { + pub unstable_detect_cjs: bool, +} + /// Keeps track of what module specifiers were resolved as CJS. -#[derive(Debug, Default)] -pub struct CjsResolutionStore(DashSet); +/// +/// Modules that are `.js` or `.ts` are only known to be CJS or +/// ESM after they're loaded based on their contents. So these files +/// will be "maybe CJS" until they're loaded. +#[derive(Debug)] +pub struct CjsTracker { + in_npm_pkg_checker: Arc, + pkg_json_resolver: Arc, + unstable_detect_cjs: bool, + known: DashMap, +} -impl CjsResolutionStore { - pub fn is_known_cjs(&self, specifier: &ModuleSpecifier) -> bool { - if specifier.scheme() != "file" { - return false; +impl CjsTracker { + pub fn new( + in_npm_pkg_checker: Arc, + pkg_json_resolver: Arc, + options: CjsTrackerOptions, + ) -> Self { + Self { + in_npm_pkg_checker, + pkg_json_resolver, + unstable_detect_cjs: options.unstable_detect_cjs, + known: Default::default(), } - - specifier_has_extension(specifier, "cjs") || self.0.contains(specifier) } - pub fn insert(&self, specifier: ModuleSpecifier) { - self.0.insert(specifier); + /// Checks whether the file might be treated as CJS, but it's not for sure + /// yet because the source hasn't been loaded to see whether it contains + /// imports or exports. + pub fn is_maybe_cjs( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + ) -> Result { + self.treat_as_cjs_with_is_script(specifier, media_type, None) + } + + /// Gets whether the file is CJS. If true, this is for sure + /// cjs because `is_script` is provided. + /// + /// `is_script` should be `true` when the contents of the file at the + /// provided specifier are known to be a script and not an ES module. + pub fn is_cjs_with_known_is_script( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + is_script: bool, + ) -> Result { + self.treat_as_cjs_with_is_script(specifier, media_type, Some(is_script)) + } + + fn treat_as_cjs_with_is_script( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + is_script: Option, + ) -> Result { + let kind = match self + .get_known_kind_with_is_script(specifier, media_type, is_script) + { + Some(kind) => kind, + None => self.check_based_on_pkg_json(specifier)?, + }; + Ok(kind.is_cjs()) + } + + pub fn get_known_kind( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + ) -> Option { + self.get_known_kind_with_is_script(specifier, media_type, None) + } + + fn get_known_kind_with_is_script( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + is_script: Option, + ) -> Option { + if specifier.scheme() != "file" { + return Some(ModuleKind::Esm); + } + + match media_type { + MediaType::Mts | MediaType::Mjs | MediaType::Dmts => Some(ModuleKind::Esm), + MediaType::Cjs | MediaType::Cts | MediaType::Dcts => Some(ModuleKind::Cjs), + MediaType::Dts => { + // dts files are always determined based on the package.json because + // they contain imports/exports even when considered CJS + if let Some(value) = self.known.get(specifier).map(|v| *v) { + Some(value) + } else { + let value = self.check_based_on_pkg_json(specifier).ok(); + if let Some(value) = value { + self.known.insert(specifier.clone(), value); + } + Some(value.unwrap_or(ModuleKind::Esm)) + } + } + MediaType::Wasm | + MediaType::Json => Some(ModuleKind::Esm), + MediaType::JavaScript + | MediaType::Jsx + | MediaType::TypeScript + | MediaType::Tsx + // treat these as unknown + | MediaType::Css + | MediaType::SourceMap + | MediaType::Unknown => { + if let Some(value) = self.known.get(specifier).map(|v| *v) { + if value.is_cjs() && is_script == Some(false) { + // we now know this is actually esm + self.known.insert(specifier.clone(), ModuleKind::Esm); + Some(ModuleKind::Esm) + } else { + Some(value) + } + } else if is_script == Some(false) { + // we know this is esm + self.known.insert(specifier.clone(), ModuleKind::Esm); + Some(ModuleKind::Esm) + } else { + None + } + } + } + } + + fn check_based_on_pkg_json( + &self, + specifier: &ModuleSpecifier, + ) -> Result { + if self.in_npm_pkg_checker.in_npm_package(specifier) { + if let Some(pkg_json) = + self.pkg_json_resolver.get_closest_package_json(specifier)? + { + let is_file_location_cjs = pkg_json.typ != "module"; + Ok(ModuleKind::from_is_cjs(is_file_location_cjs)) + } else { + Ok(ModuleKind::Cjs) + } + } else if self.unstable_detect_cjs { + if let Some(pkg_json) = + self.pkg_json_resolver.get_closest_package_json(specifier)? + { + let is_cjs_type = pkg_json.typ == "commonjs"; + Ok(ModuleKind::from_is_cjs(is_cjs_type)) + } else { + Ok(ModuleKind::Esm) + } + } else { + Ok(ModuleKind::Esm) + } } } @@ -633,8 +761,7 @@ impl Resolver for CliGraphResolver { Some(referrer), to_node_mode(mode), ) - .map_err(ResolveError::Other) - .map(|res| res.into_url()), + .map_err(|e| ResolveError::Other(e.into())), MappedResolution::PackageJson { dep_result, alias, @@ -665,19 +792,17 @@ impl Resolver for CliGraphResolver { ) .map_err(|e| ResolveError::Other(e.into())) .and_then(|pkg_folder| { - Ok( - self - .node_resolver - .as_ref() - .unwrap() - .resolve_package_sub_path_from_deno_module( - pkg_folder, - sub_path.as_deref(), - Some(referrer), - to_node_mode(mode), - )? - .into_url(), - ) + self + .node_resolver + .as_ref() + .unwrap() + .resolve_package_sub_path_from_deno_module( + pkg_folder, + sub_path.as_deref(), + Some(referrer), + to_node_mode(mode), + ) + .map_err(|e| ResolveError::Other(e.into())) }), }) } @@ -717,23 +842,20 @@ impl Resolver for CliGraphResolver { npm_req_ref.req(), ) { - return Ok( - node_resolver - .resolve_package_sub_path_from_deno_module( - pkg_folder, - npm_req_ref.sub_path(), - Some(referrer), - to_node_mode(mode), - )? - .into_url(), - ); + return node_resolver + .resolve_package_sub_path_from_deno_module( + pkg_folder, + npm_req_ref.sub_path(), + Some(referrer), + to_node_mode(mode), + ) + .map_err(|e| ResolveError::Other(e.into())); } // do npm resolution for byonm if is_byonm { return node_resolver .resolve_req_reference(&npm_req_ref, referrer, to_node_mode(mode)) - .map(|res| res.into_url()) .map_err(|err| err.into()); } } @@ -751,9 +873,7 @@ impl Resolver for CliGraphResolver { .map_err(ResolveError::Other)?; if let Some(res) = maybe_resolution { match res { - NodeResolution::Esm(url) | NodeResolution::CommonJs(url) => { - return Ok(url) - } + NodeResolution::Module(url) => return Ok(url), NodeResolution::BuiltIn(_) => { // don't resolve bare specifiers for built-in modules via node resolution } diff --git a/cli/schemas/config-file.v1.json b/cli/schemas/config-file.v1.json index 4a6239f0d4..27c8499ea2 100644 --- a/cli/schemas/config-file.v1.json +++ b/cli/schemas/config-file.v1.json @@ -291,7 +291,7 @@ "type": "array", "description": "List of tag names that will be run. Empty list disables all tags and will only use rules from `include`.", "items": { - "type": "string" + "$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/tags.v1.json" }, "minItems": 0, "uniqueItems": true @@ -300,7 +300,7 @@ "type": "array", "description": "List of rule names that will be excluded from configured tag sets. If the same rule is in `include` it will be run.", "items": { - "type": "string" + "$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/rules.v1.json" }, "minItems": 0, "uniqueItems": true @@ -309,7 +309,7 @@ "type": "array", "description": "List of rule names that will be run. Even if the same rule is in `exclude` it will be run.", "items": { - "type": "string" + "$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/rules.v1.json" }, "minItems": 0, "uniqueItems": true @@ -531,6 +531,7 @@ "detect-cjs", "ffi", "fs", + "fmt-component", "http", "kv", "net", diff --git a/cli/standalone/binary.rs b/cli/standalone/binary.rs index 52ee4eeb28..9e26512268 100644 --- a/cli/standalone/binary.rs +++ b/cli/standalone/binary.rs @@ -9,14 +9,19 @@ use std::ffi::OsString; use std::fs; use std::fs::File; use std::future::Future; +use std::io::ErrorKind; use std::io::Read; use std::io::Seek; use std::io::SeekFrom; use std::io::Write; +use std::ops::Range; use std::path::Path; use std::path::PathBuf; use std::process::Command; +use std::sync::Arc; +use deno_ast::MediaType; +use deno_ast::ModuleKind; use deno_ast::ModuleSpecifier; use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::ResolverWorkspaceJsrPackage; @@ -30,13 +35,22 @@ use deno_core::futures::AsyncReadExt; use deno_core::futures::AsyncSeekExt; use deno_core::serde_json; use deno_core::url::Url; +use deno_graph::source::RealFileSystem; +use deno_graph::ModuleGraph; +use deno_npm::resolution::SerializedNpmResolutionSnapshot; +use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage; +use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; +use deno_npm::NpmPackageId; use deno_npm::NpmSystemInfo; +use deno_runtime::deno_fs; +use deno_runtime::deno_fs::FileSystem; +use deno_runtime::deno_fs::RealFs; +use deno_runtime::deno_io::fs::FsError; use deno_runtime::deno_node::PackageJson; use deno_semver::npm::NpmVersionReqParseError; use deno_semver::package::PackageReq; use deno_semver::Version; use deno_semver::VersionReqSpecifierParseError; -use eszip::EszipRelativeFileBaseUrl; use indexmap::IndexMap; use log::Level; use serde::Deserialize; @@ -49,10 +63,12 @@ use crate::args::NpmInstallDepsProvider; use crate::args::PermissionFlags; use crate::args::UnstableConfig; use crate::cache::DenoDir; +use crate::emit::Emitter; use crate::file_fetcher::FileFetcher; use crate::http_util::HttpClientProvider; use crate::npm::CliNpmResolver; use crate::npm::InnerCliNpmResolverRef; +use crate::resolver::CjsTracker; use crate::shared::ReleaseChannel; use crate::standalone::virtual_fs::VfsEntry; use crate::util::archive; @@ -60,12 +76,63 @@ use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; +use super::file_system::DenoCompileFileSystem; +use super::serialization::deserialize_binary_data_section; +use super::serialization::serialize_binary_data_section; +use super::serialization::DenoCompileModuleData; +use super::serialization::DeserializedDataSection; +use super::serialization::RemoteModulesStore; +use super::serialization::RemoteModulesStoreBuilder; use super::virtual_fs::FileBackedVfs; use super::virtual_fs::VfsBuilder; use super::virtual_fs::VfsRoot; use super::virtual_fs::VirtualDirectory; -const MAGIC_TRAILER: &[u8; 8] = b"d3n0l4nd"; +/// A URL that can be designated as the base for relative URLs. +/// +/// After creation, this URL may be used to get the key for a +/// module in the binary. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct StandaloneRelativeFileBaseUrl<'a>(&'a Url); + +impl<'a> From<&'a Url> for StandaloneRelativeFileBaseUrl<'a> { + fn from(url: &'a Url) -> Self { + Self(url) + } +} + +impl<'a> StandaloneRelativeFileBaseUrl<'a> { + pub fn new(url: &'a Url) -> Self { + debug_assert_eq!(url.scheme(), "file"); + Self(url) + } + + /// Gets the module map key of the provided specifier. + /// + /// * Descendant file specifiers will be made relative to the base. + /// * Non-descendant file specifiers will stay as-is (absolute). + /// * Non-file specifiers will stay as-is. + pub fn specifier_key<'b>(&self, target: &'b Url) -> Cow<'b, str> { + if target.scheme() != "file" { + return Cow::Borrowed(target.as_str()); + } + + match self.0.make_relative(target) { + Some(relative) => { + if relative.starts_with("../") { + Cow::Borrowed(target.as_str()) + } else { + Cow::Owned(relative) + } + } + None => Cow::Borrowed(target.as_str()), + } + } + + pub fn inner(&self) -> &Url { + self.0 + } +} #[derive(Deserialize, Serialize)] pub enum NodeModules { @@ -120,78 +187,23 @@ pub struct Metadata { pub unstable_config: UnstableConfig, } -pub fn load_npm_vfs(root_dir_path: PathBuf) -> Result { - let data = libsui::find_section("d3n0l4nd").unwrap(); - - // We do the first part sync so it can complete quickly - let trailer: [u8; TRAILER_SIZE] = data[0..TRAILER_SIZE].try_into().unwrap(); - let trailer = match Trailer::parse(&trailer)? { - None => panic!("Could not find trailer"), - Some(trailer) => trailer, - }; - let data = &data[TRAILER_SIZE..]; - - let vfs_data = - &data[trailer.npm_vfs_pos as usize..trailer.npm_files_pos as usize]; - let mut dir: VirtualDirectory = serde_json::from_slice(vfs_data)?; - - // align the name of the directory with the root dir - dir.name = root_dir_path - .file_name() - .unwrap() - .to_string_lossy() - .to_string(); - - let fs_root = VfsRoot { - dir, - root_path: root_dir_path, - start_file_offset: trailer.npm_files_pos, - }; - Ok(FileBackedVfs::new(data.to_vec(), fs_root)) -} - fn write_binary_bytes( mut file_writer: File, original_bin: Vec, metadata: &Metadata, - eszip: eszip::EszipV2, - npm_vfs: Option<&VirtualDirectory>, - npm_files: &Vec>, + npm_snapshot: Option, + remote_modules: &RemoteModulesStoreBuilder, + vfs: VfsBuilder, compile_flags: &CompileFlags, ) -> Result<(), AnyError> { - let metadata = serde_json::to_string(metadata)?.as_bytes().to_vec(); - let npm_vfs = serde_json::to_string(&npm_vfs)?.as_bytes().to_vec(); - let eszip_archive = eszip.into_bytes(); - - let mut writer = Vec::new(); - - // write the trailer, which includes the positions - // of the data blocks in the file - writer.write_all(&{ - let metadata_pos = eszip_archive.len() as u64; - let npm_vfs_pos = metadata_pos + (metadata.len() as u64); - let npm_files_pos = npm_vfs_pos + (npm_vfs.len() as u64); - Trailer { - eszip_pos: 0, - metadata_pos, - npm_vfs_pos, - npm_files_pos, - } - .as_bytes() - })?; - - writer.write_all(&eszip_archive)?; - writer.write_all(&metadata)?; - writer.write_all(&npm_vfs)?; - for file in npm_files { - writer.write_all(file)?; - } + let data_section_bytes = + serialize_binary_data_section(metadata, npm_snapshot, remote_modules, vfs)?; let target = compile_flags.resolve_target(); if target.contains("linux") { libsui::Elf::new(&original_bin).append( "d3n0l4nd", - &writer, + &data_section_bytes, &mut file_writer, )?; } else if target.contains("windows") { @@ -201,11 +213,11 @@ fn write_binary_bytes( pe = pe.set_icon(&icon)?; } - pe.write_resource("d3n0l4nd", writer)? + pe.write_resource("d3n0l4nd", data_section_bytes)? .build(&mut file_writer)?; } else if target.contains("darwin") { libsui::Macho::from(original_bin)? - .write_section("d3n0l4nd", writer)? + .write_section("d3n0l4nd", data_section_bytes)? .build_and_sign(&mut file_writer)?; } Ok(()) @@ -221,6 +233,67 @@ pub fn is_standalone_binary(exe_path: &Path) -> bool { || libsui::utils::is_macho(&data) } +pub struct StandaloneData { + pub fs: Arc, + pub metadata: Metadata, + pub modules: StandaloneModules, + pub npm_snapshot: Option, + pub root_path: PathBuf, + pub vfs: Arc, +} + +pub struct StandaloneModules { + remote_modules: RemoteModulesStore, + vfs: Arc, +} + +impl StandaloneModules { + pub fn resolve_specifier<'a>( + &'a self, + specifier: &'a ModuleSpecifier, + ) -> Result, AnyError> { + if specifier.scheme() == "file" { + Ok(Some(specifier)) + } else { + self.remote_modules.resolve_specifier(specifier) + } + } + + pub fn has_file(&self, path: &Path) -> bool { + self.vfs.file_entry(path).is_ok() + } + + pub fn read<'a>( + &'a self, + specifier: &'a ModuleSpecifier, + ) -> Result>, AnyError> { + if specifier.scheme() == "file" { + let path = deno_path_util::url_to_file_path(specifier)?; + let bytes = match self.vfs.file_entry(&path) { + Ok(entry) => self.vfs.read_file_all(entry)?, + Err(err) if err.kind() == ErrorKind::NotFound => { + let bytes = match RealFs.read_file_sync(&path, None) { + Ok(bytes) => bytes, + Err(FsError::Io(err)) if err.kind() == ErrorKind::NotFound => { + return Ok(None) + } + Err(err) => return Err(err.into()), + }; + Cow::Owned(bytes) + } + Err(err) => return Err(err.into()), + }; + Ok(Some(DenoCompileModuleData { + media_type: MediaType::from_specifier(specifier), + specifier, + data: bytes, + })) + } else { + self.remote_modules.read(specifier) + } + } +} + /// This function will try to run this binary as a standalone binary /// produced by `deno compile`. It determines if this is a standalone /// binary by skipping over the trailer width at the end of the file, @@ -228,110 +301,67 @@ pub fn is_standalone_binary(exe_path: &Path) -> bool { /// the bundle is executed. If not, this function exits with `Ok(None)`. pub fn extract_standalone( cli_args: Cow>, -) -> Result< - Option>>, - AnyError, -> { +) -> Result, AnyError> { let Some(data) = libsui::find_section("d3n0l4nd") else { return Ok(None); }; - // We do the first part sync so it can complete quickly - let trailer = match Trailer::parse(&data[0..TRAILER_SIZE])? { + let DeserializedDataSection { + mut metadata, + npm_snapshot, + remote_modules, + mut vfs_dir, + vfs_files_data, + } = match deserialize_binary_data_section(data)? { + Some(data_section) => data_section, None => return Ok(None), - Some(trailer) => trailer, }; + let root_path = { + let maybe_current_exe = std::env::current_exe().ok(); + let current_exe_name = maybe_current_exe + .as_ref() + .and_then(|p| p.file_name()) + .map(|p| p.to_string_lossy()) + // should never happen + .unwrap_or_else(|| Cow::Borrowed("binary")); + std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name)) + }; let cli_args = cli_args.into_owned(); - // If we have an eszip, read it out - Ok(Some(async move { - let bufreader = - deno_core::futures::io::BufReader::new(&data[TRAILER_SIZE..]); + metadata.argv.reserve(cli_args.len() - 1); + for arg in cli_args.into_iter().skip(1) { + metadata.argv.push(arg.into_string().unwrap()); + } + let vfs = { + // align the name of the directory with the root dir + vfs_dir.name = root_path.file_name().unwrap().to_string_lossy().to_string(); - let (eszip, loader) = eszip::EszipV2::parse(bufreader) - .await - .context("Failed to parse eszip header")?; - - let bufreader = loader.await.context("Failed to parse eszip archive")?; - - let mut metadata = String::new(); - - bufreader - .take(trailer.metadata_len()) - .read_to_string(&mut metadata) - .await - .context("Failed to read metadata from the current executable")?; - - let mut metadata: Metadata = serde_json::from_str(&metadata).unwrap(); - metadata.argv.reserve(cli_args.len() - 1); - for arg in cli_args.into_iter().skip(1) { - metadata.argv.push(arg.into_string().unwrap()); - } - - Ok((metadata, eszip)) + let fs_root = VfsRoot { + dir: vfs_dir, + root_path: root_path.clone(), + start_file_offset: 0, + }; + Arc::new(FileBackedVfs::new(Cow::Borrowed(vfs_files_data), fs_root)) + }; + let fs: Arc = + Arc::new(DenoCompileFileSystem::new(vfs.clone())); + Ok(Some(StandaloneData { + fs, + metadata, + modules: StandaloneModules { + remote_modules, + vfs: vfs.clone(), + }, + npm_snapshot, + root_path, + vfs, })) } -const TRAILER_SIZE: usize = std::mem::size_of::() + 8; // 8 bytes for the magic trailer string - -struct Trailer { - eszip_pos: u64, - metadata_pos: u64, - npm_vfs_pos: u64, - npm_files_pos: u64, -} - -impl Trailer { - pub fn parse(trailer: &[u8]) -> Result, AnyError> { - let (magic_trailer, rest) = trailer.split_at(8); - if magic_trailer != MAGIC_TRAILER { - return Ok(None); - } - - let (eszip_archive_pos, rest) = rest.split_at(8); - let (metadata_pos, rest) = rest.split_at(8); - let (npm_vfs_pos, npm_files_pos) = rest.split_at(8); - let eszip_archive_pos = u64_from_bytes(eszip_archive_pos)?; - let metadata_pos = u64_from_bytes(metadata_pos)?; - let npm_vfs_pos = u64_from_bytes(npm_vfs_pos)?; - let npm_files_pos = u64_from_bytes(npm_files_pos)?; - Ok(Some(Trailer { - eszip_pos: eszip_archive_pos, - metadata_pos, - npm_vfs_pos, - npm_files_pos, - })) - } - - pub fn metadata_len(&self) -> u64 { - self.npm_vfs_pos - self.metadata_pos - } - - pub fn npm_vfs_len(&self) -> u64 { - self.npm_files_pos - self.npm_vfs_pos - } - - pub fn as_bytes(&self) -> Vec { - let mut trailer = MAGIC_TRAILER.to_vec(); - trailer.write_all(&self.eszip_pos.to_be_bytes()).unwrap(); - trailer.write_all(&self.metadata_pos.to_be_bytes()).unwrap(); - trailer.write_all(&self.npm_vfs_pos.to_be_bytes()).unwrap(); - trailer - .write_all(&self.npm_files_pos.to_be_bytes()) - .unwrap(); - trailer - } -} - -fn u64_from_bytes(arr: &[u8]) -> Result { - let fixed_arr: &[u8; 8] = arr - .try_into() - .context("Failed to convert the buffer into a fixed-size array")?; - Ok(u64::from_be_bytes(*fixed_arr)) -} - pub struct DenoCompileBinaryWriter<'a> { + cjs_tracker: &'a CjsTracker, deno_dir: &'a DenoDir, + emitter: &'a Emitter, file_fetcher: &'a FileFetcher, http_client_provider: &'a HttpClientProvider, npm_resolver: &'a dyn CliNpmResolver, @@ -342,7 +372,9 @@ pub struct DenoCompileBinaryWriter<'a> { impl<'a> DenoCompileBinaryWriter<'a> { #[allow(clippy::too_many_arguments)] pub fn new( + cjs_tracker: &'a CjsTracker, deno_dir: &'a DenoDir, + emitter: &'a Emitter, file_fetcher: &'a FileFetcher, http_client_provider: &'a HttpClientProvider, npm_resolver: &'a dyn CliNpmResolver, @@ -350,7 +382,9 @@ impl<'a> DenoCompileBinaryWriter<'a> { npm_system_info: NpmSystemInfo, ) -> Self { Self { + cjs_tracker, deno_dir, + emitter, file_fetcher, http_client_provider, npm_resolver, @@ -362,8 +396,8 @@ impl<'a> DenoCompileBinaryWriter<'a> { pub async fn write_bin( &self, writer: File, - eszip: eszip::EszipV2, - root_dir_url: EszipRelativeFileBaseUrl<'_>, + graph: &ModuleGraph, + root_dir_url: StandaloneRelativeFileBaseUrl<'_>, entrypoint: &ModuleSpecifier, compile_flags: &CompileFlags, cli_options: &CliOptions, @@ -390,15 +424,17 @@ impl<'a> DenoCompileBinaryWriter<'a> { ) } } - self.write_standalone_binary( - writer, - original_binary, - eszip, - root_dir_url, - entrypoint, - cli_options, - compile_flags, - ) + self + .write_standalone_binary( + writer, + original_binary, + graph, + root_dir_url, + entrypoint, + cli_options, + compile_flags, + ) + .await } async fn get_base_binary( @@ -493,12 +529,12 @@ impl<'a> DenoCompileBinaryWriter<'a> { /// This functions creates a standalone deno binary by appending a bundle /// and magic trailer to the currently executing binary. #[allow(clippy::too_many_arguments)] - fn write_standalone_binary( + async fn write_standalone_binary( &self, writer: File, original_bin: Vec, - mut eszip: eszip::EszipV2, - root_dir_url: EszipRelativeFileBaseUrl<'_>, + graph: &ModuleGraph, + root_dir_url: StandaloneRelativeFileBaseUrl<'_>, entrypoint: &ModuleSpecifier, cli_options: &CliOptions, compile_flags: &CompileFlags, @@ -512,19 +548,17 @@ impl<'a> DenoCompileBinaryWriter<'a> { None => None, }; let root_path = root_dir_url.inner().to_file_path().unwrap(); - let (npm_vfs, npm_files, node_modules) = match self.npm_resolver.as_inner() + let (maybe_npm_vfs, node_modules, npm_snapshot) = match self + .npm_resolver + .as_inner() { InnerCliNpmResolverRef::Managed(managed) => { let snapshot = managed.serialized_valid_snapshot_for_system(&self.npm_system_info); if !snapshot.as_serialized().packages.is_empty() { - let (root_dir, files) = self - .build_vfs(&root_path, cli_options)? - .into_dir_and_files(); - eszip.add_npm_snapshot(snapshot); + let npm_vfs_builder = self.build_npm_vfs(&root_path, cli_options)?; ( - Some(root_dir), - files, + Some(npm_vfs_builder), Some(NodeModules::Managed { node_modules_dir: self.npm_resolver.root_node_modules_path().map( |path| { @@ -536,18 +570,16 @@ impl<'a> DenoCompileBinaryWriter<'a> { }, ), }), + Some(snapshot), ) } else { - (None, Vec::new(), None) + (None, None, None) } } InnerCliNpmResolverRef::Byonm(resolver) => { - let (root_dir, files) = self - .build_vfs(&root_path, cli_options)? - .into_dir_and_files(); + let npm_vfs_builder = self.build_npm_vfs(&root_path, cli_options)?; ( - Some(root_dir), - files, + Some(npm_vfs_builder), Some(NodeModules::Byonm { root_node_modules_dir: resolver.root_node_modules_path().map( |node_modules_dir| { @@ -560,9 +592,69 @@ impl<'a> DenoCompileBinaryWriter<'a> { }, ), }), + None, ) } }; + let mut vfs = if let Some(npm_vfs) = maybe_npm_vfs { + npm_vfs + } else { + VfsBuilder::new(root_path.clone())? + }; + let mut remote_modules_store = RemoteModulesStoreBuilder::default(); + for module in graph.modules() { + if module.specifier().scheme() == "data" { + continue; // don't store data urls as an entry as they're in the code + } + let (maybe_source, media_type) = match module { + deno_graph::Module::Js(m) => { + let source = if m.media_type.is_emittable() { + let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script( + &m.specifier, + m.media_type, + m.is_script, + )?; + let module_kind = ModuleKind::from_is_cjs(is_cjs); + let source = self + .emitter + .emit_parsed_source( + &m.specifier, + m.media_type, + module_kind, + &m.source, + ) + .await?; + source.into_bytes() + } else { + m.source.as_bytes().to_vec() + }; + (Some(source), m.media_type) + } + deno_graph::Module::Json(m) => { + (Some(m.source.as_bytes().to_vec()), m.media_type) + } + deno_graph::Module::Npm(_) + | deno_graph::Module::Node(_) + | deno_graph::Module::External(_) => (None, MediaType::Unknown), + }; + if module.specifier().scheme() == "file" { + let file_path = deno_path_util::url_to_file_path(module.specifier())?; + vfs + .add_file_with_data( + &file_path, + match maybe_source { + Some(source) => source, + None => RealFs.read_file_sync(&file_path, None)?, + }, + ) + .with_context(|| { + format!("Failed adding '{}'", file_path.display()) + })?; + } else if let Some(source) = maybe_source { + remote_modules_store.add(module.specifier(), media_type, source); + } + } + remote_modules_store.add_redirects(&graph.redirects); let env_vars_from_env_file = match cli_options.env_file_name() { Some(env_filename) => { @@ -636,14 +728,14 @@ impl<'a> DenoCompileBinaryWriter<'a> { writer, original_bin, &metadata, - eszip, - npm_vfs.as_ref(), - &npm_files, + npm_snapshot.map(|s| s.into_serialized()), + &remote_modules_store, + vfs, compile_flags, ) } - fn build_vfs( + fn build_npm_vfs( &self, root_path: &Path, cli_options: &CliOptions, @@ -664,8 +756,9 @@ impl<'a> DenoCompileBinaryWriter<'a> { } else { // DO NOT include the user's registry url as it may contain credentials, // but also don't make this dependent on the registry url - let root_path = npm_resolver.global_cache_root_folder(); - let mut builder = VfsBuilder::new(root_path)?; + let global_cache_root_path = npm_resolver.global_cache_root_path(); + let mut builder = + VfsBuilder::new(global_cache_root_path.to_path_buf())?; let mut packages = npm_resolver.all_system_packages(&self.npm_system_info); packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism @@ -675,12 +768,12 @@ impl<'a> DenoCompileBinaryWriter<'a> { builder.add_dir_recursive(&folder)?; } - // Flatten all the registries folders into a single "node_modules/localhost" folder + // Flatten all the registries folders into a single ".deno_compile_node_modules/localhost" folder // that will be used by denort when loading the npm cache. This avoids us exposing // the user's private registry information and means we don't have to bother // serializing all the different registry config into the binary. builder.with_root_dir(|root_dir| { - root_dir.name = "node_modules".to_string(); + root_dir.name = ".deno_compile_node_modules".to_string(); let mut new_entries = Vec::with_capacity(root_dir.entries.len()); let mut localhost_entries = IndexMap::new(); for entry in std::mem::take(&mut root_dir.entries) { @@ -715,6 +808,8 @@ impl<'a> DenoCompileBinaryWriter<'a> { root_dir.entries = new_entries; }); + builder.set_new_root_path(root_path.to_path_buf())?; + Ok(builder) } } diff --git a/cli/standalone/file_system.rs b/cli/standalone/file_system.rs index 314444630b..712c6ee918 100644 --- a/cli/standalone/file_system.rs +++ b/cli/standalone/file_system.rs @@ -22,8 +22,8 @@ use super::virtual_fs::FileBackedVfs; pub struct DenoCompileFileSystem(Arc); impl DenoCompileFileSystem { - pub fn new(vfs: FileBackedVfs) -> Self { - Self(Arc::new(vfs)) + pub fn new(vfs: Arc) -> Self { + Self(vfs) } fn error_if_in_vfs(&self, path: &Path) -> FsResult<()> { diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index 60018228b7..85610f4c20 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -5,6 +5,8 @@ #![allow(dead_code)] #![allow(unused_imports)] +use binary::StandaloneData; +use binary::StandaloneModules; use deno_ast::MediaType; use deno_cache_dir::npm::NpmCacheDir; use deno_config::workspace::MappedResolution; @@ -17,6 +19,7 @@ use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::futures::FutureExt; use deno_core::v8_set_flags; +use deno_core::FastString; use deno_core::FeatureChecker; use deno_core::ModuleLoader; use deno_core::ModuleSourceCode; @@ -28,7 +31,9 @@ use deno_npm::npm_rc::ResolvedNpmRc; use deno_package_json::PackageJsonDepValue; use deno_runtime::deno_fs; use deno_runtime::deno_node::create_host_defined_options; +use deno_runtime::deno_node::NodeRequireLoader; use deno_runtime::deno_node::NodeResolver; +use deno_runtime::deno_node::PackageJsonResolver; use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_tls::rustls::RootCertStore; @@ -38,10 +43,10 @@ use deno_runtime::permissions::RuntimePermissionDescriptorParser; use deno_runtime::WorkerExecutionMode; use deno_runtime::WorkerLogLevel; use deno_semver::npm::NpmPackageReqReference; -use eszip::EszipRelativeFileBaseUrl; use import_map::parse_from_json; use node_resolver::analyze::NodeCodeTranslator; use node_resolver::NodeResolutionMode; +use serialization::DenoCompileModuleSource; use std::borrow::Cow; use std::rc::Rc; use std::sync::Arc; @@ -54,17 +59,24 @@ use crate::args::CacheSetting; use crate::args::NpmInstallDepsProvider; use crate::args::StorageKeyResolver; use crate::cache::Caches; +use crate::cache::DenoCacheEnvFsAdapter; use crate::cache::DenoDirProvider; use crate::cache::NodeAnalysisCache; use crate::cache::RealDenoCacheEnv; use crate::http_util::HttpClientProvider; use crate::node::CliCjsCodeAnalyzer; +use crate::node::CliNodeCodeTranslator; use crate::npm::create_cli_npm_resolver; +use crate::npm::create_in_npm_pkg_checker; use crate::npm::CliByonmNpmResolverCreateOptions; +use crate::npm::CliManagedInNpmPkgCheckerCreateOptions; +use crate::npm::CliManagedNpmResolverCreateOptions; +use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolverCreateOptions; -use crate::npm::CliNpmResolverManagedCreateOptions; use crate::npm::CliNpmResolverManagedSnapshotOption; -use crate::resolver::CjsResolutionStore; +use crate::npm::CreateInNpmPkgCheckerOptions; +use crate::resolver::CjsTracker; +use crate::resolver::CjsTrackerOptions; use crate::resolver::CliDenoResolverFs; use crate::resolver::CliNodeResolver; use crate::resolver::NpmModuleLoader; @@ -73,60 +85,30 @@ use crate::util::progress_bar::ProgressBarStyle; use crate::util::v8::construct_v8_flags; use crate::worker::CliMainWorkerFactory; use crate::worker::CliMainWorkerOptions; -use crate::worker::ModuleLoaderAndSourceMapGetter; +use crate::worker::CreateModuleLoaderResult; use crate::worker::ModuleLoaderFactory; pub mod binary; mod file_system; +mod serialization; mod virtual_fs; pub use binary::extract_standalone; pub use binary::is_standalone_binary; pub use binary::DenoCompileBinaryWriter; -use self::binary::load_npm_vfs; use self::binary::Metadata; use self::file_system::DenoCompileFileSystem; -struct WorkspaceEszipModule { - specifier: ModuleSpecifier, - inner: eszip::Module, -} - -struct WorkspaceEszip { - eszip: eszip::EszipV2, - root_dir_url: Arc, -} - -impl WorkspaceEszip { - pub fn get_module( - &self, - specifier: &ModuleSpecifier, - ) -> Option { - if specifier.scheme() == "file" { - let specifier_key = EszipRelativeFileBaseUrl::new(&self.root_dir_url) - .specifier_key(specifier); - let module = self.eszip.get_module(&specifier_key)?; - let specifier = self.root_dir_url.join(&module.specifier).unwrap(); - Some(WorkspaceEszipModule { - specifier, - inner: module, - }) - } else { - let module = self.eszip.get_module(specifier.as_str())?; - Some(WorkspaceEszipModule { - specifier: ModuleSpecifier::parse(&module.specifier).unwrap(), - inner: module, - }) - } - } -} - struct SharedModuleLoaderState { - eszip: WorkspaceEszip, - workspace_resolver: WorkspaceResolver, + cjs_tracker: Arc, + fs: Arc, + modules: StandaloneModules, + node_code_translator: Arc, node_resolver: Arc, npm_module_loader: Arc, + npm_resolver: Arc, + workspace_resolver: WorkspaceResolver, } #[derive(Clone)] @@ -134,6 +116,12 @@ struct EmbeddedModuleLoader { shared: Arc, } +impl std::fmt::Debug for EmbeddedModuleLoader { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("EmbeddedModuleLoader").finish() + } +} + pub const MODULE_NOT_FOUND: &str = "Module not found"; pub const UNSUPPORTED_SCHEME: &str = "Unsupported scheme"; @@ -191,8 +179,7 @@ impl ModuleLoader for EmbeddedModuleLoader { sub_path.as_deref(), Some(&referrer), NodeResolutionMode::Execution, - )? - .into_url(), + )?, ), Ok(MappedResolution::PackageJson { dep_result, @@ -200,16 +187,14 @@ impl ModuleLoader for EmbeddedModuleLoader { alias, .. }) => match dep_result.as_ref().map_err(|e| AnyError::from(e.clone()))? { - PackageJsonDepValue::Req(req) => self - .shared - .node_resolver - .resolve_req_with_sub_path( + PackageJsonDepValue::Req(req) => { + self.shared.node_resolver.resolve_req_with_sub_path( req, sub_path.as_deref(), &referrer, NodeResolutionMode::Execution, ) - .map(|res| res.into_url()), + } PackageJsonDepValue::Workspace(version_req) => { let pkg_folder = self .shared @@ -227,8 +212,7 @@ impl ModuleLoader for EmbeddedModuleLoader { sub_path.as_deref(), Some(&referrer), NodeResolutionMode::Execution, - )? - .into_url(), + )?, ) } }, @@ -237,20 +221,18 @@ impl ModuleLoader for EmbeddedModuleLoader { if let Ok(reference) = NpmPackageReqReference::from_specifier(&specifier) { - return self - .shared - .node_resolver - .resolve_req_reference( - &reference, - &referrer, - NodeResolutionMode::Execution, - ) - .map(|res| res.into_url()); + return self.shared.node_resolver.resolve_req_reference( + &reference, + &referrer, + NodeResolutionMode::Execution, + ); } if specifier.scheme() == "jsr" { - if let Some(module) = self.shared.eszip.get_module(&specifier) { - return Ok(module.specifier); + if let Some(specifier) = + self.shared.modules.resolve_specifier(&specifier)? + { + return Ok(specifier.clone()); } } @@ -345,82 +327,140 @@ impl ModuleLoader for EmbeddedModuleLoader { ); } - let Some(module) = self.shared.eszip.get_module(original_specifier) else { - return deno_core::ModuleLoadResponse::Sync(Err(type_error(format!( - "{MODULE_NOT_FOUND}: {}", - original_specifier - )))); - }; - let original_specifier = original_specifier.clone(); - - deno_core::ModuleLoadResponse::Async( - async move { - let code = module.inner.source().await.ok_or_else(|| { - type_error(format!("Module not found: {}", original_specifier)) - })?; - let code = arc_u8_to_arc_str(code) - .map_err(|_| type_error("Module source is not utf-8"))?; - Ok(deno_core::ModuleSource::new_with_redirect( - match module.inner.kind { - eszip::ModuleKind::JavaScript => ModuleType::JavaScript, - eszip::ModuleKind::Json => ModuleType::Json, - eszip::ModuleKind::Jsonc => { - return Err(type_error("jsonc modules not supported")) + match self.shared.modules.read(original_specifier) { + Ok(Some(module)) => { + let media_type = module.media_type; + let (module_specifier, module_type, module_source) = + module.into_parts(); + let is_maybe_cjs = match self + .shared + .cjs_tracker + .is_maybe_cjs(original_specifier, media_type) + { + Ok(is_maybe_cjs) => is_maybe_cjs, + Err(err) => { + return deno_core::ModuleLoadResponse::Sync(Err(type_error( + format!("{:?}", err), + ))); + } + }; + if is_maybe_cjs { + let original_specifier = original_specifier.clone(); + let module_specifier = module_specifier.clone(); + let shared = self.shared.clone(); + deno_core::ModuleLoadResponse::Async( + async move { + let source = match module_source { + DenoCompileModuleSource::String(string) => { + Cow::Borrowed(string) + } + DenoCompileModuleSource::Bytes(module_code_bytes) => { + match module_code_bytes { + Cow::Owned(bytes) => Cow::Owned( + crate::util::text_encoding::from_utf8_lossy_owned(bytes), + ), + Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes), + } + } + }; + let source = shared + .node_code_translator + .translate_cjs_to_esm(&module_specifier, Some(source)) + .await?; + let module_source = match source { + Cow::Owned(source) => ModuleSourceCode::String(source.into()), + Cow::Borrowed(source) => { + ModuleSourceCode::String(FastString::from_static(source)) + } + }; + Ok(deno_core::ModuleSource::new_with_redirect( + module_type, + module_source, + &original_specifier, + &module_specifier, + None, + )) } - eszip::ModuleKind::OpaqueData => { - unreachable!(); - } - }, - ModuleSourceCode::String(code.into()), - &original_specifier, - &module.specifier, - None, - )) + .boxed_local(), + ) + } else { + let module_source = module_source.into_for_v8(); + deno_core::ModuleLoadResponse::Sync(Ok( + deno_core::ModuleSource::new_with_redirect( + module_type, + module_source, + original_specifier, + module_specifier, + None, + ), + )) + } } - .boxed_local(), - ) + Ok(None) => deno_core::ModuleLoadResponse::Sync(Err(type_error( + format!("{MODULE_NOT_FOUND}: {}", original_specifier), + ))), + Err(err) => deno_core::ModuleLoadResponse::Sync(Err(type_error( + format!("{:?}", err), + ))), + } } } -fn arc_u8_to_arc_str( - arc_u8: Arc<[u8]>, -) -> Result, std::str::Utf8Error> { - // Check that the string is valid UTF-8. - std::str::from_utf8(&arc_u8)?; - // SAFETY: the string is valid UTF-8, and the layout Arc<[u8]> is the same as - // Arc. This is proven by the From> impl for Arc<[u8]> from the - // standard library. - Ok(unsafe { - std::mem::transmute::, std::sync::Arc>(arc_u8) - }) +impl NodeRequireLoader for EmbeddedModuleLoader { + fn ensure_read_permission<'a>( + &self, + permissions: &mut dyn deno_runtime::deno_node::NodePermissions, + path: &'a std::path::Path, + ) -> Result, AnyError> { + if self.shared.modules.has_file(path) { + // allow reading if the file is in the snapshot + return Ok(Cow::Borrowed(path)); + } + + self + .shared + .npm_resolver + .ensure_read_permission(permissions, path) + } + + fn load_text_file_lossy( + &self, + path: &std::path::Path, + ) -> Result { + Ok(self.shared.fs.read_text_file_lossy_sync(path, None)?) + } } struct StandaloneModuleLoaderFactory { shared: Arc, } +impl StandaloneModuleLoaderFactory { + pub fn create_result(&self) -> CreateModuleLoaderResult { + let loader = Rc::new(EmbeddedModuleLoader { + shared: self.shared.clone(), + }); + CreateModuleLoaderResult { + module_loader: loader.clone(), + node_require_loader: loader, + } + } +} + impl ModuleLoaderFactory for StandaloneModuleLoaderFactory { fn create_for_main( &self, _root_permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter { - ModuleLoaderAndSourceMapGetter { - module_loader: Rc::new(EmbeddedModuleLoader { - shared: self.shared.clone(), - }), - } + ) -> CreateModuleLoaderResult { + self.create_result() } fn create_for_worker( &self, _parent_permissions: PermissionsContainer, _permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter { - ModuleLoaderAndSourceMapGetter { - module_loader: Rc::new(EmbeddedModuleLoader { - shared: self.shared.clone(), - }), - } + ) -> CreateModuleLoaderResult { + self.create_result() } } @@ -439,13 +479,15 @@ impl RootCertStoreProvider for StandaloneRootCertStoreProvider { } } -pub async fn run( - mut eszip: eszip::EszipV2, - metadata: Metadata, -) -> Result { - let current_exe_path = std::env::current_exe().unwrap(); - let current_exe_name = - current_exe_path.file_name().unwrap().to_string_lossy(); +pub async fn run(data: StandaloneData) -> Result { + let StandaloneData { + fs, + metadata, + modules, + npm_snapshot, + root_path, + vfs, + } = data; let deno_dir_provider = Arc::new(DenoDirProvider::new(None)); let root_cert_store_provider = Arc::new(StandaloneRootCertStoreProvider { ca_stores: metadata.ca_stores, @@ -459,44 +501,50 @@ pub async fn run( )); // use a dummy npm registry url let npm_registry_url = ModuleSpecifier::parse("https://localhost/").unwrap(); - let root_path = - std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name)); let root_dir_url = Arc::new(ModuleSpecifier::from_directory_path(&root_path).unwrap()); let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap(); - let root_node_modules_path = root_path.join("node_modules"); - let npm_cache_dir = NpmCacheDir::new( - &RealDenoCacheEnv, - root_node_modules_path.clone(), - vec![npm_registry_url.clone()], - ); - let npm_global_cache_dir = npm_cache_dir.get_cache_location(); + let npm_global_cache_dir = root_path.join(".deno_compile_node_modules"); let cache_setting = CacheSetting::Only; - let (fs, npm_resolver, maybe_vfs_root) = match metadata.node_modules { + let pkg_json_resolver = Arc::new(PackageJsonResolver::new( + deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), + )); + let (in_npm_pkg_checker, npm_resolver) = match metadata.node_modules { Some(binary::NodeModules::Managed { node_modules_dir }) => { - // this will always have a snapshot - let snapshot = eszip.take_npm_snapshot().unwrap(); - let vfs_root_dir_path = if node_modules_dir.is_some() { - root_path.clone() - } else { - npm_cache_dir.root_dir().to_owned() - }; - let vfs = load_npm_vfs(vfs_root_dir_path.clone()) - .context("Failed to load npm vfs.")?; + // create an npmrc that uses the fake npm_registry_url to resolve packages + let npmrc = Arc::new(ResolvedNpmRc { + default_config: deno_npm::npm_rc::RegistryConfigWithUrl { + registry_url: npm_registry_url.clone(), + config: Default::default(), + }, + scopes: Default::default(), + registry_configs: Default::default(), + }); + let npm_cache_dir = Arc::new(NpmCacheDir::new( + &DenoCacheEnvFsAdapter(fs.as_ref()), + npm_global_cache_dir, + npmrc.get_all_known_registries_urls(), + )); + let snapshot = npm_snapshot.unwrap(); let maybe_node_modules_path = node_modules_dir - .map(|node_modules_dir| vfs_root_dir_path.join(node_modules_dir)); - let fs = Arc::new(DenoCompileFileSystem::new(vfs)) - as Arc; + .map(|node_modules_dir| root_path.join(node_modules_dir)); + let in_npm_pkg_checker = + create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Managed( + CliManagedInNpmPkgCheckerCreateOptions { + root_cache_dir_url: npm_cache_dir.root_dir_url(), + maybe_node_modules_path: maybe_node_modules_path.as_deref(), + }, + )); let npm_resolver = create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed( - CliNpmResolverManagedCreateOptions { + CliManagedNpmResolverCreateOptions { snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some( snapshot, )), maybe_lockfile: None, fs: fs.clone(), http_client_provider: http_client_provider.clone(), - npm_global_cache_dir, + npm_cache_dir, cache_setting, text_only_progress_bar: progress_bar, maybe_node_modules_path, @@ -505,50 +553,54 @@ pub async fn run( // this is only used for installing packages, which isn't necessary with deno compile NpmInstallDepsProvider::empty(), ), - // create an npmrc that uses the fake npm_registry_url to resolve packages - npmrc: Arc::new(ResolvedNpmRc { - default_config: deno_npm::npm_rc::RegistryConfigWithUrl { - registry_url: npm_registry_url.clone(), - config: Default::default(), - }, - scopes: Default::default(), - registry_configs: Default::default(), - }), + npmrc, lifecycle_scripts: Default::default(), }, )) .await?; - (fs, npm_resolver, Some(vfs_root_dir_path)) + (in_npm_pkg_checker, npm_resolver) } Some(binary::NodeModules::Byonm { root_node_modules_dir, }) => { - let vfs_root_dir_path = root_path.clone(); - let vfs = load_npm_vfs(vfs_root_dir_path.clone()) - .context("Failed to load vfs.")?; let root_node_modules_dir = root_node_modules_dir.map(|p| vfs.root().join(p)); - let fs = Arc::new(DenoCompileFileSystem::new(vfs)) - as Arc; + let in_npm_pkg_checker = + create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Byonm); let npm_resolver = create_cli_npm_resolver( CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { fs: CliDenoResolverFs(fs.clone()), + pkg_json_resolver: pkg_json_resolver.clone(), root_node_modules_dir, }), ) .await?; - (fs, npm_resolver, Some(vfs_root_dir_path)) + (in_npm_pkg_checker, npm_resolver) } None => { - let fs = Arc::new(deno_fs::RealFs) as Arc; + // Packages from different registries are already inlined in the binary, + // so no need to create actual `.npmrc` configuration. + let npmrc = create_default_npmrc(); + let npm_cache_dir = Arc::new(NpmCacheDir::new( + &DenoCacheEnvFsAdapter(fs.as_ref()), + npm_global_cache_dir, + npmrc.get_all_known_registries_urls(), + )); + let in_npm_pkg_checker = + create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Managed( + CliManagedInNpmPkgCheckerCreateOptions { + root_cache_dir_url: npm_cache_dir.root_dir_url(), + maybe_node_modules_path: None, + }, + )); let npm_resolver = create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed( - CliNpmResolverManagedCreateOptions { + CliManagedNpmResolverCreateOptions { snapshot: CliNpmResolverManagedSnapshotOption::Specified(None), maybe_lockfile: None, fs: fs.clone(), http_client_provider: http_client_provider.clone(), - npm_global_cache_dir, + npm_cache_dir, cache_setting, text_only_progress_bar: progress_bar, maybe_node_modules_path: None, @@ -557,42 +609,52 @@ pub async fn run( // this is only used for installing packages, which isn't necessary with deno compile NpmInstallDepsProvider::empty(), ), - // Packages from different registries are already inlined in the ESZip, - // so no need to create actual `.npmrc` configuration. npmrc: create_default_npmrc(), lifecycle_scripts: Default::default(), }, )) .await?; - (fs, npm_resolver, None) + (in_npm_pkg_checker, npm_resolver) } }; let has_node_modules_dir = npm_resolver.root_node_modules_path().is_some(); let node_resolver = Arc::new(NodeResolver::new( deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), + in_npm_pkg_checker.clone(), npm_resolver.clone().into_npm_resolver(), + pkg_json_resolver.clone(), + )); + let cjs_tracker = Arc::new(CjsTracker::new( + in_npm_pkg_checker.clone(), + pkg_json_resolver.clone(), + CjsTrackerOptions { + unstable_detect_cjs: metadata.unstable_config.detect_cjs, + }, )); - let cjs_resolutions = Arc::new(CjsResolutionStore::default()); let cache_db = Caches::new(deno_dir_provider.clone()); let node_analysis_cache = NodeAnalysisCache::new(cache_db.node_analysis_db()); let cli_node_resolver = Arc::new(CliNodeResolver::new( - cjs_resolutions.clone(), + cjs_tracker.clone(), fs.clone(), + in_npm_pkg_checker.clone(), node_resolver.clone(), npm_resolver.clone(), )); let cjs_esm_code_analyzer = CliCjsCodeAnalyzer::new( node_analysis_cache, + cjs_tracker.clone(), fs.clone(), - cli_node_resolver.clone(), None, + false, ); let node_code_translator = Arc::new(NodeCodeTranslator::new( cjs_esm_code_analyzer, deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), + in_npm_pkg_checker, node_resolver.clone(), npm_resolver.clone().into_npm_resolver(), + pkg_json_resolver.clone(), )); let workspace_resolver = { let import_map = match metadata.workspace_resolver.import_map { @@ -645,37 +707,35 @@ pub async fn run( }; let module_loader_factory = StandaloneModuleLoaderFactory { shared: Arc::new(SharedModuleLoaderState { - eszip: WorkspaceEszip { - eszip, - root_dir_url, - }, - workspace_resolver, + cjs_tracker: cjs_tracker.clone(), + fs: fs.clone(), + modules, + node_code_translator: node_code_translator.clone(), node_resolver: cli_node_resolver.clone(), npm_module_loader: Arc::new(NpmModuleLoader::new( - cjs_resolutions.clone(), - node_code_translator, + cjs_tracker.clone(), fs.clone(), - cli_node_resolver, + node_code_translator, )), + npm_resolver: npm_resolver.clone(), + workspace_resolver, }), }; let permissions = { let mut permissions = metadata.permissions.to_options(/* cli_arg_urls */ &[]); - // if running with an npm vfs, grant read access to it - if let Some(vfs_root) = maybe_vfs_root { - match &mut permissions.allow_read { - Some(vec) if vec.is_empty() => { - // do nothing, already granted - } - Some(vec) => { - vec.push(vfs_root.to_string_lossy().to_string()); - } - None => { - permissions.allow_read = - Some(vec![vfs_root.to_string_lossy().to_string()]); - } + // grant read access to the vfs + match &mut permissions.allow_read { + Some(vec) if vec.is_empty() => { + // do nothing, already granted + } + Some(vec) => { + vec.push(root_path.to_string_lossy().to_string()); + } + None => { + permissions.allow_read = + Some(vec![root_path.to_string_lossy().to_string()]); } } @@ -697,7 +757,6 @@ pub async fn run( }); let worker_factory = CliMainWorkerFactory::new( Arc::new(BlobStore::default()), - cjs_resolutions, // Code cache is not supported for standalone binary yet. None, feature_checker, @@ -708,6 +767,7 @@ pub async fn run( Box::new(module_loader_factory), node_resolver, npm_resolver, + pkg_json_resolver, root_cert_store_provider, permissions, StorageKeyResolver::empty(), @@ -723,7 +783,6 @@ pub async fn run( inspect_wait: false, strace_ops: None, is_inspecting: false, - is_npm_main: main_module.scheme() == "npm", skip_op_registration: true, location: metadata.location, argv0: NpmPackageReqReference::from_specifier(&main_module) @@ -740,7 +799,6 @@ pub async fn run( node_ipc: None, serve_port: None, serve_host: None, - unstable_detect_cjs: metadata.unstable_config.detect_cjs, }, ); diff --git a/cli/standalone/serialization.rs b/cli/standalone/serialization.rs new file mode 100644 index 0000000000..a5eb649bfd --- /dev/null +++ b/cli/standalone/serialization.rs @@ -0,0 +1,661 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use std::borrow::Cow; +use std::collections::BTreeMap; +use std::collections::HashMap; +use std::io::Write; + +use deno_ast::MediaType; +use deno_core::anyhow::bail; +use deno_core::anyhow::Context; +use deno_core::error::AnyError; +use deno_core::serde_json; +use deno_core::url::Url; +use deno_core::FastString; +use deno_core::ModuleSourceCode; +use deno_core::ModuleType; +use deno_npm::resolution::SerializedNpmResolutionSnapshot; +use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage; +use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; +use deno_npm::NpmPackageId; +use deno_semver::package::PackageReq; + +use crate::standalone::virtual_fs::VirtualDirectory; + +use super::binary::Metadata; +use super::virtual_fs::VfsBuilder; + +const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd"; + +/// Binary format: +/// * d3n0l4nd +/// * +/// * +/// * +/// * +/// * +/// * d3n0l4nd +pub fn serialize_binary_data_section( + metadata: &Metadata, + npm_snapshot: Option, + remote_modules: &RemoteModulesStoreBuilder, + vfs: VfsBuilder, +) -> Result, AnyError> { + fn write_bytes_with_len(bytes: &mut Vec, data: &[u8]) { + bytes.extend_from_slice(&(data.len() as u64).to_le_bytes()); + bytes.extend_from_slice(data); + } + + let mut bytes = Vec::new(); + bytes.extend_from_slice(MAGIC_BYTES); + + // 1. Metadata + { + let metadata = serde_json::to_string(metadata)?; + write_bytes_with_len(&mut bytes, metadata.as_bytes()); + } + // 2. Npm snapshot + { + let npm_snapshot = + npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default(); + write_bytes_with_len(&mut bytes, &npm_snapshot); + } + // 3. Remote modules + { + let update_index = bytes.len(); + bytes.extend_from_slice(&(0_u64).to_le_bytes()); + let start_index = bytes.len(); + remote_modules.write(&mut bytes)?; + let length = bytes.len() - start_index; + let length_bytes = (length as u64).to_le_bytes(); + bytes[update_index..update_index + length_bytes.len()] + .copy_from_slice(&length_bytes); + } + // 4. VFS + { + let (vfs, vfs_files) = vfs.into_dir_and_files(); + let vfs = serde_json::to_string(&vfs)?; + write_bytes_with_len(&mut bytes, vfs.as_bytes()); + let vfs_bytes_len = vfs_files.iter().map(|f| f.len() as u64).sum::(); + bytes.extend_from_slice(&vfs_bytes_len.to_le_bytes()); + for file in &vfs_files { + bytes.extend_from_slice(file); + } + } + + // write the magic bytes at the end so we can use it + // to make sure we've deserialized correctly + bytes.extend_from_slice(MAGIC_BYTES); + + Ok(bytes) +} + +pub struct DeserializedDataSection { + pub metadata: Metadata, + pub npm_snapshot: Option, + pub remote_modules: RemoteModulesStore, + pub vfs_dir: VirtualDirectory, + pub vfs_files_data: &'static [u8], +} + +pub fn deserialize_binary_data_section( + data: &'static [u8], +) -> Result, AnyError> { + fn read_bytes_with_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> { + let (input, len) = read_u64(input)?; + let (input, data) = read_bytes(input, len as usize)?; + Ok((input, data)) + } + + fn read_magic_bytes(input: &[u8]) -> Result<(&[u8], bool), AnyError> { + if input.len() < MAGIC_BYTES.len() { + bail!("Unexpected end of data. Could not find magic bytes."); + } + let (magic_bytes, input) = input.split_at(MAGIC_BYTES.len()); + if magic_bytes != MAGIC_BYTES { + return Ok((input, false)); + } + Ok((input, true)) + } + + let (input, found) = read_magic_bytes(data)?; + if !found { + return Ok(None); + } + + // 1. Metadata + let (input, data) = read_bytes_with_len(input).context("reading metadata")?; + let metadata: Metadata = + serde_json::from_slice(data).context("deserializing metadata")?; + // 2. Npm snapshot + let (input, data) = + read_bytes_with_len(input).context("reading npm snapshot")?; + let npm_snapshot = if data.is_empty() { + None + } else { + Some(deserialize_npm_snapshot(data).context("deserializing npm snapshot")?) + }; + // 3. Remote modules + let (input, data) = + read_bytes_with_len(input).context("reading remote modules data")?; + let remote_modules = + RemoteModulesStore::build(data).context("deserializing remote modules")?; + // 4. VFS + let (input, data) = read_bytes_with_len(input).context("vfs")?; + let vfs_dir: VirtualDirectory = + serde_json::from_slice(data).context("deserializing vfs data")?; + let (input, vfs_files_data) = + read_bytes_with_len(input).context("reading vfs files data")?; + + // finally ensure we read the magic bytes at the end + let (_input, found) = read_magic_bytes(input)?; + if !found { + bail!("Could not find magic bytes at the end of the data."); + } + + Ok(Some(DeserializedDataSection { + metadata, + npm_snapshot, + remote_modules, + vfs_dir, + vfs_files_data, + })) +} + +#[derive(Default)] +pub struct RemoteModulesStoreBuilder { + specifiers: Vec<(String, u64)>, + data: Vec<(MediaType, Vec)>, + data_byte_len: u64, + redirects: Vec<(String, String)>, + redirects_len: u64, +} + +impl RemoteModulesStoreBuilder { + pub fn add(&mut self, specifier: &Url, media_type: MediaType, data: Vec) { + log::debug!("Adding '{}' ({})", specifier, media_type); + let specifier = specifier.to_string(); + self.specifiers.push((specifier, self.data_byte_len)); + self.data_byte_len += 1 + 8 + data.len() as u64; // media type (1 byte), data length (8 bytes), data + self.data.push((media_type, data)); + } + + pub fn add_redirects(&mut self, redirects: &BTreeMap) { + self.redirects.reserve(redirects.len()); + for (from, to) in redirects { + log::debug!("Adding redirect '{}' -> '{}'", from, to); + let from = from.to_string(); + let to = to.to_string(); + self.redirects_len += (4 + from.len() + 4 + to.len()) as u64; + self.redirects.push((from, to)); + } + } + + fn write(&self, writer: &mut dyn Write) -> Result<(), AnyError> { + writer.write_all(&(self.specifiers.len() as u32).to_le_bytes())?; + writer.write_all(&(self.redirects.len() as u32).to_le_bytes())?; + for (specifier, offset) in &self.specifiers { + writer.write_all(&(specifier.len() as u32).to_le_bytes())?; + writer.write_all(specifier.as_bytes())?; + writer.write_all(&offset.to_le_bytes())?; + } + for (from, to) in &self.redirects { + writer.write_all(&(from.len() as u32).to_le_bytes())?; + writer.write_all(from.as_bytes())?; + writer.write_all(&(to.len() as u32).to_le_bytes())?; + writer.write_all(to.as_bytes())?; + } + for (media_type, data) in &self.data { + writer.write_all(&[serialize_media_type(*media_type)])?; + writer.write_all(&(data.len() as u64).to_le_bytes())?; + writer.write_all(data)?; + } + Ok(()) + } +} + +pub enum DenoCompileModuleSource { + String(&'static str), + Bytes(Cow<'static, [u8]>), +} + +impl DenoCompileModuleSource { + pub fn into_for_v8(self) -> ModuleSourceCode { + fn into_bytes(data: Cow<'static, [u8]>) -> ModuleSourceCode { + ModuleSourceCode::Bytes(match data { + Cow::Borrowed(d) => d.into(), + Cow::Owned(d) => d.into_boxed_slice().into(), + }) + } + + match self { + // todo(https://github.com/denoland/deno_core/pull/943): store whether + // the string is ascii or not ahead of time so we can avoid the is_ascii() + // check in FastString::from_static + Self::String(s) => ModuleSourceCode::String(FastString::from_static(s)), + Self::Bytes(b) => into_bytes(b), + } + } +} + +pub struct DenoCompileModuleData<'a> { + pub specifier: &'a Url, + pub media_type: MediaType, + pub data: Cow<'static, [u8]>, +} + +impl<'a> DenoCompileModuleData<'a> { + pub fn into_parts(self) -> (&'a Url, ModuleType, DenoCompileModuleSource) { + fn into_string_unsafe(data: Cow<'static, [u8]>) -> DenoCompileModuleSource { + match data { + Cow::Borrowed(d) => DenoCompileModuleSource::String( + // SAFETY: we know this is a valid utf8 string + unsafe { std::str::from_utf8_unchecked(d) }, + ), + Cow::Owned(d) => DenoCompileModuleSource::Bytes(Cow::Owned(d)), + } + } + + let (media_type, source) = match self.media_type { + MediaType::JavaScript + | MediaType::Jsx + | MediaType::Mjs + | MediaType::Cjs + | MediaType::TypeScript + | MediaType::Mts + | MediaType::Cts + | MediaType::Dts + | MediaType::Dmts + | MediaType::Dcts + | MediaType::Tsx => { + (ModuleType::JavaScript, into_string_unsafe(self.data)) + } + MediaType::Json => (ModuleType::Json, into_string_unsafe(self.data)), + MediaType::Wasm => { + (ModuleType::Wasm, DenoCompileModuleSource::Bytes(self.data)) + } + // just assume javascript if we made it here + MediaType::Css | MediaType::SourceMap | MediaType::Unknown => ( + ModuleType::JavaScript, + DenoCompileModuleSource::Bytes(self.data), + ), + }; + (self.specifier, media_type, source) + } +} + +enum RemoteModulesStoreSpecifierValue { + Data(usize), + Redirect(Url), +} + +pub struct RemoteModulesStore { + specifiers: HashMap, + files_data: &'static [u8], +} + +impl RemoteModulesStore { + fn build(data: &'static [u8]) -> Result { + fn read_specifier(input: &[u8]) -> Result<(&[u8], (Url, u64)), AnyError> { + let (input, specifier) = read_string_lossy(input)?; + let specifier = Url::parse(&specifier)?; + let (input, offset) = read_u64(input)?; + Ok((input, (specifier, offset))) + } + + fn read_redirect(input: &[u8]) -> Result<(&[u8], (Url, Url)), AnyError> { + let (input, from) = read_string_lossy(input)?; + let from = Url::parse(&from)?; + let (input, to) = read_string_lossy(input)?; + let to = Url::parse(&to)?; + Ok((input, (from, to))) + } + + fn read_headers( + input: &[u8], + ) -> Result<(&[u8], HashMap), AnyError> + { + let (input, specifiers_len) = read_u32_as_usize(input)?; + let (mut input, redirects_len) = read_u32_as_usize(input)?; + let mut specifiers = + HashMap::with_capacity(specifiers_len + redirects_len); + for _ in 0..specifiers_len { + let (current_input, (specifier, offset)) = + read_specifier(input).context("reading specifier")?; + input = current_input; + specifiers.insert( + specifier, + RemoteModulesStoreSpecifierValue::Data(offset as usize), + ); + } + + for _ in 0..redirects_len { + let (current_input, (from, to)) = read_redirect(input)?; + input = current_input; + specifiers.insert(from, RemoteModulesStoreSpecifierValue::Redirect(to)); + } + + Ok((input, specifiers)) + } + + let (files_data, specifiers) = read_headers(data)?; + + Ok(Self { + specifiers, + files_data, + }) + } + + pub fn resolve_specifier<'a>( + &'a self, + specifier: &'a Url, + ) -> Result, AnyError> { + let mut count = 0; + let mut current = specifier; + loop { + if count > 10 { + bail!("Too many redirects resolving '{}'", specifier); + } + match self.specifiers.get(current) { + Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => { + current = to; + count += 1; + } + Some(RemoteModulesStoreSpecifierValue::Data(_)) => { + return Ok(Some(current)); + } + None => { + return Ok(None); + } + } + } + } + + pub fn read<'a>( + &'a self, + original_specifier: &'a Url, + ) -> Result>, AnyError> { + let mut count = 0; + let mut specifier = original_specifier; + loop { + if count > 10 { + bail!("Too many redirects resolving '{}'", original_specifier); + } + match self.specifiers.get(specifier) { + Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => { + specifier = to; + count += 1; + } + Some(RemoteModulesStoreSpecifierValue::Data(offset)) => { + let input = &self.files_data[*offset..]; + let (input, media_type_byte) = read_bytes(input, 1)?; + let media_type = deserialize_media_type(media_type_byte[0])?; + let (input, len) = read_u64(input)?; + let (_input, data) = read_bytes(input, len as usize)?; + return Ok(Some(DenoCompileModuleData { + specifier, + media_type, + data: Cow::Borrowed(data), + })); + } + None => { + return Ok(None); + } + } + } + } +} + +fn serialize_npm_snapshot( + mut snapshot: SerializedNpmResolutionSnapshot, +) -> Vec { + fn append_string(bytes: &mut Vec, string: &str) { + let len = string.len() as u32; + bytes.extend_from_slice(&len.to_le_bytes()); + bytes.extend_from_slice(string.as_bytes()); + } + + snapshot.packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism + let ids_to_stored_ids = snapshot + .packages + .iter() + .enumerate() + .map(|(i, pkg)| (&pkg.id, i as u32)) + .collect::>(); + + let mut root_packages: Vec<_> = snapshot.root_packages.iter().collect(); + root_packages.sort(); + let mut bytes = Vec::new(); + + bytes.extend_from_slice(&(snapshot.packages.len() as u32).to_le_bytes()); + for pkg in &snapshot.packages { + append_string(&mut bytes, &pkg.id.as_serialized()); + } + + bytes.extend_from_slice(&(root_packages.len() as u32).to_le_bytes()); + for (req, id) in root_packages { + append_string(&mut bytes, &req.to_string()); + let id = ids_to_stored_ids.get(&id).unwrap(); + bytes.extend_from_slice(&id.to_le_bytes()); + } + + for pkg in &snapshot.packages { + let deps_len = pkg.dependencies.len() as u32; + bytes.extend_from_slice(&deps_len.to_le_bytes()); + let mut deps: Vec<_> = pkg.dependencies.iter().collect(); + deps.sort(); + for (req, id) in deps { + append_string(&mut bytes, req); + let id = ids_to_stored_ids.get(&id).unwrap(); + bytes.extend_from_slice(&id.to_le_bytes()); + } + } + + bytes +} + +fn deserialize_npm_snapshot( + input: &[u8], +) -> Result { + fn parse_id(input: &[u8]) -> Result<(&[u8], NpmPackageId), AnyError> { + let (input, id) = read_string_lossy(input)?; + let id = NpmPackageId::from_serialized(&id)?; + Ok((input, id)) + } + + #[allow(clippy::needless_lifetimes)] // clippy bug + fn parse_root_package<'a>( + id_to_npm_id: &'a impl Fn(usize) -> Result, + ) -> impl Fn(&[u8]) -> Result<(&[u8], (PackageReq, NpmPackageId)), AnyError> + 'a + { + |input| { + let (input, req) = read_string_lossy(input)?; + let req = PackageReq::from_str(&req)?; + let (input, id) = read_u32_as_usize(input)?; + Ok((input, (req, id_to_npm_id(id)?))) + } + } + + #[allow(clippy::needless_lifetimes)] // clippy bug + fn parse_package_dep<'a>( + id_to_npm_id: &'a impl Fn(usize) -> Result, + ) -> impl Fn(&[u8]) -> Result<(&[u8], (String, NpmPackageId)), AnyError> + 'a + { + |input| { + let (input, req) = read_string_lossy(input)?; + let (input, id) = read_u32_as_usize(input)?; + Ok((input, (req.into_owned(), id_to_npm_id(id)?))) + } + } + + fn parse_package<'a>( + input: &'a [u8], + id: NpmPackageId, + id_to_npm_id: &impl Fn(usize) -> Result, + ) -> Result<(&'a [u8], SerializedNpmResolutionSnapshotPackage), AnyError> { + let (input, deps_len) = read_u32_as_usize(input)?; + let (input, dependencies) = + parse_hashmap_n_times(input, deps_len, parse_package_dep(id_to_npm_id))?; + Ok(( + input, + SerializedNpmResolutionSnapshotPackage { + id, + system: Default::default(), + dist: Default::default(), + dependencies, + optional_dependencies: Default::default(), + bin: None, + scripts: Default::default(), + deprecated: Default::default(), + }, + )) + } + + let (input, packages_len) = read_u32_as_usize(input)?; + + // get a hashmap of all the npm package ids to their serialized ids + let (input, data_ids_to_npm_ids) = + parse_vec_n_times(input, packages_len, parse_id) + .context("deserializing id")?; + let data_id_to_npm_id = |id: usize| { + data_ids_to_npm_ids + .get(id) + .cloned() + .ok_or_else(|| deno_core::anyhow::anyhow!("Invalid npm package id")) + }; + + let (input, root_packages_len) = read_u32_as_usize(input)?; + let (input, root_packages) = parse_hashmap_n_times( + input, + root_packages_len, + parse_root_package(&data_id_to_npm_id), + ) + .context("deserializing root package")?; + let (input, packages) = + parse_vec_n_times_with_index(input, packages_len, |input, index| { + parse_package(input, data_id_to_npm_id(index)?, &data_id_to_npm_id) + }) + .context("deserializing package")?; + + if !input.is_empty() { + bail!("Unexpected data left over"); + } + + Ok( + SerializedNpmResolutionSnapshot { + packages, + root_packages, + } + // this is ok because we have already verified that all the + // identifiers found in the snapshot are valid via the + // npm package id -> npm package id mapping + .into_valid_unsafe(), + ) +} + +fn serialize_media_type(media_type: MediaType) -> u8 { + match media_type { + MediaType::JavaScript => 0, + MediaType::Jsx => 1, + MediaType::Mjs => 2, + MediaType::Cjs => 3, + MediaType::TypeScript => 4, + MediaType::Mts => 5, + MediaType::Cts => 6, + MediaType::Dts => 7, + MediaType::Dmts => 8, + MediaType::Dcts => 9, + MediaType::Tsx => 10, + MediaType::Json => 11, + MediaType::Wasm => 12, + MediaType::Css => 13, + MediaType::SourceMap => 14, + MediaType::Unknown => 15, + } +} + +fn deserialize_media_type(value: u8) -> Result { + match value { + 0 => Ok(MediaType::JavaScript), + 1 => Ok(MediaType::Jsx), + 2 => Ok(MediaType::Mjs), + 3 => Ok(MediaType::Cjs), + 4 => Ok(MediaType::TypeScript), + 5 => Ok(MediaType::Mts), + 6 => Ok(MediaType::Cts), + 7 => Ok(MediaType::Dts), + 8 => Ok(MediaType::Dmts), + 9 => Ok(MediaType::Dcts), + 10 => Ok(MediaType::Tsx), + 11 => Ok(MediaType::Json), + 12 => Ok(MediaType::Wasm), + 13 => Ok(MediaType::Css), + 14 => Ok(MediaType::SourceMap), + 15 => Ok(MediaType::Unknown), + _ => bail!("Unknown media type value: {}", value), + } +} + +fn parse_hashmap_n_times( + mut input: &[u8], + times: usize, + parse: impl Fn(&[u8]) -> Result<(&[u8], (TKey, TValue)), AnyError>, +) -> Result<(&[u8], HashMap), AnyError> { + let mut results = HashMap::with_capacity(times); + for _ in 0..times { + let result = parse(input); + let (new_input, (key, value)) = result?; + results.insert(key, value); + input = new_input; + } + Ok((input, results)) +} + +fn parse_vec_n_times( + input: &[u8], + times: usize, + parse: impl Fn(&[u8]) -> Result<(&[u8], TResult), AnyError>, +) -> Result<(&[u8], Vec), AnyError> { + parse_vec_n_times_with_index(input, times, |input, _index| parse(input)) +} + +fn parse_vec_n_times_with_index( + mut input: &[u8], + times: usize, + parse: impl Fn(&[u8], usize) -> Result<(&[u8], TResult), AnyError>, +) -> Result<(&[u8], Vec), AnyError> { + let mut results = Vec::with_capacity(times); + for i in 0..times { + let result = parse(input, i); + let (new_input, result) = result?; + results.push(result); + input = new_input; + } + Ok((input, results)) +} + +fn read_bytes(input: &[u8], len: usize) -> Result<(&[u8], &[u8]), AnyError> { + if input.len() < len { + bail!("Unexpected end of data.",); + } + let (len_bytes, input) = input.split_at(len); + Ok((input, len_bytes)) +} + +fn read_string_lossy(input: &[u8]) -> Result<(&[u8], Cow), AnyError> { + let (input, str_len) = read_u32_as_usize(input)?; + let (input, data_bytes) = read_bytes(input, str_len)?; + Ok((input, String::from_utf8_lossy(data_bytes))) +} + +fn read_u32_as_usize(input: &[u8]) -> Result<(&[u8], usize), AnyError> { + let (input, len_bytes) = read_bytes(input, 4)?; + let len = u32::from_le_bytes(len_bytes.try_into()?); + Ok((input, len as usize)) +} + +fn read_u64(input: &[u8]) -> Result<(&[u8], u64), AnyError> { + let (input, len_bytes) = read_bytes(input, 8)?; + let len = u64::from_le_bytes(len_bytes.try_into()?); + Ok((input, len)) +} diff --git a/cli/standalone/virtual_fs.rs b/cli/standalone/virtual_fs.rs index 53d045b625..0ae00accbf 100644 --- a/cli/standalone/virtual_fs.rs +++ b/cli/standalone/virtual_fs.rs @@ -7,6 +7,7 @@ use std::fs::File; use std::io::Read; use std::io::Seek; use std::io::SeekFrom; +use std::ops::Range; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; @@ -67,6 +68,26 @@ impl VfsBuilder { }) } + pub fn set_new_root_path( + &mut self, + root_path: PathBuf, + ) -> Result<(), AnyError> { + let root_path = canonicalize_path(&root_path)?; + self.root_path = root_path; + self.root_dir = VirtualDirectory { + name: self + .root_path + .file_stem() + .map(|s| s.to_string_lossy().into_owned()) + .unwrap_or("root".to_string()), + entries: vec![VfsEntry::Dir(VirtualDirectory { + name: std::mem::take(&mut self.root_dir.name), + entries: std::mem::take(&mut self.root_dir.entries), + })], + }; + Ok(()) + } + pub fn with_root_dir( &mut self, with_root: impl FnOnce(&mut VirtualDirectory) -> R, @@ -119,7 +140,7 @@ impl VfsBuilder { // inline the symlink and make the target file let file_bytes = std::fs::read(&target) .with_context(|| format!("Reading {}", path.display()))?; - self.add_file(&path, file_bytes)?; + self.add_file_with_data_inner(&path, file_bytes)?; } else { log::warn!( "{} Symlink target is outside '{}'. Excluding symlink at '{}' with target '{}'.", @@ -191,16 +212,32 @@ impl VfsBuilder { self.add_file_at_path_not_symlink(&target_path) } - pub fn add_file_at_path_not_symlink( + fn add_file_at_path_not_symlink( &mut self, path: &Path, ) -> Result<(), AnyError> { let file_bytes = std::fs::read(path) .with_context(|| format!("Reading {}", path.display()))?; - self.add_file(path, file_bytes) + self.add_file_with_data_inner(path, file_bytes) } - fn add_file(&mut self, path: &Path, data: Vec) -> Result<(), AnyError> { + pub fn add_file_with_data( + &mut self, + path: &Path, + data: Vec, + ) -> Result<(), AnyError> { + let target_path = canonicalize_path(path)?; + if target_path != path { + self.add_symlink(path, &target_path)?; + } + self.add_file_with_data_inner(&target_path, data) + } + + fn add_file_with_data_inner( + &mut self, + path: &Path, + data: Vec, + ) -> Result<(), AnyError> { log::debug!("Adding file '{}'", path.display()); let checksum = util::checksum::gen(&[&data]); let offset = if let Some(offset) = self.file_offsets.get(&checksum) { @@ -249,8 +286,15 @@ impl VfsBuilder { path.display(), target.display() ); - let dest = self.path_relative_root(target)?; - if dest == self.path_relative_root(path)? { + let relative_target = self.path_relative_root(target)?; + let relative_path = match self.path_relative_root(path) { + Ok(path) => path, + Err(StripRootError { .. }) => { + // ignore if the original path is outside the root directory + return Ok(()); + } + }; + if relative_target == relative_path { // it's the same, ignore return Ok(()); } @@ -263,7 +307,7 @@ impl VfsBuilder { insert_index, VfsEntry::Symlink(VirtualSymlink { name: name.to_string(), - dest_parts: dest + dest_parts: relative_target .components() .map(|c| c.as_os_str().to_string_lossy().to_string()) .collect::>(), @@ -751,14 +795,14 @@ impl deno_io::fs::File for FileBackedVfsFile { #[derive(Debug)] pub struct FileBackedVfs { - file: Mutex>, + vfs_data: Cow<'static, [u8]>, fs_root: VfsRoot, } impl FileBackedVfs { - pub fn new(file: Vec, fs_root: VfsRoot) -> Self { + pub fn new(data: Cow<'static, [u8]>, fs_root: VfsRoot) -> Self { Self { - file: Mutex::new(file), + vfs_data: data, fs_root, } } @@ -827,10 +871,15 @@ impl FileBackedVfs { Ok(path) } - pub fn read_file_all(&self, file: &VirtualFile) -> std::io::Result> { - let mut buf = vec![0; file.len as usize]; - self.read_file(file, 0, &mut buf)?; - Ok(buf) + pub fn read_file_all( + &self, + file: &VirtualFile, + ) -> std::io::Result> { + let read_range = self.get_read_range(file, 0, file.len)?; + match &self.vfs_data { + Cow::Borrowed(data) => Ok(Cow::Borrowed(&data[read_range])), + Cow::Owned(data) => Ok(Cow::Owned(data[read_range].to_vec())), + } } pub fn read_file( @@ -839,18 +888,27 @@ impl FileBackedVfs { pos: u64, buf: &mut [u8], ) -> std::io::Result { - let data = self.file.lock(); + let read_range = self.get_read_range(file, pos, buf.len() as u64)?; + buf.copy_from_slice(&self.vfs_data[read_range]); + Ok(buf.len()) + } + + fn get_read_range( + &self, + file: &VirtualFile, + pos: u64, + len: u64, + ) -> std::io::Result> { + let data = &self.vfs_data; let start = self.fs_root.start_file_offset + file.offset + pos; - let end = start + buf.len() as u64; + let end = start + len; if end > data.len() as u64 { return Err(std::io::Error::new( std::io::ErrorKind::UnexpectedEof, "unexpected EOF", )); } - - buf.copy_from_slice(&data[start as usize..end as usize]); - Ok(buf.len()) + Ok(start as usize..end as usize) } pub fn dir_entry(&self, path: &Path) -> std::io::Result<&VirtualDirectory> { @@ -888,7 +946,7 @@ mod test { #[track_caller] fn read_file(vfs: &FileBackedVfs, path: &Path) -> String { let file = vfs.file_entry(path).unwrap(); - String::from_utf8(vfs.read_file_all(file).unwrap()).unwrap() + String::from_utf8(vfs.read_file_all(file).unwrap().into_owned()).unwrap() } #[test] @@ -901,20 +959,23 @@ mod test { let src_path = src_path.to_path_buf(); let mut builder = VfsBuilder::new(src_path.clone()).unwrap(); builder - .add_file(&src_path.join("a.txt"), "data".into()) + .add_file_with_data_inner(&src_path.join("a.txt"), "data".into()) .unwrap(); builder - .add_file(&src_path.join("b.txt"), "data".into()) + .add_file_with_data_inner(&src_path.join("b.txt"), "data".into()) .unwrap(); assert_eq!(builder.files.len(), 1); // because duplicate data builder - .add_file(&src_path.join("c.txt"), "c".into()) + .add_file_with_data_inner(&src_path.join("c.txt"), "c".into()) .unwrap(); builder - .add_file(&src_path.join("sub_dir").join("d.txt"), "d".into()) + .add_file_with_data_inner( + &src_path.join("sub_dir").join("d.txt"), + "d".into(), + ) .unwrap(); builder - .add_file(&src_path.join("e.txt"), "e".into()) + .add_file_with_data_inner(&src_path.join("e.txt"), "e".into()) .unwrap(); builder .add_symlink( @@ -1031,7 +1092,7 @@ mod test { ( dest_path.to_path_buf(), FileBackedVfs::new( - data, + Cow::Owned(data), VfsRoot { dir: root_dir, root_path: dest_path.to_path_buf(), @@ -1082,7 +1143,7 @@ mod test { let temp_path = temp_dir.path().canonicalize(); let mut builder = VfsBuilder::new(temp_path.to_path_buf()).unwrap(); builder - .add_file( + .add_file_with_data_inner( temp_path.join("a.txt").as_path(), "0123456789".to_string().into_bytes(), ) diff --git a/cli/task_runner.rs b/cli/task_runner.rs index 418043b23f..43840e868d 100644 --- a/cli/task_runner.rs +++ b/cli/task_runner.rs @@ -155,6 +155,12 @@ fn prepare_env_vars( initial_cwd.to_string_lossy().to_string(), ); } + if !env_vars.contains_key(crate::npm::NPM_CONFIG_USER_AGENT_ENV_VAR) { + env_vars.insert( + crate::npm::NPM_CONFIG_USER_AGENT_ENV_VAR.into(), + crate::npm::get_npm_config_user_agent(), + ); + } if let Some(node_modules_dir) = node_modules_dir { prepend_to_path( &mut env_vars, @@ -204,7 +210,7 @@ impl ShellCommand for NpmCommand { mut context: ShellCommandContext, ) -> LocalBoxFuture<'static, ExecuteResult> { if context.args.first().map(|s| s.as_str()) == Some("run") - && context.args.len() > 2 + && context.args.len() >= 2 // for now, don't run any npm scripts that have a flag because // we don't handle stuff like `--workspaces` properly && !context.args.iter().any(|s| s.starts_with('-')) @@ -267,10 +273,12 @@ impl ShellCommand for NodeCommand { ) .execute(context); } + args.extend(["run", "-A"].into_iter().map(|s| s.to_string())); args.extend(context.args.iter().cloned()); let mut state = context.state; + state.apply_env_var(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME, "1"); ExecutableCommand::new("deno".to_string(), std::env::current_exe().unwrap()) .execute(ShellCommandContext { diff --git a/cli/tools/bench/mod.rs b/cli/tools/bench/mod.rs index be5d0ad0e1..272d063355 100644 --- a/cli/tools/bench/mod.rs +++ b/cli/tools/bench/mod.rs @@ -193,7 +193,7 @@ async fn bench_specifier_inner( .await?; // We execute the main module as a side module so that import.meta.main is not set. - worker.execute_side_module_possibly_with_npm().await?; + worker.execute_side_module().await?; let mut worker = worker.into_main_worker(); diff --git a/cli/tools/check.rs b/cli/tools/check.rs index 7edb392d48..d880278884 100644 --- a/cli/tools/check.rs +++ b/cli/tools/check.rs @@ -32,6 +32,7 @@ use crate::graph_util::ModuleGraphBuilder; use crate::npm::CliNpmResolver; use crate::tsc; use crate::tsc::Diagnostics; +use crate::tsc::TypeCheckingCjsTracker; use crate::util::extract; use crate::util::path::to_percent_decoded_str; @@ -99,6 +100,7 @@ pub struct CheckOptions { pub struct TypeChecker { caches: Arc, + cjs_tracker: Arc, cli_options: Arc, module_graph_builder: Arc, node_resolver: Arc, @@ -108,6 +110,7 @@ pub struct TypeChecker { impl TypeChecker { pub fn new( caches: Arc, + cjs_tracker: Arc, cli_options: Arc, module_graph_builder: Arc, node_resolver: Arc, @@ -115,6 +118,7 @@ impl TypeChecker { ) -> Self { Self { caches, + cjs_tracker, cli_options, module_graph_builder, node_resolver, @@ -244,6 +248,7 @@ impl TypeChecker { graph: graph.clone(), hash_data, maybe_npm: Some(tsc::RequestNpmState { + cjs_tracker: self.cjs_tracker.clone(), node_resolver: self.node_resolver.clone(), npm_resolver: self.npm_resolver.clone(), }), @@ -346,7 +351,7 @@ fn get_check_hash( } } MediaType::Json - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::SourceMap | MediaType::Wasm | MediaType::Unknown => continue, @@ -428,7 +433,7 @@ fn get_tsc_roots( } MediaType::Json | MediaType::Wasm - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::SourceMap | MediaType::Unknown => None, }, @@ -536,7 +541,7 @@ fn has_ts_check(media_type: MediaType, file_text: &str) -> bool { | MediaType::Tsx | MediaType::Json | MediaType::Wasm - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::SourceMap | MediaType::Unknown => false, } diff --git a/cli/tools/compile.rs b/cli/tools/compile.rs index 3cc4414fcb..b3e9993379 100644 --- a/cli/tools/compile.rs +++ b/cli/tools/compile.rs @@ -5,6 +5,7 @@ use crate::args::CompileFlags; use crate::args::Flags; use crate::factory::CliFactory; use crate::http_util::HttpClientProvider; +use crate::standalone::binary::StandaloneRelativeFileBaseUrl; use crate::standalone::is_standalone_binary; use deno_ast::ModuleSpecifier; use deno_core::anyhow::bail; @@ -14,7 +15,6 @@ use deno_core::error::AnyError; use deno_core::resolve_url_or_path; use deno_graph::GraphKind; use deno_terminal::colors; -use eszip::EszipRelativeFileBaseUrl; use rand::Rng; use std::path::Path; use std::path::PathBuf; @@ -29,7 +29,6 @@ pub async fn compile( let factory = CliFactory::from_flags(flags); let cli_options = factory.cli_options()?; let module_graph_creator = factory.module_graph_creator().await?; - let parsed_source_cache = factory.parsed_source_cache(); let binary_writer = factory.create_compile_binary_writer().await?; let http_client = factory.http_client_provider(); let module_specifier = cli_options.resolve_main_module()?; @@ -54,16 +53,6 @@ pub async fn compile( ); } - if cli_options.unstable_detect_cjs() { - log::warn!( - concat!( - "{} --unstable-detect-cjs is not properly supported in deno compile. ", - "The compiled executable may encounter runtime errors.", - ), - crate::colors::yellow("Warning"), - ); - } - let output_path = resolve_compile_executable_output_path( http_client, &compile_flags, @@ -80,7 +69,7 @@ pub async fn compile( let graph = if cli_options.type_check_mode().is_true() { // In this case, the previous graph creation did type checking, which will // create a module graph with types information in it. We don't want to - // store that in the eszip so create a code only module graph from scratch. + // store that in the binary so create a code only module graph from scratch. module_graph_creator .create_graph(GraphKind::CodeOnly, module_roots) .await? @@ -91,11 +80,6 @@ pub async fn compile( let ts_config_for_emit = cli_options .resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?; check_warn_tsconfig(&ts_config_for_emit); - let (transpile_options, emit_options) = - crate::args::ts_config_to_transpile_and_emit_options( - ts_config_for_emit.ts_config, - )?; - let parser = parsed_source_cache.as_capturing_parser(); let root_dir_url = resolve_root_dir_from_specifiers( cli_options.workspace().root_dir(), graph.specifiers().map(|(s, _)| s).chain( @@ -106,17 +90,6 @@ pub async fn compile( ), ); log::debug!("Binary root dir: {}", root_dir_url); - let root_dir_url = EszipRelativeFileBaseUrl::new(&root_dir_url); - let eszip = eszip::EszipV2::from_graph(eszip::FromGraphOptions { - graph, - parser, - transpile_options, - emit_options, - // make all the modules relative to the root folder - relative_file_base: Some(root_dir_url), - npm_packages: None, - })?; - log::info!( "{} {} to {}", colors::green("Compile"), @@ -143,15 +116,18 @@ pub async fn compile( let write_result = binary_writer .write_bin( file, - eszip, - root_dir_url, + &graph, + StandaloneRelativeFileBaseUrl::from(&root_dir_url), module_specifier, &compile_flags, cli_options, ) .await .with_context(|| { - format!("Writing temporary file '{}'", temp_path.display()) + format!( + "Writing deno compile executable to temporary file '{}'", + temp_path.display() + ) }); // set it as executable diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs index 260c0c8424..f593332475 100644 --- a/cli/tools/coverage/mod.rs +++ b/cli/tools/coverage/mod.rs @@ -6,12 +6,12 @@ use crate::args::FileFlags; use crate::args::Flags; use crate::cdp; use crate::factory::CliFactory; -use crate::npm::CliNpmResolver; use crate::tools::fmt::format_json; use crate::tools::test::is_supported_test_path; use crate::util::text_encoding::source_map_from_code; use deno_ast::MediaType; +use deno_ast::ModuleKind; use deno_ast::ModuleSpecifier; use deno_config::glob::FileCollector; use deno_config::glob::FilePatterns; @@ -25,6 +25,7 @@ use deno_core::serde_json; use deno_core::sourcemap::SourceMap; use deno_core::url::Url; use deno_core::LocalInspectorSession; +use node_resolver::InNpmPackageChecker; use regex::Regex; use std::fs; use std::fs::File; @@ -327,6 +328,7 @@ fn generate_coverage_report( coverage_report.found_lines = if let Some(source_map) = maybe_source_map.as_ref() { + let script_source_lines = script_source.lines().collect::>(); let mut found_lines = line_counts .iter() .enumerate() @@ -334,7 +336,23 @@ fn generate_coverage_report( // get all the mappings from this destination line to a different src line let mut results = source_map .tokens() - .filter(move |token| token.get_dst_line() as usize == index) + .filter(|token| { + let dst_line = token.get_dst_line() as usize; + dst_line == index && { + let dst_col = token.get_dst_col() as usize; + let content = script_source_lines + .get(dst_line) + .and_then(|line| { + line.get(dst_col..std::cmp::min(dst_col + 2, line.len())) + }) + .unwrap_or(""); + + !content.is_empty() + && content != "/*" + && content != "*/" + && content != "//" + } + }) .map(move |token| (token.get_src_line() as usize, *count)) .collect::>(); // only keep the results that point at different src lines @@ -444,7 +462,7 @@ fn filter_coverages( coverages: Vec, include: Vec, exclude: Vec, - npm_resolver: &dyn CliNpmResolver, + in_npm_pkg_checker: &dyn InNpmPackageChecker, ) -> Vec { let include: Vec = include.iter().map(|e| Regex::new(e).unwrap()).collect(); @@ -468,7 +486,7 @@ fn filter_coverages( || doc_test_re.is_match(e.url.as_str()) || Url::parse(&e.url) .ok() - .map(|url| npm_resolver.in_npm_package(&url)) + .map(|url| in_npm_pkg_checker.in_npm_package(&url)) .unwrap_or(false); let is_included = include.iter().any(|p| p.is_match(&e.url)); @@ -479,7 +497,7 @@ fn filter_coverages( .collect::>() } -pub async fn cover_files( +pub fn cover_files( flags: Arc, coverage_flags: CoverageFlags, ) -> Result<(), AnyError> { @@ -489,9 +507,10 @@ pub async fn cover_files( let factory = CliFactory::from_flags(flags); let cli_options = factory.cli_options()?; - let npm_resolver = factory.npm_resolver().await?; + let in_npm_pkg_checker = factory.in_npm_pkg_checker()?; let file_fetcher = factory.file_fetcher()?; let emitter = factory.emitter()?; + let cjs_tracker = factory.cjs_tracker()?; assert!(!coverage_flags.files.include.is_empty()); @@ -511,7 +530,7 @@ pub async fn cover_files( script_coverages, coverage_flags.include, coverage_flags.exclude, - npm_resolver.as_ref(), + in_npm_pkg_checker.as_ref(), ); if script_coverages.is_empty() { return Err(generic_error("No covered files included in the report")); @@ -568,16 +587,21 @@ pub async fn cover_files( let transpiled_code = match file.media_type { MediaType::JavaScript | MediaType::Unknown + | MediaType::Css + | MediaType::Wasm | MediaType::Cjs | MediaType::Mjs | MediaType::Json => None, - MediaType::Dts | MediaType::Dmts | MediaType::Dcts => Some(Vec::new()), + MediaType::Dts | MediaType::Dmts | MediaType::Dcts => Some(String::new()), MediaType::TypeScript | MediaType::Jsx | MediaType::Mts | MediaType::Cts | MediaType::Tsx => { - Some(match emitter.maybe_cached_emit(&file.specifier, &file.source) { + let module_kind = ModuleKind::from_is_cjs( + cjs_tracker.is_maybe_cjs(&file.specifier, file.media_type)?, + ); + Some(match emitter.maybe_cached_emit(&file.specifier, module_kind, &file.source) { Some(code) => code, None => { return Err(anyhow!( @@ -588,13 +612,12 @@ pub async fn cover_files( } }) } - MediaType::Wasm | MediaType::TsBuildInfo | MediaType::SourceMap => { + MediaType::SourceMap => { unreachable!() } }; let runtime_code: String = match transpiled_code { - Some(code) => String::from_utf8(code) - .with_context(|| format!("Failed decoding {}", file.specifier))?, + Some(code) => code, None => original_source.to_string(), }; diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs index 5e18546a28..e33da4efb2 100644 --- a/cli/tools/doc.rs +++ b/cli/tools/doc.rs @@ -22,9 +22,9 @@ use deno_core::serde_json; use deno_doc as doc; use deno_doc::html::UrlResolveKind; use deno_graph::source::NullFileSystem; +use deno_graph::EsParser; use deno_graph::GraphKind; use deno_graph::ModuleAnalyzer; -use deno_graph::ModuleParser; use deno_graph::ModuleSpecifier; use doc::html::ShortPath; use doc::DocDiagnostic; @@ -37,7 +37,7 @@ const JSON_SCHEMA_VERSION: u8 = 1; async fn generate_doc_nodes_for_builtin_types( doc_flags: DocFlags, - parser: &dyn ModuleParser, + parser: &dyn EsParser, analyzer: &dyn ModuleAnalyzer, ) -> Result>, AnyError> { let source_file_specifier = @@ -96,7 +96,7 @@ pub async fn doc( let module_info_cache = factory.module_info_cache()?; let parsed_source_cache = factory.parsed_source_cache(); let capturing_parser = parsed_source_cache.as_capturing_parser(); - let analyzer = module_info_cache.as_module_analyzer(parsed_source_cache); + let analyzer = module_info_cache.as_module_analyzer(); let doc_nodes_by_url = match doc_flags.source_files { DocSourceFileFlag::Builtin => { diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs index 8a4bc4e6c8..f7f8dabc6f 100644 --- a/cli/tools/fmt.rs +++ b/cli/tools/fmt.rs @@ -353,6 +353,21 @@ fn format_yaml( file_text: &str, fmt_options: &FmtOptionsConfig, ) -> Result, AnyError> { + let ignore_file = file_text + .lines() + .take_while(|line| line.starts_with('#')) + .any(|line| { + line + .strip_prefix('#') + .unwrap() + .trim() + .starts_with("deno-fmt-ignore-file") + }); + + if ignore_file { + return Ok(None); + } + let formatted_str = pretty_yaml::format_text(file_text, &get_resolved_yaml_config(fmt_options)) .map_err(AnyError::from)?; @@ -978,6 +993,7 @@ fn get_resolved_malva_config( single_line_top_level_declarations: false, selector_override_comment_directive: "deno-fmt-selector-override".into(), ignore_comment_directive: "deno-fmt-ignore".into(), + ignore_file_comment_directive: "deno-fmt-ignore-file".into(), }; FormatOptions { @@ -1016,7 +1032,7 @@ fn get_resolved_markup_fmt_config( max_attrs_per_line: None, prefer_attrs_single_line: false, html_normal_self_closing: None, - html_void_self_closing: Some(true), + html_void_self_closing: None, component_self_closing: None, svg_self_closing: None, mathml_self_closing: None, @@ -1036,6 +1052,7 @@ fn get_resolved_markup_fmt_config( svelte_directive_shorthand: Some(true), astro_attr_shorthand: Some(true), ignore_comment_directive: "deno-fmt-ignore".into(), + ignore_file_comment_directive: "deno-fmt-ignore-file".into(), }; FormatOptions { diff --git a/cli/tools/info.rs b/cli/tools/info.rs index 3febaff579..c2f5a8cb8d 100644 --- a/cli/tools/info.rs +++ b/cli/tools/info.rs @@ -530,7 +530,7 @@ impl<'a> GraphDisplayContext<'a> { fn build_module_info(&mut self, module: &Module, type_dep: bool) -> TreeNode { enum PackageOrSpecifier { - Package(NpmResolutionPackage), + Package(Box), Specifier(ModuleSpecifier), } @@ -538,7 +538,7 @@ impl<'a> GraphDisplayContext<'a> { let package_or_specifier = match module.npm() { Some(npm) => match self.npm_info.resolve_package(npm.nv_reference.nv()) { - Some(package) => Package(package.clone()), + Some(package) => Package(Box::new(package.clone())), None => Specifier(module.specifier().clone()), // should never happen }, None => Specifier(module.specifier().clone()), @@ -645,10 +645,12 @@ impl<'a> GraphDisplayContext<'a> { let message = match err { HttpsChecksumIntegrity(_) => "(checksum integrity error)", Decode(_) => "(loading decode error)", - Loader(err) => match deno_core::error::get_custom_error_class(err) { - Some("NotCapable") => "(not capable, requires --allow-import)", - _ => "(loading error)", - }, + Loader(err) => { + match deno_runtime::errors::get_error_class_name(err) { + Some("NotCapable") => "(not capable, requires --allow-import)", + _ => "(loading error)", + } + } Jsr(_) => "(loading error)", NodeUnknownBuiltinModule(_) => "(unknown node built-in error)", Npm(_) => "(npm loading error)", diff --git a/cli/tools/init/mod.rs b/cli/tools/init/mod.rs index 2d6a894e13..4e4a686c5f 100644 --- a/cli/tools/init/mod.rs +++ b/cli/tools/init/mod.rs @@ -24,32 +24,29 @@ pub fn init_project(init_flags: InitFlags) -> Result<(), AnyError> { create_file( &dir, "main.ts", - r#"import { type Route, route, serveDir } from "@std/http"; + r#"import { serveDir } from "@std/http"; -const routes: Route[] = [ - { - pattern: new URLPattern({ pathname: "/" }), - handler: () => new Response("Home page"), - }, - { - pattern: new URLPattern({ pathname: "/users/:id" }), - handler: (_req, _info, params) => new Response(params?.pathname.groups.id), - }, - { - pattern: new URLPattern({ pathname: "/static/*" }), - handler: (req) => serveDir(req), - }, -]; - -function defaultHandler(_req: Request) { - return new Response("Not found", { status: 404 }); -} - -const handler = route(routes, defaultHandler); +const userPagePattern = new URLPattern({ pathname: "/users/:id" }); +const staticPathPattern = new URLPattern({ pathname: "/static/*" }); export default { fetch(req) { - return handler(req); + const url = new URL(req.url); + + if (url.pathname === "/") { + return new Response("Home page"); + } + + const userPageMatch = userPagePattern.exec(url); + if (userPageMatch) { + return new Response(userPageMatch.pathname.groups.id); + } + + if (staticPathPattern.test(url)) { + return serveDir(req); + } + + return new Response("Not found", { status: 404 }); }, } satisfies Deno.ServeDefaultExport; "#, diff --git a/cli/tools/registry/pm.rs b/cli/tools/registry/pm.rs index 02731d3039..68913e2591 100644 --- a/cli/tools/registry/pm.rs +++ b/cli/tools/registry/pm.rs @@ -1,32 +1,25 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -mod cache_deps; - -pub use cache_deps::cache_top_level_deps; -use deno_semver::jsr::JsrPackageReqReference; -use deno_semver::npm::NpmPackageReqReference; -use deno_semver::VersionReq; - -use std::borrow::Cow; +use std::path::Path; use std::path::PathBuf; use std::sync::Arc; -use deno_ast::TextChange; -use deno_config::deno_json::FmtOptionsConfig; -use deno_core::anyhow::anyhow; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::futures::FutureExt; use deno_core::futures::StreamExt; -use deno_core::serde_json; -use deno_core::ModuleSpecifier; -use deno_runtime::deno_node; +use deno_path_util::url_to_file_path; +use deno_semver::jsr::JsrPackageReqReference; +use deno_semver::npm::NpmPackageReqReference; +use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; -use indexmap::IndexMap; -use jsonc_parser::ast::ObjectProp; -use jsonc_parser::ast::Value; -use yoke::Yoke; +use deno_semver::Version; +use deno_semver::VersionReq; +use jsonc_parser::cst::CstObject; +use jsonc_parser::cst::CstObjectProp; +use jsonc_parser::cst::CstRootNode; +use jsonc_parser::json; use crate::args::AddFlags; use crate::args::CacheSetting; @@ -38,236 +31,181 @@ use crate::file_fetcher::FileFetcher; use crate::jsr::JsrFetchResolver; use crate::npm::NpmFetchResolver; -enum DenoConfigFormat { - Json, - Jsonc, +mod cache_deps; + +pub use cache_deps::cache_top_level_deps; + +#[derive(Debug, Copy, Clone)] +enum ConfigKind { + DenoJson, + PackageJson, } -impl DenoConfigFormat { - fn from_specifier(spec: &ModuleSpecifier) -> Result { - let file_name = spec - .path_segments() - .ok_or_else(|| anyhow!("Empty path in deno config specifier: {spec}"))? - .last() - .unwrap(); - match file_name { - "deno.json" => Ok(Self::Json), - "deno.jsonc" => Ok(Self::Jsonc), - _ => bail!("Unsupported deno config file: {file_name}"), - } - } -} - -struct DenoConfig { - config: Arc, - format: DenoConfigFormat, - imports: IndexMap, -} - -fn deno_json_imports( - config: &deno_config::deno_json::ConfigFile, -) -> Result, AnyError> { - Ok( - config - .json - .imports - .clone() - .map(|imports| { - serde_json::from_value(imports) - .map_err(|err| anyhow!("Malformed \"imports\" configuration: {err}")) - }) - .transpose()? - .unwrap_or_default(), - ) -} -impl DenoConfig { - fn from_options(options: &CliOptions) -> Result, AnyError> { - let start_dir = &options.start_dir; - if let Some(config) = start_dir.maybe_deno_json() { - Ok(Some(Self { - imports: deno_json_imports(config)?, - config: config.clone(), - format: DenoConfigFormat::from_specifier(&config.specifier)?, - })) - } else { - Ok(None) - } - } - - fn add(&mut self, selected: SelectedPackage) { - self.imports.insert( - selected.import_name, - format!("{}@{}", selected.package_name, selected.version_req), - ); - } - - fn remove(&mut self, package: &str) -> bool { - self.imports.shift_remove(package).is_some() - } - - fn take_import_fields( - &mut self, - ) -> Vec<(&'static str, IndexMap)> { - vec![("imports", std::mem::take(&mut self.imports))] - } -} - -impl NpmConfig { - fn from_options(options: &CliOptions) -> Result, AnyError> { - let start_dir = &options.start_dir; - if let Some(pkg_json) = start_dir.maybe_pkg_json() { - Ok(Some(Self { - dependencies: pkg_json.dependencies.clone().unwrap_or_default(), - dev_dependencies: pkg_json.dev_dependencies.clone().unwrap_or_default(), - config: pkg_json.clone(), - fmt_options: None, - })) - } else { - Ok(None) - } - } - - fn add(&mut self, selected: SelectedPackage, dev: bool) { - let (name, version) = package_json_dependency_entry(selected); - if dev { - self.dependencies.swap_remove(&name); - self.dev_dependencies.insert(name, version); - } else { - self.dev_dependencies.swap_remove(&name); - self.dependencies.insert(name, version); - } - } - - fn remove(&mut self, package: &str) -> bool { - let in_deps = self.dependencies.shift_remove(package).is_some(); - let in_dev_deps = self.dev_dependencies.shift_remove(package).is_some(); - in_deps || in_dev_deps - } - - fn take_import_fields( - &mut self, - ) -> Vec<(&'static str, IndexMap)> { - vec![ - ("dependencies", std::mem::take(&mut self.dependencies)), - ( - "devDependencies", - std::mem::take(&mut self.dev_dependencies), - ), - ] - } -} - -struct NpmConfig { - config: Arc, - fmt_options: Option, - dependencies: IndexMap, - dev_dependencies: IndexMap, -} - -enum DenoOrPackageJson { - Deno(DenoConfig), - Npm(NpmConfig), -} - -impl From for DenoOrPackageJson { - fn from(config: DenoConfig) -> Self { - Self::Deno(config) - } -} - -impl From for DenoOrPackageJson { - fn from(config: NpmConfig) -> Self { - Self::Npm(config) - } -} - -/// Wrapper around `jsonc_parser::ast::Object` that can be stored in a `Yoke` -#[derive(yoke::Yokeable)] -struct JsoncObjectView<'a>(jsonc_parser::ast::Object<'a>); - struct ConfigUpdater { - config: DenoOrPackageJson, - // the `Yoke` is so we can carry the parsed object (which borrows from - // the source) along with the source itself - ast: Yoke, String>, + kind: ConfigKind, + cst: CstRootNode, + root_object: CstObject, path: PathBuf, modified: bool, } impl ConfigUpdater { - fn obj(&self) -> &jsonc_parser::ast::Object<'_> { - &self.ast.get().0 - } - fn contents(&self) -> &str { - self.ast.backing_cart() - } - async fn maybe_new( - config: Option>, - ) -> Result, AnyError> { - if let Some(config) = config { - Ok(Some(Self::new(config.into()).await?)) - } else { - Ok(None) - } - } - async fn new(config: DenoOrPackageJson) -> Result { - let specifier = config.specifier(); - if specifier.scheme() != "file" { - bail!("Can't update a remote configuration file"); - } - let config_file_path = specifier.to_file_path().map_err(|_| { - anyhow!("Specifier {specifier:?} is an invalid file path") - })?; - let config_file_contents = { - let contents = tokio::fs::read_to_string(&config_file_path) - .await - .with_context(|| { - format!("Reading config file at: {}", config_file_path.display()) - })?; - if contents.trim().is_empty() { - "{}\n".into() - } else { - contents - } - }; - let ast = Yoke::try_attach_to_cart(config_file_contents, |contents| { - let ast = jsonc_parser::parse_to_ast( - contents, - &Default::default(), - &Default::default(), - ) + fn new( + kind: ConfigKind, + config_file_path: PathBuf, + ) -> Result { + let config_file_contents = std::fs::read_to_string(&config_file_path) .with_context(|| { - format!("Failed to parse config file at {}", specifier) + format!("Reading config file '{}'", config_file_path.display()) })?; - let obj = match ast.value { - Some(Value::Object(obj)) => obj, - _ => bail!( - "Failed to update config file at {}, expected an object", - specifier - ), - }; - Ok(JsoncObjectView(obj)) - })?; + let cst = CstRootNode::parse(&config_file_contents, &Default::default()) + .with_context(|| { + format!("Parsing config file '{}'", config_file_path.display()) + })?; + let root_object = cst.object_value_or_set(); Ok(Self { - config, - ast, + kind, + cst, + root_object, path: config_file_path, modified: false, }) } + fn display_path(&self) -> String { + deno_path_util::url_from_file_path(&self.path) + .map(|u| u.to_string()) + .unwrap_or_else(|_| self.path.display().to_string()) + } + + fn obj(&self) -> &CstObject { + &self.root_object + } + + fn contents(&self) -> String { + self.cst.to_string() + } + fn add(&mut self, selected: SelectedPackage, dev: bool) { - match &mut self.config { - DenoOrPackageJson::Deno(deno) => deno.add(selected), - DenoOrPackageJson::Npm(npm) => npm.add(selected, dev), + fn insert_index(object: &CstObject, searching_name: &str) -> usize { + object + .properties() + .into_iter() + .take_while(|prop| { + let prop_name = + prop.name().and_then(|name| name.decoded_value().ok()); + match prop_name { + Some(current_name) => { + searching_name.cmp(¤t_name) == std::cmp::Ordering::Greater + } + None => true, + } + }) + .count() } + + match self.kind { + ConfigKind::DenoJson => { + let imports = self.root_object.object_value_or_set("imports"); + let value = + format!("{}@{}", selected.package_name, selected.version_req); + if let Some(prop) = imports.get(&selected.import_name) { + prop.set_value(json!(value)); + } else { + let index = insert_index(&imports, &selected.import_name); + imports.insert(index, &selected.import_name, json!(value)); + } + } + ConfigKind::PackageJson => { + let deps_prop = self.root_object.get("dependencies"); + let dev_deps_prop = self.root_object.get("devDependencies"); + + let dependencies = if dev { + self + .root_object + .object_value("devDependencies") + .unwrap_or_else(|| { + let index = deps_prop + .as_ref() + .map(|p| p.property_index() + 1) + .unwrap_or_else(|| self.root_object.properties().len()); + self + .root_object + .insert(index, "devDependencies", json!({})) + .object_value_or_set() + }) + } else { + self + .root_object + .object_value("dependencies") + .unwrap_or_else(|| { + let index = dev_deps_prop + .as_ref() + .map(|p| p.property_index()) + .unwrap_or_else(|| self.root_object.properties().len()); + self + .root_object + .insert(index, "dependencies", json!({})) + .object_value_or_set() + }) + }; + let other_dependencies = if dev { + deps_prop.and_then(|p| p.value().and_then(|v| v.as_object())) + } else { + dev_deps_prop.and_then(|p| p.value().and_then(|v| v.as_object())) + }; + + let (alias, value) = package_json_dependency_entry(selected); + + if let Some(other) = other_dependencies { + if let Some(prop) = other.get(&alias) { + remove_prop_and_maybe_parent_prop(prop); + } + } + + if let Some(prop) = dependencies.get(&alias) { + prop.set_value(json!(value)); + } else { + let index = insert_index(&dependencies, &alias); + dependencies.insert(index, &alias, json!(value)); + } + } + } + self.modified = true; } fn remove(&mut self, package: &str) -> bool { - let removed = match &mut self.config { - DenoOrPackageJson::Deno(deno) => deno.remove(package), - DenoOrPackageJson::Npm(npm) => npm.remove(package), + let removed = match self.kind { + ConfigKind::DenoJson => { + if let Some(prop) = self + .root_object + .object_value("imports") + .and_then(|i| i.get(package)) + { + remove_prop_and_maybe_parent_prop(prop); + true + } else { + false + } + } + ConfigKind::PackageJson => { + let deps = [ + self + .root_object + .object_value("dependencies") + .and_then(|deps| deps.get(package)), + self + .root_object + .object_value("devDependencies") + .and_then(|deps| deps.get(package)), + ]; + let removed = deps.iter().any(|d| d.is_some()); + for dep in deps.into_iter().flatten() { + remove_prop_and_maybe_parent_prop(dep); + } + removed + } }; if removed { self.modified = true; @@ -275,76 +213,28 @@ impl ConfigUpdater { removed } - async fn commit(mut self) -> Result<(), AnyError> { + fn commit(&self) -> Result<(), AnyError> { if !self.modified { return Ok(()); } - let import_fields = self.config.take_import_fields(); - - let fmt_config_options = self.config.fmt_options(); - - let new_text = update_config_file_content( - self.obj(), - self.contents(), - fmt_config_options, - import_fields.into_iter().map(|(k, v)| { - ( - k, - if v.is_empty() { - None - } else { - Some(generate_imports(v.into_iter().collect())) - }, - ) - }), - self.config.file_name(), - ); - - tokio::fs::write(&self.path, new_text).await?; + let new_text = self.contents(); + std::fs::write(&self.path, new_text).with_context(|| { + format!("failed writing to '{}'", self.path.display()) + })?; Ok(()) } } -impl DenoOrPackageJson { - fn specifier(&self) -> Cow { - match self { - Self::Deno(d, ..) => Cow::Borrowed(&d.config.specifier), - Self::Npm(n, ..) => Cow::Owned(n.config.specifier()), - } - } - - fn fmt_options(&self) -> FmtOptionsConfig { - match self { - DenoOrPackageJson::Deno(deno, ..) => deno - .config - .to_fmt_config() - .ok() - .map(|f| f.options) - .unwrap_or_default(), - DenoOrPackageJson::Npm(config) => { - config.fmt_options.clone().unwrap_or_default() - } - } - } - - fn take_import_fields( - &mut self, - ) -> Vec<(&'static str, IndexMap)> { - match self { - Self::Deno(d) => d.take_import_fields(), - Self::Npm(n) => n.take_import_fields(), - } - } - - fn file_name(&self) -> &'static str { - match self { - DenoOrPackageJson::Deno(config) => match config.format { - DenoConfigFormat::Json => "deno.json", - DenoConfigFormat::Jsonc => "deno.jsonc", - }, - DenoOrPackageJson::Npm(..) => "package.json", - } +fn remove_prop_and_maybe_parent_prop(prop: CstObjectProp) { + let parent = prop.parent().unwrap().as_object().unwrap(); + prop.remove(); + if parent.properties().is_empty() { + let parent_property = parent.parent().unwrap(); + let root_object = parent_property.parent().unwrap().as_object().unwrap(); + // remove the property + parent_property.remove(); + root_object.ensure_multiline(); } } @@ -401,11 +291,27 @@ impl std::fmt::Display for AddCommandName { fn load_configs( flags: &Arc, has_jsr_specifiers: impl FnOnce() -> bool, -) -> Result<(CliFactory, Option, Option), AnyError> { +) -> Result<(CliFactory, Option, Option), AnyError> +{ let cli_factory = CliFactory::from_flags(flags.clone()); let options = cli_factory.cli_options()?; - let npm_config = NpmConfig::from_options(options)?; - let (cli_factory, deno_config) = match DenoConfig::from_options(options)? { + let start_dir = &options.start_dir; + let npm_config = match start_dir.maybe_pkg_json() { + Some(pkg_json) => Some(ConfigUpdater::new( + ConfigKind::PackageJson, + pkg_json.path.clone(), + )?), + None => None, + }; + let deno_config = match start_dir.maybe_deno_json() { + Some(deno_json) => Some(ConfigUpdater::new( + ConfigKind::DenoJson, + url_to_file_path(&deno_json.specifier)?, + )?), + None => None, + }; + + let (cli_factory, deno_config) = match deno_config { Some(config) => (cli_factory, Some(config)), None if npm_config.is_some() && !has_jsr_specifiers() => { (cli_factory, None) @@ -413,11 +319,16 @@ fn load_configs( _ => { let factory = create_deno_json(flags, options)?; let options = factory.cli_options()?.clone(); + let deno_json = options + .start_dir + .maybe_deno_json() + .expect("Just created deno.json"); ( factory, - Some( - DenoConfig::from_options(&options)?.expect("Just created deno.json"), - ), + Some(ConfigUpdater::new( + ConfigKind::DenoJson, + url_to_file_path(&deno_json.specifier)?, + )?), ) } }; @@ -425,20 +336,26 @@ fn load_configs( Ok((cli_factory, npm_config, deno_config)) } +fn path_distance(a: &Path, b: &Path) -> usize { + let diff = pathdiff::diff_paths(a, b); + let Some(diff) = diff else { + return usize::MAX; + }; + diff.components().count() +} + pub async fn add( flags: Arc, add_flags: AddFlags, cmd_name: AddCommandName, ) -> Result<(), AnyError> { - let (cli_factory, npm_config, deno_config) = load_configs(&flags, || { - add_flags.packages.iter().any(|s| s.starts_with("jsr:")) - })?; - let mut npm_config = ConfigUpdater::maybe_new(npm_config).await?; - let mut deno_config = ConfigUpdater::maybe_new(deno_config).await?; + let (cli_factory, mut npm_config, mut deno_config) = + load_configs(&flags, || { + add_flags.packages.iter().any(|s| s.starts_with("jsr:")) + })?; if let Some(deno) = &deno_config { - let specifier = deno.config.specifier(); - if deno.obj().get_string("importMap").is_some() { + if deno.obj().get("importMap").is_some() { bail!( concat!( "`deno {}` is not supported when configuration file contains an \"importMap\" field. ", @@ -446,11 +363,26 @@ pub async fn add( " at {}", ), cmd_name, - specifier + deno.display_path(), ); } } + let start_dir = cli_factory.cli_options()?.start_dir.dir_path(); + + // only prefer to add npm deps to `package.json` if there isn't a closer deno.json. + // example: if deno.json is in the CWD and package.json is in the parent, we should add + // npm deps to deno.json, since it's closer + let prefer_npm_config = match (npm_config.as_ref(), deno_config.as_ref()) { + (Some(npm), Some(deno)) => { + let npm_distance = path_distance(&npm.path, &start_dir); + let deno_distance = path_distance(&deno.path, &start_dir); + npm_distance <= deno_distance + } + (Some(_), None) => true, + (None, _) => false, + }; + let http_client = cli_factory.http_client_provider(); let deps_http_cache = cli_factory.global_http_cache()?; let mut deps_file_fetcher = FileFetcher::new( @@ -461,10 +393,14 @@ pub async fn add( Default::default(), None, ); + + let npmrc = cli_factory.cli_options().unwrap().npmrc(); + deps_file_fetcher.set_download_log_level(log::Level::Trace); let deps_file_fetcher = Arc::new(deps_file_fetcher); let jsr_resolver = Arc::new(JsrFetchResolver::new(deps_file_fetcher.clone())); - let npm_resolver = Arc::new(NpmFetchResolver::new(deps_file_fetcher)); + let npm_resolver = + Arc::new(NpmFetchResolver::new(deps_file_fetcher, npmrc.clone())); let mut selected_packages = Vec::with_capacity(add_flags.packages.len()); let mut package_reqs = Vec::with_capacity(add_flags.packages.len()); @@ -521,15 +457,32 @@ pub async fn add( match package_and_version { PackageAndVersion::NotFound { package: package_name, - found_npm_package, + help, package_req, - } => { - if found_npm_package { - bail!("{} was not found, but a matching npm package exists. Did you mean `{}`?", crate::colors::red(package_name), crate::colors::yellow(format!("deno {cmd_name} npm:{package_req}"))); - } else { - bail!("{} was not found.", crate::colors::red(package_name)); + } => match help { + Some(NotFoundHelp::NpmPackage) => { + bail!( + "{} was not found, but a matching npm package exists. Did you mean `{}`?", + crate::colors::red(package_name), + crate::colors::yellow(format!("deno {cmd_name} npm:{package_req}")) + ); } - } + Some(NotFoundHelp::JsrPackage) => { + bail!( + "{} was not found, but a matching jsr package exists. Did you mean `{}`?", + crate::colors::red(package_name), + crate::colors::yellow(format!("deno {cmd_name} jsr:{package_req}")) + ) + } + Some(NotFoundHelp::PreReleaseVersion(version)) => { + bail!( + "{} has only pre-release versions available. Try specifying a version: `{}`", + crate::colors::red(&package_name), + crate::colors::yellow(format!("deno {cmd_name} {package_name}@^{version}")) + ) + } + None => bail!("{} was not found.", crate::colors::red(package_name)), + }, PackageAndVersion::Selected(selected) => { selected_packages.push(selected); } @@ -545,7 +498,7 @@ pub async fn add( selected_package.selected_version ); - if selected_package.package_name.starts_with("npm:") { + if selected_package.package_name.starts_with("npm:") && prefer_npm_config { if let Some(npm) = &mut npm_config { npm.add(selected_package, dev); } else { @@ -558,18 +511,11 @@ pub async fn add( } } - let mut commit_futures = vec![]; if let Some(npm) = npm_config { - commit_futures.push(npm.commit()); + npm.commit()?; } if let Some(deno) = deno_config { - commit_futures.push(deno.commit()); - } - let commit_futures = - deno_core::futures::future::join_all(commit_futures).await; - - for result in commit_futures { - result.context("Failed to update configuration file")?; + deno.commit()?; } npm_install_after_modification(flags, Some(jsr_resolver)).await?; @@ -584,76 +530,144 @@ struct SelectedPackage { selected_version: String, } +enum NotFoundHelp { + NpmPackage, + JsrPackage, + PreReleaseVersion(Version), +} + enum PackageAndVersion { NotFound { package: String, - found_npm_package: bool, package_req: PackageReq, + help: Option, }, Selected(SelectedPackage), } +fn best_version<'a>( + versions: impl Iterator, +) -> Option<&'a Version> { + let mut maybe_best_version: Option<&Version> = None; + for version in versions { + let is_best_version = maybe_best_version + .as_ref() + .map(|best_version| (*best_version).cmp(version).is_lt()) + .unwrap_or(true); + if is_best_version { + maybe_best_version = Some(version); + } + } + maybe_best_version +} + +trait PackageInfoProvider { + const SPECIFIER_PREFIX: &str; + /// The help to return if a package is found by this provider + const HELP: NotFoundHelp; + async fn req_to_nv(&self, req: &PackageReq) -> Option; + async fn latest_version<'a>(&self, req: &PackageReq) -> Option; +} + +impl PackageInfoProvider for Arc { + const HELP: NotFoundHelp = NotFoundHelp::JsrPackage; + const SPECIFIER_PREFIX: &str = "jsr"; + async fn req_to_nv(&self, req: &PackageReq) -> Option { + (**self).req_to_nv(req).await + } + + async fn latest_version<'a>(&self, req: &PackageReq) -> Option { + let info = self.package_info(&req.name).await?; + best_version( + info + .versions + .iter() + .filter(|(_, version_info)| !version_info.yanked) + .map(|(version, _)| version), + ) + .cloned() + } +} + +impl PackageInfoProvider for Arc { + const HELP: NotFoundHelp = NotFoundHelp::NpmPackage; + const SPECIFIER_PREFIX: &str = "npm"; + async fn req_to_nv(&self, req: &PackageReq) -> Option { + (**self).req_to_nv(req).await + } + + async fn latest_version<'a>(&self, req: &PackageReq) -> Option { + let info = self.package_info(&req.name).await?; + best_version(info.versions.keys()).cloned() + } +} + async fn find_package_and_select_version_for_req( jsr_resolver: Arc, npm_resolver: Arc, add_package_req: AddRmPackageReq, ) -> Result { - match add_package_req.value { - AddRmPackageReqValue::Jsr(req) => { - let jsr_prefixed_name = format!("jsr:{}", &req.name); - let Some(nv) = jsr_resolver.req_to_nv(&req).await else { - if npm_resolver.req_to_nv(&req).await.is_some() { + async fn select( + main_resolver: T, + fallback_resolver: S, + add_package_req: AddRmPackageReq, + ) -> Result { + let req = match &add_package_req.value { + AddRmPackageReqValue::Jsr(req) => req, + AddRmPackageReqValue::Npm(req) => req, + }; + let prefixed_name = format!("{}:{}", T::SPECIFIER_PREFIX, req.name); + let help_if_found_in_fallback = S::HELP; + let Some(nv) = main_resolver.req_to_nv(req).await else { + if fallback_resolver.req_to_nv(req).await.is_some() { + // it's in the other registry + return Ok(PackageAndVersion::NotFound { + package: prefixed_name, + help: Some(help_if_found_in_fallback), + package_req: req.clone(), + }); + } + if req.version_req.version_text() == "*" { + if let Some(pre_release_version) = + main_resolver.latest_version(req).await + { return Ok(PackageAndVersion::NotFound { - package: jsr_prefixed_name, - found_npm_package: true, - package_req: req, + package: prefixed_name, + package_req: req.clone(), + help: Some(NotFoundHelp::PreReleaseVersion( + pre_release_version.clone(), + )), }); } + } - return Ok(PackageAndVersion::NotFound { - package: jsr_prefixed_name, - found_npm_package: false, - package_req: req, - }); - }; - let range_symbol = if req.version_req.version_text().starts_with('~') { - "~" - } else if req.version_req.version_text() == nv.version.to_string() { - "" - } else { - "^" - }; - Ok(PackageAndVersion::Selected(SelectedPackage { - import_name: add_package_req.alias, - package_name: jsr_prefixed_name, - version_req: format!("{}{}", range_symbol, &nv.version), - selected_version: nv.version.to_string(), - })) + return Ok(PackageAndVersion::NotFound { + package: prefixed_name, + help: None, + package_req: req.clone(), + }); + }; + let range_symbol = if req.version_req.version_text().starts_with('~') { + "~" + } else if req.version_req.version_text() == nv.version.to_string() { + "" + } else { + "^" + }; + Ok(PackageAndVersion::Selected(SelectedPackage { + import_name: add_package_req.alias, + package_name: prefixed_name, + version_req: format!("{}{}", range_symbol, &nv.version), + selected_version: nv.version.to_string(), + })) + } + + match &add_package_req.value { + AddRmPackageReqValue::Jsr(_) => { + select(jsr_resolver, npm_resolver, add_package_req).await } - AddRmPackageReqValue::Npm(req) => { - let npm_prefixed_name = format!("npm:{}", &req.name); - let Some(nv) = npm_resolver.req_to_nv(&req).await else { - return Ok(PackageAndVersion::NotFound { - package: npm_prefixed_name, - found_npm_package: false, - package_req: req, - }); - }; - - let range_symbol = if req.version_req.version_text().starts_with('~') { - "~" - } else if req.version_req.version_text() == nv.version.to_string() { - "" - } else { - "^" - }; - - Ok(PackageAndVersion::Selected(SelectedPackage { - import_name: add_package_req.alias, - package_name: npm_prefixed_name, - version_req: format!("{}{}", range_symbol, &nv.version), - selected_version: nv.version.to_string(), - })) + AddRmPackageReqValue::Npm(_) => { + select(npm_resolver, jsr_resolver, add_package_req).await } } } @@ -754,33 +768,13 @@ impl AddRmPackageReq { } } -fn generate_imports(mut packages_to_version: Vec<(String, String)>) -> String { - packages_to_version.sort_by(|(k1, _), (k2, _)| k1.cmp(k2)); - let mut contents = vec![]; - let len = packages_to_version.len(); - for (index, (package, version)) in packages_to_version.iter().enumerate() { - if index == 0 { - contents.push(String::new()); // force a newline at the start - } - // TODO(bartlomieju): fix it, once we start support specifying version on the cli - contents.push(format!("\"{}\": \"{}\"", package, version)); - if index != len - 1 { - contents.push(",".to_string()); - } - } - contents.join("\n") -} - pub async fn remove( flags: Arc, remove_flags: RemoveFlags, ) -> Result<(), AnyError> { let (_, npm_config, deno_config) = load_configs(&flags, || false)?; - let mut configs = [ - ConfigUpdater::maybe_new(npm_config).await?, - ConfigUpdater::maybe_new(deno_config).await?, - ]; + let mut configs = [npm_config, deno_config]; let mut removed_packages = vec![]; @@ -817,7 +811,7 @@ pub async fn remove( log::info!("Removed {}", crate::colors::green(package)); } for config in configs.into_iter().flatten() { - config.commit().await?; + config.commit()?; } npm_install_after_modification(flags, None).await?; @@ -844,88 +838,11 @@ async fn npm_install_after_modification( // npm install cache_deps::cache_top_level_deps(&cli_factory, jsr_resolver).await?; - Ok(()) -} - -fn update_config_file_content< - I: IntoIterator)>, ->( - obj: &jsonc_parser::ast::Object, - config_file_contents: &str, - fmt_options: FmtOptionsConfig, - entries: I, - file_name: &str, -) -> String { - let mut text_changes = vec![]; - for (key, value) in entries { - match obj.properties.iter().enumerate().find_map(|(idx, k)| { - if k.name.as_str() == key { - Some((idx, k)) - } else { - None - } - }) { - Some(( - idx, - ObjectProp { - value: Value::Object(lit), - range, - .. - }, - )) => { - if let Some(value) = value { - text_changes.push(TextChange { - range: (lit.range.start + 1)..(lit.range.end - 1), - new_text: value, - }) - } else { - text_changes.push(TextChange { - // remove field entirely, making sure to - // remove the comma if it's not the last field - range: range.start..(if idx == obj.properties.len() - 1 { - range.end - } else { - obj.properties[idx + 1].range.start - }), - new_text: "".to_string(), - }) - } - } - - // need to add field - None => { - if let Some(value) = value { - let insert_position = obj.range.end - 1; - text_changes.push(TextChange { - range: insert_position..insert_position, - // NOTE(bartlomieju): adding `\n` here to force the formatter to always - // produce a config file that is multiline, like so: - // ``` - // { - // "imports": { - // "": ":@" - // } - // } - new_text: format!("\"{key}\": {{\n {value} }}"), - }) - } - } - // we verified the shape of `imports`/`dependencies` above - Some(_) => unreachable!(), - } + if let Some(lockfile) = cli_factory.cli_options()?.maybe_lockfile() { + lockfile.write_if_changed()?; } - let new_text = - deno_ast::apply_text_changes(config_file_contents, text_changes); - - crate::tools::fmt::format_json( - &PathBuf::from(file_name), - &new_text, - &fmt_options, - ) - .ok() - .map(|formatted_text| formatted_text.unwrap_or_else(|| new_text.clone())) - .unwrap_or(new_text) + Ok(()) } #[cfg(test)] diff --git a/cli/tools/registry/pm/cache_deps.rs b/cli/tools/registry/pm/cache_deps.rs index b4cd1c2532..d3c8da868c 100644 --- a/cli/tools/registry/pm/cache_deps.rs +++ b/cli/tools/registry/pm/cache_deps.rs @@ -44,7 +44,11 @@ pub async fn cache_top_level_deps( let mut seen_reqs = std::collections::HashSet::new(); - for entry in import_map.imports().entries() { + for entry in import_map.imports().entries().chain( + import_map + .scopes() + .flat_map(|scope| scope.imports.entries()), + ) { let Some(specifier) = entry.value else { continue; }; @@ -89,10 +93,6 @@ pub async fn cache_top_level_deps( while let Some(info_future) = info_futures.next().await { if let Some((specifier, info)) = info_future { - if info.export(".").is_some() { - roots.push(specifier.clone()); - continue; - } let exports = info.exports(); for (k, _) in exports { if let Ok(spec) = specifier.join(k) { diff --git a/cli/tools/registry/tar.rs b/cli/tools/registry/tar.rs index aca125e00b..6d1801ce69 100644 --- a/cli/tools/registry/tar.rs +++ b/cli/tools/registry/tar.rs @@ -120,7 +120,7 @@ fn resolve_content_maybe_unfurling( | MediaType::Unknown | MediaType::Json | MediaType::Wasm - | MediaType::TsBuildInfo => { + | MediaType::Css => { // not unfurlable data return Ok(data); } diff --git a/cli/tools/repl/session.rs b/cli/tools/repl/session.rs index 484664dae4..23b0f11ac5 100644 --- a/cli/tools/repl/session.rs +++ b/cli/tools/repl/session.rs @@ -25,6 +25,7 @@ use deno_ast::swc::visit::noop_visit_type; use deno_ast::swc::visit::Visit; use deno_ast::swc::visit::VisitWith; use deno_ast::ImportsNotUsedAsValues; +use deno_ast::ModuleKind; use deno_ast::ModuleSpecifier; use deno_ast::ParseDiagnosticsError; use deno_ast::ParsedSource; @@ -641,6 +642,10 @@ impl ReplSession { jsx_fragment_factory: self.jsx.frag_factory.clone(), jsx_import_source: self.jsx.import_source.clone(), var_decl_imports: true, + verbatim_module_syntax: false, + }, + &deno_ast::TranspileModuleOptions { + module_kind: Some(ModuleKind::Esm), }, &deno_ast::EmitOptions { source_map: deno_ast::SourceMapOption::None, @@ -651,7 +656,6 @@ impl ReplSession { }, )? .into_source() - .into_string()? .text; let value = self diff --git a/cli/tools/run/hmr.rs b/cli/tools/run/hmr.rs index 6ccf8e344b..6cebedd012 100644 --- a/cli/tools/run/hmr.rs +++ b/cli/tools/run/hmr.rs @@ -1,9 +1,11 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use crate::cdp; -use crate::emit::Emitter; -use crate::util::file_watcher::WatcherCommunicator; -use crate::util::file_watcher::WatcherRestartMode; +use std::collections::HashMap; +use std::path::PathBuf; +use std::sync::Arc; + +use deno_ast::MediaType; +use deno_ast::ModuleKind; use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::futures::StreamExt; @@ -12,11 +14,14 @@ use deno_core::serde_json::{self}; use deno_core::url::Url; use deno_core::LocalInspectorSession; use deno_terminal::colors; -use std::collections::HashMap; -use std::path::PathBuf; -use std::sync::Arc; use tokio::select; +use crate::cdp; +use crate::emit::Emitter; +use crate::resolver::CjsTracker; +use crate::util::file_watcher::WatcherCommunicator; +use crate::util::file_watcher::WatcherRestartMode; + fn explain(status: &cdp::Status) -> &'static str { match status { cdp::Status::Ok => "OK", @@ -58,6 +63,7 @@ pub struct HmrRunner { session: LocalInspectorSession, watcher_communicator: Arc, script_ids: HashMap, + cjs_tracker: Arc, emitter: Arc, } @@ -139,7 +145,8 @@ impl crate::worker::HmrRunner for HmrRunner { }; let source_code = self.emitter.load_and_emit_for_hmr( - &module_url + &module_url, + ModuleKind::from_is_cjs(self.cjs_tracker.is_maybe_cjs(&module_url, MediaType::from_specifier(&module_url))?), ).await?; let mut tries = 1; @@ -172,12 +179,14 @@ impl crate::worker::HmrRunner for HmrRunner { impl HmrRunner { pub fn new( + cjs_tracker: Arc, emitter: Arc, session: LocalInspectorSession, watcher_communicator: Arc, ) -> Self { Self { session, + cjs_tracker, emitter, watcher_communicator, script_ids: HashMap::new(), diff --git a/cli/tools/run/mod.rs b/cli/tools/run/mod.rs index 152e2650bd..bebb3f5881 100644 --- a/cli/tools/run/mod.rs +++ b/cli/tools/run/mod.rs @@ -30,6 +30,16 @@ To grant permissions, set them before the script argument. For example: } } +fn set_npm_user_agent() { + static ONCE: std::sync::Once = std::sync::Once::new(); + ONCE.call_once(|| { + std::env::set_var( + crate::npm::NPM_CONFIG_USER_AGENT_ENV_VAR, + crate::npm::get_npm_config_user_agent(), + ); + }); +} + pub async fn run_script( mode: WorkerExecutionMode, flags: Arc, @@ -58,6 +68,10 @@ pub async fn run_script( let main_module = cli_options.resolve_main_module()?; + if main_module.scheme() == "npm" { + set_npm_user_agent(); + } + maybe_npm_install(&factory).await?; let worker_factory = factory.create_cli_main_worker_factory().await?; @@ -119,6 +133,10 @@ async fn run_with_watch( let cli_options = factory.cli_options()?; let main_module = cli_options.resolve_main_module()?; + if main_module.scheme() == "npm" { + set_npm_user_agent(); + } + maybe_npm_install(&factory).await?; let _ = watcher_communicator.watch_paths(cli_options.watch_paths()); diff --git a/cli/tools/serve.rs b/cli/tools/serve.rs index 4ce1cad6f2..e3f9e94f8e 100644 --- a/cli/tools/serve.rs +++ b/cli/tools/serve.rs @@ -44,12 +44,15 @@ pub async fn serve( maybe_npm_install(&factory).await?; let worker_factory = factory.create_cli_main_worker_factory().await?; - + let hmr = serve_flags + .watch + .map(|watch_flags| watch_flags.hmr) + .unwrap_or(false); do_serve( worker_factory, main_module.clone(), serve_flags.worker_count, - false, + hmr, ) .await } @@ -109,8 +112,6 @@ async fn do_serve( } } Ok(exit_code) - - // main.await? } async fn run_worker( @@ -119,7 +120,7 @@ async fn run_worker( main_module: ModuleSpecifier, hmr: bool, ) -> Result { - let mut worker = worker_factory + let mut worker: crate::worker::CliMainWorker = worker_factory .create_main_worker( deno_runtime::WorkerExecutionMode::Serve { is_main: false, diff --git a/cli/tools/test/mod.rs b/cli/tools/test/mod.rs index e81abad0b2..fa849614fa 100644 --- a/cli/tools/test/mod.rs +++ b/cli/tools/test/mod.rs @@ -631,7 +631,7 @@ async fn configure_main_worker( "Deno[Deno.internal].core.setLeakTracingEnabled(true);", )?; } - let res = worker.execute_side_module_possibly_with_npm().await; + let res = worker.execute_side_module().await; let mut worker = worker.into_main_worker(); match res { Ok(()) => Ok(()), diff --git a/cli/tools/upgrade.rs b/cli/tools/upgrade.rs index b1b09d1a6f..77a9f72b80 100644 --- a/cli/tools/upgrade.rs +++ b/cli/tools/upgrade.rs @@ -579,6 +579,10 @@ pub async fn upgrade( let output_exe_path = full_path_output_flag.as_ref().unwrap_or(¤t_exe_path); + + #[cfg(windows)] + kill_running_deno_lsp_processes(); + let output_result = if *output_exe_path == current_exe_path { replace_exe(&new_exe_path, output_exe_path) } else { @@ -966,6 +970,34 @@ fn check_windows_access_denied_error( }) } +#[cfg(windows)] +fn kill_running_deno_lsp_processes() { + // limit this to `deno lsp` invocations to avoid killing important programs someone might be running + let is_debug = log::log_enabled!(log::Level::Debug); + let get_pipe = || { + if is_debug { + std::process::Stdio::inherit() + } else { + std::process::Stdio::null() + } + }; + let _ = Command::new("powershell.exe") + .args([ + "-Command", + r#"Get-WmiObject Win32_Process | Where-Object { + $_.Name -eq 'deno.exe' -and + $_.CommandLine -match '^(?:\"[^\"]+\"|\S+)\s+lsp\b' +} | ForEach-Object { + if ($_.Terminate()) { + Write-Host 'Terminated:' $_.ProcessId + } +}"#, + ]) + .stdout(get_pipe()) + .stderr(get_pipe()) + .output(); +} + fn set_exe_permissions( current_exe_path: &Path, output_exe_path: &Path, diff --git a/cli/tsc/99_main_compiler.js b/cli/tsc/99_main_compiler.js index 719f2b9824..52c9134dad 100644 --- a/cli/tsc/99_main_compiler.js +++ b/cli/tsc/99_main_compiler.js @@ -516,7 +516,6 @@ delete Object.prototype.__proto__; /** @typedef {{ * ls: ts.LanguageService & { [k:string]: any }, * compilerOptions: ts.CompilerOptions, - * forceEnabledVerbatimModuleSyntax: boolean, * }} LanguageServiceEntry */ /** @type {{ unscoped: LanguageServiceEntry, byScope: Map }} */ const languageServiceEntries = { @@ -802,13 +801,18 @@ delete Object.prototype.__proto__; if (logDebug) { debug(`host.getScriptSnapshot("${specifier}")`); } - const sourceFile = sourceFileCache.get(specifier); - if (sourceFile) { - if (!assetScopes.has(specifier)) { - assetScopes.set(specifier, lastRequestScope); + if (specifier.startsWith(ASSETS_URL_PREFIX)) { + const sourceFile = this.getSourceFile( + specifier, + ts.ScriptTarget.ESNext, + ); + if (sourceFile) { + if (!assetScopes.has(specifier)) { + assetScopes.set(specifier, lastRequestScope); + } + // This case only occurs for assets. + return ts.ScriptSnapshot.fromString(sourceFile.text); } - // This case only occurs for assets. - return ts.ScriptSnapshot.fromString(sourceFile.text); } let sourceText = sourceTextCache.get(specifier); if (sourceText == undefined) { @@ -846,6 +850,8 @@ delete Object.prototype.__proto__; jqueryMessage, "Cannot_find_name_0_Do_you_need_to_install_type_definitions_for_jQuery_Try_npm_i_save_dev_types_Slash_2592": jqueryMessage, + "Module_0_was_resolved_to_1_but_allowArbitraryExtensions_is_not_set_6263": + "Module '{0}' was resolved to '{1}', but importing these modules is not supported.", }; })()); @@ -1026,7 +1032,7 @@ delete Object.prototype.__proto__; : ts.sortAndDeduplicateDiagnostics( checkFiles.map((s) => program.getSemanticDiagnostics(s)).flat(), )), - ].filter(filterMapDiagnostic.bind(null, false)); + ].filter(filterMapDiagnostic); // emit the tsbuildinfo file // @ts-ignore: emitBuildInfo is not exposed (https://github.com/microsoft/TypeScript/issues/49871) @@ -1041,28 +1047,11 @@ delete Object.prototype.__proto__; debug("<<< exec stop"); } - /** - * @param {boolean} isLsp - * @param {ts.Diagnostic} diagnostic - */ - function filterMapDiagnostic(isLsp, diagnostic) { + /** @param {ts.Diagnostic} diagnostic */ + function filterMapDiagnostic(diagnostic) { if (IGNORED_DIAGNOSTICS.includes(diagnostic.code)) { return false; } - if (isLsp) { - // TS1484: `...` is a type and must be imported using a type-only import when 'verbatimModuleSyntax' is enabled. - // We force-enable `verbatimModuleSyntax` in the LSP so the `type` - // modifier is used when auto-importing types. But we don't want this - // diagnostic unless it was explicitly enabled by the user. - if (diagnostic.code == 1484) { - const entry = (lastRequestScope - ? languageServiceEntries.byScope.get(lastRequestScope) - : null) ?? languageServiceEntries.unscoped; - if (entry.forceEnabledVerbatimModuleSyntax) { - return false; - } - } - } // make the diagnostic for using an `export =` in an es module a warning if (diagnostic.code === 1203) { diagnostic.category = ts.DiagnosticCategory.Warning; @@ -1159,12 +1148,10 @@ delete Object.prototype.__proto__; "strict": true, "target": "esnext", "useDefineForClassFields": true, - "verbatimModuleSyntax": true, "jsx": "react", "jsxFactory": "React.createElement", "jsxFragmentFactory": "React.Fragment", }), - forceEnabledVerbatimModuleSyntax: true, }; setLogDebug(enableDebugLogging, "TSLS"); debug("serverInit()"); @@ -1230,17 +1217,8 @@ delete Object.prototype.__proto__; const ls = oldEntry ? oldEntry.ls : ts.createLanguageService(host, documentRegistry); - let forceEnabledVerbatimModuleSyntax = false; - if (!config["verbatimModuleSyntax"]) { - config["verbatimModuleSyntax"] = true; - forceEnabledVerbatimModuleSyntax = true; - } const compilerOptions = lspTsConfigToCompilerOptions(config); - newByScope.set(scope, { - ls, - compilerOptions, - forceEnabledVerbatimModuleSyntax, - }); + newByScope.set(scope, { ls, compilerOptions }); languageServiceEntries.byScope.delete(scope); } for (const oldEntry of languageServiceEntries.byScope.values()) { @@ -1305,7 +1283,7 @@ delete Object.prototype.__proto__; ...ls.getSemanticDiagnostics(specifier), ...ls.getSuggestionDiagnostics(specifier), ...ls.getSyntacticDiagnostics(specifier), - ].filter(filterMapDiagnostic.bind(null, true))); + ].filter(filterMapDiagnostic)); } return respond(id, diagnosticMap); } catch (e) { @@ -1366,18 +1344,12 @@ delete Object.prototype.__proto__; "console", "Console", "ErrorConstructor", - "exports", "gc", "Global", "ImportMeta", "localStorage", - "module", - "NodeModule", - "NodeRequire", - "process", "queueMicrotask", "RequestInit", - "require", "ResponseInit", "sessionStorage", "setImmediate", diff --git a/cli/tsc/diagnostics.rs b/cli/tsc/diagnostics.rs index b0394ec177..d3795706eb 100644 --- a/cli/tsc/diagnostics.rs +++ b/cli/tsc/diagnostics.rs @@ -323,7 +323,7 @@ impl Diagnostics { // todo(dsherret): use a short lived cache to prevent parsing // source maps so often if let Ok(source_map) = - SourceMap::from_slice(&fast_check_module.source_map) + SourceMap::from_slice(fast_check_module.source_map.as_bytes()) { if let Some(start) = d.start.as_mut() { let maybe_token = source_map diff --git a/cli/tsc/dts/lib.deno.ns.d.ts b/cli/tsc/dts/lib.deno.ns.d.ts index 36592e10dc..6e0e84b687 100644 --- a/cli/tsc/dts/lib.deno.ns.d.ts +++ b/cli/tsc/dts/lib.deno.ns.d.ts @@ -556,14 +556,23 @@ declare namespace Deno { */ env?: "inherit" | boolean | string[]; - /** Specifies if the `sys` permission should be requested or revoked. - * If set to `"inherit"`, the current `sys` permission will be inherited. - * If set to `true`, the global `sys` permission will be requested. - * If set to `false`, the global `sys` permission will be revoked. + /** Specifies if the `ffi` permission should be requested or revoked. + * If set to `"inherit"`, the current `ffi` permission will be inherited. + * If set to `true`, the global `ffi` permission will be requested. + * If set to `false`, the global `ffi` permission will be revoked. * * @default {false} */ - sys?: "inherit" | boolean | string[]; + ffi?: "inherit" | boolean | Array; + + /** Specifies if the `import` permission should be requested or revoked. + * If set to `"inherit"` the current `import` permission will be inherited. + * If set to `true`, the global `import` permission will be requested. + * If set to `false`, the global `import` permission will be revoked. + * If set to `Array`, the `import` permissions will be requested with the + * specified domains. + */ + import?: "inherit" | boolean | Array; /** Specifies if the `net` permission should be requested or revoked. * if set to `"inherit"`, the current `net` permission will be inherited. @@ -638,15 +647,6 @@ declare namespace Deno { */ net?: "inherit" | boolean | string[]; - /** Specifies if the `ffi` permission should be requested or revoked. - * If set to `"inherit"`, the current `ffi` permission will be inherited. - * If set to `true`, the global `ffi` permission will be requested. - * If set to `false`, the global `ffi` permission will be revoked. - * - * @default {false} - */ - ffi?: "inherit" | boolean | Array; - /** Specifies if the `read` permission should be requested or revoked. * If set to `"inherit"`, the current `read` permission will be inherited. * If set to `true`, the global `read` permission will be requested. @@ -667,6 +667,15 @@ declare namespace Deno { */ run?: "inherit" | boolean | Array; + /** Specifies if the `sys` permission should be requested or revoked. + * If set to `"inherit"`, the current `sys` permission will be inherited. + * If set to `true`, the global `sys` permission will be requested. + * If set to `false`, the global `sys` permission will be revoked. + * + * @default {false} + */ + sys?: "inherit" | boolean | string[]; + /** Specifies if the `write` permission should be requested or revoked. * If set to `"inherit"`, the current `write` permission will be inherited. * If set to `true`, the global `write` permission will be requested. diff --git a/cli/tsc/mod.rs b/cli/tsc/mod.rs index 0e3387494a..dc7fc38f7a 100644 --- a/cli/tsc/mod.rs +++ b/cli/tsc/mod.rs @@ -3,8 +3,11 @@ use crate::args::TsConfig; use crate::args::TypeCheckMode; use crate::cache::FastInsecureHasher; +use crate::cache::ModuleInfoCache; use crate::node; use crate::npm::CliNpmResolver; +use crate::npm::ResolvePkgFolderFromDenoReqError; +use crate::resolver::CjsTracker; use crate::util::checksum; use crate::util::path::mapped_specifier_for_tsc; @@ -31,12 +34,13 @@ use deno_graph::GraphKind; use deno_graph::Module; use deno_graph::ModuleGraph; use deno_graph::ResolutionResolved; +use deno_runtime::deno_fs; use deno_runtime::deno_node::NodeResolver; use deno_semver::npm::NpmPackageReqReference; use node_resolver::errors::NodeJsErrorCode; use node_resolver::errors::NodeJsErrorCoded; +use node_resolver::errors::PackageSubpathResolveError; use node_resolver::NodeModuleKind; -use node_resolver::NodeResolution; use node_resolver::NodeResolutionMode; use once_cell::sync::Lazy; use std::borrow::Cow; @@ -45,6 +49,7 @@ use std::fmt; use std::path::Path; use std::path::PathBuf; use std::sync::Arc; +use thiserror::Error; mod diagnostics; @@ -299,8 +304,76 @@ pub struct EmittedFile { pub media_type: MediaType, } +pub fn into_specifier_and_media_type( + specifier: Option, +) -> (ModuleSpecifier, MediaType) { + match specifier { + Some(specifier) => { + let media_type = MediaType::from_specifier(&specifier); + + (specifier, media_type) + } + None => ( + Url::parse("internal:///missing_dependency.d.ts").unwrap(), + MediaType::Dts, + ), + } +} + +#[derive(Debug)] +pub struct TypeCheckingCjsTracker { + cjs_tracker: Arc, + module_info_cache: Arc, +} + +impl TypeCheckingCjsTracker { + pub fn new( + cjs_tracker: Arc, + module_info_cache: Arc, + ) -> Self { + Self { + cjs_tracker, + module_info_cache, + } + } + + pub fn is_cjs( + &self, + specifier: &ModuleSpecifier, + media_type: MediaType, + code: &Arc, + ) -> bool { + if let Some(module_kind) = + self.cjs_tracker.get_known_kind(specifier, media_type) + { + module_kind.is_cjs() + } else { + let maybe_is_script = self + .module_info_cache + .as_module_analyzer() + .analyze_sync(specifier, media_type, code) + .ok() + .map(|info| info.is_script); + maybe_is_script + .and_then(|is_script| { + self + .cjs_tracker + .is_cjs_with_known_is_script(specifier, media_type, is_script) + .ok() + }) + .unwrap_or_else(|| { + self + .cjs_tracker + .is_maybe_cjs(specifier, media_type) + .unwrap_or(false) + }) + } + } +} + #[derive(Debug)] pub struct RequestNpmState { + pub cjs_tracker: Arc, pub node_resolver: Arc, pub npm_resolver: Arc, } @@ -453,7 +526,7 @@ pub fn as_ts_script_kind(media_type: MediaType) -> i32 { MediaType::Tsx => 4, MediaType::Json => 6, MediaType::SourceMap - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::Wasm | MediaType::Unknown => 0, } @@ -486,25 +559,22 @@ fn op_load_inner( ) -> Result, AnyError> { fn load_from_node_modules( specifier: &ModuleSpecifier, - node_resolver: Option<&NodeResolver>, + npm_state: Option<&RequestNpmState>, media_type: &mut MediaType, is_cjs: &mut bool, ) -> Result { *media_type = MediaType::from_specifier(specifier); - *is_cjs = node_resolver - .map(|node_resolver| { - match node_resolver.url_to_node_resolution(specifier.clone()) { - Ok(NodeResolution::CommonJs(_)) => true, - Ok(NodeResolution::Esm(_)) - | Ok(NodeResolution::BuiltIn(_)) - | Err(_) => false, - } - }) - .unwrap_or(false); let file_path = specifier.to_file_path().unwrap(); let code = std::fs::read_to_string(&file_path) .with_context(|| format!("Unable to load {}", file_path.display()))?; - Ok(code) + let code: Arc = code.into(); + *is_cjs = npm_state + .map(|npm_state| { + npm_state.cjs_tracker.is_cjs(specifier, *media_type, &code) + }) + .unwrap_or(false); + // todo(dsherret): how to avoid cloning here? + Ok(code.to_string()) } let state = state.borrow_mut::(); @@ -557,6 +627,9 @@ fn op_load_inner( match module { Module::Js(module) => { media_type = module.media_type; + if matches!(media_type, MediaType::Cjs | MediaType::Cts) { + is_cjs = true; + } let source = module .fast_check_module() .map(|m| &*m.source) @@ -570,11 +643,13 @@ fn op_load_inner( Module::Npm(_) | Module::Node(_) => None, Module::External(module) => { // means it's Deno code importing an npm module - let specifier = - node::resolve_specifier_into_node_modules(&module.specifier); + let specifier = node::resolve_specifier_into_node_modules( + &module.specifier, + &deno_fs::RealFs, + ); Some(Cow::Owned(load_from_node_modules( &specifier, - state.maybe_npm.as_ref().map(|n| n.node_resolver.as_ref()), + state.maybe_npm.as_ref(), &mut media_type, &mut is_cjs, )?)) @@ -587,7 +662,7 @@ fn op_load_inner( { Some(Cow::Owned(load_from_node_modules( specifier, - Some(npm.node_resolver.as_ref()), + Some(npm), &mut media_type, &mut is_cjs, )?)) @@ -688,12 +763,30 @@ fn op_resolve_inner( Some(ResolutionResolved { specifier, .. }) => { resolve_graph_specifier_types(specifier, &referrer, state)? } - _ => resolve_non_graph_specifier_types( - &specifier, - &referrer, - referrer_kind, - state, - )?, + _ => { + match resolve_non_graph_specifier_types( + &specifier, + &referrer, + referrer_kind, + state, + ) { + Ok(maybe_result) => maybe_result, + Err( + err @ ResolveNonGraphSpecifierTypesError::ResolvePkgFolderFromDenoReq( + ResolvePkgFolderFromDenoReqError::Managed(_), + ), + ) => { + // it's most likely requesting the jsxImportSource, which isn't loaded + // into the graph when not using jsx, so just ignore this error + if specifier.ends_with("/jsx-runtime") { + None + } else { + return Err(err.into()); + } + } + Err(err) => return Err(err.into()), + } + } }; let result = match maybe_result { Some((specifier, media_type)) => { @@ -718,7 +811,13 @@ fn op_resolve_inner( } } }; - (specifier_str, media_type.as_ts_extension()) + ( + specifier_str, + match media_type { + MediaType::Css => ".js", // surface these as .js for typescript + media_type => media_type.as_ts_extension(), + }, + ) } None => ( MISSING_DEPENDENCY_SPECIFIER.to_string(), @@ -789,41 +888,50 @@ fn resolve_graph_specifier_types( Some(referrer), NodeResolutionMode::Types, ); - let maybe_resolution = match res_result { - Ok(res) => Some(res), + let maybe_url = match res_result { + Ok(url) => Some(url), Err(err) => match err.code() { NodeJsErrorCode::ERR_TYPES_NOT_FOUND | NodeJsErrorCode::ERR_MODULE_NOT_FOUND => None, _ => return Err(err.into()), }, }; - Ok(Some(NodeResolution::into_specifier_and_media_type( - maybe_resolution, - ))) + Ok(Some(into_specifier_and_media_type(maybe_url))) } else { Ok(None) } } Some(Module::External(module)) => { // we currently only use "External" for when the module is in an npm package - Ok(state.maybe_npm.as_ref().map(|npm| { - let specifier = - node::resolve_specifier_into_node_modules(&module.specifier); - NodeResolution::into_specifier_and_media_type( - npm.node_resolver.url_to_node_resolution(specifier).ok(), - ) + Ok(state.maybe_npm.as_ref().map(|_| { + let specifier = node::resolve_specifier_into_node_modules( + &module.specifier, + &deno_fs::RealFs, + ); + into_specifier_and_media_type(Some(specifier)) })) } Some(Module::Node(_)) | None => Ok(None), } } +#[derive(Debug, Error)] +enum ResolveNonGraphSpecifierTypesError { + #[error(transparent)] + ResolvePkgFolderFromDenoReq(#[from] ResolvePkgFolderFromDenoReqError), + #[error(transparent)] + PackageSubpathResolve(#[from] PackageSubpathResolveError), +} + fn resolve_non_graph_specifier_types( raw_specifier: &str, referrer: &ModuleSpecifier, referrer_kind: NodeModuleKind, state: &State, -) -> Result, AnyError> { +) -> Result< + Option<(ModuleSpecifier, MediaType)>, + ResolveNonGraphSpecifierTypesError, +> { let npm = match state.maybe_npm.as_ref() { Some(npm) => npm, None => return Ok(None), // we only support non-graph types for npm packages @@ -831,7 +939,7 @@ fn resolve_non_graph_specifier_types( let node_resolver = &npm.node_resolver; if node_resolver.in_npm_package(referrer) { // we're in an npm package, so use node resolution - Ok(Some(NodeResolution::into_specifier_and_media_type( + Ok(Some(into_specifier_and_media_type( node_resolver .resolve( raw_specifier, @@ -839,7 +947,8 @@ fn resolve_non_graph_specifier_types( referrer_kind, NodeResolutionMode::Types, ) - .ok(), + .ok() + .map(|res| res.into_url()), ))) } else if let Ok(npm_req_ref) = NpmPackageReqReference::from_str(raw_specifier) @@ -858,17 +967,15 @@ fn resolve_non_graph_specifier_types( Some(referrer), NodeResolutionMode::Types, ); - let maybe_resolution = match res_result { - Ok(res) => Some(res), + let maybe_url = match res_result { + Ok(url) => Some(url), Err(err) => match err.code() { NodeJsErrorCode::ERR_TYPES_NOT_FOUND | NodeJsErrorCode::ERR_MODULE_NOT_FOUND => None, _ => return Err(err.into()), }, }; - Ok(Some(NodeResolution::into_specifier_and_media_type( - maybe_resolution, - ))) + Ok(Some(into_specifier_and_media_type(maybe_url))) } else { Ok(None) } diff --git a/cli/util/extract.rs b/cli/util/extract.rs index 873b7e7f2d..f577cbefec 100644 --- a/cli/util/extract.rs +++ b/cli/util/extract.rs @@ -64,7 +64,7 @@ fn extract_inner( }) { Ok(parsed) => { let mut c = ExportCollector::default(); - c.visit_program(parsed.program_ref()); + c.visit_program(parsed.program().as_ref()); c } Err(_) => ExportCollector::default(), @@ -570,14 +570,14 @@ fn generate_pseudo_file( })?; let top_level_atoms = swc_utils::collect_decls_with_ctxt::( - parsed.program_ref(), + &parsed.program_ref(), parsed.top_level_context(), ); let transformed = parsed .program_ref() - .clone() + .to_owned() .fold_with(&mut as_folder(Transform { specifier: &file.specifier, base_file_specifier, @@ -1416,7 +1416,7 @@ console.log(Foo); }) .unwrap(); - collector.visit_program(parsed.program_ref()); + parsed.program_ref().visit_with(&mut collector); collector } diff --git a/cli/util/file_watcher.rs b/cli/util/file_watcher.rs index d92d880bc1..8d734af88e 100644 --- a/cli/util/file_watcher.rs +++ b/cli/util/file_watcher.rs @@ -30,7 +30,7 @@ use tokio::sync::mpsc; use tokio::sync::mpsc::UnboundedReceiver; use tokio::time::sleep; -const CLEAR_SCREEN: &str = "\x1B[2J\x1B[1;1H"; +const CLEAR_SCREEN: &str = "\x1B[H\x1B[2J\x1B[3J"; const DEBOUNCE_INTERVAL: Duration = Duration::from_millis(200); struct DebouncedReceiver { diff --git a/cli/util/fs.rs b/cli/util/fs.rs index 2c34f486ad..d36c02242c 100644 --- a/cli/util/fs.rs +++ b/cli/util/fs.rs @@ -565,7 +565,9 @@ pub fn symlink_dir(oldpath: &Path, newpath: &Path) -> Result<(), Error> { use std::os::windows::fs::symlink_dir; symlink_dir(oldpath, newpath).map_err(|err| { if let Some(code) = err.raw_os_error() { - if code as u32 == winapi::shared::winerror::ERROR_PRIVILEGE_NOT_HELD { + if code as u32 == winapi::shared::winerror::ERROR_PRIVILEGE_NOT_HELD + || code as u32 == winapi::shared::winerror::ERROR_INVALID_FUNCTION + { return err_mapper(err, Some(ErrorKind::PermissionDenied)); } } diff --git a/cli/util/logger.rs b/cli/util/logger.rs index cdc89411fe..d93753dfd3 100644 --- a/cli/util/logger.rs +++ b/cli/util/logger.rs @@ -65,6 +65,8 @@ pub fn init(maybe_level: Option) { .filter_module("swc_ecma_parser", log::LevelFilter::Error) // Suppress span lifecycle logs since they are too verbose .filter_module("tracing::span", log::LevelFilter::Off) + // for deno_compile, this is too verbose + .filter_module("editpe", log::LevelFilter::Error) .format(|buf, record| { let mut target = record.target().to_string(); if let Some(line_no) = record.line() { diff --git a/cli/util/path.rs b/cli/util/path.rs index e4ae6e7cb1..58bed664f9 100644 --- a/cli/util/path.rs +++ b/cli/util/path.rs @@ -42,21 +42,6 @@ pub fn get_extension(file_path: &Path) -> Option { .map(|e| e.to_lowercase()); } -pub fn specifier_has_extension( - specifier: &ModuleSpecifier, - searching_ext: &str, -) -> bool { - let Some((_, ext)) = specifier.path().rsplit_once('.') else { - return false; - }; - let searching_ext = searching_ext.strip_prefix('.').unwrap_or(searching_ext); - debug_assert!(!searching_ext.contains('.')); // exts like .d.ts are not implemented here - if ext.len() != searching_ext.len() { - return false; - } - ext.eq_ignore_ascii_case(searching_ext) -} - pub fn get_atomic_dir_path(file_path: &Path) -> PathBuf { let rand = gen_rand_path_component(); let new_file_name = format!( @@ -350,18 +335,6 @@ mod test { } } - #[test] - fn test_specifier_has_extension() { - fn get(specifier: &str, ext: &str) -> bool { - specifier_has_extension(&ModuleSpecifier::parse(specifier).unwrap(), ext) - } - - assert!(get("file:///a/b/c.ts", "ts")); - assert!(get("file:///a/b/c.ts", ".ts")); - assert!(!get("file:///a/b/c.ts", ".cts")); - assert!(get("file:///a/b/c.CtS", ".cts")); - } - #[test] fn test_to_percent_decoded_str() { let str = to_percent_decoded_str("%F0%9F%A6%95"); diff --git a/cli/util/progress_bar/renderer.rs b/cli/util/progress_bar/renderer.rs index a83ceb3334..6b08dada12 100644 --- a/cli/util/progress_bar/renderer.rs +++ b/cli/util/progress_bar/renderer.rs @@ -193,10 +193,16 @@ impl ProgressBarRenderer for TextOnlyProgressBarRenderer { } }; + // TODO(@marvinhagemeister): We're trying to reconstruct the original + // specifier from the resolved one, but we lack the information about + // private registries URLs and other things here. let message = display_entry .message .replace("https://registry.npmjs.org/", "npm:") - .replace("https://jsr.io/", "jsr:"); + .replace("https://jsr.io/", "jsr:") + .replace("%2f", "/") + .replace("%2F", "/"); + display_str.push_str( &colors::gray(format!(" - {}{}\n", message, bytes_text)).to_string(), ); diff --git a/cli/util/text_encoding.rs b/cli/util/text_encoding.rs index 0b7601cb9c..8524e63ebb 100644 --- a/cli/util/text_encoding.rs +++ b/cli/util/text_encoding.rs @@ -97,12 +97,28 @@ fn find_source_map_range(code: &[u8]) -> Option> { } /// Converts an `Arc` to an `Arc<[u8]>`. +#[allow(dead_code)] pub fn arc_str_to_bytes(arc_str: Arc) -> Arc<[u8]> { let raw = Arc::into_raw(arc_str); // SAFETY: This is safe because they have the same memory layout. unsafe { Arc::from_raw(raw as *const [u8]) } } +/// Converts an `Arc` to an `Arc` if able. +#[allow(dead_code)] +pub fn arc_u8_to_arc_str( + arc_u8: Arc<[u8]>, +) -> Result, std::str::Utf8Error> { + // Check that the string is valid UTF-8. + std::str::from_utf8(&arc_u8)?; + // SAFETY: the string is valid UTF-8, and the layout Arc<[u8]> is the same as + // Arc. This is proven by the From> impl for Arc<[u8]> from the + // standard library. + Ok(unsafe { + std::mem::transmute::, std::sync::Arc>(arc_u8) + }) +} + #[cfg(test)] mod tests { use std::sync::Arc; diff --git a/cli/worker.rs b/cli/worker.rs index e230197d2b..baacd681a1 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -14,16 +14,17 @@ use deno_core::v8; use deno_core::CompiledWasmModuleStore; use deno_core::Extension; use deno_core::FeatureChecker; -use deno_core::ModuleId; use deno_core::ModuleLoader; use deno_core::PollEventLoopOptions; use deno_core::SharedArrayBufferStore; use deno_runtime::code_cache; use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; use deno_runtime::deno_fs; -use deno_runtime::deno_node; use deno_runtime::deno_node::NodeExtInitServices; +use deno_runtime::deno_node::NodeRequireLoader; +use deno_runtime::deno_node::NodeRequireLoaderRc; use deno_runtime::deno_node::NodeResolver; +use deno_runtime::deno_node::PackageJsonResolver; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_web::BlobStore; @@ -42,7 +43,6 @@ use deno_runtime::WorkerExecutionMode; use deno_runtime::WorkerLogLevel; use deno_semver::npm::NpmPackageReqReference; use deno_terminal::colors; -use node_resolver::NodeResolution; use node_resolver::NodeResolutionMode; use tokio::select; @@ -51,28 +51,27 @@ use crate::args::DenoSubcommand; use crate::args::StorageKeyResolver; use crate::errors; use crate::npm::CliNpmResolver; -use crate::resolver::CjsResolutionStore; use crate::util::checksum; use crate::util::file_watcher::WatcherCommunicator; use crate::util::file_watcher::WatcherRestartMode; -use crate::util::path::specifier_has_extension; use crate::version; -pub struct ModuleLoaderAndSourceMapGetter { +pub struct CreateModuleLoaderResult { pub module_loader: Rc, + pub node_require_loader: Rc, } pub trait ModuleLoaderFactory: Send + Sync { fn create_for_main( &self, root_permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter; + ) -> CreateModuleLoaderResult; fn create_for_worker( &self, parent_permissions: PermissionsContainer, permissions: PermissionsContainer, - ) -> ModuleLoaderAndSourceMapGetter; + ) -> CreateModuleLoaderResult; } #[async_trait::async_trait(?Send)] @@ -109,7 +108,6 @@ pub struct CliMainWorkerOptions { pub inspect_wait: bool, pub strace_ops: Option>, pub is_inspecting: bool, - pub is_npm_main: bool, pub location: Option, pub argv0: Option, pub node_debug: Option, @@ -122,13 +120,11 @@ pub struct CliMainWorkerOptions { pub node_ipc: Option, pub serve_port: Option, pub serve_host: Option, - pub unstable_detect_cjs: bool, } struct SharedWorkerState { blob_store: Arc, broadcast_channel: InMemoryBroadcastChannel, - cjs_resolution_store: Arc, code_cache: Option>, compiled_wasm_module_store: CompiledWasmModuleStore, feature_checker: Arc, @@ -139,6 +135,7 @@ struct SharedWorkerState { module_loader_factory: Box, node_resolver: Arc, npm_resolver: Arc, + pkg_json_resolver: Arc, root_cert_store_provider: Arc, root_permissions: PermissionsContainer, shared_array_buffer_store: SharedArrayBufferStore, @@ -148,11 +145,15 @@ struct SharedWorkerState { } impl SharedWorkerState { - pub fn create_node_init_services(&self) -> NodeExtInitServices { + pub fn create_node_init_services( + &self, + node_require_loader: NodeRequireLoaderRc, + ) -> NodeExtInitServices { NodeExtInitServices { - node_require_resolver: self.npm_resolver.clone().into_require_resolver(), + node_require_loader, node_resolver: self.node_resolver.clone(), npm_resolver: self.npm_resolver.clone().into_npm_resolver(), + pkg_json_resolver: self.pkg_json_resolver.clone(), } } @@ -163,7 +164,6 @@ impl SharedWorkerState { pub struct CliMainWorker { main_module: ModuleSpecifier, - is_main_cjs: bool, worker: MainWorker, shared: Arc, } @@ -185,17 +185,7 @@ impl CliMainWorker { log::debug!("main_module {}", self.main_module); - if self.is_main_cjs { - deno_node::load_cjs_module( - &mut self.worker.js_runtime, - &self.main_module.to_file_path().unwrap().to_string_lossy(), - true, - self.shared.options.inspect_brk, - )?; - } else { - self.execute_main_module_possibly_with_npm().await?; - } - + self.execute_main_module().await?; self.worker.dispatch_load_event()?; loop { @@ -283,22 +273,7 @@ impl CliMainWorker { /// Execute the given main module emitting load and unload events before and after execution /// respectively. pub async fn execute(&mut self) -> Result<(), AnyError> { - if self.inner.is_main_cjs { - deno_node::load_cjs_module( - &mut self.inner.worker.js_runtime, - &self - .inner - .main_module - .to_file_path() - .unwrap() - .to_string_lossy(), - true, - self.inner.shared.options.inspect_brk, - )?; - } else { - self.inner.execute_main_module_possibly_with_npm().await?; - } - + self.inner.execute_main_module().await?; self.inner.worker.dispatch_load_event()?; self.pending_unload = true; @@ -339,24 +314,13 @@ impl CliMainWorker { executor.execute().await } - pub async fn execute_main_module_possibly_with_npm( - &mut self, - ) -> Result<(), AnyError> { + pub async fn execute_main_module(&mut self) -> Result<(), AnyError> { let id = self.worker.preload_main_module(&self.main_module).await?; - self.evaluate_module_possibly_with_npm(id).await + self.worker.evaluate_module(id).await } - pub async fn execute_side_module_possibly_with_npm( - &mut self, - ) -> Result<(), AnyError> { + pub async fn execute_side_module(&mut self) -> Result<(), AnyError> { let id = self.worker.preload_side_module(&self.main_module).await?; - self.evaluate_module_possibly_with_npm(id).await - } - - async fn evaluate_module_possibly_with_npm( - &mut self, - id: ModuleId, - ) -> Result<(), AnyError> { self.worker.evaluate_module(id).await } @@ -426,7 +390,6 @@ impl CliMainWorkerFactory { #[allow(clippy::too_many_arguments)] pub fn new( blob_store: Arc, - cjs_resolution_store: Arc, code_cache: Option>, feature_checker: Arc, fs: Arc, @@ -436,6 +399,7 @@ impl CliMainWorkerFactory { module_loader_factory: Box, node_resolver: Arc, npm_resolver: Arc, + pkg_json_resolver: Arc, root_cert_store_provider: Arc, root_permissions: PermissionsContainer, storage_key_resolver: StorageKeyResolver, @@ -446,7 +410,6 @@ impl CliMainWorkerFactory { shared: Arc::new(SharedWorkerState { blob_store, broadcast_channel: Default::default(), - cjs_resolution_store, code_cache, compiled_wasm_module_store: Default::default(), feature_checker, @@ -457,6 +420,7 @@ impl CliMainWorkerFactory { module_loader_factory, node_resolver, npm_resolver, + pkg_json_resolver, root_cert_store_provider, root_permissions, shared_array_buffer_store: Default::default(), @@ -492,10 +456,13 @@ impl CliMainWorkerFactory { stdio: deno_runtime::deno_io::Stdio, ) -> Result { let shared = &self.shared; - let ModuleLoaderAndSourceMapGetter { module_loader } = shared + let CreateModuleLoaderResult { + module_loader, + node_require_loader, + } = shared .module_loader_factory .create_for_main(permissions.clone()); - let (main_module, is_main_cjs) = if let Ok(package_ref) = + let main_module = if let Ok(package_ref) = NpmPackageReqReference::from_specifier(&main_module) { if let Some(npm_resolver) = shared.npm_resolver.as_managed() { @@ -515,9 +482,8 @@ impl CliMainWorkerFactory { package_ref.req(), &referrer, )?; - let node_resolution = self + let main_module = self .resolve_binary_entrypoint(&package_folder, package_ref.sub_path())?; - let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_)); if let Some(lockfile) = &shared.maybe_lockfile { // For npm binary commands, ensure that the lockfile gets updated @@ -526,36 +492,9 @@ impl CliMainWorkerFactory { lockfile.write_if_changed()?; } - (node_resolution.into_url(), is_main_cjs) - } else if shared.options.is_npm_main - || shared.node_resolver.in_npm_package(&main_module) - { - let node_resolution = - shared.node_resolver.url_to_node_resolution(main_module)?; - let is_main_cjs = matches!(node_resolution, NodeResolution::CommonJs(_)); - (node_resolution.into_url(), is_main_cjs) + main_module } else { - let is_maybe_cjs_js_ext = self.shared.options.unstable_detect_cjs - && specifier_has_extension(&main_module, "js") - && self - .shared - .node_resolver - .get_closest_package_json(&main_module) - .ok() - .flatten() - .map(|pkg_json| pkg_json.typ == "commonjs") - .unwrap_or(false); - let is_cjs = if is_maybe_cjs_js_ext { - // fill the cjs resolution store by preparing the module load - module_loader - .prepare_load(&main_module, None, false) - .await?; - self.shared.cjs_resolution_store.is_known_cjs(&main_module) - } else { - main_module.scheme() == "file" - && specifier_has_extension(&main_module, "cjs") - }; - (main_module, is_cjs) + main_module }; let maybe_inspector_server = shared.maybe_inspector_server.clone(); @@ -597,7 +536,9 @@ impl CliMainWorkerFactory { root_cert_store_provider: Some(shared.root_cert_store_provider.clone()), module_loader, fs: shared.fs.clone(), - node_services: Some(shared.create_node_init_services()), + node_services: Some( + shared.create_node_init_services(node_require_loader), + ), npm_process_state_provider: Some(shared.npm_process_state_provider()), blob_store: shared.blob_store.clone(), broadcast_channel: shared.broadcast_channel.clone(), @@ -682,7 +623,6 @@ impl CliMainWorkerFactory { Ok(CliMainWorker { main_module, - is_main_cjs, worker, shared: shared.clone(), }) @@ -692,19 +632,19 @@ impl CliMainWorkerFactory { &self, package_folder: &Path, sub_path: Option<&str>, - ) -> Result { + ) -> Result { match self .shared .node_resolver .resolve_binary_export(package_folder, sub_path) { - Ok(node_resolution) => Ok(node_resolution), + Ok(specifier) => Ok(specifier), Err(original_err) => { // if the binary entrypoint was not found, fallback to regular node resolution let result = self.resolve_binary_entrypoint_fallback(package_folder, sub_path); match result { - Ok(Some(resolution)) => Ok(resolution), + Ok(Some(specifier)) => Ok(specifier), Ok(None) => Err(original_err.into()), Err(fallback_err) => { bail!("{:#}\n\nFallback failed: {:#}", original_err, fallback_err) @@ -719,7 +659,7 @@ impl CliMainWorkerFactory { &self, package_folder: &Path, sub_path: Option<&str>, - ) -> Result, AnyError> { + ) -> Result, AnyError> { // only fallback if the user specified a sub path if sub_path.is_none() { // it's confusing to users if the package doesn't have any binary @@ -728,7 +668,7 @@ impl CliMainWorkerFactory { return Ok(None); } - let resolution = self + let specifier = self .shared .node_resolver .resolve_package_subpath_from_deno_module( @@ -737,19 +677,14 @@ impl CliMainWorkerFactory { /* referrer */ None, NodeResolutionMode::Execution, )?; - match &resolution { - NodeResolution::BuiltIn(_) => Ok(None), - NodeResolution::CommonJs(specifier) | NodeResolution::Esm(specifier) => { - if specifier - .to_file_path() - .map(|p| p.exists()) - .unwrap_or(false) - { - Ok(Some(resolution)) - } else { - bail!("Cannot find module '{}'", specifier) - } - } + if specifier + .to_file_path() + .map(|p| p.exists()) + .unwrap_or(false) + { + Ok(Some(specifier)) + } else { + bail!("Cannot find module '{}'", specifier) } } } @@ -761,11 +696,13 @@ fn create_web_worker_callback( Arc::new(move |args| { let maybe_inspector_server = shared.maybe_inspector_server.clone(); - let ModuleLoaderAndSourceMapGetter { module_loader } = - shared.module_loader_factory.create_for_worker( - args.parent_permissions.clone(), - args.permissions.clone(), - ); + let CreateModuleLoaderResult { + module_loader, + node_require_loader, + } = shared.module_loader_factory.create_for_worker( + args.parent_permissions.clone(), + args.permissions.clone(), + ); let create_web_worker_cb = create_web_worker_callback(shared.clone(), stdio.clone()); @@ -795,7 +732,9 @@ fn create_web_worker_callback( root_cert_store_provider: Some(shared.root_cert_store_provider.clone()), module_loader, fs: shared.fs.clone(), - node_services: Some(shared.create_node_init_services()), + node_services: Some( + shared.create_node_init_services(node_require_loader), + ), blob_store: shared.blob_store.clone(), broadcast_channel: shared.broadcast_channel.clone(), shared_array_buffer_store: Some(shared.shared_array_buffer_store.clone()), diff --git a/ext/broadcast_channel/Cargo.toml b/ext/broadcast_channel/Cargo.toml index 7ca058a44a..90ac038357 100644 --- a/ext/broadcast_channel/Cargo.toml +++ b/ext/broadcast_channel/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_broadcast_channel" -version = "0.167.0" +version = "0.171.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/cache/Cargo.toml b/ext/cache/Cargo.toml index dee4d7274d..56fa0a527f 100644 --- a/ext/cache/Cargo.toml +++ b/ext/cache/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_cache" -version = "0.105.0" +version = "0.109.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/cache/lib.rs b/ext/cache/lib.rs index b9cc5427c2..524d4cea05 100644 --- a/ext/cache/lib.rs +++ b/ext/cache/lib.rs @@ -33,7 +33,9 @@ pub enum CacheError { } #[derive(Clone)] -pub struct CreateCache(pub Arc C>); +pub struct CreateCache( + pub Arc Result>, +); deno_core::extension!(deno_cache, deps = [ deno_webidl, deno_web, deno_url, deno_fetch ], @@ -231,7 +233,7 @@ where if let Some(cache) = state.try_borrow::() { Ok(cache.clone()) } else if let Some(create_cache) = state.try_borrow::>() { - let cache = create_cache.0(); + let cache = create_cache.0()?; state.put(cache); Ok(state.borrow::().clone()) } else { diff --git a/ext/cache/sqlite.rs b/ext/cache/sqlite.rs index e4991c32f1..6efceda11e 100644 --- a/ext/cache/sqlite.rs +++ b/ext/cache/sqlite.rs @@ -42,7 +42,7 @@ pub struct SqliteBackedCache { } impl SqliteBackedCache { - pub fn new(cache_storage_dir: PathBuf) -> Self { + pub fn new(cache_storage_dir: PathBuf) -> Result { { std::fs::create_dir_all(&cache_storage_dir) .expect("failed to create cache dir"); @@ -57,18 +57,14 @@ impl SqliteBackedCache { PRAGMA synchronous=NORMAL; PRAGMA optimize; "; - connection - .execute_batch(initial_pragmas) - .expect("failed to execute pragmas"); - connection - .execute( - "CREATE TABLE IF NOT EXISTS cache_storage ( + connection.execute_batch(initial_pragmas)?; + connection.execute( + "CREATE TABLE IF NOT EXISTS cache_storage ( id INTEGER PRIMARY KEY, cache_name TEXT NOT NULL UNIQUE )", - (), - ) - .expect("failed to create cache_storage table"); + (), + )?; connection .execute( "CREATE TABLE IF NOT EXISTS request_response_list ( @@ -86,12 +82,11 @@ impl SqliteBackedCache { UNIQUE (cache_id, request_url) )", (), - ) - .expect("failed to create request_response_list table"); - SqliteBackedCache { + )?; + Ok(SqliteBackedCache { connection: Arc::new(Mutex::new(connection)), cache_storage_dir, - } + }) } } } diff --git a/ext/canvas/Cargo.toml b/ext/canvas/Cargo.toml index 6ca3d76c05..4231d7c84d 100644 --- a/ext/canvas/Cargo.toml +++ b/ext/canvas/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_canvas" -version = "0.42.0" +version = "0.46.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/console/01_console.js b/ext/console/01_console.js index 3c07cf64aa..3803492b90 100644 --- a/ext/console/01_console.js +++ b/ext/console/01_console.js @@ -2653,6 +2653,7 @@ const HSL_PATTERN = new SafeRegExp( ); function parseCssColor(colorString) { + colorString = StringPrototypeToLowerCase(colorString); if (colorKeywords.has(colorString)) { colorString = colorKeywords.get(colorString); } diff --git a/ext/console/Cargo.toml b/ext/console/Cargo.toml index f83f7138d8..80f1cca84d 100644 --- a/ext/console/Cargo.toml +++ b/ext/console/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_console" -version = "0.173.0" +version = "0.177.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/cron/Cargo.toml b/ext/cron/Cargo.toml index ccd81de0cc..966ccdc958 100644 --- a/ext/cron/Cargo.toml +++ b/ext/cron/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_cron" -version = "0.53.0" +version = "0.57.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/crypto/Cargo.toml b/ext/crypto/Cargo.toml index 2f970ca535..a5794dc68b 100644 --- a/ext/crypto/Cargo.toml +++ b/ext/crypto/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_crypto" -version = "0.187.0" +version = "0.191.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fetch/23_request.js b/ext/fetch/23_request.js index 6211e927d9..61cac22d2e 100644 --- a/ext/fetch/23_request.js +++ b/ext/fetch/23_request.js @@ -269,12 +269,6 @@ class Request { /** @type {AbortSignal} */ get [_signal]() { const signal = this[_signalCache]; - // This signal not been created yet, and the request is still in progress - if (signal === undefined) { - const signal = newSignal(); - this[_signalCache] = signal; - return signal; - } // This signal has not been created yet, but the request has already completed if (signal === false) { const signal = newSignal(); @@ -282,6 +276,18 @@ class Request { signal[signalAbort](signalAbortError); return signal; } + + // This signal not been created yet, and the request is still in progress + if (signal === undefined) { + const signal = newSignal(); + this[_signalCache] = signal; + this[_request].onCancel?.(() => { + signal[signalAbort](signalAbortError); + }); + + return signal; + } + return signal; } get [_mimeType]() { diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index afffd3ffb1..56d416bbb8 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.197.0" +version = "0.201.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/fetch/lib.rs b/ext/fetch/lib.rs index 4df8dc3d72..7ef26431c2 100644 --- a/ext/fetch/lib.rs +++ b/ext/fetch/lib.rs @@ -39,6 +39,7 @@ use deno_core::OpState; use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; +use deno_permissions::PermissionCheckError; use deno_tls::rustls::RootCertStore; use deno_tls::Proxy; use deno_tls::RootCertStoreProvider; @@ -149,7 +150,7 @@ pub enum FetchError { #[error(transparent)] Resource(deno_core::error::AnyError), #[error(transparent)] - Permission(deno_core::error::AnyError), + Permission(#[from] PermissionCheckError), #[error("NetworkError when attempting to fetch resource")] NetworkError, #[error("Fetching files only supports the GET method: received {0}")] @@ -346,13 +347,13 @@ pub trait FetchPermissions { &mut self, url: &Url, api_name: &str, - ) -> Result<(), deno_core::error::AnyError>; + ) -> Result<(), PermissionCheckError>; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn check_read<'a>( &mut self, p: &'a Path, api_name: &str, - ) -> Result, deno_core::error::AnyError>; + ) -> Result, PermissionCheckError>; } impl FetchPermissions for deno_permissions::PermissionsContainer { @@ -361,7 +362,7 @@ impl FetchPermissions for deno_permissions::PermissionsContainer { &mut self, url: &Url, api_name: &str, - ) -> Result<(), deno_core::error::AnyError> { + ) -> Result<(), PermissionCheckError> { deno_permissions::PermissionsContainer::check_net_url(self, url, api_name) } @@ -370,7 +371,7 @@ impl FetchPermissions for deno_permissions::PermissionsContainer { &mut self, path: &'a Path, api_name: &str, - ) -> Result, deno_core::error::AnyError> { + ) -> Result, PermissionCheckError> { deno_permissions::PermissionsContainer::check_read_path( self, path, @@ -414,9 +415,7 @@ where "file" => { let path = url.to_file_path().map_err(|_| FetchError::NetworkError)?; let permissions = state.borrow_mut::(); - let path = permissions - .check_read(&path, "fetch()") - .map_err(FetchError::Permission)?; + let path = permissions.check_read(&path, "fetch()")?; let url = match path { Cow::Owned(path) => Url::from_file_path(path).unwrap(), Cow::Borrowed(_) => url, @@ -442,9 +441,7 @@ where } "http" | "https" => { let permissions = state.borrow_mut::(); - permissions - .check_net_url(&url, "fetch()") - .map_err(FetchError::Resource)?; + permissions.check_net_url(&url, "fetch()")?; let maybe_authority = extract_authority(&mut url); let uri = url @@ -863,9 +860,7 @@ where if let Some(proxy) = args.proxy.clone() { let permissions = state.borrow_mut::(); let url = Url::parse(&proxy.url)?; - permissions - .check_net_url(&url, "Deno.createHttpClient()") - .map_err(FetchError::Permission)?; + permissions.check_net_url(&url, "Deno.createHttpClient()")?; } let options = state.borrow::(); diff --git a/ext/ffi/Cargo.toml b/ext/ffi/Cargo.toml index 80b0180d0b..295e8be846 100644 --- a/ext/ffi/Cargo.toml +++ b/ext/ffi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_ffi" -version = "0.160.0" +version = "0.164.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -21,6 +21,7 @@ dynasmrt = "1.2.3" libffi = "=3.2.0" libffi-sys = "=2.3.0" log.workspace = true +num-bigint.workspace = true serde.workspace = true serde-value = "0.7" serde_json = "1.0" diff --git a/ext/ffi/call.rs b/ext/ffi/call.rs index d337b29b00..bbff0ee48f 100644 --- a/ext/ffi/call.rs +++ b/ext/ffi/call.rs @@ -9,12 +9,14 @@ use crate::FfiPermissions; use crate::ForeignFunction; use deno_core::op2; use deno_core::serde_json::Value; +use deno_core::serde_v8::BigInt as V8BigInt; use deno_core::serde_v8::ExternalPointer; use deno_core::unsync::spawn_blocking; use deno_core::v8; use deno_core::OpState; use deno_core::ResourceId; use libffi::middle::Arg; +use num_bigint::BigInt; use serde::Serialize; use std::cell::RefCell; use std::ffi::c_void; @@ -30,7 +32,9 @@ pub enum CallError { #[error("Invalid FFI symbol name: '{0}'")] InvalidSymbol(String), #[error(transparent)] - Permission(deno_core::error::AnyError), + Permission(#[from] deno_permissions::PermissionCheckError), + #[error(transparent)] + Resource(deno_core::error::AnyError), #[error(transparent)] Callback(#[from] super::CallbackError), } @@ -202,6 +206,7 @@ where #[serde(untagged)] pub enum FfiValue { Value(Value), + BigInt(V8BigInt), External(ExternalPointer), } @@ -251,18 +256,18 @@ fn ffi_call( NativeType::I32 => { FfiValue::Value(Value::from(cif.call::(fun_ptr, &call_args))) } - NativeType::U64 => { - FfiValue::Value(Value::from(cif.call::(fun_ptr, &call_args))) - } - NativeType::I64 => { - FfiValue::Value(Value::from(cif.call::(fun_ptr, &call_args))) - } - NativeType::USize => { - FfiValue::Value(Value::from(cif.call::(fun_ptr, &call_args))) - } - NativeType::ISize => { - FfiValue::Value(Value::from(cif.call::(fun_ptr, &call_args))) - } + NativeType::U64 => FfiValue::BigInt(V8BigInt::from(BigInt::from( + cif.call::(fun_ptr, &call_args), + ))), + NativeType::I64 => FfiValue::BigInt(V8BigInt::from(BigInt::from( + cif.call::(fun_ptr, &call_args), + ))), + NativeType::USize => FfiValue::BigInt(V8BigInt::from(BigInt::from( + cif.call::(fun_ptr, &call_args), + ))), + NativeType::ISize => FfiValue::BigInt(V8BigInt::from(BigInt::from( + cif.call::(fun_ptr, &call_args), + ))), NativeType::F32 => { FfiValue::Value(Value::from(cif.call::(fun_ptr, &call_args))) } @@ -298,9 +303,7 @@ where { let mut state = state.borrow_mut(); let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(CallError::Permission)?; + permissions.check_partial_no_path()?; }; let symbol = PtrSymbol::new(pointer, &def)?; @@ -344,7 +347,7 @@ pub fn op_ffi_call_nonblocking( let resource = state .resource_table .get::(rid) - .map_err(CallError::Permission)?; + .map_err(CallError::Resource)?; let symbols = &resource.symbols; *symbols .get(&symbol) @@ -398,9 +401,7 @@ where { let mut state = state.borrow_mut(); let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(CallError::Permission)?; + permissions.check_partial_no_path()?; }; let symbol = PtrSymbol::new(pointer, &def)?; diff --git a/ext/ffi/callback.rs b/ext/ffi/callback.rs index f33e0413a3..29583c800c 100644 --- a/ext/ffi/callback.rs +++ b/ext/ffi/callback.rs @@ -38,7 +38,7 @@ pub enum CallbackError { #[error(transparent)] Resource(deno_core::error::AnyError), #[error(transparent)] - Permission(deno_core::error::AnyError), + Permission(#[from] deno_permissions::PermissionCheckError), #[error(transparent)] Other(deno_core::error::AnyError), } @@ -572,9 +572,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(CallbackError::Permission)?; + permissions.check_partial_no_path()?; let thread_id: u32 = LOCAL_THREAD_ID.with(|s| { let value = *s.borrow(); diff --git a/ext/ffi/dlfcn.rs b/ext/ffi/dlfcn.rs index 53bdcbc5cc..55909468f8 100644 --- a/ext/ffi/dlfcn.rs +++ b/ext/ffi/dlfcn.rs @@ -30,7 +30,7 @@ pub enum DlfcnError { #[error(transparent)] Dlopen(#[from] dlopen2::Error), #[error(transparent)] - Permission(deno_core::error::AnyError), + Permission(#[from] deno_permissions::PermissionCheckError), #[error(transparent)] Other(deno_core::error::AnyError), } @@ -133,9 +133,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - let path = permissions - .check_partial_with_path(&args.path) - .map_err(DlfcnError::Permission)?; + let path = permissions.check_partial_with_path(&args.path)?; let lib = Library::open(&path).map_err(|e| { dlopen2::Error::OpeningLibraryError(std::io::Error::new( diff --git a/ext/ffi/lib.rs b/ext/ffi/lib.rs index 237f8c3b05..73ec7757ab 100644 --- a/ext/ffi/lib.rs +++ b/ext/ffi/lib.rs @@ -1,7 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::error::AnyError; - use std::mem::size_of; use std::os::raw::c_char; use std::os::raw::c_short; @@ -31,6 +29,7 @@ use symbol::Symbol; pub use call::CallError; pub use callback::CallbackError; +use deno_permissions::PermissionCheckError; pub use dlfcn::DlfcnError; pub use ir::IRError; pub use r#static::StaticError; @@ -48,17 +47,17 @@ const _: () = { pub const UNSTABLE_FEATURE_NAME: &str = "ffi"; pub trait FfiPermissions { - fn check_partial_no_path(&mut self) -> Result<(), AnyError>; + fn check_partial_no_path(&mut self) -> Result<(), PermissionCheckError>; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn check_partial_with_path( &mut self, path: &str, - ) -> Result; + ) -> Result; } impl FfiPermissions for deno_permissions::PermissionsContainer { #[inline(always)] - fn check_partial_no_path(&mut self) -> Result<(), AnyError> { + fn check_partial_no_path(&mut self) -> Result<(), PermissionCheckError> { deno_permissions::PermissionsContainer::check_ffi_partial_no_path(self) } @@ -66,7 +65,7 @@ impl FfiPermissions for deno_permissions::PermissionsContainer { fn check_partial_with_path( &mut self, path: &str, - ) -> Result { + ) -> Result { deno_permissions::PermissionsContainer::check_ffi_partial_with_path( self, path, ) diff --git a/ext/ffi/repr.rs b/ext/ffi/repr.rs index 2f04f4feb4..fd8a2c8e70 100644 --- a/ext/ffi/repr.rs +++ b/ext/ffi/repr.rs @@ -46,7 +46,7 @@ pub enum ReprError { #[error("Invalid pointer pointer, pointer is null")] InvalidPointer, #[error(transparent)] - Permission(deno_core::error::AnyError), + Permission(#[from] deno_permissions::PermissionCheckError), } #[op2(fast)] @@ -58,9 +58,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; Ok(ptr_number as *mut c_void) } @@ -75,9 +73,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; Ok(a == b) } @@ -91,9 +87,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; Ok(buf as *mut c_void) } @@ -107,9 +101,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; let Some(buf) = buf.get_backing_store() else { return Ok(0 as _); @@ -130,9 +122,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; if ptr.is_null() { return Err(ReprError::InvalidOffset); @@ -162,9 +152,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; Ok(ptr as usize) } @@ -181,9 +169,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; if ptr.is_null() { return Err(ReprError::InvalidArrayBuffer); @@ -215,9 +201,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; if src.is_null() { Err(ReprError::InvalidArrayBuffer) @@ -246,9 +230,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; if ptr.is_null() { return Err(ReprError::InvalidCString); @@ -272,9 +254,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; if ptr.is_null() { return Err(ReprError::InvalidBool); @@ -294,9 +274,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; if ptr.is_null() { return Err(ReprError::InvalidU8); @@ -318,9 +296,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; if ptr.is_null() { return Err(ReprError::InvalidI8); @@ -342,9 +318,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; if ptr.is_null() { return Err(ReprError::InvalidU16); @@ -366,9 +340,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; if ptr.is_null() { return Err(ReprError::InvalidI16); @@ -390,9 +362,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; if ptr.is_null() { return Err(ReprError::InvalidU32); @@ -412,9 +382,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; if ptr.is_null() { return Err(ReprError::InvalidI32); @@ -437,9 +405,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; if ptr.is_null() { return Err(ReprError::InvalidU64); @@ -465,9 +431,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; if ptr.is_null() { return Err(ReprError::InvalidI64); @@ -490,9 +454,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; if ptr.is_null() { return Err(ReprError::InvalidF32); @@ -512,9 +474,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; if ptr.is_null() { return Err(ReprError::InvalidF64); @@ -534,9 +494,7 @@ where FP: FfiPermissions + 'static, { let permissions = state.borrow_mut::(); - permissions - .check_partial_no_path() - .map_err(ReprError::Permission)?; + permissions.check_partial_no_path()?; if ptr.is_null() { return Err(ReprError::InvalidPointer); diff --git a/ext/fs/Cargo.toml b/ext/fs/Cargo.toml index ab0bf22fd3..e85f349b15 100644 --- a/ext/fs/Cargo.toml +++ b/ext/fs/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fs" -version = "0.83.0" +version = "0.87.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -31,7 +31,7 @@ serde.workspace = true thiserror.workspace = true [target.'cfg(unix)'.dependencies] -nix = { workspace = true, features = ["user"] } +nix = { workspace = true, features = ["fs", "user"] } [target.'cfg(windows)'.dependencies] winapi = { workspace = true, features = ["winbase"] } diff --git a/ext/fs/lib.rs b/ext/fs/lib.rs index cd2baf22a9..dd852e6be8 100644 --- a/ext/fs/lib.rs +++ b/ext/fs/lib.rs @@ -22,8 +22,8 @@ pub use crate::sync::MaybeSync; use crate::ops::*; -use deno_core::error::AnyError; use deno_io::fs::FsError; +use deno_permissions::PermissionCheckError; use std::borrow::Cow; use std::path::Path; use std::path::PathBuf; @@ -42,45 +42,51 @@ pub trait FsPermissions { &mut self, path: &str, api_name: &str, - ) -> Result; + ) -> Result; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn check_read_path<'a>( &mut self, path: &'a Path, api_name: &str, - ) -> Result, AnyError>; - fn check_read_all(&mut self, api_name: &str) -> Result<(), AnyError>; + ) -> Result, PermissionCheckError>; + fn check_read_all( + &mut self, + api_name: &str, + ) -> Result<(), PermissionCheckError>; fn check_read_blind( &mut self, p: &Path, display: &str, api_name: &str, - ) -> Result<(), AnyError>; + ) -> Result<(), PermissionCheckError>; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn check_write( &mut self, path: &str, api_name: &str, - ) -> Result; + ) -> Result; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn check_write_path<'a>( &mut self, path: &'a Path, api_name: &str, - ) -> Result, AnyError>; + ) -> Result, PermissionCheckError>; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn check_write_partial( &mut self, path: &str, api_name: &str, - ) -> Result; - fn check_write_all(&mut self, api_name: &str) -> Result<(), AnyError>; + ) -> Result; + fn check_write_all( + &mut self, + api_name: &str, + ) -> Result<(), PermissionCheckError>; fn check_write_blind( &mut self, p: &Path, display: &str, api_name: &str, - ) -> Result<(), AnyError>; + ) -> Result<(), PermissionCheckError>; fn check<'a>( &mut self, @@ -140,7 +146,7 @@ impl FsPermissions for deno_permissions::PermissionsContainer { &mut self, path: &str, api_name: &str, - ) -> Result { + ) -> Result { deno_permissions::PermissionsContainer::check_read(self, path, api_name) } @@ -148,7 +154,7 @@ impl FsPermissions for deno_permissions::PermissionsContainer { &mut self, path: &'a Path, api_name: &str, - ) -> Result, AnyError> { + ) -> Result, PermissionCheckError> { deno_permissions::PermissionsContainer::check_read_path( self, path, @@ -160,7 +166,7 @@ impl FsPermissions for deno_permissions::PermissionsContainer { path: &Path, display: &str, api_name: &str, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionCheckError> { deno_permissions::PermissionsContainer::check_read_blind( self, path, display, api_name, ) @@ -170,7 +176,7 @@ impl FsPermissions for deno_permissions::PermissionsContainer { &mut self, path: &str, api_name: &str, - ) -> Result { + ) -> Result { deno_permissions::PermissionsContainer::check_write(self, path, api_name) } @@ -178,7 +184,7 @@ impl FsPermissions for deno_permissions::PermissionsContainer { &mut self, path: &'a Path, api_name: &str, - ) -> Result, AnyError> { + ) -> Result, PermissionCheckError> { deno_permissions::PermissionsContainer::check_write_path( self, path, api_name, ) @@ -188,7 +194,7 @@ impl FsPermissions for deno_permissions::PermissionsContainer { &mut self, path: &str, api_name: &str, - ) -> Result { + ) -> Result { deno_permissions::PermissionsContainer::check_write_partial( self, path, api_name, ) @@ -199,17 +205,23 @@ impl FsPermissions for deno_permissions::PermissionsContainer { p: &Path, display: &str, api_name: &str, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionCheckError> { deno_permissions::PermissionsContainer::check_write_blind( self, p, display, api_name, ) } - fn check_read_all(&mut self, api_name: &str) -> Result<(), AnyError> { + fn check_read_all( + &mut self, + api_name: &str, + ) -> Result<(), PermissionCheckError> { deno_permissions::PermissionsContainer::check_read_all(self, api_name) } - fn check_write_all(&mut self, api_name: &str) -> Result<(), AnyError> { + fn check_write_all( + &mut self, + api_name: &str, + ) -> Result<(), PermissionCheckError> { deno_permissions::PermissionsContainer::check_write_all(self, api_name) } } diff --git a/ext/fs/ops.rs b/ext/fs/ops.rs index a3f59da4ea..9b76b49e61 100644 --- a/ext/fs/ops.rs +++ b/ext/fs/ops.rs @@ -10,6 +10,12 @@ use std::path::PathBuf; use std::path::StripPrefixError; use std::rc::Rc; +use crate::interface::AccessCheckFn; +use crate::interface::FileSystemRc; +use crate::interface::FsDirEntry; +use crate::interface::FsFileType; +use crate::FsPermissions; +use crate::OpenOptions; use deno_core::op2; use deno_core::CancelFuture; use deno_core::CancelHandle; @@ -20,18 +26,12 @@ use deno_core::ToJsBuffer; use deno_io::fs::FileResource; use deno_io::fs::FsError; use deno_io::fs::FsStat; +use deno_permissions::PermissionCheckError; use rand::rngs::ThreadRng; use rand::thread_rng; use rand::Rng; use serde::Serialize; -use crate::interface::AccessCheckFn; -use crate::interface::FileSystemRc; -use crate::interface::FsDirEntry; -use crate::interface::FsFileType; -use crate::FsPermissions; -use crate::OpenOptions; - #[derive(Debug, thiserror::Error)] pub enum FsOpsError { #[error("{0}")] @@ -39,7 +39,7 @@ pub enum FsOpsError { #[error("{0}")] OperationError(#[source] OperationError), #[error(transparent)] - Permission(deno_core::error::AnyError), + Permission(#[from] PermissionCheckError), #[error(transparent)] Resource(deno_core::error::AnyError), #[error("File name or path {0:?} is not valid UTF-8")] @@ -150,8 +150,7 @@ where let path = fs.cwd()?; state .borrow_mut::

() - .check_read_blind(&path, "CWD", "Deno.cwd()") - .map_err(FsOpsError::Permission)?; + .check_read_blind(&path, "CWD", "Deno.cwd()")?; let path_str = path_into_string(path.into_os_string())?; Ok(path_str) } @@ -166,8 +165,7 @@ where { let d = state .borrow_mut::

() - .check_read(directory, "Deno.chdir()") - .map_err(FsOpsError::Permission)?; + .check_read(directory, "Deno.chdir()")?; state .borrow::() .chdir(&d) @@ -253,8 +251,7 @@ where let path = state .borrow_mut::

() - .check_write(&path, "Deno.mkdirSync()") - .map_err(FsOpsError::Permission)?; + .check_write(&path, "Deno.mkdirSync()")?; let fs = state.borrow::(); fs.mkdir_sync(&path, recursive, Some(mode)) @@ -277,10 +274,7 @@ where let (fs, path) = { let mut state = state.borrow_mut(); - let path = state - .borrow_mut::

() - .check_write(&path, "Deno.mkdir()") - .map_err(FsOpsError::Permission)?; + let path = state.borrow_mut::

().check_write(&path, "Deno.mkdir()")?; (state.borrow::().clone(), path) }; @@ -302,8 +296,7 @@ where { let path = state .borrow_mut::

() - .check_write(&path, "Deno.chmodSync()") - .map_err(FsOpsError::Permission)?; + .check_write(&path, "Deno.chmodSync()")?; let fs = state.borrow::(); fs.chmod_sync(&path, mode).context_path("chmod", &path)?; Ok(()) @@ -320,10 +313,7 @@ where { let (fs, path) = { let mut state = state.borrow_mut(); - let path = state - .borrow_mut::

() - .check_write(&path, "Deno.chmod()") - .map_err(FsOpsError::Permission)?; + let path = state.borrow_mut::

().check_write(&path, "Deno.chmod()")?; (state.borrow::().clone(), path) }; fs.chmod_async(path.clone(), mode) @@ -344,8 +334,7 @@ where { let path = state .borrow_mut::

() - .check_write(&path, "Deno.chownSync()") - .map_err(FsOpsError::Permission)?; + .check_write(&path, "Deno.chownSync()")?; let fs = state.borrow::(); fs.chown_sync(&path, uid, gid) .context_path("chown", &path)?; @@ -364,10 +353,7 @@ where { let (fs, path) = { let mut state = state.borrow_mut(); - let path = state - .borrow_mut::

() - .check_write(&path, "Deno.chown()") - .map_err(FsOpsError::Permission)?; + let path = state.borrow_mut::

().check_write(&path, "Deno.chown()")?; (state.borrow::().clone(), path) }; fs.chown_async(path.clone(), uid, gid) @@ -387,8 +373,7 @@ where { let path = state .borrow_mut::

() - .check_write(path, "Deno.removeSync()") - .map_err(FsOpsError::Permission)?; + .check_write(path, "Deno.removeSync()")?; let fs = state.borrow::(); fs.remove_sync(&path, recursive) @@ -411,13 +396,11 @@ where let path = if recursive { state .borrow_mut::

() - .check_write(&path, "Deno.remove()") - .map_err(FsOpsError::Permission)? + .check_write(&path, "Deno.remove()")? } else { state .borrow_mut::

() - .check_write_partial(&path, "Deno.remove()") - .map_err(FsOpsError::Permission)? + .check_write_partial(&path, "Deno.remove()")? }; (state.borrow::().clone(), path) @@ -440,12 +423,8 @@ where P: FsPermissions + 'static, { let permissions = state.borrow_mut::

(); - let from = permissions - .check_read(from, "Deno.copyFileSync()") - .map_err(FsOpsError::Permission)?; - let to = permissions - .check_write(to, "Deno.copyFileSync()") - .map_err(FsOpsError::Permission)?; + let from = permissions.check_read(from, "Deno.copyFileSync()")?; + let to = permissions.check_write(to, "Deno.copyFileSync()")?; let fs = state.borrow::(); fs.copy_file_sync(&from, &to) @@ -466,12 +445,8 @@ where let (fs, from, to) = { let mut state = state.borrow_mut(); let permissions = state.borrow_mut::

(); - let from = permissions - .check_read(&from, "Deno.copyFile()") - .map_err(FsOpsError::Permission)?; - let to = permissions - .check_write(&to, "Deno.copyFile()") - .map_err(FsOpsError::Permission)?; + let from = permissions.check_read(&from, "Deno.copyFile()")?; + let to = permissions.check_write(&to, "Deno.copyFile()")?; (state.borrow::().clone(), from, to) }; @@ -493,8 +468,7 @@ where { let path = state .borrow_mut::

() - .check_read(&path, "Deno.statSync()") - .map_err(FsOpsError::Permission)?; + .check_read(&path, "Deno.statSync()")?; let fs = state.borrow::(); let stat = fs.stat_sync(&path).context_path("stat", &path)?; let serializable_stat = SerializableStat::from(stat); @@ -514,9 +488,7 @@ where let (fs, path) = { let mut state = state.borrow_mut(); let permissions = state.borrow_mut::

(); - let path = permissions - .check_read(&path, "Deno.stat()") - .map_err(FsOpsError::Permission)?; + let path = permissions.check_read(&path, "Deno.stat()")?; (state.borrow::().clone(), path) }; let stat = fs @@ -537,8 +509,7 @@ where { let path = state .borrow_mut::

() - .check_read(&path, "Deno.lstatSync()") - .map_err(FsOpsError::Permission)?; + .check_read(&path, "Deno.lstatSync()")?; let fs = state.borrow::(); let stat = fs.lstat_sync(&path).context_path("lstat", &path)?; let serializable_stat = SerializableStat::from(stat); @@ -558,9 +529,7 @@ where let (fs, path) = { let mut state = state.borrow_mut(); let permissions = state.borrow_mut::

(); - let path = permissions - .check_read(&path, "Deno.lstat()") - .map_err(FsOpsError::Permission)?; + let path = permissions.check_read(&path, "Deno.lstat()")?; (state.borrow::().clone(), path) }; let stat = fs @@ -581,13 +550,9 @@ where { let fs = state.borrow::().clone(); let permissions = state.borrow_mut::

(); - let path = permissions - .check_read(&path, "Deno.realPathSync()") - .map_err(FsOpsError::Permission)?; + let path = permissions.check_read(&path, "Deno.realPathSync()")?; if path.is_relative() { - permissions - .check_read_blind(&fs.cwd()?, "CWD", "Deno.realPathSync()") - .map_err(FsOpsError::Permission)?; + permissions.check_read_blind(&fs.cwd()?, "CWD", "Deno.realPathSync()")?; } let resolved_path = @@ -610,13 +575,9 @@ where let mut state = state.borrow_mut(); let fs = state.borrow::().clone(); let permissions = state.borrow_mut::

(); - let path = permissions - .check_read(&path, "Deno.realPath()") - .map_err(FsOpsError::Permission)?; + let path = permissions.check_read(&path, "Deno.realPath()")?; if path.is_relative() { - permissions - .check_read_blind(&fs.cwd()?, "CWD", "Deno.realPath()") - .map_err(FsOpsError::Permission)?; + permissions.check_read_blind(&fs.cwd()?, "CWD", "Deno.realPath()")?; } (fs, path) }; @@ -640,8 +601,7 @@ where { let path = state .borrow_mut::

() - .check_read(&path, "Deno.readDirSync()") - .map_err(FsOpsError::Permission)?; + .check_read(&path, "Deno.readDirSync()")?; let fs = state.borrow::(); let entries = fs.read_dir_sync(&path).context_path("readdir", &path)?; @@ -662,8 +622,7 @@ where let mut state = state.borrow_mut(); let path = state .borrow_mut::

() - .check_read(&path, "Deno.readDir()") - .map_err(FsOpsError::Permission)?; + .check_read(&path, "Deno.readDir()")?; (state.borrow::().clone(), path) }; @@ -685,15 +644,9 @@ where P: FsPermissions + 'static, { let permissions = state.borrow_mut::

(); - let _ = permissions - .check_read(&oldpath, "Deno.renameSync()") - .map_err(FsOpsError::Permission)?; - let oldpath = permissions - .check_write(&oldpath, "Deno.renameSync()") - .map_err(FsOpsError::Permission)?; - let newpath = permissions - .check_write(&newpath, "Deno.renameSync()") - .map_err(FsOpsError::Permission)?; + let _ = permissions.check_read(&oldpath, "Deno.renameSync()")?; + let oldpath = permissions.check_write(&oldpath, "Deno.renameSync()")?; + let newpath = permissions.check_write(&newpath, "Deno.renameSync()")?; let fs = state.borrow::(); fs.rename_sync(&oldpath, &newpath) @@ -714,15 +667,9 @@ where let (fs, oldpath, newpath) = { let mut state = state.borrow_mut(); let permissions = state.borrow_mut::

(); - _ = permissions - .check_read(&oldpath, "Deno.rename()") - .map_err(FsOpsError::Permission)?; - let oldpath = permissions - .check_write(&oldpath, "Deno.rename()") - .map_err(FsOpsError::Permission)?; - let newpath = permissions - .check_write(&newpath, "Deno.rename()") - .map_err(FsOpsError::Permission)?; + _ = permissions.check_read(&oldpath, "Deno.rename()")?; + let oldpath = permissions.check_write(&oldpath, "Deno.rename()")?; + let newpath = permissions.check_write(&newpath, "Deno.rename()")?; (state.borrow::().clone(), oldpath, newpath) }; @@ -743,18 +690,10 @@ where P: FsPermissions + 'static, { let permissions = state.borrow_mut::

(); - _ = permissions - .check_read(oldpath, "Deno.linkSync()") - .map_err(FsOpsError::Permission)?; - let oldpath = permissions - .check_write(oldpath, "Deno.linkSync()") - .map_err(FsOpsError::Permission)?; - _ = permissions - .check_read(newpath, "Deno.linkSync()") - .map_err(FsOpsError::Permission)?; - let newpath = permissions - .check_write(newpath, "Deno.linkSync()") - .map_err(FsOpsError::Permission)?; + _ = permissions.check_read(oldpath, "Deno.linkSync()")?; + let oldpath = permissions.check_write(oldpath, "Deno.linkSync()")?; + _ = permissions.check_read(newpath, "Deno.linkSync()")?; + let newpath = permissions.check_write(newpath, "Deno.linkSync()")?; let fs = state.borrow::(); fs.link_sync(&oldpath, &newpath) @@ -775,18 +714,10 @@ where let (fs, oldpath, newpath) = { let mut state = state.borrow_mut(); let permissions = state.borrow_mut::

(); - _ = permissions - .check_read(&oldpath, "Deno.link()") - .map_err(FsOpsError::Permission)?; - let oldpath = permissions - .check_write(&oldpath, "Deno.link()") - .map_err(FsOpsError::Permission)?; - _ = permissions - .check_read(&newpath, "Deno.link()") - .map_err(FsOpsError::Permission)?; - let newpath = permissions - .check_write(&newpath, "Deno.link()") - .map_err(FsOpsError::Permission)?; + _ = permissions.check_read(&oldpath, "Deno.link()")?; + let oldpath = permissions.check_write(&oldpath, "Deno.link()")?; + _ = permissions.check_read(&newpath, "Deno.link()")?; + let newpath = permissions.check_write(&newpath, "Deno.link()")?; (state.borrow::().clone(), oldpath, newpath) }; @@ -811,12 +742,8 @@ where let newpath = PathBuf::from(newpath); let permissions = state.borrow_mut::

(); - permissions - .check_write_all("Deno.symlinkSync()") - .map_err(FsOpsError::Permission)?; - permissions - .check_read_all("Deno.symlinkSync()") - .map_err(FsOpsError::Permission)?; + permissions.check_write_all("Deno.symlinkSync()")?; + permissions.check_read_all("Deno.symlinkSync()")?; let fs = state.borrow::(); fs.symlink_sync(&oldpath, &newpath, file_type) @@ -841,12 +768,8 @@ where let fs = { let mut state = state.borrow_mut(); let permissions = state.borrow_mut::

(); - permissions - .check_write_all("Deno.symlink()") - .map_err(FsOpsError::Permission)?; - permissions - .check_read_all("Deno.symlink()") - .map_err(FsOpsError::Permission)?; + permissions.check_write_all("Deno.symlink()")?; + permissions.check_read_all("Deno.symlink()")?; state.borrow::().clone() }; @@ -868,8 +791,7 @@ where { let path = state .borrow_mut::

() - .check_read(&path, "Deno.readLink()") - .map_err(FsOpsError::Permission)?; + .check_read(&path, "Deno.readLink()")?; let fs = state.borrow::(); @@ -891,8 +813,7 @@ where let mut state = state.borrow_mut(); let path = state .borrow_mut::

() - .check_read(&path, "Deno.readLink()") - .map_err(FsOpsError::Permission)?; + .check_read(&path, "Deno.readLink()")?; (state.borrow::().clone(), path) }; @@ -915,8 +836,7 @@ where { let path = state .borrow_mut::

() - .check_write(path, "Deno.truncateSync()") - .map_err(FsOpsError::Permission)?; + .check_write(path, "Deno.truncateSync()")?; let fs = state.borrow::(); fs.truncate_sync(&path, len) @@ -938,8 +858,7 @@ where let mut state = state.borrow_mut(); let path = state .borrow_mut::

() - .check_write(&path, "Deno.truncate()") - .map_err(FsOpsError::Permission)?; + .check_write(&path, "Deno.truncate()")?; (state.borrow::().clone(), path) }; @@ -962,10 +881,7 @@ pub fn op_fs_utime_sync

( where P: FsPermissions + 'static, { - let path = state - .borrow_mut::

() - .check_write(path, "Deno.utime()") - .map_err(FsOpsError::Permission)?; + let path = state.borrow_mut::

().check_write(path, "Deno.utime()")?; let fs = state.borrow::(); fs.utime_sync(&path, atime_secs, atime_nanos, mtime_secs, mtime_nanos) @@ -988,10 +904,7 @@ where { let (fs, path) = { let mut state = state.borrow_mut(); - let path = state - .borrow_mut::

() - .check_write(&path, "Deno.utime()") - .map_err(FsOpsError::Permission)?; + let path = state.borrow_mut::

().check_write(&path, "Deno.utime()")?; (state.borrow::().clone(), path) }; @@ -1219,16 +1132,12 @@ where { let fs = state.borrow::().clone(); let dir = match dir { - Some(dir) => state - .borrow_mut::

() - .check_write(dir, api_name) - .map_err(FsOpsError::Permission)?, + Some(dir) => state.borrow_mut::

().check_write(dir, api_name)?, None => { let dir = fs.tmp_dir().context("tmpdir")?; state .borrow_mut::

() - .check_write_blind(&dir, "TMP", api_name) - .map_err(FsOpsError::Permission)?; + .check_write_blind(&dir, "TMP", api_name)?; dir } }; @@ -1246,16 +1155,12 @@ where let mut state = state.borrow_mut(); let fs = state.borrow::().clone(); let dir = match dir { - Some(dir) => state - .borrow_mut::

() - .check_write(dir, api_name) - .map_err(FsOpsError::Permission)?, + Some(dir) => state.borrow_mut::

().check_write(dir, api_name)?, None => { let dir = fs.tmp_dir().context("tmpdir")?; state .borrow_mut::

() - .check_write_blind(&dir, "TMP", api_name) - .map_err(FsOpsError::Permission)?; + .check_write_blind(&dir, "TMP", api_name)?; dir } }; diff --git a/ext/http/00_serve.ts b/ext/http/00_serve.ts index 1b70cf2129..7bf83e49c3 100644 --- a/ext/http/00_serve.ts +++ b/ext/http/00_serve.ts @@ -14,6 +14,7 @@ import { op_http_get_request_headers, op_http_get_request_method_and_url, op_http_read_request_body, + op_http_request_on_cancel, op_http_serve, op_http_serve_on, op_http_set_promise_complete, @@ -373,6 +374,18 @@ class InnerRequest { get external() { return this.#external; } + + onCancel(callback) { + if (this.#external === null) { + callback(); + return; + } + + PromisePrototypeThen( + op_http_request_on_cancel(this.#external), + callback, + ); + } } class CallbackContext { diff --git a/ext/http/Cargo.toml b/ext/http/Cargo.toml index 6fa7598cb5..ed98fe349c 100644 --- a/ext/http/Cargo.toml +++ b/ext/http/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_http" -version = "0.171.0" +version = "0.175.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index 56c46de925..1251f00cc0 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -564,6 +564,7 @@ fn is_request_compressible( match accept_encoding.to_str() { // Firefox and Chrome send this -- no need to parse Ok("gzip, deflate, br") => return Compression::Brotli, + Ok("gzip, deflate, br, zstd") => return Compression::Brotli, Ok("gzip") => return Compression::GZip, Ok("br") => return Compression::Brotli, _ => (), @@ -700,6 +701,27 @@ fn set_response( http.complete(); } +#[op2(fast)] +pub fn op_http_get_request_cancelled(external: *const c_void) -> bool { + let http = + // SAFETY: op is called with external. + unsafe { clone_external!(external, "op_http_get_request_cancelled") }; + http.cancelled() +} + +#[op2(async)] +pub async fn op_http_request_on_cancel(external: *const c_void) { + let http = + // SAFETY: op is called with external. + unsafe { clone_external!(external, "op_http_request_on_cancel") }; + let (tx, rx) = tokio::sync::oneshot::channel(); + + http.on_cancel(tx); + drop(http); + + rx.await.ok(); +} + /// Returned promise resolves when body streaming finishes. /// Call [`op_http_close_after_finish`] when done with the external. #[op2(async)] diff --git a/ext/http/lib.rs b/ext/http/lib.rs index 6243804a14..49893b1b92 100644 --- a/ext/http/lib.rs +++ b/ext/http/lib.rs @@ -112,7 +112,9 @@ deno_core::extension!( http_next::op_http_close_after_finish, http_next::op_http_get_request_header, http_next::op_http_get_request_headers, + http_next::op_http_request_on_cancel, http_next::op_http_get_request_method_and_url, + http_next::op_http_get_request_cancelled, http_next::op_http_read_request_body, http_next::op_http_serve_on, http_next::op_http_serve, diff --git a/ext/http/service.rs b/ext/http/service.rs index 75f93d77c2..ce24dea43f 100644 --- a/ext/http/service.rs +++ b/ext/http/service.rs @@ -27,6 +27,7 @@ use std::rc::Rc; use std::task::Context; use std::task::Poll; use std::task::Waker; +use tokio::sync::oneshot; pub type Request = hyper::Request; pub type Response = hyper::Response; @@ -211,6 +212,7 @@ pub struct UpgradeUnavailableError; struct HttpRecordInner { server_state: SignallingRc, + closed_channel: Option>, request_info: HttpConnectionProperties, request_parts: http::request::Parts, request_body: Option, @@ -276,6 +278,7 @@ impl HttpRecord { response_body_finished: false, response_body_waker: None, trailers: None, + closed_channel: None, been_dropped: false, finished: false, needs_close_after_finish: false, @@ -312,6 +315,10 @@ impl HttpRecord { RefMut::map(self.self_mut(), |inner| &mut inner.needs_close_after_finish) } + pub fn on_cancel(&self, sender: oneshot::Sender<()>) { + self.self_mut().closed_channel = Some(sender); + } + fn recycle(self: Rc) { assert!( Rc::strong_count(&self) == 1, @@ -390,6 +397,9 @@ impl HttpRecord { inner.been_dropped = true; // The request body might include actual resources. inner.request_body.take(); + if let Some(closed_channel) = inner.closed_channel.take() { + let _ = closed_channel.send(()); + } } /// Complete this record, potentially expunging it if it is fully complete (ie: cancelled as well). diff --git a/ext/io/Cargo.toml b/ext/io/Cargo.toml index 8f407c820c..6ef049ff9b 100644 --- a/ext/io/Cargo.toml +++ b/ext/io/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_io" -version = "0.83.0" +version = "0.87.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/io/bi_pipe.rs b/ext/io/bi_pipe.rs index b6fc70ca24..3492e2f441 100644 --- a/ext/io/bi_pipe.rs +++ b/ext/io/bi_pipe.rs @@ -183,9 +183,10 @@ fn from_raw( ) -> Result<(BiPipeRead, BiPipeWrite), std::io::Error> { use std::os::fd::FromRawFd; // Safety: The fd is part of a pair of connected sockets - let unix_stream = tokio::net::UnixStream::from_std(unsafe { - std::os::unix::net::UnixStream::from_raw_fd(stream) - })?; + let unix_stream = + unsafe { std::os::unix::net::UnixStream::from_raw_fd(stream) }; + unix_stream.set_nonblocking(true)?; + let unix_stream = tokio::net::UnixStream::from_std(unix_stream)?; let (read, write) = unix_stream.into_split(); Ok((BiPipeRead { inner: read }, BiPipeWrite { inner: write })) } @@ -280,7 +281,7 @@ pub fn bi_pipe_pair_raw( // https://github.com/nix-rust/nix/issues/861 let mut fds = [-1, -1]; #[cfg(not(target_os = "macos"))] - let flags = libc::SOCK_CLOEXEC | libc::SOCK_NONBLOCK; + let flags = libc::SOCK_CLOEXEC; #[cfg(target_os = "macos")] let flags = 0; @@ -301,13 +302,13 @@ pub fn bi_pipe_pair_raw( if cfg!(target_os = "macos") { let fcntl = |fd: i32, flag: libc::c_int| -> Result<(), std::io::Error> { // SAFETY: libc call, fd is valid - let flags = unsafe { libc::fcntl(fd, libc::F_GETFL) }; + let flags = unsafe { libc::fcntl(fd, libc::F_GETFD) }; if flags == -1 { return Err(fail(fds)); } // SAFETY: libc call, fd is valid - let ret = unsafe { libc::fcntl(fd, libc::F_SETFL, flags | flag) }; + let ret = unsafe { libc::fcntl(fd, libc::F_SETFD, flags | flag) }; if ret == -1 { return Err(fail(fds)); } @@ -323,13 +324,9 @@ pub fn bi_pipe_pair_raw( std::io::Error::last_os_error() } - // SOCK_NONBLOCK is not supported on macOS. - (fcntl)(fds[0], libc::O_NONBLOCK)?; - (fcntl)(fds[1], libc::O_NONBLOCK)?; - // SOCK_CLOEXEC is not supported on macOS. - (fcntl)(fds[0], libc::FD_CLOEXEC)?; - (fcntl)(fds[1], libc::FD_CLOEXEC)?; + fcntl(fds[0], libc::FD_CLOEXEC)?; + fcntl(fds[1], libc::FD_CLOEXEC)?; } let fd1 = fds[0]; diff --git a/ext/io/fs.rs b/ext/io/fs.rs index 06fc3da098..8854265209 100644 --- a/ext/io/fs.rs +++ b/ext/io/fs.rs @@ -1,6 +1,7 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. use std::borrow::Cow; +use std::fmt::Formatter; use std::io; use std::rc::Rc; use std::time::SystemTime; @@ -21,6 +22,21 @@ pub enum FsError { NotCapable(&'static str), } +impl std::fmt::Display for FsError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + FsError::Io(err) => std::fmt::Display::fmt(err, f), + FsError::FileBusy => f.write_str("file busy"), + FsError::NotSupported => f.write_str("not supported"), + FsError::NotCapable(err) => { + f.write_str(&format!("requires {err} access")) + } + } + } +} + +impl std::error::Error for FsError {} + impl FsError { pub fn kind(&self) -> io::ErrorKind { match self { @@ -55,20 +71,6 @@ impl From for FsError { } } -impl From for deno_core::error::AnyError { - fn from(err: FsError) -> Self { - match err { - FsError::Io(err) => err.into(), - FsError::FileBusy => deno_core::error::resource_unavailable(), - FsError::NotSupported => deno_core::error::not_supported(), - FsError::NotCapable(err) => deno_core::error::custom_error( - "NotCapable", - format!("permission denied: {err}"), - ), - } - } -} - impl From for FsError { fn from(err: JoinError) -> Self { if err.is_cancelled() { diff --git a/ext/kv/Cargo.toml b/ext/kv/Cargo.toml index f7b28af675..1d7b91770f 100644 --- a/ext/kv/Cargo.toml +++ b/ext/kv/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_kv" -version = "0.81.0" +version = "0.85.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/kv/remote.rs b/ext/kv/remote.rs index 922853588a..4930aacfe3 100644 --- a/ext/kv/remote.rs +++ b/ext/kv/remote.rs @@ -15,6 +15,7 @@ use deno_core::futures::Stream; use deno_core::OpState; use deno_fetch::create_http_client; use deno_fetch::CreateHttpClientOptions; +use deno_permissions::PermissionCheckError; use deno_tls::rustls::RootCertStore; use deno_tls::Proxy; use deno_tls::RootCertStoreProvider; @@ -45,17 +46,17 @@ impl HttpOptions { } pub trait RemoteDbHandlerPermissions { - fn check_env(&mut self, var: &str) -> Result<(), AnyError>; + fn check_env(&mut self, var: &str) -> Result<(), PermissionCheckError>; fn check_net_url( &mut self, url: &Url, api_name: &str, - ) -> Result<(), AnyError>; + ) -> Result<(), PermissionCheckError>; } impl RemoteDbHandlerPermissions for deno_permissions::PermissionsContainer { #[inline(always)] - fn check_env(&mut self, var: &str) -> Result<(), AnyError> { + fn check_env(&mut self, var: &str) -> Result<(), PermissionCheckError> { deno_permissions::PermissionsContainer::check_env(self, var) } @@ -64,7 +65,7 @@ impl RemoteDbHandlerPermissions for deno_permissions::PermissionsContainer { &mut self, url: &Url, api_name: &str, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionCheckError> { deno_permissions::PermissionsContainer::check_net_url(self, url, api_name) } } @@ -103,7 +104,9 @@ impl denokv_remote::RemotePermissions fn check_net_url(&self, url: &Url) -> Result<(), anyhow::Error> { let mut state = self.state.borrow_mut(); let permissions = state.borrow_mut::

(); - permissions.check_net_url(url, "Deno.openKv") + permissions + .check_net_url(url, "Deno.openKv") + .map_err(Into::into) } } diff --git a/ext/kv/sqlite.rs b/ext/kv/sqlite.rs index 0b4a3693c4..9de5209275 100644 --- a/ext/kv/sqlite.rs +++ b/ext/kv/sqlite.rs @@ -13,20 +13,20 @@ use std::sync::Arc; use std::sync::Mutex; use std::sync::OnceLock; +use crate::DatabaseHandler; use async_trait::async_trait; use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::unsync::spawn_blocking; use deno_core::OpState; use deno_path_util::normalize_path; +use deno_permissions::PermissionCheckError; pub use denokv_sqlite::SqliteBackendError; use denokv_sqlite::SqliteConfig; use denokv_sqlite::SqliteNotifier; use rand::SeedableRng; use rusqlite::OpenFlags; -use crate::DatabaseHandler; - static SQLITE_NOTIFIERS_MAP: OnceLock>> = OnceLock::new(); @@ -42,13 +42,13 @@ pub trait SqliteDbHandlerPermissions { &mut self, p: &str, api_name: &str, - ) -> Result; + ) -> Result; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn check_write<'a>( &mut self, p: &'a Path, api_name: &str, - ) -> Result, AnyError>; + ) -> Result, PermissionCheckError>; } impl SqliteDbHandlerPermissions for deno_permissions::PermissionsContainer { @@ -57,7 +57,7 @@ impl SqliteDbHandlerPermissions for deno_permissions::PermissionsContainer { &mut self, p: &str, api_name: &str, - ) -> Result { + ) -> Result { deno_permissions::PermissionsContainer::check_read(self, p, api_name) } @@ -66,7 +66,7 @@ impl SqliteDbHandlerPermissions for deno_permissions::PermissionsContainer { &mut self, p: &'a Path, api_name: &str, - ) -> Result, AnyError> { + ) -> Result, PermissionCheckError> { deno_permissions::PermissionsContainer::check_write_path(self, p, api_name) } } diff --git a/ext/napi/Cargo.toml b/ext/napi/Cargo.toml index ef2e41d57e..df3ec0287b 100644 --- a/ext/napi/Cargo.toml +++ b/ext/napi/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_napi" -version = "0.104.0" +version = "0.108.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -16,5 +16,14 @@ path = "lib.rs" [dependencies] deno_core.workspace = true deno_permissions.workspace = true +libc.workspace = true libloading = { version = "0.7" } +log.workspace = true +napi_sym.workspace = true thiserror.workspace = true + +[target.'cfg(windows)'.dependencies] +windows-sys.workspace = true + +[dev-dependencies] +libuv-sys-lite = "=1.48.2" diff --git a/ext/napi/README.md b/ext/napi/README.md index e69de29bb2..b479295244 100644 --- a/ext/napi/README.md +++ b/ext/napi/README.md @@ -0,0 +1,114 @@ +# napi + +This directory contains source for Deno's Node-API implementation. It depends on +`napi_sym` and `deno_napi`. + +Files are generally organized the same as in Node.js's implementation to ease in +ensuring compatibility. + +## Adding a new function + +Add the symbol name to +[`cli/napi_sym/symbol_exports.json`](../napi_sym/symbol_exports.json). + +```diff +{ + "symbols": [ + ... + "napi_get_undefined", +- "napi_get_null" ++ "napi_get_null", ++ "napi_get_boolean" + ] +} +``` + +Determine where to place the implementation. `napi_get_boolean` is related to JS +values so we will place it in `js_native_api.rs`. If something is not clear, +just create a new file module. + +See [`napi_sym`](../napi_sym/) for writing the implementation: + +```rust +#[napi_sym::napi_sym] +fn napi_get_boolean( + env: *mut Env, + value: bool, + result: *mut napi_value, +) -> Result { + // ... + Ok(()) +} +``` + +Update the generated symbol lists using the script: + +``` +deno run --allow-write tools/napi/generate_symbols_lists.js +``` + +Add a test in [`/tests/napi`](../../tests/napi/). You can also refer to Node.js +test suite for Node-API. + +```js +// tests/napi/boolean_test.js +import { assertEquals, loadTestLibrary } from "./common.js"; +const lib = loadTestLibrary(); +Deno.test("napi get boolean", function () { + assertEquals(lib.test_get_boolean(true), true); + assertEquals(lib.test_get_boolean(false), false); +}); +``` + +```rust +// tests/napi/src/boolean.rs + +use napi_sys::Status::napi_ok; +use napi_sys::ValueType::napi_boolean; +use napi_sys::*; + +extern "C" fn test_boolean( + env: napi_env, + info: napi_callback_info, +) -> napi_value { + let (args, argc, _) = crate::get_callback_info!(env, info, 1); + assert_eq!(argc, 1); + + let mut ty = -1; + assert!(unsafe { napi_typeof(env, args[0], &mut ty) } == napi_ok); + assert_eq!(ty, napi_boolean); + + // Use napi_get_boolean here... + + value +} + +pub fn init(env: napi_env, exports: napi_value) { + let properties = &[crate::new_property!(env, "test_boolean\0", test_boolean)]; + + unsafe { + napi_define_properties(env, exports, properties.len(), properties.as_ptr()) + }; +} +``` + +```diff +// tests/napi/src/lib.rs + ++ mod boolean; + +... + +#[no_mangle] +unsafe extern "C" fn napi_register_module_v1( + env: napi_env, + exports: napi_value, +) -> napi_value { + ... ++ boolean::init(env, exports); + + exports +} +``` + +Run the test using `cargo test -p tests/napi`. diff --git a/ext/napi/build.rs b/ext/napi/build.rs new file mode 100644 index 0000000000..8705830a95 --- /dev/null +++ b/ext/napi/build.rs @@ -0,0 +1,22 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +fn main() { + let symbols_file_name = match std::env::consts::OS { + "android" | "freebsd" | "openbsd" => { + "generated_symbol_exports_list_linux.def".to_string() + } + os => format!("generated_symbol_exports_list_{}.def", os), + }; + let symbols_path = std::path::Path::new(".") + .join(symbols_file_name) + .canonicalize() + .expect( + "Missing symbols list! Generate using tools/napi/generate_symbols_lists.js", + ); + + println!("cargo:rustc-rerun-if-changed={}", symbols_path.display()); + + let path = std::path::PathBuf::from(std::env::var("OUT_DIR").unwrap()) + .join("napi_symbol_path.txt"); + std::fs::write(path, symbols_path.as_os_str().as_encoded_bytes()).unwrap(); +} diff --git a/cli/napi/generated_symbol_exports_list_linux.def b/ext/napi/generated_symbol_exports_list_linux.def similarity index 100% rename from cli/napi/generated_symbol_exports_list_linux.def rename to ext/napi/generated_symbol_exports_list_linux.def diff --git a/cli/napi/generated_symbol_exports_list_macos.def b/ext/napi/generated_symbol_exports_list_macos.def similarity index 100% rename from cli/napi/generated_symbol_exports_list_macos.def rename to ext/napi/generated_symbol_exports_list_macos.def diff --git a/cli/napi/generated_symbol_exports_list_windows.def b/ext/napi/generated_symbol_exports_list_windows.def similarity index 100% rename from cli/napi/generated_symbol_exports_list_windows.def rename to ext/napi/generated_symbol_exports_list_windows.def diff --git a/cli/napi/js_native_api.rs b/ext/napi/js_native_api.rs similarity index 99% rename from cli/napi/js_native_api.rs rename to ext/napi/js_native_api.rs index 35e7690c3c..53a12d6eba 100644 --- a/cli/napi/js_native_api.rs +++ b/ext/napi/js_native_api.rs @@ -5,7 +5,7 @@ const NAPI_VERSION: u32 = 9; -use deno_runtime::deno_napi::*; +use crate::*; use libc::INT_MAX; use super::util::check_new_from_utf8; @@ -17,9 +17,9 @@ use super::util::napi_set_last_error; use super::util::v8_name_from_property_descriptor; use crate::check_arg; use crate::check_env; -use deno_runtime::deno_napi::function::create_function; -use deno_runtime::deno_napi::function::create_function_template; -use deno_runtime::deno_napi::function::CallbackInfo; +use crate::function::create_function; +use crate::function::create_function_template; +use crate::function::CallbackInfo; use napi_sym::napi_sym; use std::ptr::NonNull; @@ -1083,7 +1083,7 @@ fn napi_create_string_latin1( } #[napi_sym] -fn napi_create_string_utf8( +pub(crate) fn napi_create_string_utf8( env_ptr: *mut Env, string: *const c_char, length: usize, @@ -1647,7 +1647,7 @@ fn napi_get_cb_info( check_arg!(env, argc); let argc = unsafe { *argc as usize }; for i in 0..argc { - let mut arg = args.get(i as _); + let arg = args.get(i as _); unsafe { *argv.add(i) = arg.into(); } diff --git a/ext/napi/lib.rs b/ext/napi/lib.rs index 0b2b3eb5e7..88b8c238df 100644 --- a/ext/napi/lib.rs +++ b/ext/napi/lib.rs @@ -5,6 +5,22 @@ #![allow(clippy::undocumented_unsafe_blocks)] #![deny(clippy::missing_safety_doc)] +//! Symbols to be exported are now defined in this JSON file. +//! The `#[napi_sym]` macro checks for missing entries and panics. +//! +//! `./tools/napi/generate_symbols_list.js` is used to generate the LINK `cli/exports.def` on Windows, +//! which is also checked into git. +//! +//! To add a new napi function: +//! 1. Place `#[napi_sym]` on top of your implementation. +//! 2. Add the function's identifier to this JSON list. +//! 3. Finally, run `tools/napi/generate_symbols_list.js` to update `ext/napi/generated_symbol_exports_list_*.def`. + +pub mod js_native_api; +pub mod node_api; +pub mod util; +pub mod uv; + use core::ptr::NonNull; use deno_core::op2; use deno_core::parking_lot::RwLock; @@ -27,7 +43,7 @@ pub enum NApiError { #[error("Unable to find register Node-API module at {}", .0.display())] ModuleNotFound(PathBuf), #[error(transparent)] - Permission(deno_core::error::AnyError), + Permission(#[from] PermissionCheckError), } #[cfg(unix)] @@ -39,6 +55,7 @@ use libloading::os::windows::*; // Expose common stuff for ease of use. // `use deno_napi::*` pub use deno_core::v8; +use deno_permissions::PermissionCheckError; pub use std::ffi::CStr; pub use std::os::raw::c_char; pub use std::os::raw::c_void; @@ -492,20 +509,14 @@ deno_core::extension!(deno_napi, pub trait NapiPermissions { #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] - fn check( - &mut self, - path: &str, - ) -> Result; + fn check(&mut self, path: &str) -> Result; } // NOTE(bartlomieju): for now, NAPI uses `--allow-ffi` flag, but that might // change in the future. impl NapiPermissions for deno_permissions::PermissionsContainer { #[inline(always)] - fn check( - &mut self, - path: &str, - ) -> Result { + fn check(&mut self, path: &str) -> Result { deno_permissions::PermissionsContainer::check_ffi(self, path) } } @@ -537,7 +548,7 @@ where let (async_work_sender, cleanup_hooks, external_ops_tracker, path) = { let mut op_state = op_state.borrow_mut(); let permissions = op_state.borrow_mut::(); - let path = permissions.check(&path).map_err(NApiError::Permission)?; + let path = permissions.check(&path)?; let napi_state = op_state.borrow::(); ( op_state.borrow::().clone(), @@ -631,3 +642,34 @@ where Ok(exports) } + +#[allow(clippy::print_stdout)] +pub fn print_linker_flags(name: &str) { + let symbols_path = + include_str!(concat!(env!("OUT_DIR"), "/napi_symbol_path.txt")); + + #[cfg(target_os = "windows")] + println!("cargo:rustc-link-arg-bin={name}=/DEF:{}", symbols_path); + + #[cfg(target_os = "macos")] + println!( + "cargo:rustc-link-arg-bin={name}=-Wl,-exported_symbols_list,{}", + symbols_path, + ); + + #[cfg(any( + target_os = "linux", + target_os = "freebsd", + target_os = "openbsd" + ))] + println!( + "cargo:rustc-link-arg-bin={name}=-Wl,--export-dynamic-symbol-list={}", + symbols_path, + ); + + #[cfg(target_os = "android")] + println!( + "cargo:rustc-link-arg-bin={name}=-Wl,--export-dynamic-symbol-list={}", + symbols_path, + ); +} diff --git a/cli/napi/node_api.rs b/ext/napi/node_api.rs similarity index 98% rename from cli/napi/node_api.rs rename to ext/napi/node_api.rs index 4497a4695a..186ae42c48 100644 --- a/cli/napi/node_api.rs +++ b/ext/napi/node_api.rs @@ -9,10 +9,10 @@ use super::util::napi_set_last_error; use super::util::SendPtr; use crate::check_arg; use crate::check_env; +use crate::*; use deno_core::parking_lot::Condvar; use deno_core::parking_lot::Mutex; use deno_core::V8CrossThreadTaskSpawner; -use deno_runtime::deno_napi::*; use napi_sym::napi_sym; use std::sync::atomic::AtomicBool; use std::sync::atomic::AtomicU8; @@ -488,7 +488,7 @@ impl AsyncWork { } #[napi_sym] -fn napi_create_async_work( +pub(crate) fn napi_create_async_work( env: *mut Env, async_resource: napi_value, async_resource_name: napi_value, @@ -537,7 +537,10 @@ fn napi_create_async_work( } #[napi_sym] -fn napi_delete_async_work(env: *mut Env, work: napi_async_work) -> napi_status { +pub(crate) fn napi_delete_async_work( + env: *mut Env, + work: napi_async_work, +) -> napi_status { let env = check_env!(env); check_arg!(env, work); @@ -560,7 +563,10 @@ fn napi_get_uv_event_loop( } #[napi_sym] -fn napi_queue_async_work(env: *mut Env, work: napi_async_work) -> napi_status { +pub(crate) fn napi_queue_async_work( + env: *mut Env, + work: napi_async_work, +) -> napi_status { let env = check_env!(env); check_arg!(env, work); @@ -897,7 +903,7 @@ fn napi_create_threadsafe_function( }; let resource_name = resource_name.to_rust_string_lossy(&mut env.scope()); - let mut tsfn = Box::new(TsFn { + let tsfn = Box::new(TsFn { env, func, max_queue_size, diff --git a/cli/napi/sym/Cargo.toml b/ext/napi/sym/Cargo.toml similarity index 95% rename from cli/napi/sym/Cargo.toml rename to ext/napi/sym/Cargo.toml index 7c9bf208c7..7c13a9165c 100644 --- a/cli/napi/sym/Cargo.toml +++ b/ext/napi/sym/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "napi_sym" -version = "0.103.0" +version = "0.107.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/cli/napi/sym/README.md b/ext/napi/sym/README.md similarity index 92% rename from cli/napi/sym/README.md rename to ext/napi/sym/README.md index de08a8e174..66eb4bff26 100644 --- a/cli/napi/sym/README.md +++ b/ext/napi/sym/README.md @@ -2,7 +2,8 @@ A proc_macro for Deno's Node-API implementation. It does the following things: -- Marks the symbol as `#[no_mangle]` and rewrites it as `pub extern "C" $name`. +- Marks the symbol as `#[no_mangle]` and rewrites it as + `unsafe extern "C" $name`. - Asserts that the function symbol is present in [`symbol_exports.json`](./symbol_exports.json). - Maps `deno_napi::Result` to raw `napi_result`. diff --git a/cli/napi/sym/lib.rs b/ext/napi/sym/lib.rs similarity index 100% rename from cli/napi/sym/lib.rs rename to ext/napi/sym/lib.rs diff --git a/cli/napi/sym/symbol_exports.json b/ext/napi/sym/symbol_exports.json similarity index 100% rename from cli/napi/sym/symbol_exports.json rename to ext/napi/sym/symbol_exports.json diff --git a/cli/napi/util.rs b/ext/napi/util.rs similarity index 84% rename from cli/napi/util.rs rename to ext/napi/util.rs index 63d8effbf2..21e9d433aa 100644 --- a/cli/napi/util.rs +++ b/ext/napi/util.rs @@ -1,9 +1,9 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_runtime::deno_napi::*; +use crate::*; use libc::INT_MAX; #[repr(transparent)] -pub struct SendPtr(pub *const T); +pub(crate) struct SendPtr(pub *const T); impl SendPtr { // silly function to get around `clippy::redundant_locals` @@ -37,7 +37,7 @@ impl Drop for BufferFinalizer { } } -pub extern "C" fn backing_store_deleter_callback( +pub(crate) extern "C" fn backing_store_deleter_callback( data: *mut c_void, _byte_length: usize, deleter_data: *mut c_void, @@ -50,7 +50,7 @@ pub extern "C" fn backing_store_deleter_callback( drop(finalizer); } -pub fn make_external_backing_store( +pub(crate) fn make_external_backing_store( env: *mut Env, data: *mut c_void, byte_length: usize, @@ -90,9 +90,7 @@ macro_rules! check_env { macro_rules! return_error_status_if_false { ($env: expr, $condition: expr, $status: ident) => { if !$condition { - return Err( - $crate::napi::util::napi_set_last_error($env, $status).into(), - ); + return Err($crate::util::napi_set_last_error($env, $status).into()); } }; } @@ -101,7 +99,7 @@ macro_rules! return_error_status_if_false { macro_rules! return_status_if_false { ($env: expr, $condition: expr, $status: ident) => { if !$condition { - return $crate::napi::util::napi_set_last_error($env, $status); + return $crate::util::napi_set_last_error($env, $status); } }; } @@ -222,7 +220,7 @@ macro_rules! check_arg { ($env: expr, $ptr: expr) => { $crate::return_status_if_false!( $env, - !$crate::napi::util::Nullable::is_null(&$ptr), + !$crate::util::Nullable::is_null(&$ptr), napi_invalid_arg ); }; @@ -230,17 +228,17 @@ macro_rules! check_arg { #[macro_export] macro_rules! napi_wrap { - ( $( # $attr:tt )* fn $name:ident $( < $( $x:lifetime ),* > )? ( $env:ident : & $( $lt:lifetime )? mut Env $( , $ident:ident : $ty:ty )* $(,)? ) -> napi_status $body:block ) => { - $( # $attr )* + ( $( # [ $attr:meta ] )* $vis:vis fn $name:ident $( < $( $x:lifetime ),* > )? ( $env:ident : & $( $lt:lifetime )? mut Env $( , $ident:ident : $ty:ty )* $(,)? ) -> napi_status $body:block ) => { + $( # [ $attr ] )* #[no_mangle] - pub unsafe extern "C" fn $name $( < $( $x ),* > )? ( env_ptr : *mut Env , $( $ident : $ty ),* ) -> napi_status { + $vis unsafe extern "C" fn $name $( < $( $x ),* > )? ( env_ptr : *mut Env , $( $ident : $ty ),* ) -> napi_status { let env: & $( $lt )? mut Env = $crate::check_env!(env_ptr); if env.last_exception.is_some() { return napi_pending_exception; } - $crate::napi::util::napi_clear_last_error(env); + $crate::util::napi_clear_last_error(env); let scope_env = unsafe { &mut *env_ptr }; let scope = &mut scope_env.scope(); @@ -259,21 +257,21 @@ macro_rules! napi_wrap { let env = unsafe { &mut *env_ptr }; let global = v8::Global::new(env.isolate(), exception); env.last_exception = Some(global); - return $crate::napi::util::napi_set_last_error(env_ptr, napi_pending_exception); + return $crate::util::napi_set_last_error(env_ptr, napi_pending_exception); } if result != napi_ok { - return $crate::napi::util::napi_set_last_error(env_ptr, result); + return $crate::util::napi_set_last_error(env_ptr, result); } return result; } }; - ( $( # $attr:tt )* fn $name:ident $( < $( $x:lifetime ),* > )? ( $( $ident:ident : $ty:ty ),* $(,)? ) -> napi_status $body:block ) => { - $( # $attr )* + ( $( # [ $attr:meta ] )* $vis:vis fn $name:ident $( < $( $x:lifetime ),* > )? ( $( $ident:ident : $ty:ty ),* $(,)? ) -> napi_status $body:block ) => { + $( # [ $attr ] )* #[no_mangle] - pub unsafe extern "C" fn $name $( < $( $x ),* > )? ( $( $ident : $ty ),* ) -> napi_status { + $vis unsafe extern "C" fn $name $( < $( $x ),* > )? ( $( $ident : $ty ),* ) -> napi_status { #[inline(always)] fn inner $( < $( $x ),* > )? ( $( $ident : $ty ),* ) -> napi_status $body diff --git a/cli/napi/uv.rs b/ext/napi/uv.rs similarity index 95% rename from cli/napi/uv.rs rename to ext/napi/uv.rs index d4cb5c0b36..6f728a92b3 100644 --- a/cli/napi/uv.rs +++ b/ext/napi/uv.rs @@ -1,7 +1,7 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use crate::*; use deno_core::parking_lot::Mutex; -use deno_runtime::deno_napi::*; use std::mem::MaybeUninit; use std::ptr::addr_of_mut; @@ -16,10 +16,10 @@ fn assert_ok(res: c_int) -> c_int { res } -use crate::napi::js_native_api::napi_create_string_utf8; -use crate::napi::node_api::napi_create_async_work; -use crate::napi::node_api::napi_delete_async_work; -use crate::napi::node_api::napi_queue_async_work; +use js_native_api::napi_create_string_utf8; +use node_api::napi_create_async_work; +use node_api::napi_delete_async_work; +use node_api::napi_queue_async_work; use std::ffi::c_int; const UV_MUTEX_SIZE: usize = { diff --git a/ext/net/Cargo.toml b/ext/net/Cargo.toml index 634dd7dda4..245deedd2d 100644 --- a/ext/net/Cargo.toml +++ b/ext/net/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_net" -version = "0.165.0" +version = "0.169.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -17,11 +17,11 @@ path = "lib.rs" deno_core.workspace = true deno_permissions.workspace = true deno_tls.workspace = true +hickory-proto = "0.24" +hickory-resolver = { version = "0.24", features = ["tokio-runtime", "serde-config"] } pin-project.workspace = true rustls-tokio-stream.workspace = true serde.workspace = true socket2.workspace = true thiserror.workspace = true tokio.workspace = true -trust-dns-proto = "0.23" -trust-dns-resolver = { version = "0.23", features = ["tokio-runtime", "serde-config"] } diff --git a/ext/net/lib.rs b/ext/net/lib.rs index b039965d4c..bf8f58aa27 100644 --- a/ext/net/lib.rs +++ b/ext/net/lib.rs @@ -11,6 +11,7 @@ mod tcp; use deno_core::error::AnyError; use deno_core::OpState; +use deno_permissions::PermissionCheckError; use deno_tls::rustls::RootCertStore; use deno_tls::RootCertStoreProvider; use std::borrow::Cow; @@ -25,25 +26,25 @@ pub trait NetPermissions { &mut self, host: &(T, Option), api_name: &str, - ) -> Result<(), AnyError>; + ) -> Result<(), PermissionCheckError>; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn check_read( &mut self, p: &str, api_name: &str, - ) -> Result; + ) -> Result; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn check_write( &mut self, p: &str, api_name: &str, - ) -> Result; + ) -> Result; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn check_write_path<'a>( &mut self, p: &'a Path, api_name: &str, - ) -> Result, AnyError>; + ) -> Result, PermissionCheckError>; } impl NetPermissions for deno_permissions::PermissionsContainer { @@ -52,7 +53,7 @@ impl NetPermissions for deno_permissions::PermissionsContainer { &mut self, host: &(T, Option), api_name: &str, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionCheckError> { deno_permissions::PermissionsContainer::check_net(self, host, api_name) } @@ -61,7 +62,7 @@ impl NetPermissions for deno_permissions::PermissionsContainer { &mut self, path: &str, api_name: &str, - ) -> Result { + ) -> Result { deno_permissions::PermissionsContainer::check_read(self, path, api_name) } @@ -70,7 +71,7 @@ impl NetPermissions for deno_permissions::PermissionsContainer { &mut self, path: &str, api_name: &str, - ) -> Result { + ) -> Result { deno_permissions::PermissionsContainer::check_write(self, path, api_name) } @@ -79,7 +80,7 @@ impl NetPermissions for deno_permissions::PermissionsContainer { &mut self, path: &'a Path, api_name: &str, - ) -> Result, AnyError> { + ) -> Result, PermissionCheckError> { deno_permissions::PermissionsContainer::check_write_path( self, path, api_name, ) diff --git a/ext/net/ops.rs b/ext/net/ops.rs index 0f92dead0c..9a8b70f0f6 100644 --- a/ext/net/ops.rs +++ b/ext/net/ops.rs @@ -18,6 +18,16 @@ use deno_core::OpState; use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; +use hickory_proto::rr::rdata::caa::Value; +use hickory_proto::rr::record_data::RData; +use hickory_proto::rr::record_type::RecordType; +use hickory_resolver::config::NameServerConfigGroup; +use hickory_resolver::config::ResolverConfig; +use hickory_resolver::config::ResolverOpts; +use hickory_resolver::error::ResolveError; +use hickory_resolver::error::ResolveErrorKind; +use hickory_resolver::system_conf; +use hickory_resolver::AsyncResolver; use serde::Deserialize; use serde::Serialize; use socket2::Domain; @@ -33,16 +43,6 @@ use std::rc::Rc; use std::str::FromStr; use tokio::net::TcpStream; use tokio::net::UdpSocket; -use trust_dns_proto::rr::rdata::caa::Value; -use trust_dns_proto::rr::record_data::RData; -use trust_dns_proto::rr::record_type::RecordType; -use trust_dns_resolver::config::NameServerConfigGroup; -use trust_dns_resolver::config::ResolverConfig; -use trust_dns_resolver::config::ResolverOpts; -use trust_dns_resolver::error::ResolveError; -use trust_dns_resolver::error::ResolveErrorKind; -use trust_dns_resolver::system_conf; -use trust_dns_resolver::AsyncResolver; #[derive(Serialize, Clone, Debug)] #[serde(rename_all = "camelCase")] @@ -81,8 +81,8 @@ pub enum NetError { Io(#[from] std::io::Error), #[error("Another accept task is ongoing")] AcceptTaskOngoing, - #[error("{0}")] - Permission(deno_core::error::AnyError), + #[error(transparent)] + Permission(#[from] deno_permissions::PermissionCheckError), #[error("{0}")] Resource(deno_core::error::AnyError), #[error("No resolved address found")] @@ -195,12 +195,10 @@ where { { let mut s = state.borrow_mut(); - s.borrow_mut::() - .check_net( - &(&addr.hostname, Some(addr.port)), - "Deno.DatagramConn.send()", - ) - .map_err(NetError::Permission)?; + s.borrow_mut::().check_net( + &(&addr.hostname, Some(addr.port)), + "Deno.DatagramConn.send()", + )?; } let addr = resolve_addr(&addr.hostname, addr.port) .await? @@ -369,8 +367,7 @@ where let mut state_ = state.borrow_mut(); state_ .borrow_mut::() - .check_net(&(&addr.hostname, Some(addr.port)), "Deno.connect()") - .map_err(NetError::Permission)?; + .check_net(&(&addr.hostname, Some(addr.port)), "Deno.connect()")?; } let addr = resolve_addr(&addr.hostname, addr.port) @@ -420,8 +417,7 @@ where } state .borrow_mut::() - .check_net(&(&addr.hostname, Some(addr.port)), "Deno.listen()") - .map_err(NetError::Permission)?; + .check_net(&(&addr.hostname, Some(addr.port)), "Deno.listen()")?; let addr = resolve_addr_sync(&addr.hostname, addr.port)? .next() .ok_or_else(|| NetError::NoResolvedAddress)?; @@ -449,8 +445,7 @@ where { state .borrow_mut::() - .check_net(&(&addr.hostname, Some(addr.port)), "Deno.listenDatagram()") - .map_err(NetError::Permission)?; + .check_net(&(&addr.hostname, Some(addr.port)), "Deno.listenDatagram()")?; let addr = resolve_addr_sync(&addr.hostname, addr.port)? .next() .ok_or_else(|| NetError::NoResolvedAddress)?; @@ -647,9 +642,7 @@ where let socker_addr = &ns.socket_addr; let ip = socker_addr.ip().to_string(); let port = socker_addr.port(); - perm - .check_net(&(ip, Some(port)), "Deno.resolveDns()") - .map_err(NetError::Permission)?; + perm.check_net(&(ip, Some(port)), "Deno.resolveDns()")?; } } @@ -834,6 +827,22 @@ mod tests { use deno_core::futures::FutureExt; use deno_core::JsRuntime; use deno_core::RuntimeOptions; + use deno_permissions::PermissionCheckError; + use hickory_proto::rr::rdata::a::A; + use hickory_proto::rr::rdata::aaaa::AAAA; + use hickory_proto::rr::rdata::caa::KeyValue; + use hickory_proto::rr::rdata::caa::CAA; + use hickory_proto::rr::rdata::mx::MX; + use hickory_proto::rr::rdata::name::ANAME; + use hickory_proto::rr::rdata::name::CNAME; + use hickory_proto::rr::rdata::name::NS; + use hickory_proto::rr::rdata::name::PTR; + use hickory_proto::rr::rdata::naptr::NAPTR; + use hickory_proto::rr::rdata::srv::SRV; + use hickory_proto::rr::rdata::txt::TXT; + use hickory_proto::rr::rdata::SOA; + use hickory_proto::rr::record_data::RData; + use hickory_proto::rr::Name; use socket2::SockRef; use std::net::Ipv4Addr; use std::net::Ipv6Addr; @@ -842,21 +851,6 @@ mod tests { use std::path::PathBuf; use std::sync::Arc; use std::sync::Mutex; - use trust_dns_proto::rr::rdata::a::A; - use trust_dns_proto::rr::rdata::aaaa::AAAA; - use trust_dns_proto::rr::rdata::caa::KeyValue; - use trust_dns_proto::rr::rdata::caa::CAA; - use trust_dns_proto::rr::rdata::mx::MX; - use trust_dns_proto::rr::rdata::name::ANAME; - use trust_dns_proto::rr::rdata::name::CNAME; - use trust_dns_proto::rr::rdata::name::NS; - use trust_dns_proto::rr::rdata::name::PTR; - use trust_dns_proto::rr::rdata::naptr::NAPTR; - use trust_dns_proto::rr::rdata::srv::SRV; - use trust_dns_proto::rr::rdata::txt::TXT; - use trust_dns_proto::rr::rdata::SOA; - use trust_dns_proto::rr::record_data::RData; - use trust_dns_proto::rr::Name; #[test] fn rdata_to_return_record_a() { @@ -1041,7 +1035,7 @@ mod tests { &mut self, _host: &(T, Option), _api_name: &str, - ) -> Result<(), deno_core::error::AnyError> { + ) -> Result<(), PermissionCheckError> { Ok(()) } @@ -1049,7 +1043,7 @@ mod tests { &mut self, p: &str, _api_name: &str, - ) -> Result { + ) -> Result { Ok(PathBuf::from(p)) } @@ -1057,7 +1051,7 @@ mod tests { &mut self, p: &str, _api_name: &str, - ) -> Result { + ) -> Result { Ok(PathBuf::from(p)) } @@ -1065,7 +1059,7 @@ mod tests { &mut self, p: &'a Path, _api_name: &str, - ) -> Result, deno_core::error::AnyError> { + ) -> Result, PermissionCheckError> { Ok(Cow::Borrowed(p)) } } diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml index 55d53d0ce0..9e1a3495b5 100644 --- a/ext/node/Cargo.toml +++ b/ext/node/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_node" -version = "0.110.0" +version = "0.114.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/node/lib.rs b/ext/node/lib.rs index 9b22add453..9ca21e9941 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -9,25 +9,22 @@ use std::path::Path; use std::path::PathBuf; use deno_core::error::AnyError; -use deno_core::located_script_name; use deno_core::op2; use deno_core::url::Url; #[allow(unused_imports)] use deno_core::v8; use deno_core::v8::ExternalReference; -use deno_core::JsRuntime; -use deno_fs::sync::MaybeSend; -use deno_fs::sync::MaybeSync; use node_resolver::NpmResolverRc; use once_cell::sync::Lazy; extern crate libz_sys as zlib; mod global; -mod ops; +pub mod ops; mod polyfill; pub use deno_package_json::PackageJson; +use deno_permissions::PermissionCheckError; pub use node_resolver::PathClean; pub use ops::ipc::ChildPipeFd; pub use ops::ipc::IpcJsonStreamResource; @@ -49,10 +46,18 @@ pub trait NodePermissions { &mut self, url: &Url, api_name: &str, - ) -> Result<(), AnyError>; + ) -> Result<(), PermissionCheckError>; + fn check_net( + &mut self, + host: (&str, Option), + api_name: &str, + ) -> Result<(), PermissionCheckError>; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] #[inline(always)] - fn check_read(&mut self, path: &str) -> Result { + fn check_read( + &mut self, + path: &str, + ) -> Result { self.check_read_with_api_name(path, None) } #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] @@ -60,20 +65,24 @@ pub trait NodePermissions { &mut self, path: &str, api_name: Option<&str>, - ) -> Result; + ) -> Result; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn check_read_path<'a>( &mut self, path: &'a Path, - ) -> Result, AnyError>; + ) -> Result, PermissionCheckError>; fn query_read_all(&mut self) -> bool; - fn check_sys(&mut self, kind: &str, api_name: &str) -> Result<(), AnyError>; + fn check_sys( + &mut self, + kind: &str, + api_name: &str, + ) -> Result<(), PermissionCheckError>; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn check_write_with_api_name( &mut self, path: &str, api_name: Option<&str>, - ) -> Result; + ) -> Result; } impl NodePermissions for deno_permissions::PermissionsContainer { @@ -82,16 +91,24 @@ impl NodePermissions for deno_permissions::PermissionsContainer { &mut self, url: &Url, api_name: &str, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionCheckError> { deno_permissions::PermissionsContainer::check_net_url(self, url, api_name) } + fn check_net( + &mut self, + host: (&str, Option), + api_name: &str, + ) -> Result<(), PermissionCheckError> { + deno_permissions::PermissionsContainer::check_net(self, &host, api_name) + } + #[inline(always)] fn check_read_with_api_name( &mut self, path: &str, api_name: Option<&str>, - ) -> Result { + ) -> Result { deno_permissions::PermissionsContainer::check_read_with_api_name( self, path, api_name, ) @@ -100,7 +117,7 @@ impl NodePermissions for deno_permissions::PermissionsContainer { fn check_read_path<'a>( &mut self, path: &'a Path, - ) -> Result, AnyError> { + ) -> Result, PermissionCheckError> { deno_permissions::PermissionsContainer::check_read_path(self, path, None) } @@ -113,28 +130,33 @@ impl NodePermissions for deno_permissions::PermissionsContainer { &mut self, path: &str, api_name: Option<&str>, - ) -> Result { + ) -> Result { deno_permissions::PermissionsContainer::check_write_with_api_name( self, path, api_name, ) } - fn check_sys(&mut self, kind: &str, api_name: &str) -> Result<(), AnyError> { + fn check_sys( + &mut self, + kind: &str, + api_name: &str, + ) -> Result<(), PermissionCheckError> { deno_permissions::PermissionsContainer::check_sys(self, kind, api_name) } } #[allow(clippy::disallowed_types)] -pub type NodeRequireResolverRc = - deno_fs::sync::MaybeArc; +pub type NodeRequireLoaderRc = std::rc::Rc; -pub trait NodeRequireResolver: std::fmt::Debug + MaybeSend + MaybeSync { +pub trait NodeRequireLoader { #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn ensure_read_permission<'a>( &self, permissions: &mut dyn NodePermissions, path: &'a Path, ) -> Result, AnyError>; + + fn load_text_file_lossy(&self, path: &Path) -> Result; } pub static NODE_ENV_VAR_ALLOWLIST: Lazy> = Lazy::new(|| { @@ -152,10 +174,12 @@ fn op_node_build_os() -> String { env!("TARGET").split('-').nth(2).unwrap().to_string() } +#[derive(Clone)] pub struct NodeExtInitServices { - pub node_require_resolver: NodeRequireResolverRc, + pub node_require_loader: NodeRequireLoaderRc, pub node_resolver: NodeResolverRc, pub npm_resolver: NpmResolverRc, + pub pkg_json_resolver: PackageJsonResolverRc, } deno_core::extension!(deno_node, @@ -348,7 +372,7 @@ deno_core::extension!(deno_node, ops::http2::op_http2_send_response, ops::os::op_node_os_get_priority

, ops::os::op_node_os_set_priority

, - ops::os::op_node_os_username

, + ops::os::op_node_os_user_info

, ops::os::op_geteuid

, ops::os::op_getegid

, ops::os::op_cpus

, @@ -387,6 +411,15 @@ deno_core::extension!(deno_node, ops::process::op_node_process_kill, ops::process::op_process_abort, ops::tls::op_get_root_certificates, + ops::inspector::op_inspector_open

, + ops::inspector::op_inspector_close, + ops::inspector::op_inspector_url, + ops::inspector::op_inspector_wait, + ops::inspector::op_inspector_connect

, + ops::inspector::op_inspector_dispatch, + ops::inspector::op_inspector_disconnect, + ops::inspector::op_inspector_emit_protocol_event, + ops::inspector::op_inspector_enabled, ], esm_entry_point = "ext:deno_node/02_init.js", esm = [ @@ -595,8 +628,8 @@ deno_core::extension!(deno_node, "node:http" = "http.ts", "node:http2" = "http2.ts", "node:https" = "https.ts", - "node:inspector" = "inspector.ts", - "node:inspector/promises" = "inspector.ts", + "node:inspector" = "inspector.js", + "node:inspector/promises" = "inspector/promises.js", "node:module" = "01_require.js", "node:net" = "net.ts", "node:os" = "os.ts", @@ -639,9 +672,10 @@ deno_core::extension!(deno_node, state.put(options.fs.clone()); if let Some(init) = &options.maybe_init { - state.put(init.node_require_resolver.clone()); + state.put(init.node_require_loader.clone()); state.put(init.node_resolver.clone()); state.put(init.npm_resolver.clone()); + state.put(init.pkg_json_resolver.clone()); } }, global_template_middleware = global_template_middleware, @@ -761,33 +795,16 @@ deno_core::extension!(deno_node, }, ); -pub fn load_cjs_module( - js_runtime: &mut JsRuntime, - module: &str, - main: bool, - inspect_brk: bool, -) -> Result<(), AnyError> { - fn escape_for_single_quote_string(text: &str) -> String { - text.replace('\\', r"\\").replace('\'', r"\'") - } - - let source_code = format!( - r#"(function loadCjsModule(moduleName, isMain, inspectBrk) {{ - Deno[Deno.internal].node.loadCjsModule(moduleName, isMain, inspectBrk); - }})('{module}', {main}, {inspect_brk});"#, - main = main, - module = escape_for_single_quote_string(module), - inspect_brk = inspect_brk, - ); - - js_runtime.execute_script(located_script_name!(), source_code)?; - Ok(()) -} - pub type NodeResolver = node_resolver::NodeResolver; #[allow(clippy::disallowed_types)] pub type NodeResolverRc = deno_fs::sync::MaybeArc>; +pub type PackageJsonResolver = + node_resolver::PackageJsonResolver; +#[allow(clippy::disallowed_types)] +pub type PackageJsonResolverRc = deno_fs::sync::MaybeArc< + node_resolver::PackageJsonResolver, +>; #[derive(Debug)] pub struct DenoFsNodeResolverEnv { diff --git a/ext/node/ops/blocklist.rs b/ext/node/ops/blocklist.rs index 332cdda8f9..6c64d68eca 100644 --- a/ext/node/ops/blocklist.rs +++ b/ext/node/ops/blocklist.rs @@ -7,9 +7,6 @@ use std::net::Ipv4Addr; use std::net::Ipv6Addr; use std::net::SocketAddr; -use deno_core::anyhow::anyhow; -use deno_core::anyhow::bail; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::OpState; @@ -27,13 +24,25 @@ impl deno_core::GarbageCollected for BlockListResource {} #[derive(Serialize)] struct SocketAddressSerialization(String, String); +#[derive(Debug, thiserror::Error)] +pub enum BlocklistError { + #[error("{0}")] + AddrParse(#[from] std::net::AddrParseError), + #[error("{0}")] + IpNetwork(#[from] ipnetwork::IpNetworkError), + #[error("Invalid address")] + InvalidAddress, + #[error("IP version mismatch between start and end addresses")] + IpVersionMismatch, +} + #[op2(fast)] pub fn op_socket_address_parse( state: &mut OpState, #[string] addr: &str, #[smi] port: u16, #[string] family: &str, -) -> Result { +) -> Result { let ip = addr.parse::()?; let parsed: SocketAddr = SocketAddr::new(ip, port); let parsed_ip_str = parsed.ip().to_string(); @@ -52,7 +61,7 @@ pub fn op_socket_address_parse( Ok(false) } } else { - Err(anyhow!("Invalid address")) + Err(BlocklistError::InvalidAddress) } } @@ -60,8 +69,8 @@ pub fn op_socket_address_parse( #[serde] pub fn op_socket_address_get_serialization( state: &mut OpState, -) -> Result { - Ok(state.take::()) +) -> SocketAddressSerialization { + state.take::() } #[op2] @@ -77,7 +86,7 @@ pub fn op_blocklist_new() -> BlockListResource { pub fn op_blocklist_add_address( #[cppgc] wrap: &BlockListResource, #[string] addr: &str, -) -> Result<(), AnyError> { +) -> Result<(), BlocklistError> { wrap.blocklist.borrow_mut().add_address(addr) } @@ -86,7 +95,7 @@ pub fn op_blocklist_add_range( #[cppgc] wrap: &BlockListResource, #[string] start: &str, #[string] end: &str, -) -> Result { +) -> Result { wrap.blocklist.borrow_mut().add_range(start, end) } @@ -95,7 +104,7 @@ pub fn op_blocklist_add_subnet( #[cppgc] wrap: &BlockListResource, #[string] addr: &str, #[smi] prefix: u8, -) -> Result<(), AnyError> { +) -> Result<(), BlocklistError> { wrap.blocklist.borrow_mut().add_subnet(addr, prefix) } @@ -104,7 +113,7 @@ pub fn op_blocklist_check( #[cppgc] wrap: &BlockListResource, #[string] addr: &str, #[string] r#type: &str, -) -> Result { +) -> Result { wrap.blocklist.borrow().check(addr, r#type) } @@ -123,7 +132,7 @@ impl BlockList { &mut self, addr: IpAddr, prefix: Option, - ) -> Result<(), AnyError> { + ) -> Result<(), BlocklistError> { match addr { IpAddr::V4(addr) => { let ipv4_prefix = prefix.unwrap_or(32); @@ -154,7 +163,7 @@ impl BlockList { Ok(()) } - pub fn add_address(&mut self, address: &str) -> Result<(), AnyError> { + pub fn add_address(&mut self, address: &str) -> Result<(), BlocklistError> { let ip: IpAddr = address.parse()?; self.map_addr_add_network(ip, None)?; Ok(()) @@ -164,7 +173,7 @@ impl BlockList { &mut self, start: &str, end: &str, - ) -> Result { + ) -> Result { let start_ip: IpAddr = start.parse()?; let end_ip: IpAddr = end.parse()?; @@ -193,25 +202,33 @@ impl BlockList { self.map_addr_add_network(IpAddr::V6(addr), None)?; } } - _ => bail!("IP version mismatch between start and end addresses"), + _ => return Err(BlocklistError::IpVersionMismatch), } Ok(true) } - pub fn add_subnet(&mut self, addr: &str, prefix: u8) -> Result<(), AnyError> { + pub fn add_subnet( + &mut self, + addr: &str, + prefix: u8, + ) -> Result<(), BlocklistError> { let ip: IpAddr = addr.parse()?; self.map_addr_add_network(ip, Some(prefix))?; Ok(()) } - pub fn check(&self, addr: &str, r#type: &str) -> Result { + pub fn check( + &self, + addr: &str, + r#type: &str, + ) -> Result { let addr: IpAddr = addr.parse()?; let family = r#type.to_lowercase(); if family == "ipv4" && addr.is_ipv4() || family == "ipv6" && addr.is_ipv6() { Ok(self.rules.iter().any(|net| net.contains(addr))) } else { - Err(anyhow!("Invalid address")) + Err(BlocklistError::InvalidAddress) } } } diff --git a/ext/node/ops/crypto/cipher.rs b/ext/node/ops/crypto/cipher.rs index b80aa33fe8..ec45146b49 100644 --- a/ext/node/ops/crypto/cipher.rs +++ b/ext/node/ops/crypto/cipher.rs @@ -4,9 +4,6 @@ use aes::cipher::block_padding::Pkcs7; use aes::cipher::BlockDecryptMut; use aes::cipher::BlockEncryptMut; use aes::cipher::KeyIvInit; -use deno_core::error::range_error; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::Resource; use digest::generic_array::GenericArray; use digest::KeyInit; @@ -50,8 +47,22 @@ pub struct DecipherContext { decipher: Rc>, } +#[derive(Debug, thiserror::Error)] +pub enum CipherContextError { + #[error("Cipher context is already in use")] + ContextInUse, + #[error("{0}")] + Resource(deno_core::error::AnyError), + #[error(transparent)] + Cipher(#[from] CipherError), +} + impl CipherContext { - pub fn new(algorithm: &str, key: &[u8], iv: &[u8]) -> Result { + pub fn new( + algorithm: &str, + key: &[u8], + iv: &[u8], + ) -> Result { Ok(Self { cipher: Rc::new(RefCell::new(Cipher::new(algorithm, key, iv)?)), }) @@ -74,16 +85,31 @@ impl CipherContext { auto_pad: bool, input: &[u8], output: &mut [u8], - ) -> Result { + ) -> Result { Rc::try_unwrap(self.cipher) - .map_err(|_| type_error("Cipher context is already in use"))? + .map_err(|_| CipherContextError::ContextInUse)? .into_inner() .r#final(auto_pad, input, output) + .map_err(Into::into) } } +#[derive(Debug, thiserror::Error)] +pub enum DecipherContextError { + #[error("Decipher context is already in use")] + ContextInUse, + #[error("{0}")] + Resource(deno_core::error::AnyError), + #[error(transparent)] + Decipher(#[from] DecipherError), +} + impl DecipherContext { - pub fn new(algorithm: &str, key: &[u8], iv: &[u8]) -> Result { + pub fn new( + algorithm: &str, + key: &[u8], + iv: &[u8], + ) -> Result { Ok(Self { decipher: Rc::new(RefCell::new(Decipher::new(algorithm, key, iv)?)), }) @@ -103,11 +129,12 @@ impl DecipherContext { input: &[u8], output: &mut [u8], auth_tag: &[u8], - ) -> Result<(), AnyError> { + ) -> Result<(), DecipherContextError> { Rc::try_unwrap(self.decipher) - .map_err(|_| type_error("Decipher context is already in use"))? + .map_err(|_| DecipherContextError::ContextInUse)? .into_inner() .r#final(auto_pad, input, output, auth_tag) + .map_err(Into::into) } } @@ -123,12 +150,26 @@ impl Resource for DecipherContext { } } +#[derive(Debug, thiserror::Error)] +pub enum CipherError { + #[error("IV length must be 12 bytes")] + InvalidIvLength, + #[error("Invalid key length")] + InvalidKeyLength, + #[error("Invalid initialization vector")] + InvalidInitializationVector, + #[error("Cannot pad the input data")] + CannotPadInputData, + #[error("Unknown cipher {0}")] + UnknownCipher(String), +} + impl Cipher { fn new( algorithm_name: &str, key: &[u8], iv: &[u8], - ) -> Result { + ) -> Result { use Cipher::*; Ok(match algorithm_name { "aes-128-cbc" => { @@ -139,7 +180,7 @@ impl Cipher { "aes-256-ecb" => Aes256Ecb(Box::new(ecb::Encryptor::new(key.into()))), "aes-128-gcm" => { if iv.len() != 12 { - return Err(type_error("IV length must be 12 bytes")); + return Err(CipherError::InvalidIvLength); } let cipher = @@ -149,7 +190,7 @@ impl Cipher { } "aes-256-gcm" => { if iv.len() != 12 { - return Err(type_error("IV length must be 12 bytes")); + return Err(CipherError::InvalidIvLength); } let cipher = @@ -159,15 +200,15 @@ impl Cipher { } "aes256" | "aes-256-cbc" => { if key.len() != 32 { - return Err(range_error("Invalid key length")); + return Err(CipherError::InvalidKeyLength); } if iv.len() != 16 { - return Err(type_error("Invalid initialization vector")); + return Err(CipherError::InvalidInitializationVector); } Aes256Cbc(Box::new(cbc::Encryptor::new(key.into(), iv.into()))) } - _ => return Err(type_error(format!("Unknown cipher {algorithm_name}"))), + _ => return Err(CipherError::UnknownCipher(algorithm_name.to_string())), }) } @@ -235,14 +276,14 @@ impl Cipher { auto_pad: bool, input: &[u8], output: &mut [u8], - ) -> Result { + ) -> Result { assert!(input.len() < 16); use Cipher::*; match (self, auto_pad) { (Aes128Cbc(encryptor), true) => { let _ = (*encryptor) .encrypt_padded_b2b_mut::(input, output) - .map_err(|_| type_error("Cannot pad the input data"))?; + .map_err(|_| CipherError::CannotPadInputData)?; Ok(None) } (Aes128Cbc(mut encryptor), false) => { @@ -255,7 +296,7 @@ impl Cipher { (Aes128Ecb(encryptor), true) => { let _ = (*encryptor) .encrypt_padded_b2b_mut::(input, output) - .map_err(|_| type_error("Cannot pad the input data"))?; + .map_err(|_| CipherError::CannotPadInputData)?; Ok(None) } (Aes128Ecb(mut encryptor), false) => { @@ -268,7 +309,7 @@ impl Cipher { (Aes192Ecb(encryptor), true) => { let _ = (*encryptor) .encrypt_padded_b2b_mut::(input, output) - .map_err(|_| type_error("Cannot pad the input data"))?; + .map_err(|_| CipherError::CannotPadInputData)?; Ok(None) } (Aes192Ecb(mut encryptor), false) => { @@ -281,7 +322,7 @@ impl Cipher { (Aes256Ecb(encryptor), true) => { let _ = (*encryptor) .encrypt_padded_b2b_mut::(input, output) - .map_err(|_| type_error("Cannot pad the input data"))?; + .map_err(|_| CipherError::CannotPadInputData)?; Ok(None) } (Aes256Ecb(mut encryptor), false) => { @@ -296,7 +337,7 @@ impl Cipher { (Aes256Cbc(encryptor), true) => { let _ = (*encryptor) .encrypt_padded_b2b_mut::(input, output) - .map_err(|_| type_error("Cannot pad the input data"))?; + .map_err(|_| CipherError::CannotPadInputData)?; Ok(None) } (Aes256Cbc(mut encryptor), false) => { @@ -319,12 +360,32 @@ impl Cipher { } } +#[derive(Debug, thiserror::Error)] +pub enum DecipherError { + #[error("IV length must be 12 bytes")] + InvalidIvLength, + #[error("Invalid key length")] + InvalidKeyLength, + #[error("Invalid initialization vector")] + InvalidInitializationVector, + #[error("Cannot unpad the input data")] + CannotUnpadInputData, + #[error("Failed to authenticate data")] + DataAuthenticationFailed, + #[error("setAutoPadding(false) not supported for Aes128Gcm yet")] + SetAutoPaddingFalseAes128GcmUnsupported, + #[error("setAutoPadding(false) not supported for Aes256Gcm yet")] + SetAutoPaddingFalseAes256GcmUnsupported, + #[error("Unknown cipher {0}")] + UnknownCipher(String), +} + impl Decipher { fn new( algorithm_name: &str, key: &[u8], iv: &[u8], - ) -> Result { + ) -> Result { use Decipher::*; Ok(match algorithm_name { "aes-128-cbc" => { @@ -335,7 +396,7 @@ impl Decipher { "aes-256-ecb" => Aes256Ecb(Box::new(ecb::Decryptor::new(key.into()))), "aes-128-gcm" => { if iv.len() != 12 { - return Err(type_error("IV length must be 12 bytes")); + return Err(DecipherError::InvalidIvLength); } let decipher = @@ -345,7 +406,7 @@ impl Decipher { } "aes-256-gcm" => { if iv.len() != 12 { - return Err(type_error("IV length must be 12 bytes")); + return Err(DecipherError::InvalidIvLength); } let decipher = @@ -355,15 +416,17 @@ impl Decipher { } "aes256" | "aes-256-cbc" => { if key.len() != 32 { - return Err(range_error("Invalid key length")); + return Err(DecipherError::InvalidKeyLength); } if iv.len() != 16 { - return Err(type_error("Invalid initialization vector")); + return Err(DecipherError::InvalidInitializationVector); } Aes256Cbc(Box::new(cbc::Decryptor::new(key.into(), iv.into()))) } - _ => return Err(type_error(format!("Unknown cipher {algorithm_name}"))), + _ => { + return Err(DecipherError::UnknownCipher(algorithm_name.to_string())) + } }) } @@ -432,14 +495,14 @@ impl Decipher { input: &[u8], output: &mut [u8], auth_tag: &[u8], - ) -> Result<(), AnyError> { + ) -> Result<(), DecipherError> { use Decipher::*; match (self, auto_pad) { (Aes128Cbc(decryptor), true) => { assert!(input.len() == 16); let _ = (*decryptor) .decrypt_padded_b2b_mut::(input, output) - .map_err(|_| type_error("Cannot unpad the input data"))?; + .map_err(|_| DecipherError::CannotUnpadInputData)?; Ok(()) } (Aes128Cbc(mut decryptor), false) => { @@ -453,7 +516,7 @@ impl Decipher { assert!(input.len() == 16); let _ = (*decryptor) .decrypt_padded_b2b_mut::(input, output) - .map_err(|_| type_error("Cannot unpad the input data"))?; + .map_err(|_| DecipherError::CannotUnpadInputData)?; Ok(()) } (Aes128Ecb(mut decryptor), false) => { @@ -467,7 +530,7 @@ impl Decipher { assert!(input.len() == 16); let _ = (*decryptor) .decrypt_padded_b2b_mut::(input, output) - .map_err(|_| type_error("Cannot unpad the input data"))?; + .map_err(|_| DecipherError::CannotUnpadInputData)?; Ok(()) } (Aes192Ecb(mut decryptor), false) => { @@ -481,7 +544,7 @@ impl Decipher { assert!(input.len() == 16); let _ = (*decryptor) .decrypt_padded_b2b_mut::(input, output) - .map_err(|_| type_error("Cannot unpad the input data"))?; + .map_err(|_| DecipherError::CannotUnpadInputData)?; Ok(()) } (Aes256Ecb(mut decryptor), false) => { @@ -496,28 +559,28 @@ impl Decipher { if tag.as_slice() == auth_tag { Ok(()) } else { - Err(type_error("Failed to authenticate data")) + Err(DecipherError::DataAuthenticationFailed) } } - (Aes128Gcm(_), false) => Err(type_error( - "setAutoPadding(false) not supported for Aes256Gcm yet", - )), + (Aes128Gcm(_), false) => { + Err(DecipherError::SetAutoPaddingFalseAes128GcmUnsupported) + } (Aes256Gcm(decipher), true) => { let tag = decipher.finish(); if tag.as_slice() == auth_tag { Ok(()) } else { - Err(type_error("Failed to authenticate data")) + Err(DecipherError::DataAuthenticationFailed) } } - (Aes256Gcm(_), false) => Err(type_error( - "setAutoPadding(false) not supported for Aes256Gcm yet", - )), + (Aes256Gcm(_), false) => { + Err(DecipherError::SetAutoPaddingFalseAes256GcmUnsupported) + } (Aes256Cbc(decryptor), true) => { assert!(input.len() == 16); let _ = (*decryptor) .decrypt_padded_b2b_mut::(input, output) - .map_err(|_| type_error("Cannot unpad the input data"))?; + .map_err(|_| DecipherError::CannotUnpadInputData)?; Ok(()) } (Aes256Cbc(mut decryptor), false) => { diff --git a/ext/node/ops/crypto/digest.rs b/ext/node/ops/crypto/digest.rs index 293e8e0637..a7d8fb51f1 100644 --- a/ext/node/ops/crypto/digest.rs +++ b/ext/node/ops/crypto/digest.rs @@ -1,6 +1,4 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::error::generic_error; -use deno_core::error::AnyError; use deno_core::GarbageCollected; use digest::Digest; use digest::DynDigest; @@ -19,7 +17,7 @@ impl Hasher { pub fn new( algorithm: &str, output_length: Option, - ) -> Result { + ) -> Result { let hash = Hash::new(algorithm, output_length)?; Ok(Self { @@ -44,7 +42,7 @@ impl Hasher { pub fn clone_inner( &self, output_length: Option, - ) -> Result, AnyError> { + ) -> Result, HashError> { let hash = self.hash.borrow(); let Some(hash) = hash.as_ref() else { return Ok(None); @@ -184,11 +182,19 @@ pub enum Hash { use Hash::*; +#[derive(Debug, thiserror::Error)] +pub enum HashError { + #[error("Output length mismatch for non-extendable algorithm")] + OutputLengthMismatch, + #[error("Digest method not supported: {0}")] + DigestMethodUnsupported(String), +} + impl Hash { pub fn new( algorithm_name: &str, output_length: Option, - ) -> Result { + ) -> Result { match algorithm_name { "shake128" => return Ok(Shake128(Default::default(), output_length)), "shake256" => return Ok(Shake256(Default::default(), output_length)), @@ -201,17 +207,13 @@ impl Hash { let digest: D = Digest::new(); if let Some(length) = output_length { if length != digest.output_size() { - return Err(generic_error( - "Output length mismatch for non-extendable algorithm", - )); + return Err(HashError::OutputLengthMismatch); } } FixedSize(Box::new(digest)) }, _ => { - return Err(generic_error(format!( - "Digest method not supported: {algorithm_name}" - ))) + return Err(HashError::DigestMethodUnsupported(algorithm_name.to_string())) } ); @@ -243,14 +245,12 @@ impl Hash { pub fn clone_hash( &self, output_length: Option, - ) -> Result { + ) -> Result { let hash = match self { FixedSize(context) => { if let Some(length) = output_length { if length != context.output_size() { - return Err(generic_error( - "Output length mismatch for non-extendable algorithm", - )); + return Err(HashError::OutputLengthMismatch); } } FixedSize(context.box_clone()) diff --git a/ext/node/ops/crypto/keys.rs b/ext/node/ops/crypto/keys.rs index ac62f5ccae..f164972d48 100644 --- a/ext/node/ops/crypto/keys.rs +++ b/ext/node/ops/crypto/keys.rs @@ -4,9 +4,7 @@ use std::borrow::Cow; use std::cell::RefCell; use base64::Engine; -use deno_core::error::generic_error; use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::serde_v8::BigInt as V8BigInt; use deno_core::unsync::spawn_blocking; @@ -46,6 +44,7 @@ use spki::der::Reader as _; use spki::DecodePublicKey as _; use spki::EncodePublicKey as _; use spki::SubjectPublicKeyInfoRef; +use x509_parser::error::X509Error; use x509_parser::x509; use super::dh; @@ -236,9 +235,11 @@ impl RsaPssPrivateKey { } impl EcPublicKey { - pub fn to_jwk(&self) -> Result { + pub fn to_jwk(&self) -> Result { match self { - EcPublicKey::P224(_) => Err(type_error("Unsupported JWK EC curve: P224")), + EcPublicKey::P224(_) => { + Err(AsymmetricPublicKeyJwkError::UnsupportedJwkEcCurveP224) + } EcPublicKey::P256(key) => Ok(key.to_jwk()), EcPublicKey::P384(key) => Ok(key.to_jwk()), } @@ -363,49 +364,201 @@ impl<'a> TryFrom> for RsaPssParameters<'a> { } } +#[derive(Debug, thiserror::Error)] +pub enum X509PublicKeyError { + #[error(transparent)] + X509(#[from] x509_parser::error::X509Error), + #[error(transparent)] + Rsa(#[from] rsa::Error), + #[error(transparent)] + Asn1(#[from] x509_parser::der_parser::asn1_rs::Error), + #[error(transparent)] + Ec(#[from] elliptic_curve::Error), + #[error("unsupported ec named curve")] + UnsupportedEcNamedCurve, + #[error("missing ec parameters")] + MissingEcParameters, + #[error("malformed DSS public key")] + MalformedDssPublicKey, + #[error("unsupported x509 public key type")] + UnsupportedX509KeyType, +} + +#[derive(Debug, thiserror::Error)] +pub enum RsaJwkError { + #[error(transparent)] + Base64(#[from] base64::DecodeError), + #[error(transparent)] + Rsa(#[from] rsa::Error), + #[error("missing RSA private component")] + MissingRsaPrivateComponent, +} + +#[derive(Debug, thiserror::Error)] +pub enum EcJwkError { + #[error(transparent)] + Ec(#[from] elliptic_curve::Error), + #[error("unsupported curve: {0}")] + UnsupportedCurve(String), +} + +#[derive(Debug, thiserror::Error)] +pub enum EdRawError { + #[error(transparent)] + Ed25519Signature(#[from] ed25519_dalek::SignatureError), + #[error("invalid Ed25519 key")] + InvalidEd25519Key, + #[error("unsupported curve")] + UnsupportedCurve, +} + +#[derive(Debug, thiserror::Error)] +pub enum AsymmetricPrivateKeyError { + #[error("invalid PEM private key: not valid utf8 starting at byte {0}")] + InvalidPemPrivateKeyInvalidUtf8(usize), + #[error("invalid encrypted PEM private key")] + InvalidEncryptedPemPrivateKey, + #[error("invalid PEM private key")] + InvalidPemPrivateKey, + #[error("encrypted private key requires a passphrase to decrypt")] + EncryptedPrivateKeyRequiresPassphraseToDecrypt, + #[error("invalid PKCS#1 private key")] + InvalidPkcs1PrivateKey, + #[error("invalid SEC1 private key")] + InvalidSec1PrivateKey, + #[error("unsupported PEM label: {0}")] + UnsupportedPemLabel(String), + #[error(transparent)] + RsaPssParamsParse(#[from] RsaPssParamsParseError), + #[error("invalid encrypted PKCS#8 private key")] + InvalidEncryptedPkcs8PrivateKey, + #[error("invalid PKCS#8 private key")] + InvalidPkcs8PrivateKey, + #[error("PKCS#1 private key does not support encryption with passphrase")] + Pkcs1PrivateKeyDoesNotSupportEncryptionWithPassphrase, + #[error("SEC1 private key does not support encryption with passphrase")] + Sec1PrivateKeyDoesNotSupportEncryptionWithPassphrase, + #[error("unsupported ec named curve")] + UnsupportedEcNamedCurve, + #[error("invalid private key")] + InvalidPrivateKey, + #[error("invalid DSA private key")] + InvalidDsaPrivateKey, + #[error("malformed or missing named curve in ec parameters")] + MalformedOrMissingNamedCurveInEcParameters, + #[error("unsupported key type: {0}")] + UnsupportedKeyType(String), + #[error("unsupported key format: {0}")] + UnsupportedKeyFormat(String), + #[error("invalid x25519 private key")] + InvalidX25519PrivateKey, + #[error("x25519 private key is the wrong length")] + X25519PrivateKeyIsWrongLength, + #[error("invalid Ed25519 private key")] + InvalidEd25519PrivateKey, + #[error("missing dh parameters")] + MissingDhParameters, + #[error("unsupported private key oid")] + UnsupportedPrivateKeyOid, +} + +#[derive(Debug, thiserror::Error)] +pub enum AsymmetricPublicKeyError { + #[error("invalid PEM private key: not valid utf8 starting at byte {0}")] + InvalidPemPrivateKeyInvalidUtf8(usize), + #[error("invalid PEM public key")] + InvalidPemPublicKey, + #[error("invalid PKCS#1 public key")] + InvalidPkcs1PublicKey, + #[error(transparent)] + AsymmetricPrivateKey(#[from] AsymmetricPrivateKeyError), + #[error("invalid x509 certificate")] + InvalidX509Certificate, + #[error(transparent)] + X509(#[from] x509_parser::nom::Err), + #[error(transparent)] + X509PublicKey(#[from] X509PublicKeyError), + #[error("unsupported PEM label: {0}")] + UnsupportedPemLabel(String), + #[error("invalid SPKI public key")] + InvalidSpkiPublicKey, + #[error("unsupported key type: {0}")] + UnsupportedKeyType(String), + #[error("unsupported key format: {0}")] + UnsupportedKeyFormat(String), + #[error(transparent)] + Spki(#[from] spki::Error), + #[error(transparent)] + Pkcs1(#[from] rsa::pkcs1::Error), + #[error(transparent)] + RsaPssParamsParse(#[from] RsaPssParamsParseError), + #[error("malformed DSS public key")] + MalformedDssPublicKey, + #[error("malformed or missing named curve in ec parameters")] + MalformedOrMissingNamedCurveInEcParameters, + #[error("malformed or missing public key in ec spki")] + MalformedOrMissingPublicKeyInEcSpki, + #[error(transparent)] + Ec(#[from] elliptic_curve::Error), + #[error("unsupported ec named curve")] + UnsupportedEcNamedCurve, + #[error("malformed or missing public key in x25519 spki")] + MalformedOrMissingPublicKeyInX25519Spki, + #[error("x25519 public key is too short")] + X25519PublicKeyIsTooShort, + #[error("invalid Ed25519 public key")] + InvalidEd25519PublicKey, + #[error("missing dh parameters")] + MissingDhParameters, + #[error("malformed dh parameters")] + MalformedDhParameters, + #[error("malformed or missing public key in dh spki")] + MalformedOrMissingPublicKeyInDhSpki, + #[error("unsupported private key oid")] + UnsupportedPrivateKeyOid, +} + impl KeyObjectHandle { pub fn new_asymmetric_private_key_from_js( key: &[u8], format: &str, typ: &str, passphrase: Option<&[u8]>, - ) -> Result { + ) -> Result { let document = match format { "pem" => { let pem = std::str::from_utf8(key).map_err(|err| { - type_error(format!( - "invalid PEM private key: not valid utf8 starting at byte {}", - err.valid_up_to() - )) + AsymmetricPrivateKeyError::InvalidPemPrivateKeyInvalidUtf8( + err.valid_up_to(), + ) })?; if let Some(passphrase) = passphrase { - SecretDocument::from_pkcs8_encrypted_pem(pem, passphrase) - .map_err(|_| type_error("invalid encrypted PEM private key"))? + SecretDocument::from_pkcs8_encrypted_pem(pem, passphrase).map_err( + |_| AsymmetricPrivateKeyError::InvalidEncryptedPemPrivateKey, + )? } else { let (label, doc) = SecretDocument::from_pem(pem) - .map_err(|_| type_error("invalid PEM private key"))?; + .map_err(|_| AsymmetricPrivateKeyError::InvalidPemPrivateKey)?; match label { EncryptedPrivateKeyInfo::PEM_LABEL => { - return Err(type_error( - "encrypted private key requires a passphrase to decrypt", - )) + return Err(AsymmetricPrivateKeyError::EncryptedPrivateKeyRequiresPassphraseToDecrypt); } PrivateKeyInfo::PEM_LABEL => doc, rsa::pkcs1::RsaPrivateKey::PEM_LABEL => { - SecretDocument::from_pkcs1_der(doc.as_bytes()) - .map_err(|_| type_error("invalid PKCS#1 private key"))? + SecretDocument::from_pkcs1_der(doc.as_bytes()).map_err(|_| { + AsymmetricPrivateKeyError::InvalidPkcs1PrivateKey + })? } sec1::EcPrivateKey::PEM_LABEL => { SecretDocument::from_sec1_der(doc.as_bytes()) - .map_err(|_| type_error("invalid SEC1 private key"))? + .map_err(|_| AsymmetricPrivateKeyError::InvalidSec1PrivateKey)? } _ => { - return Err(type_error(format!( - "unsupported PEM label: {}", - label - ))) + return Err(AsymmetricPrivateKeyError::UnsupportedPemLabel( + label.to_string(), + )) } } } @@ -413,54 +566,57 @@ impl KeyObjectHandle { "der" => match typ { "pkcs8" => { if let Some(passphrase) = passphrase { - SecretDocument::from_pkcs8_encrypted_der(key, passphrase) - .map_err(|_| type_error("invalid encrypted PKCS#8 private key"))? + SecretDocument::from_pkcs8_encrypted_der(key, passphrase).map_err( + |_| AsymmetricPrivateKeyError::InvalidEncryptedPkcs8PrivateKey, + )? } else { SecretDocument::from_pkcs8_der(key) - .map_err(|_| type_error("invalid PKCS#8 private key"))? + .map_err(|_| AsymmetricPrivateKeyError::InvalidPkcs8PrivateKey)? } } "pkcs1" => { if passphrase.is_some() { - return Err(type_error( - "PKCS#1 private key does not support encryption with passphrase", - )); + return Err(AsymmetricPrivateKeyError::Pkcs1PrivateKeyDoesNotSupportEncryptionWithPassphrase); } SecretDocument::from_pkcs1_der(key) - .map_err(|_| type_error("invalid PKCS#1 private key"))? + .map_err(|_| AsymmetricPrivateKeyError::InvalidPkcs1PrivateKey)? } "sec1" => { if passphrase.is_some() { - return Err(type_error( - "SEC1 private key does not support encryption with passphrase", - )); + return Err(AsymmetricPrivateKeyError::Sec1PrivateKeyDoesNotSupportEncryptionWithPassphrase); } SecretDocument::from_sec1_der(key) - .map_err(|_| type_error("invalid SEC1 private key"))? + .map_err(|_| AsymmetricPrivateKeyError::InvalidSec1PrivateKey)? + } + _ => { + return Err(AsymmetricPrivateKeyError::UnsupportedKeyType( + typ.to_string(), + )) } - _ => return Err(type_error(format!("unsupported key type: {}", typ))), }, _ => { - return Err(type_error(format!("unsupported key format: {}", format))) + return Err(AsymmetricPrivateKeyError::UnsupportedKeyFormat( + format.to_string(), + )) } }; let pk_info = PrivateKeyInfo::try_from(document.as_bytes()) - .map_err(|_| type_error("invalid private key"))?; + .map_err(|_| AsymmetricPrivateKeyError::InvalidPrivateKey)?; let alg = pk_info.algorithm.oid; let private_key = match alg { RSA_ENCRYPTION_OID => { let private_key = rsa::RsaPrivateKey::from_pkcs1_der(pk_info.private_key) - .map_err(|_| type_error("invalid PKCS#1 private key"))?; + .map_err(|_| AsymmetricPrivateKeyError::InvalidPkcs1PrivateKey)?; AsymmetricPrivateKey::Rsa(private_key) } RSASSA_PSS_OID => { let details = parse_rsa_pss_params(pk_info.algorithm.parameters)?; let private_key = rsa::RsaPrivateKey::from_pkcs1_der(pk_info.private_key) - .map_err(|_| type_error("invalid PKCS#1 private key"))?; + .map_err(|_| AsymmetricPrivateKeyError::InvalidPkcs1PrivateKey)?; AsymmetricPrivateKey::RsaPss(RsaPssPrivateKey { key: private_key, details, @@ -468,40 +624,43 @@ impl KeyObjectHandle { } DSA_OID => { let private_key = dsa::SigningKey::try_from(pk_info) - .map_err(|_| type_error("invalid DSA private key"))?; + .map_err(|_| AsymmetricPrivateKeyError::InvalidDsaPrivateKey)?; AsymmetricPrivateKey::Dsa(private_key) } EC_OID => { let named_curve = pk_info.algorithm.parameters_oid().map_err(|_| { - type_error("malformed or missing named curve in ec parameters") + AsymmetricPrivateKeyError::MalformedOrMissingNamedCurveInEcParameters })?; match named_curve { ID_SECP224R1_OID => { - let secret_key = - p224::SecretKey::from_sec1_der(pk_info.private_key) - .map_err(|_| type_error("invalid SEC1 private key"))?; + let secret_key = p224::SecretKey::from_sec1_der( + pk_info.private_key, + ) + .map_err(|_| AsymmetricPrivateKeyError::InvalidSec1PrivateKey)?; AsymmetricPrivateKey::Ec(EcPrivateKey::P224(secret_key)) } ID_SECP256R1_OID => { - let secret_key = - p256::SecretKey::from_sec1_der(pk_info.private_key) - .map_err(|_| type_error("invalid SEC1 private key"))?; + let secret_key = p256::SecretKey::from_sec1_der( + pk_info.private_key, + ) + .map_err(|_| AsymmetricPrivateKeyError::InvalidSec1PrivateKey)?; AsymmetricPrivateKey::Ec(EcPrivateKey::P256(secret_key)) } ID_SECP384R1_OID => { - let secret_key = - p384::SecretKey::from_sec1_der(pk_info.private_key) - .map_err(|_| type_error("invalid SEC1 private key"))?; + let secret_key = p384::SecretKey::from_sec1_der( + pk_info.private_key, + ) + .map_err(|_| AsymmetricPrivateKeyError::InvalidSec1PrivateKey)?; AsymmetricPrivateKey::Ec(EcPrivateKey::P384(secret_key)) } - _ => return Err(type_error("unsupported ec named curve")), + _ => return Err(AsymmetricPrivateKeyError::UnsupportedEcNamedCurve), } } X25519_OID => { let string_ref = OctetStringRef::from_der(pk_info.private_key) - .map_err(|_| type_error("invalid x25519 private key"))?; + .map_err(|_| AsymmetricPrivateKeyError::InvalidX25519PrivateKey)?; if string_ref.as_bytes().len() != 32 { - return Err(type_error("x25519 private key is the wrong length")); + return Err(AsymmetricPrivateKeyError::X25519PrivateKeyIsWrongLength); } let mut bytes = [0; 32]; bytes.copy_from_slice(string_ref.as_bytes()); @@ -509,22 +668,22 @@ impl KeyObjectHandle { } ED25519_OID => { let signing_key = ed25519_dalek::SigningKey::try_from(pk_info) - .map_err(|_| type_error("invalid Ed25519 private key"))?; + .map_err(|_| AsymmetricPrivateKeyError::InvalidEd25519PrivateKey)?; AsymmetricPrivateKey::Ed25519(signing_key) } DH_KEY_AGREEMENT_OID => { let params = pk_info .algorithm .parameters - .ok_or_else(|| type_error("missing dh parameters"))?; + .ok_or(AsymmetricPrivateKeyError::MissingDhParameters)?; let params = pkcs3::DhParameter::from_der(¶ms.to_der().unwrap()) - .map_err(|_| type_error("malformed dh parameters"))?; + .map_err(|_| AsymmetricPrivateKeyError::MissingDhParameters)?; AsymmetricPrivateKey::Dh(DhPrivateKey { key: dh::PrivateKey::from_bytes(pk_info.private_key), params, }) } - _ => return Err(type_error("unsupported private key oid")), + _ => return Err(AsymmetricPrivateKeyError::UnsupportedPrivateKeyOid), }; Ok(KeyObjectHandle::AsymmetricPrivate(private_key)) @@ -532,7 +691,7 @@ impl KeyObjectHandle { pub fn new_x509_public_key( spki: &x509::SubjectPublicKeyInfo, - ) -> Result { + ) -> Result { use x509_parser::der_parser::asn1_rs::oid; use x509_parser::public_key::PublicKey; @@ -565,18 +724,18 @@ impl KeyObjectHandle { let public_key = p384::PublicKey::from_sec1_bytes(data)?; AsymmetricPublicKey::Ec(EcPublicKey::P384(public_key)) } - _ => return Err(type_error("unsupported ec named curve")), + _ => return Err(X509PublicKeyError::UnsupportedEcNamedCurve), } } else { - return Err(type_error("missing ec parameters")); + return Err(X509PublicKeyError::MissingEcParameters); } } PublicKey::DSA(_) => { let verifying_key = dsa::VerifyingKey::from_public_key_der(spki.raw) - .map_err(|_| type_error("malformed DSS public key"))?; + .map_err(|_| X509PublicKeyError::MalformedDssPublicKey)?; AsymmetricPublicKey::Dsa(verifying_key) } - _ => return Err(type_error("unsupported x509 public key type")), + _ => return Err(X509PublicKeyError::UnsupportedX509KeyType), }; Ok(KeyObjectHandle::AsymmetricPublic(key)) @@ -585,7 +744,7 @@ impl KeyObjectHandle { pub fn new_rsa_jwk( jwk: RsaJwkKey, is_public: bool, - ) -> Result { + ) -> Result { use base64::prelude::BASE64_URL_SAFE_NO_PAD; let n = BASE64_URL_SAFE_NO_PAD.decode(jwk.n.as_bytes())?; @@ -604,19 +763,19 @@ impl KeyObjectHandle { let d = BASE64_URL_SAFE_NO_PAD.decode( jwk .d - .ok_or_else(|| type_error("missing RSA private component"))? + .ok_or(RsaJwkError::MissingRsaPrivateComponent)? .as_bytes(), )?; let p = BASE64_URL_SAFE_NO_PAD.decode( jwk .p - .ok_or_else(|| type_error("missing RSA private component"))? + .ok_or(RsaJwkError::MissingRsaPrivateComponent)? .as_bytes(), )?; let q = BASE64_URL_SAFE_NO_PAD.decode( jwk .q - .ok_or_else(|| type_error("missing RSA private component"))? + .ok_or(RsaJwkError::MissingRsaPrivateComponent)? .as_bytes(), )?; @@ -640,7 +799,7 @@ impl KeyObjectHandle { pub fn new_ec_jwk( jwk: &JwkEcKey, is_public: bool, - ) -> Result { + ) -> Result { // https://datatracker.ietf.org/doc/html/rfc7518#section-6.2.1.1 let handle = match jwk.crv() { "P-256" if is_public => { @@ -660,7 +819,7 @@ impl KeyObjectHandle { EcPrivateKey::P384(p384::SecretKey::from_jwk(jwk)?), )), _ => { - return Err(type_error(format!("unsupported curve: {}", jwk.crv()))); + return Err(EcJwkError::UnsupportedCurve(jwk.crv().to_string())); } }; @@ -671,12 +830,11 @@ impl KeyObjectHandle { curve: &str, data: &[u8], is_public: bool, - ) -> Result { + ) -> Result { match curve { "Ed25519" => { - let data = data - .try_into() - .map_err(|_| type_error("invalid Ed25519 key"))?; + let data = + data.try_into().map_err(|_| EdRawError::InvalidEd25519Key)?; if !is_public { Ok(KeyObjectHandle::AsymmetricPrivate( AsymmetricPrivateKey::Ed25519( @@ -692,9 +850,8 @@ impl KeyObjectHandle { } } "X25519" => { - let data: [u8; 32] = data - .try_into() - .map_err(|_| type_error("invalid x25519 key"))?; + let data: [u8; 32] = + data.try_into().map_err(|_| EdRawError::InvalidEd25519Key)?; if !is_public { Ok(KeyObjectHandle::AsymmetricPrivate( AsymmetricPrivateKey::X25519(x25519_dalek::StaticSecret::from( @@ -707,7 +864,7 @@ impl KeyObjectHandle { )) } } - _ => Err(type_error("unsupported curve")), + _ => Err(EdRawError::UnsupportedCurve), } } @@ -716,24 +873,23 @@ impl KeyObjectHandle { format: &str, typ: &str, passphrase: Option<&[u8]>, - ) -> Result { + ) -> Result { let document = match format { "pem" => { let pem = std::str::from_utf8(key).map_err(|err| { - type_error(format!( - "invalid PEM public key: not valid utf8 starting at byte {}", - err.valid_up_to() - )) + AsymmetricPublicKeyError::InvalidPemPrivateKeyInvalidUtf8( + err.valid_up_to(), + ) })?; let (label, document) = Document::from_pem(pem) - .map_err(|_| type_error("invalid PEM public key"))?; + .map_err(|_| AsymmetricPublicKeyError::InvalidPemPublicKey)?; match label { SubjectPublicKeyInfoRef::PEM_LABEL => document, rsa::pkcs1::RsaPublicKey::PEM_LABEL => { Document::from_pkcs1_der(document.as_bytes()) - .map_err(|_| type_error("invalid PKCS#1 public key"))? + .map_err(|_| AsymmetricPublicKeyError::InvalidPkcs1PublicKey)? } EncryptedPrivateKeyInfo::PEM_LABEL | PrivateKeyInfo::PEM_LABEL @@ -754,27 +910,36 @@ impl KeyObjectHandle { } "CERTIFICATE" => { let (_, pem) = x509_parser::pem::parse_x509_pem(pem.as_bytes()) - .map_err(|_| type_error("invalid x509 certificate"))?; + .map_err(|_| AsymmetricPublicKeyError::InvalidX509Certificate)?; let cert = pem.parse_x509()?; let public_key = cert.tbs_certificate.subject_pki; - return KeyObjectHandle::new_x509_public_key(&public_key); + return KeyObjectHandle::new_x509_public_key(&public_key) + .map_err(Into::into); } _ => { - return Err(type_error(format!("unsupported PEM label: {}", label))) + return Err(AsymmetricPublicKeyError::UnsupportedPemLabel( + label.to_string(), + )) } } } "der" => match typ { "pkcs1" => Document::from_pkcs1_der(key) - .map_err(|_| type_error("invalid PKCS#1 public key"))?, + .map_err(|_| AsymmetricPublicKeyError::InvalidPkcs1PublicKey)?, "spki" => Document::from_public_key_der(key) - .map_err(|_| type_error("invalid SPKI public key"))?, - _ => return Err(type_error(format!("unsupported key type: {}", typ))), + .map_err(|_| AsymmetricPublicKeyError::InvalidSpkiPublicKey)?, + _ => { + return Err(AsymmetricPublicKeyError::UnsupportedKeyType( + typ.to_string(), + )) + } }, _ => { - return Err(type_error(format!("unsupported key format: {}", format))) + return Err(AsymmetricPublicKeyError::UnsupportedKeyType( + format.to_string(), + )) } }; @@ -799,16 +964,16 @@ impl KeyObjectHandle { } DSA_OID => { let verifying_key = dsa::VerifyingKey::try_from(spki) - .map_err(|_| type_error("malformed DSS public key"))?; + .map_err(|_| AsymmetricPublicKeyError::MalformedDssPublicKey)?; AsymmetricPublicKey::Dsa(verifying_key) } EC_OID => { let named_curve = spki.algorithm.parameters_oid().map_err(|_| { - type_error("malformed or missing named curve in ec parameters") - })?; - let data = spki.subject_public_key.as_bytes().ok_or_else(|| { - type_error("malformed or missing public key in ec spki") + AsymmetricPublicKeyError::MalformedOrMissingNamedCurveInEcParameters })?; + let data = spki.subject_public_key.as_bytes().ok_or( + AsymmetricPublicKeyError::MalformedOrMissingPublicKeyInEcSpki, + )?; match named_curve { ID_SECP224R1_OID => { @@ -823,54 +988,68 @@ impl KeyObjectHandle { let public_key = p384::PublicKey::from_sec1_bytes(data)?; AsymmetricPublicKey::Ec(EcPublicKey::P384(public_key)) } - _ => return Err(type_error("unsupported ec named curve")), + _ => return Err(AsymmetricPublicKeyError::UnsupportedEcNamedCurve), } } X25519_OID => { let mut bytes = [0; 32]; - let data = spki.subject_public_key.as_bytes().ok_or_else(|| { - type_error("malformed or missing public key in x25519 spki") - })?; + let data = spki.subject_public_key.as_bytes().ok_or( + AsymmetricPublicKeyError::MalformedOrMissingPublicKeyInX25519Spki, + )?; if data.len() < 32 { - return Err(type_error("x25519 public key is too short")); + return Err(AsymmetricPublicKeyError::X25519PublicKeyIsTooShort); } bytes.copy_from_slice(&data[0..32]); AsymmetricPublicKey::X25519(x25519_dalek::PublicKey::from(bytes)) } ED25519_OID => { let verifying_key = ed25519_dalek::VerifyingKey::try_from(spki) - .map_err(|_| type_error("invalid Ed25519 private key"))?; + .map_err(|_| AsymmetricPublicKeyError::InvalidEd25519PublicKey)?; AsymmetricPublicKey::Ed25519(verifying_key) } DH_KEY_AGREEMENT_OID => { let params = spki .algorithm .parameters - .ok_or_else(|| type_error("missing dh parameters"))?; + .ok_or(AsymmetricPublicKeyError::MissingDhParameters)?; let params = pkcs3::DhParameter::from_der(¶ms.to_der().unwrap()) - .map_err(|_| type_error("malformed dh parameters"))?; + .map_err(|_| AsymmetricPublicKeyError::MalformedDhParameters)?; let Some(subject_public_key) = spki.subject_public_key.as_bytes() else { - return Err(type_error("malformed or missing public key in dh spki")); + return Err( + AsymmetricPublicKeyError::MalformedOrMissingPublicKeyInDhSpki, + ); }; AsymmetricPublicKey::Dh(DhPublicKey { key: dh::PublicKey::from_bytes(subject_public_key), params, }) } - _ => return Err(type_error("unsupported public key oid")), + _ => return Err(AsymmetricPublicKeyError::UnsupportedPrivateKeyOid), }; Ok(KeyObjectHandle::AsymmetricPublic(public_key)) } } +#[derive(Debug, thiserror::Error)] +pub enum RsaPssParamsParseError { + #[error("malformed pss private key parameters")] + MalformedPssPrivateKeyParameters, + #[error("unsupported pss hash algorithm")] + UnsupportedPssHashAlgorithm, + #[error("unsupported pss mask gen algorithm")] + UnsupportedPssMaskGenAlgorithm, + #[error("malformed or missing pss mask gen algorithm parameters")] + MalformedOrMissingPssMaskGenAlgorithm, +} + fn parse_rsa_pss_params( parameters: Option>, -) -> Result, deno_core::anyhow::Error> { +) -> Result, RsaPssParamsParseError> { let details = if let Some(parameters) = parameters { let params = RsaPssParameters::try_from(parameters) - .map_err(|_| type_error("malformed pss private key parameters"))?; + .map_err(|_| RsaPssParamsParseError::MalformedPssPrivateKeyParameters)?; let hash_algorithm = match params.hash_algorithm.map(|k| k.oid) { Some(ID_SHA1_OID) => RsaPssHashAlgorithm::Sha1, @@ -881,16 +1060,16 @@ fn parse_rsa_pss_params( Some(ID_SHA512_224_OID) => RsaPssHashAlgorithm::Sha512_224, Some(ID_SHA512_256_OID) => RsaPssHashAlgorithm::Sha512_256, None => RsaPssHashAlgorithm::Sha1, - _ => return Err(type_error("unsupported pss hash algorithm")), + _ => return Err(RsaPssParamsParseError::UnsupportedPssHashAlgorithm), }; let mf1_hash_algorithm = match params.mask_gen_algorithm { Some(alg) => { if alg.oid != ID_MFG1 { - return Err(type_error("unsupported pss mask gen algorithm")); + return Err(RsaPssParamsParseError::UnsupportedPssMaskGenAlgorithm); } let params = alg.parameters_oid().map_err(|_| { - type_error("malformed or missing pss mask gen algorithm parameters") + RsaPssParamsParseError::MalformedOrMissingPssMaskGenAlgorithm })?; match params { ID_SHA1_OID => RsaPssHashAlgorithm::Sha1, @@ -900,7 +1079,9 @@ fn parse_rsa_pss_params( ID_SHA512_OID => RsaPssHashAlgorithm::Sha512, ID_SHA512_224_OID => RsaPssHashAlgorithm::Sha512_224, ID_SHA512_256_OID => RsaPssHashAlgorithm::Sha512_256, - _ => return Err(type_error("unsupported pss mask gen algorithm")), + _ => { + return Err(RsaPssParamsParseError::UnsupportedPssMaskGenAlgorithm) + } } } None => hash_algorithm, @@ -921,14 +1102,49 @@ fn parse_rsa_pss_params( Ok(details) } -use base64::prelude::BASE64_URL_SAFE_NO_PAD; - fn bytes_to_b64(bytes: &[u8]) -> String { + use base64::prelude::BASE64_URL_SAFE_NO_PAD; BASE64_URL_SAFE_NO_PAD.encode(bytes) } +#[derive(Debug, thiserror::Error)] +pub enum AsymmetricPublicKeyJwkError { + #[error("key is not an asymmetric public key")] + KeyIsNotAsymmetricPublicKey, + #[error("Unsupported JWK EC curve: P224")] + UnsupportedJwkEcCurveP224, + #[error("jwk export not implemented for this key type")] + JwkExportNotImplementedForKeyType, +} + +#[derive(Debug, thiserror::Error)] +pub enum AsymmetricPublicKeyDerError { + #[error("key is not an asymmetric public key")] + KeyIsNotAsymmetricPublicKey, + #[error("invalid RSA public key")] + InvalidRsaPublicKey, + #[error("exporting non-RSA public key as PKCS#1 is not supported")] + ExportingNonRsaPublicKeyAsPkcs1Unsupported, + #[error("invalid EC public key")] + InvalidEcPublicKey, + #[error("exporting RSA-PSS public key as SPKI is not supported yet")] + ExportingNonRsaPssPublicKeyAsSpkiUnsupported, + #[error("invalid DSA public key")] + InvalidDsaPublicKey, + #[error("invalid X25519 public key")] + InvalidX25519PublicKey, + #[error("invalid Ed25519 public key")] + InvalidEd25519PublicKey, + #[error("invalid DH public key")] + InvalidDhPublicKey, + #[error("unsupported key type: {0}")] + UnsupportedKeyType(String), +} + impl AsymmetricPublicKey { - fn export_jwk(&self) -> Result { + fn export_jwk( + &self, + ) -> Result { match self { AsymmetricPublicKey::Ec(key) => { let jwk = key.to_jwk()?; @@ -974,40 +1190,39 @@ impl AsymmetricPublicKey { }); Ok(jwk) } - _ => Err(type_error("jwk export not implemented for this key type")), + _ => Err(AsymmetricPublicKeyJwkError::JwkExportNotImplementedForKeyType), } } - fn export_der(&self, typ: &str) -> Result, AnyError> { + fn export_der( + &self, + typ: &str, + ) -> Result, AsymmetricPublicKeyDerError> { match typ { "pkcs1" => match self { AsymmetricPublicKey::Rsa(key) => { let der = key .to_pkcs1_der() - .map_err(|_| type_error("invalid RSA public key"))? + .map_err(|_| AsymmetricPublicKeyDerError::InvalidRsaPublicKey)? .into_vec() .into_boxed_slice(); Ok(der) } - _ => Err(type_error( - "exporting non-RSA public key as PKCS#1 is not supported", - )), + _ => Err(AsymmetricPublicKeyDerError::ExportingNonRsaPublicKeyAsPkcs1Unsupported), }, "spki" => { let der = match self { AsymmetricPublicKey::Rsa(key) => key .to_public_key_der() - .map_err(|_| type_error("invalid RSA public key"))? + .map_err(|_| AsymmetricPublicKeyDerError::InvalidRsaPublicKey)? .into_vec() .into_boxed_slice(), AsymmetricPublicKey::RsaPss(_key) => { - return Err(generic_error( - "exporting RSA-PSS public key as SPKI is not supported yet", - )) + return Err(AsymmetricPublicKeyDerError::ExportingNonRsaPssPublicKeyAsSpkiUnsupported) } AsymmetricPublicKey::Dsa(key) => key .to_public_key_der() - .map_err(|_| type_error("invalid DSA public key"))? + .map_err(|_| AsymmetricPublicKeyDerError::InvalidDsaPublicKey)? .into_vec() .into_boxed_slice(), AsymmetricPublicKey::Ec(key) => { @@ -1023,12 +1238,12 @@ impl AsymmetricPublicKey { parameters: Some(asn1::AnyRef::from(&oid)), }, subject_public_key: BitStringRef::from_bytes(&sec1) - .map_err(|_| type_error("invalid EC public key"))?, + .map_err(|_| AsymmetricPublicKeyDerError::InvalidEcPublicKey)?, }; spki .to_der() - .map_err(|_| type_error("invalid EC public key"))? + .map_err(|_| AsymmetricPublicKeyDerError::InvalidEcPublicKey)? .into_boxed_slice() } AsymmetricPublicKey::X25519(key) => { @@ -1038,12 +1253,12 @@ impl AsymmetricPublicKey { parameters: None, }, subject_public_key: BitStringRef::from_bytes(key.as_bytes()) - .map_err(|_| type_error("invalid X25519 public key"))?, + .map_err(|_| AsymmetricPublicKeyDerError::InvalidX25519PublicKey)?, }; spki .to_der() - .map_err(|_| type_error("invalid X25519 public key"))? + .map_err(|_| AsymmetricPublicKeyDerError::InvalidX25519PublicKey)? .into_boxed_slice() } AsymmetricPublicKey::Ed25519(key) => { @@ -1053,12 +1268,12 @@ impl AsymmetricPublicKey { parameters: None, }, subject_public_key: BitStringRef::from_bytes(key.as_bytes()) - .map_err(|_| type_error("invalid Ed25519 public key"))?, + .map_err(|_| AsymmetricPublicKeyDerError::InvalidEd25519PublicKey)?, }; spki .to_der() - .map_err(|_| type_error("invalid Ed25519 public key"))? + .map_err(|_| AsymmetricPublicKeyDerError::InvalidEd25519PublicKey)? .into_boxed_slice() } AsymmetricPublicKey::Dh(key) => { @@ -1071,43 +1286,67 @@ impl AsymmetricPublicKey { }, subject_public_key: BitStringRef::from_bytes(&public_key_bytes) .map_err(|_| { - type_error("invalid DH public key") + AsymmetricPublicKeyDerError::InvalidDhPublicKey })?, }; spki .to_der() - .map_err(|_| type_error("invalid DH public key"))? + .map_err(|_| AsymmetricPublicKeyDerError::InvalidDhPublicKey)? .into_boxed_slice() } }; Ok(der) } - _ => Err(type_error(format!("unsupported key type: {}", typ))), + _ => Err(AsymmetricPublicKeyDerError::UnsupportedKeyType(typ.to_string())), } } } +#[derive(Debug, thiserror::Error)] +pub enum AsymmetricPrivateKeyDerError { + #[error("key is not an asymmetric private key")] + KeyIsNotAsymmetricPrivateKey, + #[error("invalid RSA private key")] + InvalidRsaPrivateKey, + #[error("exporting non-RSA private key as PKCS#1 is not supported")] + ExportingNonRsaPrivateKeyAsPkcs1Unsupported, + #[error("invalid EC private key")] + InvalidEcPrivateKey, + #[error("exporting non-EC private key as SEC1 is not supported")] + ExportingNonEcPrivateKeyAsSec1Unsupported, + #[error("exporting RSA-PSS private key as PKCS#8 is not supported yet")] + ExportingNonRsaPssPrivateKeyAsPkcs8Unsupported, + #[error("invalid DSA private key")] + InvalidDsaPrivateKey, + #[error("invalid X25519 private key")] + InvalidX25519PrivateKey, + #[error("invalid Ed25519 private key")] + InvalidEd25519PrivateKey, + #[error("invalid DH private key")] + InvalidDhPrivateKey, + #[error("unsupported key type: {0}")] + UnsupportedKeyType(String), +} + impl AsymmetricPrivateKey { fn export_der( &self, typ: &str, // cipher: Option<&str>, // passphrase: Option<&str>, - ) -> Result, AnyError> { + ) -> Result, AsymmetricPrivateKeyDerError> { match typ { "pkcs1" => match self { AsymmetricPrivateKey::Rsa(key) => { let der = key .to_pkcs1_der() - .map_err(|_| type_error("invalid RSA private key"))? + .map_err(|_| AsymmetricPrivateKeyDerError::InvalidRsaPrivateKey)? .to_bytes() .to_vec() .into_boxed_slice(); Ok(der) } - _ => Err(type_error( - "exporting non-RSA private key as PKCS#1 is not supported", - )), + _ => Err(AsymmetricPrivateKeyDerError::ExportingNonRsaPrivateKeyAsPkcs1Unsupported), }, "sec1" => match self { AsymmetricPrivateKey::Ec(key) => { @@ -1116,30 +1355,26 @@ impl AsymmetricPrivateKey { EcPrivateKey::P256(key) => key.to_sec1_der(), EcPrivateKey::P384(key) => key.to_sec1_der(), } - .map_err(|_| type_error("invalid EC private key"))?; + .map_err(|_| AsymmetricPrivateKeyDerError::InvalidEcPrivateKey)?; Ok(sec1.to_vec().into_boxed_slice()) } - _ => Err(type_error( - "exporting non-EC private key as SEC1 is not supported", - )), + _ => Err(AsymmetricPrivateKeyDerError::ExportingNonEcPrivateKeyAsSec1Unsupported), }, "pkcs8" => { let der = match self { AsymmetricPrivateKey::Rsa(key) => { let document = key .to_pkcs8_der() - .map_err(|_| type_error("invalid RSA private key"))?; + .map_err(|_| AsymmetricPrivateKeyDerError::InvalidRsaPrivateKey)?; document.to_bytes().to_vec().into_boxed_slice() } AsymmetricPrivateKey::RsaPss(_key) => { - return Err(generic_error( - "exporting RSA-PSS private key as PKCS#8 is not supported yet", - )) + return Err(AsymmetricPrivateKeyDerError::ExportingNonRsaPssPrivateKeyAsPkcs8Unsupported) } AsymmetricPrivateKey::Dsa(key) => { let document = key .to_pkcs8_der() - .map_err(|_| type_error("invalid DSA private key"))?; + .map_err(|_| AsymmetricPrivateKeyDerError::InvalidDsaPrivateKey)?; document.to_bytes().to_vec().into_boxed_slice() } AsymmetricPrivateKey::Ec(key) => { @@ -1148,14 +1383,14 @@ impl AsymmetricPrivateKey { EcPrivateKey::P256(key) => key.to_pkcs8_der(), EcPrivateKey::P384(key) => key.to_pkcs8_der(), } - .map_err(|_| type_error("invalid EC private key"))?; + .map_err(|_| AsymmetricPrivateKeyDerError::InvalidEcPrivateKey)?; document.to_bytes().to_vec().into_boxed_slice() } AsymmetricPrivateKey::X25519(key) => { let private_key = OctetStringRef::new(key.as_bytes()) - .map_err(|_| type_error("invalid X25519 private key"))? + .map_err(|_| AsymmetricPrivateKeyDerError::InvalidX25519PrivateKey)? .to_der() - .map_err(|_| type_error("invalid X25519 private key"))?; + .map_err(|_| AsymmetricPrivateKeyDerError::InvalidX25519PrivateKey)?; let private_key = PrivateKeyInfo { algorithm: rsa::pkcs8::AlgorithmIdentifierRef { @@ -1168,15 +1403,15 @@ impl AsymmetricPrivateKey { let der = private_key .to_der() - .map_err(|_| type_error("invalid X25519 private key"))? + .map_err(|_| AsymmetricPrivateKeyDerError::InvalidX25519PrivateKey)? .into_boxed_slice(); return Ok(der); } AsymmetricPrivateKey::Ed25519(key) => { let private_key = OctetStringRef::new(key.as_bytes()) - .map_err(|_| type_error("invalid Ed25519 private key"))? + .map_err(|_| AsymmetricPrivateKeyDerError::InvalidEd25519PrivateKey)? .to_der() - .map_err(|_| type_error("invalid Ed25519 private key"))?; + .map_err(|_| AsymmetricPrivateKeyDerError::InvalidEd25519PrivateKey)?; let private_key = PrivateKeyInfo { algorithm: rsa::pkcs8::AlgorithmIdentifierRef { @@ -1189,7 +1424,7 @@ impl AsymmetricPrivateKey { private_key .to_der() - .map_err(|_| type_error("invalid ED25519 private key"))? + .map_err(|_| AsymmetricPrivateKeyDerError::InvalidEd25519PrivateKey)? .into_boxed_slice() } AsymmetricPrivateKey::Dh(key) => { @@ -1206,14 +1441,14 @@ impl AsymmetricPrivateKey { private_key .to_der() - .map_err(|_| type_error("invalid DH private key"))? + .map_err(|_| AsymmetricPrivateKeyDerError::InvalidDhPrivateKey)? .into_boxed_slice() } }; Ok(der) } - _ => Err(type_error(format!("unsupported key type: {}", typ))), + _ => Err(AsymmetricPrivateKeyDerError::UnsupportedKeyType(typ.to_string())), } } } @@ -1225,7 +1460,7 @@ pub fn op_node_create_private_key( #[string] format: &str, #[string] typ: &str, #[buffer] passphrase: Option<&[u8]>, -) -> Result { +) -> Result { KeyObjectHandle::new_asymmetric_private_key_from_js( key, format, typ, passphrase, ) @@ -1237,7 +1472,7 @@ pub fn op_node_create_ed_raw( #[string] curve: &str, #[buffer] key: &[u8], is_public: bool, -) -> Result { +) -> Result { KeyObjectHandle::new_ed_raw(curve, key, is_public) } @@ -1255,16 +1490,16 @@ pub struct RsaJwkKey { pub fn op_node_create_rsa_jwk( #[serde] jwk: RsaJwkKey, is_public: bool, -) -> Result { +) -> Result { KeyObjectHandle::new_rsa_jwk(jwk, is_public) } #[op2] #[cppgc] pub fn op_node_create_ec_jwk( - #[serde] jwk: elliptic_curve::JwkEcKey, + #[serde] jwk: JwkEcKey, is_public: bool, -) -> Result { +) -> Result { KeyObjectHandle::new_ec_jwk(&jwk, is_public) } @@ -1275,7 +1510,7 @@ pub fn op_node_create_public_key( #[string] format: &str, #[string] typ: &str, #[buffer] passphrase: Option<&[u8]>, -) -> Result { +) -> Result { KeyObjectHandle::new_asymmetric_public_key_from_js( key, format, typ, passphrase, ) @@ -1293,7 +1528,7 @@ pub fn op_node_create_secret_key( #[string] pub fn op_node_get_asymmetric_key_type( #[cppgc] handle: &KeyObjectHandle, -) -> Result<&'static str, AnyError> { +) -> Result<&'static str, deno_core::error::AnyError> { match handle { KeyObjectHandle::AsymmetricPrivate(AsymmetricPrivateKey::Rsa(_)) | KeyObjectHandle::AsymmetricPublic(AsymmetricPublicKey::Rsa(_)) => { @@ -1364,7 +1599,7 @@ pub enum AsymmetricKeyDetails { #[serde] pub fn op_node_get_asymmetric_key_details( #[cppgc] handle: &KeyObjectHandle, -) -> Result { +) -> Result { match handle { KeyObjectHandle::AsymmetricPrivate(private_key) => match private_key { AsymmetricPrivateKey::Rsa(key) => { @@ -1482,12 +1717,10 @@ pub fn op_node_get_asymmetric_key_details( #[smi] pub fn op_node_get_symmetric_key_size( #[cppgc] handle: &KeyObjectHandle, -) -> Result { +) -> Result { match handle { - KeyObjectHandle::AsymmetricPrivate(_) => { - Err(type_error("asymmetric key is not a symmetric key")) - } - KeyObjectHandle::AsymmetricPublic(_) => { + KeyObjectHandle::AsymmetricPrivate(_) + | KeyObjectHandle::AsymmetricPublic(_) => { Err(type_error("asymmetric key is not a symmetric key")) } KeyObjectHandle::Secret(key) => Ok(key.len() * 8), @@ -1592,13 +1825,17 @@ pub async fn op_node_generate_rsa_key_async( .unwrap() } +#[derive(Debug, thiserror::Error)] +#[error("digest not allowed for RSA-PSS keys{}", .0.as_ref().map(|digest| format!(": {digest}")).unwrap_or_default())] +pub struct GenerateRsaPssError(Option); + fn generate_rsa_pss( modulus_length: usize, public_exponent: usize, hash_algorithm: Option<&str>, mf1_hash_algorithm: Option<&str>, salt_length: Option, -) -> Result { +) -> Result { let key = RsaPrivateKey::new_with_exp( &mut thread_rng(), modulus_length, @@ -1617,25 +1854,19 @@ fn generate_rsa_pss( let hash_algorithm = match_fixed_digest_with_oid!( hash_algorithm, fn (algorithm: Option) { - algorithm.ok_or_else(|| type_error("digest not allowed for RSA-PSS keys: {}"))? + algorithm.ok_or(GenerateRsaPssError(None))? }, _ => { - return Err(type_error(format!( - "digest not allowed for RSA-PSS keys: {}", - hash_algorithm - ))) + return Err(GenerateRsaPssError(Some(hash_algorithm.to_string()))) } ); let mf1_hash_algorithm = match_fixed_digest_with_oid!( mf1_hash_algorithm, fn (algorithm: Option) { - algorithm.ok_or_else(|| type_error("digest not allowed for RSA-PSS keys: {}"))? + algorithm.ok_or(GenerateRsaPssError(None))? }, _ => { - return Err(type_error(format!( - "digest not allowed for RSA-PSS keys: {}", - mf1_hash_algorithm - ))) + return Err(GenerateRsaPssError(Some(mf1_hash_algorithm.to_string()))) } ); let salt_length = @@ -1663,7 +1894,7 @@ pub fn op_node_generate_rsa_pss_key( #[string] hash_algorithm: Option, // todo: Option<&str> not supproted in ops yet #[string] mf1_hash_algorithm: Option, // todo: Option<&str> not supproted in ops yet #[smi] salt_length: Option, -) -> Result { +) -> Result { generate_rsa_pss( modulus_length, public_exponent, @@ -1681,7 +1912,7 @@ pub async fn op_node_generate_rsa_pss_key_async( #[string] hash_algorithm: Option, // todo: Option<&str> not supproted in ops yet #[string] mf1_hash_algorithm: Option, // todo: Option<&str> not supproted in ops yet #[smi] salt_length: Option, -) -> Result { +) -> Result { spawn_blocking(move || { generate_rsa_pss( modulus_length, @@ -1698,7 +1929,7 @@ pub async fn op_node_generate_rsa_pss_key_async( fn dsa_generate( modulus_length: usize, divisor_length: usize, -) -> Result { +) -> Result { let mut rng = rand::thread_rng(); use dsa::Components; use dsa::KeySize; @@ -1729,7 +1960,7 @@ fn dsa_generate( pub fn op_node_generate_dsa_key( #[smi] modulus_length: usize, #[smi] divisor_length: usize, -) -> Result { +) -> Result { dsa_generate(modulus_length, divisor_length) } @@ -1738,13 +1969,15 @@ pub fn op_node_generate_dsa_key( pub async fn op_node_generate_dsa_key_async( #[smi] modulus_length: usize, #[smi] divisor_length: usize, -) -> Result { +) -> Result { spawn_blocking(move || dsa_generate(modulus_length, divisor_length)) .await .unwrap() } -fn ec_generate(named_curve: &str) -> Result { +fn ec_generate( + named_curve: &str, +) -> Result { let mut rng = rand::thread_rng(); // TODO(@littledivy): Support public key point encoding. // Default is uncompressed. @@ -1776,7 +2009,7 @@ fn ec_generate(named_curve: &str) -> Result { #[cppgc] pub fn op_node_generate_ec_key( #[string] named_curve: &str, -) -> Result { +) -> Result { ec_generate(named_curve) } @@ -1784,7 +2017,7 @@ pub fn op_node_generate_ec_key( #[cppgc] pub async fn op_node_generate_ec_key_async( #[string] named_curve: String, -) -> Result { +) -> Result { spawn_blocking(move || ec_generate(&named_curve)) .await .unwrap() @@ -1840,7 +2073,7 @@ fn u32_slice_to_u8_slice(slice: &[u32]) -> &[u8] { fn dh_group_generate( group_name: &str, -) -> Result { +) -> Result { let (dh, prime, generator) = match group_name { "modp5" => ( dh::DiffieHellman::group::(), @@ -1895,7 +2128,7 @@ fn dh_group_generate( #[cppgc] pub fn op_node_generate_dh_group_key( #[string] group_name: &str, -) -> Result { +) -> Result { dh_group_generate(group_name) } @@ -1903,7 +2136,7 @@ pub fn op_node_generate_dh_group_key( #[cppgc] pub async fn op_node_generate_dh_group_key_async( #[string] group_name: String, -) -> Result { +) -> Result { spawn_blocking(move || dh_group_generate(&group_name)) .await .unwrap() @@ -1913,7 +2146,7 @@ fn dh_generate( prime: Option<&[u8]>, prime_len: usize, generator: usize, -) -> Result { +) -> KeyObjectHandlePair { let prime = prime .map(|p| p.into()) .unwrap_or_else(|| Prime::generate(prime_len)); @@ -1923,7 +2156,7 @@ fn dh_generate( base: asn1::Int::new(generator.to_be_bytes().as_slice()).unwrap(), private_value_length: None, }; - Ok(KeyObjectHandlePair::new( + KeyObjectHandlePair::new( AsymmetricPrivateKey::Dh(DhPrivateKey { key: dh.private_key, params: params.clone(), @@ -1932,7 +2165,7 @@ fn dh_generate( key: dh.public_key, params, }), - )) + ) } #[op2] @@ -1941,7 +2174,7 @@ pub fn op_node_generate_dh_key( #[buffer] prime: Option<&[u8]>, #[smi] prime_len: usize, #[smi] generator: usize, -) -> Result { +) -> KeyObjectHandlePair { dh_generate(prime, prime_len, generator) } @@ -1951,7 +2184,7 @@ pub async fn op_node_generate_dh_key_async( #[buffer(copy)] prime: Option>, #[smi] prime_len: usize, #[smi] generator: usize, -) -> Result { +) -> KeyObjectHandlePair { spawn_blocking(move || dh_generate(prime.as_deref(), prime_len, generator)) .await .unwrap() @@ -1963,21 +2196,21 @@ pub fn op_node_dh_keys_generate_and_export( #[buffer] prime: Option<&[u8]>, #[smi] prime_len: usize, #[smi] generator: usize, -) -> Result<(ToJsBuffer, ToJsBuffer), AnyError> { +) -> (ToJsBuffer, ToJsBuffer) { let prime = prime .map(|p| p.into()) .unwrap_or_else(|| Prime::generate(prime_len)); let dh = dh::DiffieHellman::new(prime, generator); let private_key = dh.private_key.into_vec().into_boxed_slice(); let public_key = dh.public_key.into_vec().into_boxed_slice(); - Ok((private_key.into(), public_key.into())) + (private_key.into(), public_key.into()) } #[op2] #[buffer] pub fn op_node_export_secret_key( #[cppgc] handle: &KeyObjectHandle, -) -> Result, AnyError> { +) -> Result, deno_core::error::AnyError> { let key = handle .as_secret_key() .ok_or_else(|| type_error("key is not a secret key"))?; @@ -1988,7 +2221,7 @@ pub fn op_node_export_secret_key( #[string] pub fn op_node_export_secret_key_b64url( #[cppgc] handle: &KeyObjectHandle, -) -> Result { +) -> Result { let key = handle .as_secret_key() .ok_or_else(|| type_error("key is not a secret key"))?; @@ -1999,23 +2232,33 @@ pub fn op_node_export_secret_key_b64url( #[serde] pub fn op_node_export_public_key_jwk( #[cppgc] handle: &KeyObjectHandle, -) -> Result { +) -> Result { let public_key = handle .as_public_key() - .ok_or_else(|| type_error("key is not an asymmetric public key"))?; + .ok_or(AsymmetricPublicKeyJwkError::KeyIsNotAsymmetricPublicKey)?; public_key.export_jwk() } +#[derive(Debug, thiserror::Error)] +pub enum ExportPublicKeyPemError { + #[error(transparent)] + AsymmetricPublicKeyDer(#[from] AsymmetricPublicKeyDerError), + #[error("very large data")] + VeryLargeData, + #[error(transparent)] + Der(#[from] der::Error), +} + #[op2] #[string] pub fn op_node_export_public_key_pem( #[cppgc] handle: &KeyObjectHandle, #[string] typ: &str, -) -> Result { +) -> Result { let public_key = handle .as_public_key() - .ok_or_else(|| type_error("key is not an asymmetric public key"))?; + .ok_or(AsymmetricPublicKeyDerError::KeyIsNotAsymmetricPublicKey)?; let data = public_key.export_der(typ)?; let label = match typ { @@ -2025,7 +2268,7 @@ pub fn op_node_export_public_key_pem( }; let pem_len = der::pem::encapsulated_len(label, LineEnding::LF, data.len()) - .map_err(|_| type_error("very large data"))?; + .map_err(|_| ExportPublicKeyPemError::VeryLargeData)?; let mut out = vec![0; pem_len]; let mut writer = PemWriter::new(label, LineEnding::LF, &mut out)?; writer.write(&data)?; @@ -2040,22 +2283,32 @@ pub fn op_node_export_public_key_pem( pub fn op_node_export_public_key_der( #[cppgc] handle: &KeyObjectHandle, #[string] typ: &str, -) -> Result, AnyError> { +) -> Result, AsymmetricPublicKeyDerError> { let public_key = handle .as_public_key() - .ok_or_else(|| type_error("key is not an asymmetric public key"))?; + .ok_or(AsymmetricPublicKeyDerError::KeyIsNotAsymmetricPublicKey)?; public_key.export_der(typ) } +#[derive(Debug, thiserror::Error)] +pub enum ExportPrivateKeyPemError { + #[error(transparent)] + AsymmetricPublicKeyDer(#[from] AsymmetricPrivateKeyDerError), + #[error("very large data")] + VeryLargeData, + #[error(transparent)] + Der(#[from] der::Error), +} + #[op2] #[string] pub fn op_node_export_private_key_pem( #[cppgc] handle: &KeyObjectHandle, #[string] typ: &str, -) -> Result { +) -> Result { let private_key = handle .as_private_key() - .ok_or_else(|| type_error("key is not an asymmetric private key"))?; + .ok_or(AsymmetricPrivateKeyDerError::KeyIsNotAsymmetricPrivateKey)?; let data = private_key.export_der(typ)?; let label = match typ { @@ -2066,7 +2319,7 @@ pub fn op_node_export_private_key_pem( }; let pem_len = der::pem::encapsulated_len(label, LineEnding::LF, data.len()) - .map_err(|_| type_error("very large data"))?; + .map_err(|_| ExportPrivateKeyPemError::VeryLargeData)?; let mut out = vec![0; pem_len]; let mut writer = PemWriter::new(label, LineEnding::LF, &mut out)?; writer.write(&data)?; @@ -2081,10 +2334,10 @@ pub fn op_node_export_private_key_pem( pub fn op_node_export_private_key_der( #[cppgc] handle: &KeyObjectHandle, #[string] typ: &str, -) -> Result, AnyError> { +) -> Result, AsymmetricPrivateKeyDerError> { let private_key = handle .as_private_key() - .ok_or_else(|| type_error("key is not an asymmetric private key"))?; + .ok_or(AsymmetricPrivateKeyDerError::KeyIsNotAsymmetricPrivateKey)?; private_key.export_der(typ) } @@ -2102,7 +2355,7 @@ pub fn op_node_key_type(#[cppgc] handle: &KeyObjectHandle) -> &'static str { #[cppgc] pub fn op_node_derive_public_key_from_private_key( #[cppgc] handle: &KeyObjectHandle, -) -> Result { +) -> Result { let Some(private_key) = handle.as_private_key() else { return Err(type_error("expected private key")); }; diff --git a/ext/node/ops/crypto/mod.rs b/ext/node/ops/crypto/mod.rs index 600d315587..e90e820909 100644 --- a/ext/node/ops/crypto/mod.rs +++ b/ext/node/ops/crypto/mod.rs @@ -1,7 +1,6 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. use deno_core::error::generic_error; use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::unsync::spawn_blocking; use deno_core::JsBuffer; @@ -34,14 +33,14 @@ use rsa::Pkcs1v15Encrypt; use rsa::RsaPrivateKey; use rsa::RsaPublicKey; -mod cipher; +pub mod cipher; mod dh; -mod digest; +pub mod digest; pub mod keys; mod md5_sha1; mod pkcs3; mod primes; -mod sign; +pub mod sign; pub mod x509; use self::digest::match_fixed_digest_with_eager_block_buffer; @@ -58,38 +57,31 @@ pub fn op_node_check_prime( pub fn op_node_check_prime_bytes( #[anybuffer] bytes: &[u8], #[number] checks: usize, -) -> Result { +) -> bool { let candidate = BigInt::from_bytes_be(num_bigint::Sign::Plus, bytes); - Ok(primes::is_probably_prime(&candidate, checks)) + primes::is_probably_prime(&candidate, checks) } #[op2(async)] pub async fn op_node_check_prime_async( #[bigint] num: i64, #[number] checks: usize, -) -> Result { +) -> Result { // TODO(@littledivy): use rayon for CPU-bound tasks - Ok( - spawn_blocking(move || { - primes::is_probably_prime(&BigInt::from(num), checks) - }) - .await?, - ) + spawn_blocking(move || primes::is_probably_prime(&BigInt::from(num), checks)) + .await } #[op2(async)] pub fn op_node_check_prime_bytes_async( #[anybuffer] bytes: &[u8], #[number] checks: usize, -) -> Result>, AnyError> { +) -> impl Future> { let candidate = BigInt::from_bytes_be(num_bigint::Sign::Plus, bytes); // TODO(@littledivy): use rayon for CPU-bound tasks - Ok(async move { - Ok( - spawn_blocking(move || primes::is_probably_prime(&candidate, checks)) - .await?, - ) - }) + async move { + spawn_blocking(move || primes::is_probably_prime(&candidate, checks)).await + } } #[op2] @@ -97,7 +89,7 @@ pub fn op_node_check_prime_bytes_async( pub fn op_node_create_hash( #[string] algorithm: &str, output_length: Option, -) -> Result { +) -> Result { digest::Hasher::new(algorithm, output_length.map(|l| l as usize)) } @@ -145,17 +137,31 @@ pub fn op_node_hash_digest_hex( pub fn op_node_hash_clone( #[cppgc] hasher: &digest::Hasher, output_length: Option, -) -> Result, AnyError> { +) -> Result, digest::HashError> { hasher.clone_inner(output_length.map(|l| l as usize)) } +#[derive(Debug, thiserror::Error)] +pub enum PrivateEncryptDecryptError { + #[error(transparent)] + Pkcs8(#[from] pkcs8::Error), + #[error(transparent)] + Spki(#[from] spki::Error), + #[error(transparent)] + Utf8(#[from] std::str::Utf8Error), + #[error(transparent)] + Rsa(#[from] rsa::Error), + #[error("Unknown padding")] + UnknownPadding, +} + #[op2] #[serde] pub fn op_node_private_encrypt( #[serde] key: StringOrBuffer, #[serde] msg: StringOrBuffer, #[smi] padding: u32, -) -> Result { +) -> Result { let key = RsaPrivateKey::from_pkcs8_pem((&key).try_into()?)?; let mut rng = rand::thread_rng(); @@ -172,7 +178,7 @@ pub fn op_node_private_encrypt( .encrypt(&mut rng, Oaep::new::(), &msg)? .into(), ), - _ => Err(type_error("Unknown padding")), + _ => Err(PrivateEncryptDecryptError::UnknownPadding), } } @@ -182,13 +188,13 @@ pub fn op_node_private_decrypt( #[serde] key: StringOrBuffer, #[serde] msg: StringOrBuffer, #[smi] padding: u32, -) -> Result { +) -> Result { let key = RsaPrivateKey::from_pkcs8_pem((&key).try_into()?)?; match padding { 1 => Ok(key.decrypt(Pkcs1v15Encrypt, &msg)?.into()), 4 => Ok(key.decrypt(Oaep::new::(), &msg)?.into()), - _ => Err(type_error("Unknown padding")), + _ => Err(PrivateEncryptDecryptError::UnknownPadding), } } @@ -198,7 +204,7 @@ pub fn op_node_public_encrypt( #[serde] key: StringOrBuffer, #[serde] msg: StringOrBuffer, #[smi] padding: u32, -) -> Result { +) -> Result { let key = RsaPublicKey::from_public_key_pem((&key).try_into()?)?; let mut rng = rand::thread_rng(); @@ -209,7 +215,7 @@ pub fn op_node_public_encrypt( .encrypt(&mut rng, Oaep::new::(), &msg)? .into(), ), - _ => Err(type_error("Unknown padding")), + _ => Err(PrivateEncryptDecryptError::UnknownPadding), } } @@ -220,7 +226,7 @@ pub fn op_node_create_cipheriv( #[string] algorithm: &str, #[buffer] key: &[u8], #[buffer] iv: &[u8], -) -> Result { +) -> Result { let context = cipher::CipherContext::new(algorithm, key, iv)?; Ok(state.resource_table.add(context)) } @@ -262,11 +268,14 @@ pub fn op_node_cipheriv_final( auto_pad: bool, #[buffer] input: &[u8], #[anybuffer] output: &mut [u8], -) -> Result>, AnyError> { - let context = state.resource_table.take::(rid)?; +) -> Result>, cipher::CipherContextError> { + let context = state + .resource_table + .take::(rid) + .map_err(cipher::CipherContextError::Resource)?; let context = Rc::try_unwrap(context) - .map_err(|_| type_error("Cipher context is already in use"))?; - context.r#final(auto_pad, input, output) + .map_err(|_| cipher::CipherContextError::ContextInUse)?; + context.r#final(auto_pad, input, output).map_err(Into::into) } #[op2] @@ -274,10 +283,13 @@ pub fn op_node_cipheriv_final( pub fn op_node_cipheriv_take( state: &mut OpState, #[smi] rid: u32, -) -> Result>, AnyError> { - let context = state.resource_table.take::(rid)?; +) -> Result>, cipher::CipherContextError> { + let context = state + .resource_table + .take::(rid) + .map_err(cipher::CipherContextError::Resource)?; let context = Rc::try_unwrap(context) - .map_err(|_| type_error("Cipher context is already in use"))?; + .map_err(|_| cipher::CipherContextError::ContextInUse)?; Ok(context.take_tag()) } @@ -288,7 +300,7 @@ pub fn op_node_create_decipheriv( #[string] algorithm: &str, #[buffer] key: &[u8], #[buffer] iv: &[u8], -) -> Result { +) -> Result { let context = cipher::DecipherContext::new(algorithm, key, iv)?; Ok(state.resource_table.add(context)) } @@ -326,10 +338,13 @@ pub fn op_node_decipheriv_decrypt( pub fn op_node_decipheriv_take( state: &mut OpState, #[smi] rid: u32, -) -> Result<(), AnyError> { - let context = state.resource_table.take::(rid)?; +) -> Result<(), cipher::DecipherContextError> { + let context = state + .resource_table + .take::(rid) + .map_err(cipher::DecipherContextError::Resource)?; Rc::try_unwrap(context) - .map_err(|_| type_error("Cipher context is already in use"))?; + .map_err(|_| cipher::DecipherContextError::ContextInUse)?; Ok(()) } @@ -341,11 +356,16 @@ pub fn op_node_decipheriv_final( #[buffer] input: &[u8], #[anybuffer] output: &mut [u8], #[buffer] auth_tag: &[u8], -) -> Result<(), AnyError> { - let context = state.resource_table.take::(rid)?; +) -> Result<(), cipher::DecipherContextError> { + let context = state + .resource_table + .take::(rid) + .map_err(cipher::DecipherContextError::Resource)?; let context = Rc::try_unwrap(context) - .map_err(|_| type_error("Cipher context is already in use"))?; - context.r#final(auto_pad, input, output, auth_tag) + .map_err(|_| cipher::DecipherContextError::ContextInUse)?; + context + .r#final(auto_pad, input, output, auth_tag) + .map_err(Into::into) } #[op2] @@ -356,7 +376,7 @@ pub fn op_node_sign( #[string] digest_type: &str, #[smi] pss_salt_length: Option, #[smi] dsa_signature_encoding: u32, -) -> Result, AnyError> { +) -> Result, sign::KeyObjectHandlePrehashedSignAndVerifyError> { handle.sign_prehashed( digest_type, digest, @@ -373,7 +393,7 @@ pub fn op_node_verify( #[buffer] signature: &[u8], #[smi] pss_salt_length: Option, #[smi] dsa_signature_encoding: u32, -) -> Result { +) -> Result { handle.verify_prehashed( digest_type, digest, @@ -383,13 +403,21 @@ pub fn op_node_verify( ) } +#[derive(Debug, thiserror::Error)] +pub enum Pbkdf2Error { + #[error("unsupported digest: {0}")] + UnsupportedDigest(String), + #[error(transparent)] + Join(#[from] tokio::task::JoinError), +} + fn pbkdf2_sync( password: &[u8], salt: &[u8], iterations: u32, algorithm_name: &str, derived_key: &mut [u8], -) -> Result<(), AnyError> { +) -> Result<(), Pbkdf2Error> { match_fixed_digest_with_eager_block_buffer!( algorithm_name, fn () { @@ -397,10 +425,7 @@ fn pbkdf2_sync( Ok(()) }, _ => { - Err(type_error(format!( - "unsupported digest: {}", - algorithm_name - ))) + Err(Pbkdf2Error::UnsupportedDigest(algorithm_name.to_string())) } ) } @@ -424,7 +449,7 @@ pub async fn op_node_pbkdf2_async( #[smi] iterations: u32, #[string] digest: String, #[number] keylen: usize, -) -> Result { +) -> Result { spawn_blocking(move || { let mut derived_key = vec![0; keylen]; pbkdf2_sync(&password, &salt, iterations, &digest, &mut derived_key) @@ -450,15 +475,27 @@ pub async fn op_node_fill_random_async(#[smi] len: i32) -> ToJsBuffer { .unwrap() } +#[derive(Debug, thiserror::Error)] +pub enum HkdfError { + #[error("expected secret key")] + ExpectedSecretKey, + #[error("HKDF-Expand failed")] + HkdfExpandFailed, + #[error("Unsupported digest: {0}")] + UnsupportedDigest(String), + #[error(transparent)] + Join(#[from] tokio::task::JoinError), +} + fn hkdf_sync( digest_algorithm: &str, handle: &KeyObjectHandle, salt: &[u8], info: &[u8], okm: &mut [u8], -) -> Result<(), AnyError> { +) -> Result<(), HkdfError> { let Some(ikm) = handle.as_secret_key() else { - return Err(type_error("expected secret key")); + return Err(HkdfError::ExpectedSecretKey); }; match_fixed_digest_with_eager_block_buffer!( @@ -466,10 +503,10 @@ fn hkdf_sync( fn () { let hk = Hkdf::::new(Some(salt), ikm); hk.expand(info, okm) - .map_err(|_| type_error("HKDF-Expand failed")) + .map_err(|_| HkdfError::HkdfExpandFailed) }, _ => { - Err(type_error(format!("Unsupported digest: {}", digest_algorithm))) + Err(HkdfError::UnsupportedDigest(digest_algorithm.to_string())) } ) } @@ -481,7 +518,7 @@ pub fn op_node_hkdf( #[buffer] salt: &[u8], #[buffer] info: &[u8], #[buffer] okm: &mut [u8], -) -> Result<(), AnyError> { +) -> Result<(), HkdfError> { hkdf_sync(digest_algorithm, handle, salt, info, okm) } @@ -493,7 +530,7 @@ pub async fn op_node_hkdf_async( #[buffer] salt: JsBuffer, #[buffer] info: JsBuffer, #[number] okm_len: usize, -) -> Result { +) -> Result { let handle = handle.clone(); spawn_blocking(move || { let mut okm = vec![0u8; okm_len]; @@ -509,27 +546,24 @@ pub fn op_node_dh_compute_secret( #[buffer] prime: JsBuffer, #[buffer] private_key: JsBuffer, #[buffer] their_public_key: JsBuffer, -) -> Result { +) -> ToJsBuffer { let pubkey: BigUint = BigUint::from_bytes_be(their_public_key.as_ref()); let privkey: BigUint = BigUint::from_bytes_be(private_key.as_ref()); let primei: BigUint = BigUint::from_bytes_be(prime.as_ref()); let shared_secret: BigUint = pubkey.modpow(&privkey, &primei); - Ok(shared_secret.to_bytes_be().into()) + shared_secret.to_bytes_be().into() } #[op2(fast)] -#[smi] -pub fn op_node_random_int( - #[smi] min: i32, - #[smi] max: i32, -) -> Result { +#[number] +pub fn op_node_random_int(#[number] min: i64, #[number] max: i64) -> i64 { let mut rng = rand::thread_rng(); // Uniform distribution is required to avoid Modulo Bias // https://en.wikipedia.org/wiki/Fisher–Yates_shuffle#Modulo_bias let dist = Uniform::from(min..max); - Ok(dist.sample(&mut rng)) + dist.sample(&mut rng) } #[allow(clippy::too_many_arguments)] @@ -542,7 +576,7 @@ fn scrypt( parallelization: u32, _maxmem: u32, output_buffer: &mut [u8], -) -> Result<(), AnyError> { +) -> Result<(), deno_core::error::AnyError> { // Construct Params let params = scrypt::Params::new( cost as u8, @@ -573,7 +607,7 @@ pub fn op_node_scrypt_sync( #[smi] parallelization: u32, #[smi] maxmem: u32, #[anybuffer] output_buffer: &mut [u8], -) -> Result<(), AnyError> { +) -> Result<(), deno_core::error::AnyError> { scrypt( password, salt, @@ -586,6 +620,14 @@ pub fn op_node_scrypt_sync( ) } +#[derive(Debug, thiserror::Error)] +pub enum ScryptAsyncError { + #[error(transparent)] + Join(#[from] tokio::task::JoinError), + #[error(transparent)] + Other(deno_core::error::AnyError), +} + #[op2(async)] #[serde] pub async fn op_node_scrypt_async( @@ -596,10 +638,11 @@ pub async fn op_node_scrypt_async( #[smi] block_size: u32, #[smi] parallelization: u32, #[smi] maxmem: u32, -) -> Result { +) -> Result { spawn_blocking(move || { let mut output_buffer = vec![0u8; keylen as usize]; - let res = scrypt( + + scrypt( password, salt, keylen, @@ -608,25 +651,30 @@ pub async fn op_node_scrypt_async( parallelization, maxmem, &mut output_buffer, - ); - - if res.is_ok() { - Ok(output_buffer.into()) - } else { - // TODO(lev): rethrow the error? - Err(generic_error("scrypt failure")) - } + ) + .map(|_| output_buffer.into()) + .map_err(ScryptAsyncError::Other) }) .await? } +#[derive(Debug, thiserror::Error)] +pub enum EcdhEncodePubKey { + #[error("Invalid public key")] + InvalidPublicKey, + #[error("Unsupported curve")] + UnsupportedCurve, + #[error(transparent)] + Sec1(#[from] sec1::Error), +} + #[op2] #[buffer] pub fn op_node_ecdh_encode_pubkey( #[string] curve: &str, #[buffer] pubkey: &[u8], compress: bool, -) -> Result, AnyError> { +) -> Result, EcdhEncodePubKey> { use elliptic_curve::sec1::FromEncodedPoint; match curve { @@ -639,7 +687,7 @@ pub fn op_node_ecdh_encode_pubkey( ); // CtOption does not expose its variants. if pubkey.is_none().into() { - return Err(type_error("Invalid public key")); + return Err(EcdhEncodePubKey::InvalidPublicKey); } let pubkey = pubkey.unwrap(); @@ -652,7 +700,7 @@ pub fn op_node_ecdh_encode_pubkey( ); // CtOption does not expose its variants. if pubkey.is_none().into() { - return Err(type_error("Invalid public key")); + return Err(EcdhEncodePubKey::InvalidPublicKey); } let pubkey = pubkey.unwrap(); @@ -665,7 +713,7 @@ pub fn op_node_ecdh_encode_pubkey( ); // CtOption does not expose its variants. if pubkey.is_none().into() { - return Err(type_error("Invalid public key")); + return Err(EcdhEncodePubKey::InvalidPublicKey); } let pubkey = pubkey.unwrap(); @@ -678,14 +726,14 @@ pub fn op_node_ecdh_encode_pubkey( ); // CtOption does not expose its variants. if pubkey.is_none().into() { - return Err(type_error("Invalid public key")); + return Err(EcdhEncodePubKey::InvalidPublicKey); } let pubkey = pubkey.unwrap(); Ok(pubkey.to_encoded_point(compress).as_ref().to_vec()) } - &_ => Err(type_error("Unsupported curve")), + &_ => Err(EcdhEncodePubKey::UnsupportedCurve), } } @@ -695,7 +743,7 @@ pub fn op_node_ecdh_generate_keys( #[buffer] pubbuf: &mut [u8], #[buffer] privbuf: &mut [u8], #[string] format: &str, -) -> Result<(), AnyError> { +) -> Result<(), deno_core::error::AnyError> { let mut rng = rand::thread_rng(); let compress = format == "compressed"; match curve { @@ -742,7 +790,7 @@ pub fn op_node_ecdh_compute_secret( #[buffer] this_priv: Option, #[buffer] their_pub: &mut [u8], #[buffer] secret: &mut [u8], -) -> Result<(), AnyError> { +) { match curve { "secp256k1" => { let their_public_key = @@ -760,8 +808,6 @@ pub fn op_node_ecdh_compute_secret( their_public_key.as_affine(), ); secret.copy_from_slice(shared_secret.raw_secret_bytes()); - - Ok(()) } "prime256v1" | "secp256r1" => { let their_public_key = @@ -776,8 +822,6 @@ pub fn op_node_ecdh_compute_secret( their_public_key.as_affine(), ); secret.copy_from_slice(shared_secret.raw_secret_bytes()); - - Ok(()) } "secp384r1" => { let their_public_key = @@ -792,8 +836,6 @@ pub fn op_node_ecdh_compute_secret( their_public_key.as_affine(), ); secret.copy_from_slice(shared_secret.raw_secret_bytes()); - - Ok(()) } "secp224r1" => { let their_public_key = @@ -808,8 +850,6 @@ pub fn op_node_ecdh_compute_secret( their_public_key.as_affine(), ); secret.copy_from_slice(shared_secret.raw_secret_bytes()); - - Ok(()) } &_ => todo!(), } @@ -820,7 +860,7 @@ pub fn op_node_ecdh_compute_public_key( #[string] curve: &str, #[buffer] privkey: &[u8], #[buffer] pubkey: &mut [u8], -) -> Result<(), AnyError> { +) { match curve { "secp256k1" => { let this_private_key = @@ -828,8 +868,6 @@ pub fn op_node_ecdh_compute_public_key( .expect("bad private key"); let public_key = this_private_key.public_key(); pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref()); - - Ok(()) } "prime256v1" | "secp256r1" => { let this_private_key = @@ -837,7 +875,6 @@ pub fn op_node_ecdh_compute_public_key( .expect("bad private key"); let public_key = this_private_key.public_key(); pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref()); - Ok(()) } "secp384r1" => { let this_private_key = @@ -845,7 +882,6 @@ pub fn op_node_ecdh_compute_public_key( .expect("bad private key"); let public_key = this_private_key.public_key(); pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref()); - Ok(()) } "secp224r1" => { let this_private_key = @@ -853,7 +889,6 @@ pub fn op_node_ecdh_compute_public_key( .expect("bad private key"); let public_key = this_private_key.public_key(); pubkey.copy_from_slice(public_key.to_sec1_bytes().as_ref()); - Ok(()) } &_ => todo!(), } @@ -874,8 +909,20 @@ pub fn op_node_gen_prime(#[number] size: usize) -> ToJsBuffer { #[serde] pub async fn op_node_gen_prime_async( #[number] size: usize, -) -> Result { - Ok(spawn_blocking(move || gen_prime(size)).await?) +) -> Result { + spawn_blocking(move || gen_prime(size)).await +} + +#[derive(Debug, thiserror::Error)] +pub enum DiffieHellmanError { + #[error("Expected private key")] + ExpectedPrivateKey, + #[error("Expected public key")] + ExpectedPublicKey, + #[error("DH parameters mismatch")] + DhParametersMismatch, + #[error("Unsupported key type for diffie hellman, or key type mismatch")] + UnsupportedKeyTypeForDiffieHellmanOrKeyTypeMismatch, } #[op2] @@ -883,117 +930,134 @@ pub async fn op_node_gen_prime_async( pub fn op_node_diffie_hellman( #[cppgc] private: &KeyObjectHandle, #[cppgc] public: &KeyObjectHandle, -) -> Result, AnyError> { +) -> Result, DiffieHellmanError> { let private = private .as_private_key() - .ok_or_else(|| type_error("Expected private key"))?; + .ok_or(DiffieHellmanError::ExpectedPrivateKey)?; let public = public .as_public_key() - .ok_or_else(|| type_error("Expected public key"))?; + .ok_or(DiffieHellmanError::ExpectedPublicKey)?; - let res = match (private, &*public) { - ( - AsymmetricPrivateKey::Ec(EcPrivateKey::P224(private)), - AsymmetricPublicKey::Ec(EcPublicKey::P224(public)), - ) => p224::ecdh::diffie_hellman( - private.to_nonzero_scalar(), - public.as_affine(), - ) - .raw_secret_bytes() - .to_vec() - .into_boxed_slice(), - ( - AsymmetricPrivateKey::Ec(EcPrivateKey::P256(private)), - AsymmetricPublicKey::Ec(EcPublicKey::P256(public)), - ) => p256::ecdh::diffie_hellman( - private.to_nonzero_scalar(), - public.as_affine(), - ) - .raw_secret_bytes() - .to_vec() - .into_boxed_slice(), - ( - AsymmetricPrivateKey::Ec(EcPrivateKey::P384(private)), - AsymmetricPublicKey::Ec(EcPublicKey::P384(public)), - ) => p384::ecdh::diffie_hellman( - private.to_nonzero_scalar(), - public.as_affine(), - ) - .raw_secret_bytes() - .to_vec() - .into_boxed_slice(), - ( - AsymmetricPrivateKey::X25519(private), - AsymmetricPublicKey::X25519(public), - ) => private - .diffie_hellman(public) - .to_bytes() - .into_iter() - .collect(), - (AsymmetricPrivateKey::Dh(private), AsymmetricPublicKey::Dh(public)) => { - if private.params.prime != public.params.prime - || private.params.base != public.params.base - { - return Err(type_error("DH parameters mismatch")); + let res = + match (private, &*public) { + ( + AsymmetricPrivateKey::Ec(EcPrivateKey::P224(private)), + AsymmetricPublicKey::Ec(EcPublicKey::P224(public)), + ) => p224::ecdh::diffie_hellman( + private.to_nonzero_scalar(), + public.as_affine(), + ) + .raw_secret_bytes() + .to_vec() + .into_boxed_slice(), + ( + AsymmetricPrivateKey::Ec(EcPrivateKey::P256(private)), + AsymmetricPublicKey::Ec(EcPublicKey::P256(public)), + ) => p256::ecdh::diffie_hellman( + private.to_nonzero_scalar(), + public.as_affine(), + ) + .raw_secret_bytes() + .to_vec() + .into_boxed_slice(), + ( + AsymmetricPrivateKey::Ec(EcPrivateKey::P384(private)), + AsymmetricPublicKey::Ec(EcPublicKey::P384(public)), + ) => p384::ecdh::diffie_hellman( + private.to_nonzero_scalar(), + public.as_affine(), + ) + .raw_secret_bytes() + .to_vec() + .into_boxed_slice(), + ( + AsymmetricPrivateKey::X25519(private), + AsymmetricPublicKey::X25519(public), + ) => private + .diffie_hellman(public) + .to_bytes() + .into_iter() + .collect(), + (AsymmetricPrivateKey::Dh(private), AsymmetricPublicKey::Dh(public)) => { + if private.params.prime != public.params.prime + || private.params.base != public.params.base + { + return Err(DiffieHellmanError::DhParametersMismatch); + } + + // OSIP - Octet-String-to-Integer primitive + let public_key = public.key.clone().into_vec(); + let pubkey = BigUint::from_bytes_be(&public_key); + + // Exponentiation (z = y^x mod p) + let prime = BigUint::from_bytes_be(private.params.prime.as_bytes()); + let private_key = private.key.clone().into_vec(); + let private_key = BigUint::from_bytes_be(&private_key); + let shared_secret = pubkey.modpow(&private_key, &prime); + + shared_secret.to_bytes_be().into() } - - // OSIP - Octet-String-to-Integer primitive - let public_key = public.key.clone().into_vec(); - let pubkey = BigUint::from_bytes_be(&public_key); - - // Exponentiation (z = y^x mod p) - let prime = BigUint::from_bytes_be(private.params.prime.as_bytes()); - let private_key = private.key.clone().into_vec(); - let private_key = BigUint::from_bytes_be(&private_key); - let shared_secret = pubkey.modpow(&private_key, &prime); - - shared_secret.to_bytes_be().into() - } - _ => { - return Err(type_error( - "Unsupported key type for diffie hellman, or key type mismatch", - )) - } - }; + _ => return Err( + DiffieHellmanError::UnsupportedKeyTypeForDiffieHellmanOrKeyTypeMismatch, + ), + }; Ok(res) } +#[derive(Debug, thiserror::Error)] +pub enum SignEd25519Error { + #[error("Expected private key")] + ExpectedPrivateKey, + #[error("Expected Ed25519 private key")] + ExpectedEd25519PrivateKey, + #[error("Invalid Ed25519 private key")] + InvalidEd25519PrivateKey, +} + #[op2(fast)] pub fn op_node_sign_ed25519( #[cppgc] key: &KeyObjectHandle, #[buffer] data: &[u8], #[buffer] signature: &mut [u8], -) -> Result<(), AnyError> { +) -> Result<(), SignEd25519Error> { let private = key .as_private_key() - .ok_or_else(|| type_error("Expected private key"))?; + .ok_or(SignEd25519Error::ExpectedPrivateKey)?; let ed25519 = match private { AsymmetricPrivateKey::Ed25519(private) => private, - _ => return Err(type_error("Expected Ed25519 private key")), + _ => return Err(SignEd25519Error::ExpectedEd25519PrivateKey), }; let pair = Ed25519KeyPair::from_seed_unchecked(ed25519.as_bytes().as_slice()) - .map_err(|_| type_error("Invalid Ed25519 private key"))?; + .map_err(|_| SignEd25519Error::InvalidEd25519PrivateKey)?; signature.copy_from_slice(pair.sign(data).as_ref()); Ok(()) } +#[derive(Debug, thiserror::Error)] +pub enum VerifyEd25519Error { + #[error("Expected public key")] + ExpectedPublicKey, + #[error("Expected Ed25519 public key")] + ExpectedEd25519PublicKey, +} + #[op2(fast)] pub fn op_node_verify_ed25519( #[cppgc] key: &KeyObjectHandle, #[buffer] data: &[u8], #[buffer] signature: &[u8], -) -> Result { +) -> Result { let public = key .as_public_key() - .ok_or_else(|| type_error("Expected public key"))?; + .ok_or(VerifyEd25519Error::ExpectedPublicKey)?; let ed25519 = match &*public { AsymmetricPublicKey::Ed25519(public) => public, - _ => return Err(type_error("Expected Ed25519 public key")), + _ => return Err(VerifyEd25519Error::ExpectedEd25519PublicKey), }; let verified = ring::signature::UnparsedPublicKey::new( diff --git a/ext/node/ops/crypto/sign.rs b/ext/node/ops/crypto/sign.rs index b7779a5d80..30094c0765 100644 --- a/ext/node/ops/crypto/sign.rs +++ b/ext/node/ops/crypto/sign.rs @@ -1,7 +1,4 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::error::generic_error; -use deno_core::error::type_error; -use deno_core::error::AnyError; use rand::rngs::OsRng; use rsa::signature::hazmat::PrehashSigner as _; use rsa::signature::hazmat::PrehashVerifier as _; @@ -26,7 +23,7 @@ use elliptic_curve::FieldBytesSize; fn dsa_signature( encoding: u32, signature: ecdsa::Signature, -) -> Result, AnyError> +) -> Result, KeyObjectHandlePrehashedSignAndVerifyError> where MaxSize: ArrayLength, as Add>::Output: Add + ArrayLength, @@ -36,10 +33,54 @@ where 0 => Ok(signature.to_der().to_bytes().to_vec().into_boxed_slice()), // IEEE P1363 1 => Ok(signature.to_bytes().to_vec().into_boxed_slice()), - _ => Err(type_error("invalid DSA signature encoding")), + _ => Err( + KeyObjectHandlePrehashedSignAndVerifyError::InvalidDsaSignatureEncoding, + ), } } +#[derive(Debug, thiserror::Error)] +pub enum KeyObjectHandlePrehashedSignAndVerifyError { + #[error("invalid DSA signature encoding")] + InvalidDsaSignatureEncoding, + #[error("key is not a private key")] + KeyIsNotPrivate, + #[error("digest not allowed for RSA signature: {0}")] + DigestNotAllowedForRsaSignature(String), + #[error("failed to sign digest with RSA")] + FailedToSignDigestWithRsa, + #[error("digest not allowed for RSA-PSS signature: {0}")] + DigestNotAllowedForRsaPssSignature(String), + #[error("failed to sign digest with RSA-PSS")] + FailedToSignDigestWithRsaPss, + #[error("failed to sign digest with DSA")] + FailedToSignDigestWithDsa, + #[error("rsa-pss with different mf1 hash algorithm and hash algorithm is not supported")] + RsaPssHashAlgorithmUnsupported, + #[error( + "private key does not allow {actual} to be used, expected {expected}" + )] + PrivateKeyDisallowsUsage { actual: String, expected: String }, + #[error("failed to sign digest")] + FailedToSignDigest, + #[error("x25519 key cannot be used for signing")] + X25519KeyCannotBeUsedForSigning, + #[error("Ed25519 key cannot be used for prehashed signing")] + Ed25519KeyCannotBeUsedForPrehashedSigning, + #[error("DH key cannot be used for signing")] + DhKeyCannotBeUsedForSigning, + #[error("key is not a public or private key")] + KeyIsNotPublicOrPrivate, + #[error("Invalid DSA signature")] + InvalidDsaSignature, + #[error("x25519 key cannot be used for verification")] + X25519KeyCannotBeUsedForVerification, + #[error("Ed25519 key cannot be used for prehashed verification")] + Ed25519KeyCannotBeUsedForPrehashedVerification, + #[error("DH key cannot be used for verification")] + DhKeyCannotBeUsedForVerification, +} + impl KeyObjectHandle { pub fn sign_prehashed( &self, @@ -47,10 +88,10 @@ impl KeyObjectHandle { digest: &[u8], pss_salt_length: Option, dsa_signature_encoding: u32, - ) -> Result, AnyError> { + ) -> Result, KeyObjectHandlePrehashedSignAndVerifyError> { let private_key = self .as_private_key() - .ok_or_else(|| type_error("key is not a private key"))?; + .ok_or(KeyObjectHandlePrehashedSignAndVerifyError::KeyIsNotPrivate)?; match private_key { AsymmetricPrivateKey::Rsa(key) => { @@ -63,34 +104,26 @@ impl KeyObjectHandle { rsa::pkcs1v15::Pkcs1v15Sign::new::() }, _ => { - return Err(type_error(format!( - "digest not allowed for RSA signature: {}", - digest_type - ))) + return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaSignature(digest_type.to_string())) } ) }; let signature = signer .sign(Some(&mut OsRng), key, digest) - .map_err(|_| generic_error("failed to sign digest with RSA"))?; + .map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithRsa)?; Ok(signature.into()) } AsymmetricPrivateKey::RsaPss(key) => { let mut hash_algorithm = None; let mut salt_length = None; - match &key.details { - Some(details) => { - if details.hash_algorithm != details.mf1_hash_algorithm { - return Err(type_error( - "rsa-pss with different mf1 hash algorithm and hash algorithm is not supported", - )); - } - hash_algorithm = Some(details.hash_algorithm); - salt_length = Some(details.salt_length as usize); + if let Some(details) = &key.details { + if details.hash_algorithm != details.mf1_hash_algorithm { + return Err(KeyObjectHandlePrehashedSignAndVerifyError::RsaPssHashAlgorithmUnsupported); } - None => {} - }; + hash_algorithm = Some(details.hash_algorithm); + salt_length = Some(details.salt_length as usize); + } if let Some(s) = pss_salt_length { salt_length = Some(s as usize); } @@ -99,10 +132,10 @@ impl KeyObjectHandle { fn (algorithm: Option) { if let Some(hash_algorithm) = hash_algorithm.take() { if Some(hash_algorithm) != algorithm { - return Err(type_error(format!( - "private key does not allow {} to be used, expected {}", - digest_type, hash_algorithm.as_str() - ))); + return Err(KeyObjectHandlePrehashedSignAndVerifyError::PrivateKeyDisallowsUsage { + actual: digest_type.to_string(), + expected: hash_algorithm.as_str().to_string(), + }); } } if let Some(salt_length) = salt_length { @@ -112,15 +145,12 @@ impl KeyObjectHandle { } }, _ => { - return Err(type_error(format!( - "digest not allowed for RSA-PSS signature: {}", - digest_type - ))) + return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaPssSignature(digest_type.to_string())); } ); let signature = pss .sign(Some(&mut OsRng), &key.key, digest) - .map_err(|_| generic_error("failed to sign digest with RSA-PSS"))?; + .map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithRsaPss)?; Ok(signature.into()) } AsymmetricPrivateKey::Dsa(key) => { @@ -130,15 +160,12 @@ impl KeyObjectHandle { key.sign_prehashed_rfc6979::(digest) }, _ => { - return Err(type_error(format!( - "digest not allowed for RSA signature: {}", - digest_type - ))) + return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaSignature(digest_type.to_string())) } ); let signature = - res.map_err(|_| generic_error("failed to sign digest with DSA"))?; + res.map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithDsa)?; Ok(signature.into()) } AsymmetricPrivateKey::Ec(key) => match key { @@ -146,7 +173,7 @@ impl KeyObjectHandle { let signing_key = p224::ecdsa::SigningKey::from(key); let signature: p224::ecdsa::Signature = signing_key .sign_prehash(digest) - .map_err(|_| type_error("failed to sign digest"))?; + .map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigest)?; dsa_signature(dsa_signature_encoding, signature) } @@ -154,7 +181,7 @@ impl KeyObjectHandle { let signing_key = p256::ecdsa::SigningKey::from(key); let signature: p256::ecdsa::Signature = signing_key .sign_prehash(digest) - .map_err(|_| type_error("failed to sign digest"))?; + .map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigest)?; dsa_signature(dsa_signature_encoding, signature) } @@ -162,19 +189,17 @@ impl KeyObjectHandle { let signing_key = p384::ecdsa::SigningKey::from(key); let signature: p384::ecdsa::Signature = signing_key .sign_prehash(digest) - .map_err(|_| type_error("failed to sign digest"))?; + .map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigest)?; dsa_signature(dsa_signature_encoding, signature) } }, AsymmetricPrivateKey::X25519(_) => { - Err(type_error("x25519 key cannot be used for signing")) + Err(KeyObjectHandlePrehashedSignAndVerifyError::X25519KeyCannotBeUsedForSigning) } - AsymmetricPrivateKey::Ed25519(_) => Err(type_error( - "Ed25519 key cannot be used for prehashed signing", - )), + AsymmetricPrivateKey::Ed25519(_) => Err(KeyObjectHandlePrehashedSignAndVerifyError::Ed25519KeyCannotBeUsedForPrehashedSigning), AsymmetricPrivateKey::Dh(_) => { - Err(type_error("DH key cannot be used for signing")) + Err(KeyObjectHandlePrehashedSignAndVerifyError::DhKeyCannotBeUsedForSigning) } } } @@ -186,10 +211,10 @@ impl KeyObjectHandle { signature: &[u8], pss_salt_length: Option, dsa_signature_encoding: u32, - ) -> Result { - let public_key = self - .as_public_key() - .ok_or_else(|| type_error("key is not a public or private key"))?; + ) -> Result { + let public_key = self.as_public_key().ok_or( + KeyObjectHandlePrehashedSignAndVerifyError::KeyIsNotPublicOrPrivate, + )?; match &*public_key { AsymmetricPublicKey::Rsa(key) => { @@ -202,10 +227,7 @@ impl KeyObjectHandle { rsa::pkcs1v15::Pkcs1v15Sign::new::() }, _ => { - return Err(type_error(format!( - "digest not allowed for RSA signature: {}", - digest_type - ))) + return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaSignature(digest_type.to_string())) } ) }; @@ -215,18 +237,13 @@ impl KeyObjectHandle { AsymmetricPublicKey::RsaPss(key) => { let mut hash_algorithm = None; let mut salt_length = None; - match &key.details { - Some(details) => { - if details.hash_algorithm != details.mf1_hash_algorithm { - return Err(type_error( - "rsa-pss with different mf1 hash algorithm and hash algorithm is not supported", - )); - } - hash_algorithm = Some(details.hash_algorithm); - salt_length = Some(details.salt_length as usize); + if let Some(details) = &key.details { + if details.hash_algorithm != details.mf1_hash_algorithm { + return Err(KeyObjectHandlePrehashedSignAndVerifyError::RsaPssHashAlgorithmUnsupported); } - None => {} - }; + hash_algorithm = Some(details.hash_algorithm); + salt_length = Some(details.salt_length as usize); + } if let Some(s) = pss_salt_length { salt_length = Some(s as usize); } @@ -235,10 +252,10 @@ impl KeyObjectHandle { fn (algorithm: Option) { if let Some(hash_algorithm) = hash_algorithm.take() { if Some(hash_algorithm) != algorithm { - return Err(type_error(format!( - "private key does not allow {} to be used, expected {}", - digest_type, hash_algorithm.as_str() - ))); + return Err(KeyObjectHandlePrehashedSignAndVerifyError::PrivateKeyDisallowsUsage { + actual: digest_type.to_string(), + expected: hash_algorithm.as_str().to_string(), + }); } } if let Some(salt_length) = salt_length { @@ -248,17 +265,14 @@ impl KeyObjectHandle { } }, _ => { - return Err(type_error(format!( - "digest not allowed for RSA-PSS signature: {}", - digest_type - ))) + return Err(KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaPssSignature(digest_type.to_string())); } ); Ok(pss.verify(&key.key, digest, signature).is_ok()) } AsymmetricPublicKey::Dsa(key) => { let signature = dsa::Signature::from_der(signature) - .map_err(|_| type_error("Invalid DSA signature"))?; + .map_err(|_| KeyObjectHandlePrehashedSignAndVerifyError::InvalidDsaSignature)?; Ok(key.verify_prehash(digest, &signature).is_ok()) } AsymmetricPublicKey::Ec(key) => match key { @@ -300,13 +314,11 @@ impl KeyObjectHandle { } }, AsymmetricPublicKey::X25519(_) => { - Err(type_error("x25519 key cannot be used for verification")) + Err(KeyObjectHandlePrehashedSignAndVerifyError::X25519KeyCannotBeUsedForVerification) } - AsymmetricPublicKey::Ed25519(_) => Err(type_error( - "Ed25519 key cannot be used for prehashed verification", - )), + AsymmetricPublicKey::Ed25519(_) => Err(KeyObjectHandlePrehashedSignAndVerifyError::Ed25519KeyCannotBeUsedForPrehashedVerification), AsymmetricPublicKey::Dh(_) => { - Err(type_error("DH key cannot be used for verification")) + Err(KeyObjectHandlePrehashedSignAndVerifyError::DhKeyCannotBeUsedForVerification) } } } diff --git a/ext/node/ops/crypto/x509.rs b/ext/node/ops/crypto/x509.rs index b44ff3a4b3..ab8e52f703 100644 --- a/ext/node/ops/crypto/x509.rs +++ b/ext/node/ops/crypto/x509.rs @@ -1,11 +1,11 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::error::AnyError; use deno_core::op2; use x509_parser::der_parser::asn1_rs::Any; use x509_parser::der_parser::asn1_rs::Tag; use x509_parser::der_parser::oid::Oid; +pub use x509_parser::error::X509Error; use x509_parser::extensions; use x509_parser::pem; use x509_parser::prelude::*; @@ -65,7 +65,7 @@ impl<'a> Deref for CertificateView<'a> { #[cppgc] pub fn op_node_x509_parse( #[buffer] buf: &[u8], -) -> Result { +) -> Result { let source = match pem::parse_x509_pem(buf) { Ok((_, pem)) => CertificateSources::Pem(pem), Err(_) => CertificateSources::Der(buf.to_vec().into_boxed_slice()), @@ -81,7 +81,7 @@ pub fn op_node_x509_parse( X509Certificate::from_der(buf).map(|(_, cert)| cert)? } }; - Ok::<_, AnyError>(CertificateView { cert }) + Ok::<_, X509Error>(CertificateView { cert }) }, )?; @@ -89,23 +89,23 @@ pub fn op_node_x509_parse( } #[op2(fast)] -pub fn op_node_x509_ca(#[cppgc] cert: &Certificate) -> Result { +pub fn op_node_x509_ca(#[cppgc] cert: &Certificate) -> bool { let cert = cert.inner.get().deref(); - Ok(cert.is_ca()) + cert.is_ca() } #[op2(fast)] pub fn op_node_x509_check_email( #[cppgc] cert: &Certificate, #[string] email: &str, -) -> Result { +) -> bool { let cert = cert.inner.get().deref(); let subject = cert.subject(); if subject .iter_email() .any(|e| e.as_str().unwrap_or("") == email) { - return Ok(true); + return true; } let subject_alt = cert @@ -121,62 +121,60 @@ pub fn op_node_x509_check_email( for name in &subject_alt.general_names { if let extensions::GeneralName::RFC822Name(n) = name { if *n == email { - return Ok(true); + return true; } } } } - Ok(false) + false } #[op2] #[string] -pub fn op_node_x509_fingerprint( - #[cppgc] cert: &Certificate, -) -> Result, AnyError> { - Ok(cert.fingerprint::()) +pub fn op_node_x509_fingerprint(#[cppgc] cert: &Certificate) -> Option { + cert.fingerprint::() } #[op2] #[string] pub fn op_node_x509_fingerprint256( #[cppgc] cert: &Certificate, -) -> Result, AnyError> { - Ok(cert.fingerprint::()) +) -> Option { + cert.fingerprint::() } #[op2] #[string] pub fn op_node_x509_fingerprint512( #[cppgc] cert: &Certificate, -) -> Result, AnyError> { - Ok(cert.fingerprint::()) +) -> Option { + cert.fingerprint::() } #[op2] #[string] pub fn op_node_x509_get_issuer( #[cppgc] cert: &Certificate, -) -> Result { +) -> Result { let cert = cert.inner.get().deref(); - Ok(x509name_to_string(cert.issuer(), oid_registry())?) + x509name_to_string(cert.issuer(), oid_registry()) } #[op2] #[string] pub fn op_node_x509_get_subject( #[cppgc] cert: &Certificate, -) -> Result { +) -> Result { let cert = cert.inner.get().deref(); - Ok(x509name_to_string(cert.subject(), oid_registry())?) + x509name_to_string(cert.subject(), oid_registry()) } #[op2] #[cppgc] pub fn op_node_x509_public_key( #[cppgc] cert: &Certificate, -) -> Result { +) -> Result { let cert = cert.inner.get().deref(); let public_key = &cert.tbs_certificate.subject_pki; @@ -245,37 +243,29 @@ fn x509name_to_string( #[op2] #[string] -pub fn op_node_x509_get_valid_from( - #[cppgc] cert: &Certificate, -) -> Result { +pub fn op_node_x509_get_valid_from(#[cppgc] cert: &Certificate) -> String { let cert = cert.inner.get().deref(); - Ok(cert.validity().not_before.to_string()) + cert.validity().not_before.to_string() } #[op2] #[string] -pub fn op_node_x509_get_valid_to( - #[cppgc] cert: &Certificate, -) -> Result { +pub fn op_node_x509_get_valid_to(#[cppgc] cert: &Certificate) -> String { let cert = cert.inner.get().deref(); - Ok(cert.validity().not_after.to_string()) + cert.validity().not_after.to_string() } #[op2] #[string] -pub fn op_node_x509_get_serial_number( - #[cppgc] cert: &Certificate, -) -> Result { +pub fn op_node_x509_get_serial_number(#[cppgc] cert: &Certificate) -> String { let cert = cert.inner.get().deref(); let mut s = cert.serial.to_str_radix(16); s.make_ascii_uppercase(); - Ok(s) + s } #[op2(fast)] -pub fn op_node_x509_key_usage( - #[cppgc] cert: &Certificate, -) -> Result { +pub fn op_node_x509_key_usage(#[cppgc] cert: &Certificate) -> u16 { let cert = cert.inner.get().deref(); let key_usage = cert .extensions() @@ -286,5 +276,5 @@ pub fn op_node_x509_key_usage( _ => None, }); - Ok(key_usage.map(|k| k.flags).unwrap_or(0)) + key_usage.map(|k| k.flags).unwrap_or(0) } diff --git a/ext/node/ops/fs.rs b/ext/node/ops/fs.rs index 6253f32d05..9c0e4e1ccf 100644 --- a/ext/node/ops/fs.rs +++ b/ext/node/ops/fs.rs @@ -3,7 +3,6 @@ use std::cell::RefCell; use std::rc::Rc; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::OpState; use deno_fs::FileSystemRc; @@ -11,11 +10,27 @@ use serde::Serialize; use crate::NodePermissions; +#[derive(Debug, thiserror::Error)] +pub enum FsError { + #[error(transparent)] + Permission(#[from] deno_permissions::PermissionCheckError), + #[error("{0}")] + Io(#[from] std::io::Error), + #[cfg(windows)] + #[error("Path has no root.")] + PathHasNoRoot, + #[cfg(not(any(unix, windows)))] + #[error("Unsupported platform.")] + UnsupportedPlatform, + #[error(transparent)] + Fs(#[from] deno_io::fs::FsError), +} + #[op2(fast)] pub fn op_node_fs_exists_sync

( state: &mut OpState, #[string] path: String, -) -> Result +) -> Result where P: NodePermissions + 'static, { @@ -30,7 +45,7 @@ where pub async fn op_node_fs_exists

( state: Rc>, #[string] path: String, -) -> Result +) -> Result where P: NodePermissions + 'static, { @@ -50,7 +65,7 @@ pub fn op_node_cp_sync

( state: &mut OpState, #[string] path: &str, #[string] new_path: &str, -) -> Result<(), AnyError> +) -> Result<(), FsError> where P: NodePermissions + 'static, { @@ -71,7 +86,7 @@ pub async fn op_node_cp

( state: Rc>, #[string] path: String, #[string] new_path: String, -) -> Result<(), AnyError> +) -> Result<(), FsError> where P: NodePermissions + 'static, { @@ -108,7 +123,7 @@ pub fn op_node_statfs

( state: Rc>, #[string] path: String, bigint: bool, -) -> Result +) -> Result where P: NodePermissions + 'static, { @@ -130,13 +145,21 @@ where let mut cpath = path.as_bytes().to_vec(); cpath.push(0); if bigint { - #[cfg(not(target_os = "macos"))] + #[cfg(not(any( + target_os = "macos", + target_os = "freebsd", + target_os = "openbsd" + )))] // SAFETY: `cpath` is NUL-terminated and result is pointer to valid statfs memory. let (code, result) = unsafe { let mut result: libc::statfs64 = std::mem::zeroed(); (libc::statfs64(cpath.as_ptr() as _, &mut result), result) }; - #[cfg(target_os = "macos")] + #[cfg(any( + target_os = "macos", + target_os = "freebsd", + target_os = "openbsd" + ))] // SAFETY: `cpath` is NUL-terminated and result is pointer to valid statfs memory. let (code, result) = unsafe { let mut result: libc::statfs = std::mem::zeroed(); @@ -146,7 +169,10 @@ where return Err(std::io::Error::last_os_error().into()); } Ok(StatFs { + #[cfg(not(target_os = "openbsd"))] typ: result.f_type as _, + #[cfg(target_os = "openbsd")] + typ: 0 as _, bsize: result.f_bsize as _, blocks: result.f_blocks as _, bfree: result.f_bfree as _, @@ -164,7 +190,10 @@ where return Err(std::io::Error::last_os_error().into()); } Ok(StatFs { + #[cfg(not(target_os = "openbsd"))] typ: result.f_type as _, + #[cfg(target_os = "openbsd")] + typ: 0 as _, bsize: result.f_bsize as _, blocks: result.f_blocks as _, bfree: result.f_bfree as _, @@ -176,7 +205,6 @@ where } #[cfg(windows)] { - use deno_core::anyhow::anyhow; use std::ffi::OsStr; use std::os::windows::ffi::OsStrExt; use windows_sys::Win32::Storage::FileSystem::GetDiskFreeSpaceW; @@ -186,10 +214,7 @@ where // call below. #[allow(clippy::disallowed_methods)] let path = path.canonicalize()?; - let root = path - .ancestors() - .last() - .ok_or(anyhow!("Path has no root."))?; + let root = path.ancestors().last().ok_or(FsError::PathHasNoRoot)?; let mut root = OsStr::new(root).encode_wide().collect::>(); root.push(0); let mut sectors_per_cluster = 0; @@ -229,7 +254,7 @@ where { let _ = path; let _ = bigint; - Err(anyhow!("Unsupported platform.")) + Err(FsError::UnsupportedPlatform) } } @@ -241,7 +266,7 @@ pub fn op_node_lutimes_sync

( #[smi] atime_nanos: u32, #[number] mtime_secs: i64, #[smi] mtime_nanos: u32, -) -> Result<(), AnyError> +) -> Result<(), FsError> where P: NodePermissions + 'static, { @@ -262,7 +287,7 @@ pub async fn op_node_lutimes

( #[smi] atime_nanos: u32, #[number] mtime_secs: i64, #[smi] mtime_nanos: u32, -) -> Result<(), AnyError> +) -> Result<(), FsError> where P: NodePermissions + 'static, { @@ -286,7 +311,7 @@ pub fn op_node_lchown_sync

( #[string] path: String, uid: Option, gid: Option, -) -> Result<(), AnyError> +) -> Result<(), FsError> where P: NodePermissions + 'static, { @@ -304,7 +329,7 @@ pub async fn op_node_lchown

( #[string] path: String, uid: Option, gid: Option, -) -> Result<(), AnyError> +) -> Result<(), FsError> where P: NodePermissions + 'static, { diff --git a/ext/node/ops/http.rs b/ext/node/ops/http.rs index 730e1e482b..69571078fe 100644 --- a/ext/node/ops/http.rs +++ b/ext/node/ops/http.rs @@ -78,9 +78,7 @@ where { let permissions = state.borrow_mut::

(); - permissions - .check_net_url(&url, "ClientRequest") - .map_err(FetchError::Permission)?; + permissions.check_net_url(&url, "ClientRequest")?; } let mut header_map = HeaderMap::new(); diff --git a/ext/node/ops/http2.rs b/ext/node/ops/http2.rs index 705a8ecdcb..53dada9f41 100644 --- a/ext/node/ops/http2.rs +++ b/ext/node/ops/http2.rs @@ -7,7 +7,6 @@ use std::rc::Rc; use std::task::Poll; use bytes::Bytes; -use deno_core::error::AnyError; use deno_core::futures::future::poll_fn; use deno_core::op2; use deno_core::serde::Serialize; @@ -110,17 +109,28 @@ impl Resource for Http2ServerSendResponse { } } +#[derive(Debug, thiserror::Error)] +pub enum Http2Error { + #[error(transparent)] + Resource(deno_core::error::AnyError), + #[error(transparent)] + UrlParse(#[from] url::ParseError), + #[error(transparent)] + H2(#[from] h2::Error), +} + #[op2(async)] #[serde] pub async fn op_http2_connect( state: Rc>, #[smi] rid: ResourceId, #[string] url: String, -) -> Result<(ResourceId, ResourceId), AnyError> { +) -> Result<(ResourceId, ResourceId), Http2Error> { // No permission check necessary because we're using an existing connection let network_stream = { let mut state = state.borrow_mut(); - take_network_stream_resource(&mut state.resource_table, rid)? + take_network_stream_resource(&mut state.resource_table, rid) + .map_err(Http2Error::Resource)? }; let url = Url::parse(&url)?; @@ -144,9 +154,10 @@ pub async fn op_http2_connect( pub async fn op_http2_listen( state: Rc>, #[smi] rid: ResourceId, -) -> Result { +) -> Result { let stream = - take_network_stream_resource(&mut state.borrow_mut().resource_table, rid)?; + take_network_stream_resource(&mut state.borrow_mut().resource_table, rid) + .map_err(Http2Error::Resource)?; let conn = h2::server::Builder::new().handshake(stream).await?; Ok( @@ -166,12 +177,13 @@ pub async fn op_http2_accept( #[smi] rid: ResourceId, ) -> Result< Option<(Vec<(ByteString, ByteString)>, ResourceId, ResourceId)>, - AnyError, + Http2Error, > { let resource = state .borrow() .resource_table - .get::(rid)?; + .get::(rid) + .map_err(Http2Error::Resource)?; let mut conn = RcRef::map(&resource, |r| &r.conn).borrow_mut().await; if let Some(res) = conn.accept().await { let (req, resp) = res?; @@ -233,11 +245,12 @@ pub async fn op_http2_send_response( #[smi] rid: ResourceId, #[smi] status: u16, #[serde] headers: Vec<(ByteString, ByteString)>, -) -> Result<(ResourceId, u32), AnyError> { +) -> Result<(ResourceId, u32), Http2Error> { let resource = state .borrow() .resource_table - .get::(rid)?; + .get::(rid) + .map_err(Http2Error::Resource)?; let mut send_response = RcRef::map(resource, |r| &r.send_response) .borrow_mut() .await; @@ -262,8 +275,12 @@ pub async fn op_http2_send_response( pub async fn op_http2_poll_client_connection( state: Rc>, #[smi] rid: ResourceId, -) -> Result<(), AnyError> { - let resource = state.borrow().resource_table.get::(rid)?; +) -> Result<(), Http2Error> { + let resource = state + .borrow() + .resource_table + .get::(rid) + .map_err(Http2Error::Resource)?; let cancel_handle = RcRef::map(resource.clone(), |this| &this.cancel_handle); let mut conn = RcRef::map(resource, |this| &this.conn).borrow_mut().await; @@ -289,11 +306,12 @@ pub async fn op_http2_client_request( // 4 strings of keys? #[serde] mut pseudo_headers: HashMap, #[serde] headers: Vec<(ByteString, ByteString)>, -) -> Result<(ResourceId, u32), AnyError> { +) -> Result<(ResourceId, u32), Http2Error> { let resource = state .borrow() .resource_table - .get::(client_rid)?; + .get::(client_rid) + .map_err(Http2Error::Resource)?; let url = resource.url.clone(); @@ -326,7 +344,10 @@ pub async fn op_http2_client_request( let resource = { let state = state.borrow(); - state.resource_table.get::(client_rid)? + state + .resource_table + .get::(client_rid) + .map_err(Http2Error::Resource)? }; let mut client = RcRef::map(&resource, |r| &r.client).borrow_mut().await; poll_fn(|cx| client.poll_ready(cx)).await?; @@ -345,11 +366,12 @@ pub async fn op_http2_client_send_data( #[smi] stream_rid: ResourceId, #[buffer] data: JsBuffer, end_of_stream: bool, -) -> Result<(), AnyError> { +) -> Result<(), Http2Error> { let resource = state .borrow() .resource_table - .get::(stream_rid)?; + .get::(stream_rid) + .map_err(Http2Error::Resource)?; let mut stream = RcRef::map(&resource, |r| &r.stream).borrow_mut().await; stream.send_data(data.to_vec().into(), end_of_stream)?; @@ -361,7 +383,7 @@ pub async fn op_http2_client_reset_stream( state: Rc>, #[smi] stream_rid: ResourceId, #[smi] code: u32, -) -> Result<(), AnyError> { +) -> Result<(), deno_core::error::AnyError> { let resource = state .borrow() .resource_table @@ -376,11 +398,12 @@ pub async fn op_http2_client_send_trailers( state: Rc>, #[smi] stream_rid: ResourceId, #[serde] trailers: Vec<(ByteString, ByteString)>, -) -> Result<(), AnyError> { +) -> Result<(), Http2Error> { let resource = state .borrow() .resource_table - .get::(stream_rid)?; + .get::(stream_rid) + .map_err(Http2Error::Resource)?; let mut stream = RcRef::map(&resource, |r| &r.stream).borrow_mut().await; let mut trailers_map = http::HeaderMap::new(); @@ -408,11 +431,12 @@ pub struct Http2ClientResponse { pub async fn op_http2_client_get_response( state: Rc>, #[smi] stream_rid: ResourceId, -) -> Result<(Http2ClientResponse, bool), AnyError> { +) -> Result<(Http2ClientResponse, bool), Http2Error> { let resource = state .borrow() .resource_table - .get::(stream_rid)?; + .get::(stream_rid) + .map_err(Http2Error::Resource)?; let mut response_future = RcRef::map(&resource, |r| &r.response).borrow_mut().await; @@ -478,11 +502,12 @@ fn poll_data_or_trailers( pub async fn op_http2_client_get_response_body_chunk( state: Rc>, #[smi] body_rid: ResourceId, -) -> Result<(Option>, bool, bool), AnyError> { +) -> Result<(Option>, bool, bool), Http2Error> { let resource = state .borrow() .resource_table - .get::(body_rid)?; + .get::(body_rid) + .map_err(Http2Error::Resource)?; let mut body = RcRef::map(&resource, |r| &r.body).borrow_mut().await; loop { @@ -525,7 +550,7 @@ pub async fn op_http2_client_get_response_body_chunk( pub async fn op_http2_client_get_response_trailers( state: Rc>, #[smi] body_rid: ResourceId, -) -> Result>, AnyError> { +) -> Result>, deno_core::error::AnyError> { let resource = state .borrow() .resource_table diff --git a/ext/node/ops/idna.rs b/ext/node/ops/idna.rs index 9c9450c70f..a3d85e77c2 100644 --- a/ext/node/ops/idna.rs +++ b/ext/node/ops/idna.rs @@ -1,7 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::anyhow::Error; -use deno_core::error::range_error; use deno_core::op2; use std::borrow::Cow; @@ -11,19 +9,21 @@ use std::borrow::Cow; const PUNY_PREFIX: &str = "xn--"; -fn invalid_input_err() -> Error { - range_error("Invalid input") -} - -fn not_basic_err() -> Error { - range_error("Illegal input >= 0x80 (not a basic code point)") +#[derive(Debug, thiserror::Error)] +pub enum IdnaError { + #[error("Invalid input")] + InvalidInput, + #[error("Input would take more than 63 characters to encode")] + InputTooLong, + #[error("Illegal input >= 0x80 (not a basic code point)")] + IllegalInput, } /// map a domain by mapping each label with the given function -fn map_domain( +fn map_domain( domain: &str, - f: impl Fn(&str) -> Result, E>, -) -> Result { + f: impl Fn(&str) -> Result, IdnaError>, +) -> Result { let mut result = String::with_capacity(domain.len()); let mut domain = domain; @@ -48,7 +48,7 @@ fn map_domain( /// Maps a unicode domain to ascii by punycode encoding each label /// /// Note this is not IDNA2003 or IDNA2008 compliant, rather it matches node.js's punycode implementation -fn to_ascii(input: &str) -> Result { +fn to_ascii(input: &str) -> Result { if input.is_ascii() { return Ok(input.into()); } @@ -61,9 +61,7 @@ fn to_ascii(input: &str) -> Result { } else { idna::punycode::encode_str(label) .map(|encoded| [PUNY_PREFIX, &encoded].join("").into()) // add the prefix - .ok_or_else(|| { - Error::msg("Input would take more than 63 characters to encode") // only error possible per the docs - }) + .ok_or(IdnaError::InputTooLong) // only error possible per the docs } })?; @@ -74,13 +72,13 @@ fn to_ascii(input: &str) -> Result { /// Maps an ascii domain to unicode by punycode decoding each label /// /// Note this is not IDNA2003 or IDNA2008 compliant, rather it matches node.js's punycode implementation -fn to_unicode(input: &str) -> Result { +fn to_unicode(input: &str) -> Result { map_domain(input, |s| { if let Some(puny) = s.strip_prefix(PUNY_PREFIX) { // it's a punycode encoded label Ok( idna::punycode::decode_to_string(&puny.to_lowercase()) - .ok_or_else(invalid_input_err)? + .ok_or(IdnaError::InvalidInput)? .into(), ) } else { @@ -95,7 +93,7 @@ fn to_unicode(input: &str) -> Result { #[string] pub fn op_node_idna_punycode_to_ascii( #[string] domain: String, -) -> Result { +) -> Result { to_ascii(&domain) } @@ -105,7 +103,7 @@ pub fn op_node_idna_punycode_to_ascii( #[string] pub fn op_node_idna_punycode_to_unicode( #[string] domain: String, -) -> Result { +) -> Result { to_unicode(&domain) } @@ -115,8 +113,8 @@ pub fn op_node_idna_punycode_to_unicode( #[string] pub fn op_node_idna_domain_to_ascii( #[string] domain: String, -) -> Result { - idna::domain_to_ascii(&domain).map_err(|e| e.into()) +) -> Result { + idna::domain_to_ascii(&domain) } /// Converts a domain to Unicode as per the IDNA spec @@ -131,7 +129,7 @@ pub fn op_node_idna_domain_to_unicode(#[string] domain: String) -> String { #[string] pub fn op_node_idna_punycode_decode( #[string] domain: String, -) -> Result { +) -> Result { if domain.is_empty() { return Ok(domain); } @@ -147,11 +145,10 @@ pub fn op_node_idna_punycode_decode( .unwrap_or(domain.len() - 1); if !domain[..last_dash].is_ascii() { - return Err(not_basic_err()); + return Err(IdnaError::IllegalInput); } - idna::punycode::decode_to_string(&domain) - .ok_or_else(|| deno_core::error::range_error("Invalid input")) + idna::punycode::decode_to_string(&domain).ok_or(IdnaError::InvalidInput) } #[op2] diff --git a/ext/node/ops/inspector.rs b/ext/node/ops/inspector.rs new file mode 100644 index 0000000000..34a7e004c1 --- /dev/null +++ b/ext/node/ops/inspector.rs @@ -0,0 +1,161 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. + +use crate::NodePermissions; +use deno_core::anyhow::Error; +use deno_core::error::generic_error; +use deno_core::futures::channel::mpsc; +use deno_core::op2; +use deno_core::v8; +use deno_core::GarbageCollected; +use deno_core::InspectorSessionKind; +use deno_core::InspectorSessionOptions; +use deno_core::JsRuntimeInspector; +use deno_core::OpState; +use std::cell::RefCell; +use std::rc::Rc; + +#[op2(fast)] +pub fn op_inspector_enabled() -> bool { + // TODO: hook up to InspectorServer + false +} + +#[op2] +pub fn op_inspector_open

( + _state: &mut OpState, + _port: Option, + #[string] _host: Option, +) -> Result<(), Error> +where + P: NodePermissions + 'static, +{ + // TODO: hook up to InspectorServer + /* + let server = state.borrow_mut::(); + if let Some(host) = host { + server.set_host(host); + } + if let Some(port) = port { + server.set_port(port); + } + state + .borrow_mut::

() + .check_net((server.host(), Some(server.port())), "inspector.open")?; + */ + + Ok(()) +} + +#[op2(fast)] +pub fn op_inspector_close() { + // TODO: hook up to InspectorServer +} + +#[op2] +#[string] +pub fn op_inspector_url() -> Option { + // TODO: hook up to InspectorServer + None +} + +#[op2(fast)] +pub fn op_inspector_wait(state: &OpState) -> bool { + match state.try_borrow::>>() { + Some(inspector) => { + inspector + .borrow_mut() + .wait_for_session_and_break_on_next_statement(); + true + } + None => false, + } +} + +#[op2(fast)] +pub fn op_inspector_emit_protocol_event( + #[string] _event_name: String, + #[string] _params: String, +) { + // TODO: inspector channel & protocol notifications +} + +struct JSInspectorSession { + tx: RefCell>>, +} + +impl GarbageCollected for JSInspectorSession {} + +#[op2] +#[cppgc] +pub fn op_inspector_connect<'s, P>( + isolate: *mut v8::Isolate, + scope: &mut v8::HandleScope<'s>, + state: &mut OpState, + connect_to_main_thread: bool, + callback: v8::Local<'s, v8::Function>, +) -> Result +where + P: NodePermissions + 'static, +{ + state + .borrow_mut::

() + .check_sys("inspector", "inspector.Session.connect")?; + + if connect_to_main_thread { + return Err(generic_error("connectToMainThread not supported")); + } + + let context = scope.get_current_context(); + let context = v8::Global::new(scope, context); + let callback = v8::Global::new(scope, callback); + + let inspector = state + .borrow::>>() + .borrow_mut(); + + let tx = inspector.create_raw_session( + InspectorSessionOptions { + kind: InspectorSessionKind::NonBlocking { + wait_for_disconnect: false, + }, + }, + // The inspector connection does not keep the event loop alive but + // when the inspector sends a message to the frontend, the JS that + // that runs may keep the event loop alive so we have to call back + // synchronously, instead of using the usual LocalInspectorSession + // UnboundedReceiver API. + Box::new(move |message| { + // SAFETY: This function is called directly by the inspector, so + // 1) The isolate is still valid + // 2) We are on the same thread as the Isolate + let scope = unsafe { &mut v8::CallbackScope::new(&mut *isolate) }; + let context = v8::Local::new(scope, context.clone()); + let scope = &mut v8::ContextScope::new(scope, context); + let scope = &mut v8::TryCatch::new(scope); + let recv = v8::undefined(scope); + if let Some(message) = v8::String::new(scope, &message.content) { + let callback = v8::Local::new(scope, callback.clone()); + callback.call(scope, recv.into(), &[message.into()]); + } + }), + ); + + Ok(JSInspectorSession { + tx: RefCell::new(Some(tx)), + }) +} + +#[op2(fast)] +pub fn op_inspector_dispatch( + #[cppgc] session: &JSInspectorSession, + #[string] message: String, +) { + if let Some(tx) = &*session.tx.borrow() { + let _ = tx.unbounded_send(message); + } +} + +#[op2(fast)] +pub fn op_inspector_disconnect(#[cppgc] session: &JSInspectorSession) { + drop(session.tx.borrow_mut().take()); +} diff --git a/ext/node/ops/ipc.rs b/ext/node/ops/ipc.rs index 59b6fece14..672cf0d707 100644 --- a/ext/node/ops/ipc.rs +++ b/ext/node/ops/ipc.rs @@ -17,8 +17,6 @@ mod impl_ { use std::task::Context; use std::task::Poll; - use deno_core::error::bad_resource_id; - use deno_core::error::AnyError; use deno_core::op2; use deno_core::serde; use deno_core::serde::Serializer; @@ -167,7 +165,7 @@ mod impl_ { #[smi] pub fn op_node_child_ipc_pipe( state: &mut OpState, - ) -> Result, AnyError> { + ) -> Result, io::Error> { let fd = match state.try_borrow_mut::() { Some(child_pipe_fd) => child_pipe_fd.0, None => return Ok(None), @@ -180,6 +178,18 @@ mod impl_ { )) } + #[derive(Debug, thiserror::Error)] + pub enum IpcError { + #[error(transparent)] + Resource(deno_core::error::AnyError), + #[error(transparent)] + IpcJsonStream(#[from] IpcJsonStreamError), + #[error(transparent)] + Canceled(#[from] deno_core::Canceled), + #[error("failed to serialize json value: {0}")] + SerdeJson(serde_json::Error), + } + #[op2(async)] pub fn op_node_ipc_write<'a>( scope: &mut v8::HandleScope<'a>, @@ -192,34 +202,37 @@ mod impl_ { // ideally we would just return `Result<(impl Future, bool), ..>`, but that's not // supported by `op2` currently. queue_ok: v8::Local<'a, v8::Array>, - ) -> Result>, AnyError> { + ) -> Result>, IpcError> { let mut serialized = Vec::with_capacity(64); let mut ser = serde_json::Serializer::new(&mut serialized); - serialize_v8_value(scope, value, &mut ser).map_err(|e| { - deno_core::error::type_error(format!( - "failed to serialize json value: {e}" - )) - })?; + serialize_v8_value(scope, value, &mut ser).map_err(IpcError::SerdeJson)?; serialized.push(b'\n'); let stream = state .borrow() .resource_table .get::(rid) - .map_err(|_| bad_resource_id())?; + .map_err(IpcError::Resource)?; let old = stream .queued_bytes .fetch_add(serialized.len(), std::sync::atomic::Ordering::Relaxed); if old + serialized.len() > 2 * INITIAL_CAPACITY { // sending messages too fast - let v = false.to_v8(scope)?; + let v = false.to_v8(scope).unwrap(); // Infallible queue_ok.set_index(scope, 0, v); } Ok(async move { - stream.clone().write_msg_bytes(&serialized).await?; + let cancel = stream.cancel.clone(); + let result = stream + .clone() + .write_msg_bytes(&serialized) + .or_cancel(cancel) + .await; + // adjust count even on error stream .queued_bytes .fetch_sub(serialized.len(), std::sync::atomic::Ordering::Relaxed); + result??; Ok(()) }) } @@ -239,12 +252,12 @@ mod impl_ { pub async fn op_node_ipc_read( state: Rc>, #[smi] rid: ResourceId, - ) -> Result { + ) -> Result { let stream = state .borrow() .resource_table .get::(rid) - .map_err(|_| bad_resource_id())?; + .map_err(IpcError::Resource)?; let cancel = stream.cancel.clone(); let mut stream = RcRef::map(stream, |r| &r.read_half).borrow_mut().await; @@ -400,7 +413,7 @@ mod impl_ { async fn write_msg_bytes( self: Rc, msg: &[u8], - ) -> Result<(), AnyError> { + ) -> Result<(), io::Error> { let mut write_half = RcRef::map(self, |r| &r.write_half).borrow_mut().await; write_half.write_all(msg).await?; @@ -455,6 +468,14 @@ mod impl_ { } } + #[derive(Debug, thiserror::Error)] + pub enum IpcJsonStreamError { + #[error("{0}")] + Io(#[source] std::io::Error), + #[error("{0}")] + SimdJson(#[source] simd_json::Error), + } + // JSON serialization stream over IPC pipe. // // `\n` is used as a delimiter between messages. @@ -475,7 +496,7 @@ mod impl_ { async fn read_msg( &mut self, - ) -> Result, AnyError> { + ) -> Result, IpcJsonStreamError> { let mut json = None; let nread = read_msg_inner( &mut self.pipe, @@ -483,7 +504,8 @@ mod impl_ { &mut json, &mut self.read_buffer, ) - .await?; + .await + .map_err(IpcJsonStreamError::Io)?; if nread == 0 { // EOF. return Ok(None); @@ -493,7 +515,8 @@ mod impl_ { Some(v) => v, None => { // Took more than a single read and some buffering. - simd_json::from_slice(&mut self.buffer[..nread])? + simd_json::from_slice(&mut self.buffer[..nread]) + .map_err(IpcJsonStreamError::SimdJson)? } }; diff --git a/ext/node/ops/mod.rs b/ext/node/ops/mod.rs index b562261f39..b53f19dc23 100644 --- a/ext/node/ops/mod.rs +++ b/ext/node/ops/mod.rs @@ -7,6 +7,7 @@ pub mod fs; pub mod http; pub mod http2; pub mod idna; +pub mod inspector; pub mod ipc; pub mod os; pub mod process; diff --git a/ext/node/ops/os/mod.rs b/ext/node/ops/os/mod.rs index ca91895f22..d291277ad4 100644 --- a/ext/node/ops/os/mod.rs +++ b/ext/node/ops/os/mod.rs @@ -1,19 +1,31 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +use std::mem::MaybeUninit; + use crate::NodePermissions; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::OpState; mod cpus; -mod priority; +pub mod priority; + +#[derive(Debug, thiserror::Error)] +pub enum OsError { + #[error(transparent)] + Priority(priority::PriorityError), + #[error(transparent)] + Permission(#[from] deno_permissions::PermissionCheckError), + #[error("Failed to get cpu info")] + FailedToGetCpuInfo, + #[error("Failed to get user info")] + FailedToGetUserInfo(#[source] std::io::Error), +} #[op2(fast)] pub fn op_node_os_get_priority

( state: &mut OpState, pid: u32, -) -> Result +) -> Result where P: NodePermissions + 'static, { @@ -22,7 +34,7 @@ where permissions.check_sys("getPriority", "node:os.getPriority()")?; } - priority::get_priority(pid) + priority::get_priority(pid).map_err(OsError::Priority) } #[op2(fast)] @@ -30,7 +42,7 @@ pub fn op_node_os_set_priority

( state: &mut OpState, pid: u32, priority: i32, -) -> Result<(), AnyError> +) -> Result<(), OsError> where P: NodePermissions + 'static, { @@ -39,25 +51,171 @@ where permissions.check_sys("setPriority", "node:os.setPriority()")?; } - priority::set_priority(pid, priority) + priority::set_priority(pid, priority).map_err(OsError::Priority) +} + +#[derive(serde::Serialize)] +pub struct UserInfo { + username: String, + homedir: String, + shell: Option, +} + +#[cfg(unix)] +fn get_user_info(uid: u32) -> Result { + use std::ffi::CStr; + let mut pw: MaybeUninit = MaybeUninit::uninit(); + let mut result: *mut libc::passwd = std::ptr::null_mut(); + // SAFETY: libc call, no invariants + let max_buf_size = unsafe { libc::sysconf(libc::_SC_GETPW_R_SIZE_MAX) }; + let buf_size = if max_buf_size < 0 { + // from the man page + 16_384 + } else { + max_buf_size as usize + }; + let mut buf = { + let mut b = Vec::>::with_capacity(buf_size); + // SAFETY: MaybeUninit has no initialization invariants, and len == cap + unsafe { + b.set_len(buf_size); + } + b + }; + // SAFETY: libc call, args are correct + let s = unsafe { + libc::getpwuid_r( + uid, + pw.as_mut_ptr(), + buf.as_mut_ptr().cast(), + buf_size, + std::ptr::addr_of_mut!(result), + ) + }; + if result.is_null() { + if s != 0 { + return Err( + OsError::FailedToGetUserInfo(std::io::Error::last_os_error()), + ); + } else { + return Err(OsError::FailedToGetUserInfo(std::io::Error::from( + std::io::ErrorKind::NotFound, + ))); + } + } + // SAFETY: pw was initialized by the call to `getpwuid_r` above + let pw = unsafe { pw.assume_init() }; + // SAFETY: initialized above, pw alive until end of function, nul terminated + let username = unsafe { CStr::from_ptr(pw.pw_name) }; + // SAFETY: initialized above, pw alive until end of function, nul terminated + let homedir = unsafe { CStr::from_ptr(pw.pw_dir) }; + // SAFETY: initialized above, pw alive until end of function, nul terminated + let shell = unsafe { CStr::from_ptr(pw.pw_shell) }; + Ok(UserInfo { + username: username.to_string_lossy().into_owned(), + homedir: homedir.to_string_lossy().into_owned(), + shell: Some(shell.to_string_lossy().into_owned()), + }) +} + +#[cfg(windows)] +fn get_user_info(_uid: u32) -> Result { + use std::ffi::OsString; + use std::os::windows::ffi::OsStringExt; + + use windows_sys::Win32::Foundation::CloseHandle; + use windows_sys::Win32::Foundation::GetLastError; + use windows_sys::Win32::Foundation::ERROR_INSUFFICIENT_BUFFER; + use windows_sys::Win32::Foundation::HANDLE; + use windows_sys::Win32::System::Threading::GetCurrentProcess; + use windows_sys::Win32::System::Threading::OpenProcessToken; + use windows_sys::Win32::UI::Shell::GetUserProfileDirectoryW; + struct Handle(HANDLE); + impl Drop for Handle { + fn drop(&mut self) { + // SAFETY: win32 call + unsafe { + CloseHandle(self.0); + } + } + } + let mut token: MaybeUninit = MaybeUninit::uninit(); + + // Get a handle to the current process + // SAFETY: win32 call + unsafe { + if OpenProcessToken( + GetCurrentProcess(), + windows_sys::Win32::Security::TOKEN_READ, + token.as_mut_ptr(), + ) == 0 + { + return Err( + OsError::FailedToGetUserInfo(std::io::Error::last_os_error()), + ); + } + } + + // SAFETY: initialized by call above + let token = Handle(unsafe { token.assume_init() }); + + let mut bufsize = 0; + // get the size for the homedir buf (it'll end up in `bufsize`) + // SAFETY: win32 call + unsafe { + GetUserProfileDirectoryW(token.0, std::ptr::null_mut(), &mut bufsize); + let err = GetLastError(); + if err != ERROR_INSUFFICIENT_BUFFER { + return Err(OsError::FailedToGetUserInfo( + std::io::Error::from_raw_os_error(err as i32), + )); + } + } + let mut path = vec![0; bufsize as usize]; + // Actually get the homedir + // SAFETY: path is `bufsize` elements + unsafe { + if GetUserProfileDirectoryW(token.0, path.as_mut_ptr(), &mut bufsize) == 0 { + return Err( + OsError::FailedToGetUserInfo(std::io::Error::last_os_error()), + ); + } + } + // remove trailing nul + path.pop(); + let homedir_wide = OsString::from_wide(&path); + let homedir = homedir_wide.to_string_lossy().into_owned(); + + Ok(UserInfo { + username: deno_whoami::username(), + homedir, + shell: None, + }) } #[op2] -#[string] -pub fn op_node_os_username

(state: &mut OpState) -> Result +#[serde] +pub fn op_node_os_user_info

( + state: &mut OpState, + #[smi] uid: u32, +) -> Result where P: NodePermissions + 'static, { { let permissions = state.borrow_mut::

(); - permissions.check_sys("username", "node:os.userInfo()")?; + permissions + .check_sys("userInfo", "node:os.userInfo()") + .map_err(OsError::Permission)?; } - Ok(deno_whoami::username()) + get_user_info(uid) } #[op2(fast)] -pub fn op_geteuid

(state: &mut OpState) -> Result +pub fn op_geteuid

( + state: &mut OpState, +) -> Result where P: NodePermissions + 'static, { @@ -76,7 +234,9 @@ where } #[op2(fast)] -pub fn op_getegid

(state: &mut OpState) -> Result +pub fn op_getegid

( + state: &mut OpState, +) -> Result where P: NodePermissions + 'static, { @@ -96,7 +256,7 @@ where #[op2] #[serde] -pub fn op_cpus

(state: &mut OpState) -> Result, AnyError> +pub fn op_cpus

(state: &mut OpState) -> Result, OsError> where P: NodePermissions + 'static, { @@ -105,12 +265,14 @@ where permissions.check_sys("cpus", "node:os.cpus()")?; } - cpus::cpu_info().ok_or_else(|| type_error("Failed to get cpu info")) + cpus::cpu_info().ok_or(OsError::FailedToGetCpuInfo) } #[op2] #[string] -pub fn op_homedir

(state: &mut OpState) -> Result, AnyError> +pub fn op_homedir

( + state: &mut OpState, +) -> Result, deno_core::error::AnyError> where P: NodePermissions + 'static, { diff --git a/ext/node/ops/os/priority.rs b/ext/node/ops/os/priority.rs index 043928e2a6..9a1ebcca70 100644 --- a/ext/node/ops/os/priority.rs +++ b/ext/node/ops/os/priority.rs @@ -1,12 +1,18 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::error::AnyError; - pub use impl_::*; +#[derive(Debug, thiserror::Error)] +pub enum PriorityError { + #[error("{0}")] + Io(#[from] std::io::Error), + #[cfg(windows)] + #[error("Invalid priority")] + InvalidPriority, +} + #[cfg(unix)] mod impl_ { - use super::*; use errno::errno; use errno::set_errno; use errno::Errno; @@ -16,7 +22,7 @@ mod impl_ { const PRIORITY_HIGH: i32 = -14; // Ref: https://github.com/libuv/libuv/blob/55376b044b74db40772e8a6e24d67a8673998e02/src/unix/core.c#L1533-L1547 - pub fn get_priority(pid: u32) -> Result { + pub fn get_priority(pid: u32) -> Result { set_errno(Errno(0)); match ( // SAFETY: libc::getpriority is unsafe @@ -29,7 +35,10 @@ mod impl_ { } } - pub fn set_priority(pid: u32, priority: i32) -> Result<(), AnyError> { + pub fn set_priority( + pid: u32, + priority: i32, + ) -> Result<(), super::PriorityError> { // SAFETY: libc::setpriority is unsafe match unsafe { libc::setpriority(PRIO_PROCESS, pid as id_t, priority) } { -1 => Err(std::io::Error::last_os_error().into()), @@ -40,8 +49,6 @@ mod impl_ { #[cfg(windows)] mod impl_ { - use super::*; - use deno_core::error::type_error; use winapi::shared::minwindef::DWORD; use winapi::shared::minwindef::FALSE; use winapi::shared::ntdef::NULL; @@ -67,7 +74,7 @@ mod impl_ { const PRIORITY_HIGHEST: i32 = -20; // Ported from: https://github.com/libuv/libuv/blob/a877ca2435134ef86315326ef4ef0c16bdbabf17/src/win/util.c#L1649-L1685 - pub fn get_priority(pid: u32) -> Result { + pub fn get_priority(pid: u32) -> Result { // SAFETY: Windows API calls unsafe { let handle = if pid == 0 { @@ -95,7 +102,10 @@ mod impl_ { } // Ported from: https://github.com/libuv/libuv/blob/a877ca2435134ef86315326ef4ef0c16bdbabf17/src/win/util.c#L1688-L1719 - pub fn set_priority(pid: u32, priority: i32) -> Result<(), AnyError> { + pub fn set_priority( + pid: u32, + priority: i32, + ) -> Result<(), super::PriorityError> { // SAFETY: Windows API calls unsafe { let handle = if pid == 0 { @@ -109,7 +119,7 @@ mod impl_ { #[allow(clippy::manual_range_contains)] let priority_class = if priority < PRIORITY_HIGHEST || priority > PRIORITY_LOW { - return Err(type_error("Invalid priority")); + return Err(super::PriorityError::InvalidPriority); } else if priority < PRIORITY_HIGH { REALTIME_PRIORITY_CLASS } else if priority < PRIORITY_ABOVE_NORMAL { diff --git a/ext/node/ops/process.rs b/ext/node/ops/process.rs index 0992c46c62..282567226e 100644 --- a/ext/node/ops/process.rs +++ b/ext/node/ops/process.rs @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::error::AnyError; use deno_core::op2; use deno_core::OpState; use deno_permissions::PermissionsContainer; @@ -51,7 +50,7 @@ pub fn op_node_process_kill( state: &mut OpState, #[smi] pid: i32, #[smi] sig: i32, -) -> Result { +) -> Result { state .borrow_mut::() .check_run_all("process.kill")?; diff --git a/ext/node/ops/require.rs b/ext/node/ops/require.rs index 7d85ee8532..30db8b6293 100644 --- a/ext/node/ops/require.rs +++ b/ext/node/ops/require.rs @@ -1,8 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::anyhow::Context; -use deno_core::error::generic_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::url::Url; use deno_core::v8; @@ -12,6 +9,7 @@ use deno_core::OpState; use deno_fs::FileSystemRc; use deno_package_json::PackageJsonRc; use deno_path_util::normalize_path; +use deno_path_util::url_to_file_path; use node_resolver::NodeModuleKind; use node_resolver::NodeResolutionMode; use node_resolver::REQUIRE_CONDITIONS; @@ -22,21 +20,52 @@ use std::path::PathBuf; use std::rc::Rc; use crate::NodePermissions; -use crate::NodeRequireResolverRc; +use crate::NodeRequireLoaderRc; use crate::NodeResolverRc; use crate::NpmResolverRc; +use crate::PackageJsonResolverRc; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn ensure_read_permission<'a, P>( state: &mut OpState, file_path: &'a Path, -) -> Result, AnyError> +) -> Result, deno_core::error::AnyError> where P: NodePermissions + 'static, { - let resolver = state.borrow::().clone(); + let loader = state.borrow::().clone(); let permissions = state.borrow_mut::

(); - resolver.ensure_read_permission(permissions, file_path) + loader.ensure_read_permission(permissions, file_path) +} + +#[derive(Debug, thiserror::Error)] +pub enum RequireError { + #[error(transparent)] + UrlParse(#[from] url::ParseError), + #[error(transparent)] + Permission(deno_core::error::AnyError), + #[error(transparent)] + PackageExportsResolve( + #[from] node_resolver::errors::PackageExportsResolveError, + ), + #[error(transparent)] + PackageJsonLoad(#[from] node_resolver::errors::PackageJsonLoadError), + #[error(transparent)] + ClosestPkgJson(#[from] node_resolver::errors::ClosestPkgJsonError), + #[error(transparent)] + PackageImportsResolve( + #[from] node_resolver::errors::PackageImportsResolveError, + ), + #[error(transparent)] + FilePathConversion(#[from] deno_path_util::UrlToFilePathError), + #[error(transparent)] + UrlConversion(#[from] deno_path_util::PathToUrlError), + #[error(transparent)] + Fs(#[from] deno_io::fs::FsError), + #[error(transparent)] + ReadModule(deno_core::error::AnyError), + #[error("Unable to get CWD: {0}")] + UnableToGetCwd(deno_io::fs::FsError), } #[op2] @@ -95,7 +124,7 @@ pub fn op_require_init_paths() -> Vec { pub fn op_require_node_module_paths

( state: &mut OpState, #[string] from: String, -) -> Result, AnyError> +) -> Result, RequireError> where P: NodePermissions + 'static, { @@ -104,13 +133,10 @@ where let from = if from.starts_with("file:///") { url_to_file_path(&Url::parse(&from)?)? } else { - let current_dir = - &(fs.cwd().map_err(AnyError::from)).context("Unable to get CWD")?; - deno_path_util::normalize_path(current_dir.join(from)) + let current_dir = &fs.cwd().map_err(RequireError::UnableToGetCwd)?; + normalize_path(current_dir.join(from)) }; - let from = ensure_read_permission::

(state, &from)?; - if cfg!(windows) { // return root node_modules when path is 'D:\\'. let from_str = from.to_str().unwrap(); @@ -131,7 +157,7 @@ where } let mut paths = Vec::with_capacity(from.components().count()); - let mut current_path = from.as_ref(); + let mut current_path = from.as_path(); let mut maybe_parent = Some(current_path); while let Some(parent) = maybe_parent { if !parent.ends_with("node_modules") { @@ -209,8 +235,11 @@ pub fn op_require_is_deno_dir_package( state: &mut OpState, #[string] path: String, ) -> bool { - let resolver = state.borrow::(); - resolver.in_npm_package_at_file_path(&PathBuf::from(path)) + let resolver = state.borrow::(); + match deno_path_util::url_from_file_path(&PathBuf::from(path)) { + Ok(specifier) => resolver.in_npm_package(&specifier), + Err(_) => false, + } } #[op2] @@ -264,7 +293,7 @@ pub fn op_require_path_is_absolute(#[string] p: String) -> bool { pub fn op_require_stat

( state: &mut OpState, #[string] path: String, -) -> Result +) -> Result where P: NodePermissions + 'static, { @@ -287,12 +316,13 @@ where pub fn op_require_real_path

( state: &mut OpState, #[string] request: String, -) -> Result +) -> Result where P: NodePermissions + 'static, { let path = PathBuf::from(request); - let path = ensure_read_permission::

(state, &path)?; + let path = ensure_read_permission::

(state, &path) + .map_err(RequireError::Permission)?; let fs = state.borrow::(); let canonicalized_path = deno_path_util::strip_unc_prefix(fs.realpath_sync(&path)?); @@ -319,12 +349,14 @@ pub fn op_require_path_resolve(#[serde] parts: Vec) -> String { #[string] pub fn op_require_path_dirname( #[string] request: String, -) -> Result { +) -> Result { let p = PathBuf::from(request); if let Some(parent) = p.parent() { Ok(parent.to_string_lossy().into_owned()) } else { - Err(generic_error("Path doesn't have a parent")) + Err(deno_core::error::generic_error( + "Path doesn't have a parent", + )) } } @@ -332,12 +364,14 @@ pub fn op_require_path_dirname( #[string] pub fn op_require_path_basename( #[string] request: String, -) -> Result { +) -> Result { let p = PathBuf::from(request); if let Some(path) = p.file_name() { Ok(path.to_string_lossy().into_owned()) } else { - Err(generic_error("Path doesn't have a file name")) + Err(deno_core::error::generic_error( + "Path doesn't have a file name", + )) } } @@ -348,7 +382,7 @@ pub fn op_require_try_self_parent_path

( has_parent: bool, #[string] maybe_parent_filename: Option, #[string] maybe_parent_id: Option, -) -> Result, AnyError> +) -> Result, deno_core::error::AnyError> where P: NodePermissions + 'static, { @@ -378,7 +412,7 @@ pub fn op_require_try_self

( state: &mut OpState, #[string] parent_path: Option, #[string] request: String, -) -> Result, AnyError> +) -> Result, RequireError> where P: NodePermissions + 'static, { @@ -386,8 +420,8 @@ where return Ok(None); } - let node_resolver = state.borrow::(); - let pkg = node_resolver + let pkg_json_resolver = state.borrow::(); + let pkg = pkg_json_resolver .get_closest_package_json_from_path(&PathBuf::from(parent_path.unwrap())) .ok() .flatten(); @@ -416,6 +450,7 @@ where let referrer = deno_core::url::Url::from_file_path(&pkg.path).unwrap(); if let Some(exports) = &pkg.exports { + let node_resolver = state.borrow::(); let r = node_resolver.package_exports_resolve( &pkg.path, &expansion, @@ -440,14 +475,18 @@ where pub fn op_require_read_file

( state: &mut OpState, #[string] file_path: String, -) -> Result +) -> Result where P: NodePermissions + 'static, { let file_path = PathBuf::from(file_path); - let file_path = ensure_read_permission::

(state, &file_path)?; - let fs = state.borrow::(); - Ok(fs.read_text_file_lossy_sync(&file_path, None)?) + // todo(dsherret): there's multiple borrows to NodeRequireLoaderRc here + let file_path = ensure_read_permission::

(state, &file_path) + .map_err(RequireError::Permission)?; + let loader = state.borrow::(); + loader + .load_text_file_lossy(&file_path) + .map_err(RequireError::ReadModule) } #[op2] @@ -472,16 +511,17 @@ pub fn op_require_resolve_exports

( #[string] name: String, #[string] expansion: String, #[string] parent_path: String, -) -> Result, AnyError> +) -> Result, RequireError> where P: NodePermissions + 'static, { let fs = state.borrow::(); - let npm_resolver = state.borrow::(); let node_resolver = state.borrow::(); + let pkg_json_resolver = state.borrow::(); let modules_path = PathBuf::from(&modules_path_str); - let pkg_path = if npm_resolver.in_npm_package_at_file_path(&modules_path) + let modules_specifier = deno_path_util::url_from_file_path(&modules_path)?; + let pkg_path = if node_resolver.in_npm_package(&modules_specifier) && !uses_local_node_modules_dir { modules_path @@ -495,7 +535,7 @@ where } }; let Some(pkg) = - node_resolver.load_package_json(&pkg_path.join("package.json"))? + pkg_json_resolver.load_package_json(&pkg_path.join("package.json"))? else { return Ok(None); }; @@ -503,12 +543,16 @@ where return Ok(None); }; - let referrer = Url::from_file_path(parent_path).unwrap(); + let referrer = if parent_path.is_empty() { + None + } else { + Some(Url::from_file_path(parent_path).unwrap()) + }; let r = node_resolver.package_exports_resolve( &pkg.path, &format!(".{expansion}"), exports, - Some(&referrer), + referrer.as_ref(), NodeModuleKind::Cjs, REQUIRE_CONDITIONS, NodeResolutionMode::Execution, @@ -525,16 +569,14 @@ where pub fn op_require_read_closest_package_json

( state: &mut OpState, #[string] filename: String, -) -> Result, AnyError> +) -> Result, node_resolver::errors::ClosestPkgJsonError> where P: NodePermissions + 'static, { let filename = PathBuf::from(filename); // permissions: allow reading the closest package.json files - let node_resolver = state.borrow::().clone(); - node_resolver - .get_closest_package_json_from_path(&filename) - .map_err(AnyError::from) + let pkg_json_resolver = state.borrow::(); + pkg_json_resolver.get_closest_package_json_from_path(&filename) } #[op2] @@ -546,13 +588,13 @@ pub fn op_require_read_package_scope

( where P: NodePermissions + 'static, { - let node_resolver = state.borrow::().clone(); + let pkg_json_resolver = state.borrow::(); let package_json_path = PathBuf::from(package_json_path); if package_json_path.file_name() != Some("package.json".as_ref()) { // permissions: do not allow reading a non-package.json file return None; } - node_resolver + pkg_json_resolver .load_package_json(&package_json_path) .ok() .flatten() @@ -564,22 +606,23 @@ pub fn op_require_package_imports_resolve

( state: &mut OpState, #[string] referrer_filename: String, #[string] request: String, -) -> Result, AnyError> +) -> Result, RequireError> where P: NodePermissions + 'static, { let referrer_path = PathBuf::from(&referrer_filename); - let referrer_path = ensure_read_permission::

(state, &referrer_path)?; - let node_resolver = state.borrow::(); + let referrer_path = ensure_read_permission::

(state, &referrer_path) + .map_err(RequireError::Permission)?; + let pkg_json_resolver = state.borrow::(); let Some(pkg) = - node_resolver.get_closest_package_json_from_path(&referrer_path)? + pkg_json_resolver.get_closest_package_json_from_path(&referrer_path)? else { return Ok(None); }; if pkg.imports.is_some() { - let referrer_url = - deno_core::url::Url::from_file_path(&referrer_filename).unwrap(); + let node_resolver = state.borrow::(); + let referrer_url = Url::from_file_path(&referrer_filename).unwrap(); let url = node_resolver.package_imports_resolve( &request, Some(&referrer_url), @@ -604,20 +647,11 @@ pub fn op_require_break_on_next_statement(state: Rc>) { inspector.wait_for_session_and_break_on_next_statement() } -fn url_to_file_path_string(url: &Url) -> Result { +fn url_to_file_path_string(url: &Url) -> Result { let file_path = url_to_file_path(url)?; Ok(file_path.to_string_lossy().into_owned()) } -fn url_to_file_path(url: &Url) -> Result { - match url.to_file_path() { - Ok(file_path) => Ok(file_path), - Err(()) => { - deno_core::anyhow::bail!("failed to convert '{}' to file path", url) - } - } -} - #[op2(fast)] pub fn op_require_can_parse_as_esm( scope: &mut v8::HandleScope, diff --git a/ext/node/ops/util.rs b/ext/node/ops/util.rs index 533d51c92c..1c177ac043 100644 --- a/ext/node/ops/util.rs +++ b/ext/node/ops/util.rs @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::error::AnyError; use deno_core::op2; use deno_core::OpState; use deno_core::ResourceHandle; @@ -22,7 +21,7 @@ enum HandleType { pub fn op_node_guess_handle_type( state: &mut OpState, rid: u32, -) -> Result { +) -> Result { let handle = state.resource_table.get_handle(rid)?; let handle_type = match handle { diff --git a/ext/node/ops/v8.rs b/ext/node/ops/v8.rs index 8813d2e18e..61f67f11f7 100644 --- a/ext/node/ops/v8.rs +++ b/ext/node/ops/v8.rs @@ -1,7 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::error::generic_error; -use deno_core::error::type_error; -use deno_core::error::AnyError; + use deno_core::op2; use deno_core::v8; use deno_core::FastString; @@ -206,10 +204,9 @@ pub fn op_v8_write_value( scope: &mut v8::HandleScope, #[cppgc] ser: &Serializer, value: v8::Local, -) -> Result<(), AnyError> { +) { let context = scope.get_current_context(); ser.inner.write_value(context, value); - Ok(()) } struct DeserBuffer { @@ -271,11 +268,13 @@ pub fn op_v8_new_deserializer( scope: &mut v8::HandleScope, obj: v8::Local, buffer: v8::Local, -) -> Result, AnyError> { +) -> Result, deno_core::error::AnyError> { let offset = buffer.byte_offset(); let len = buffer.byte_length(); let backing_store = buffer.get_backing_store().ok_or_else(|| { - generic_error("deserialization buffer has no backing store") + deno_core::error::generic_error( + "deserialization buffer has no backing store", + ) })?; let (buf_slice, buf_ptr) = if let Some(data) = backing_store.data() { // SAFETY: the offset is valid for the underlying buffer because we're getting it directly from v8 @@ -317,10 +316,10 @@ pub fn op_v8_transfer_array_buffer_de( #[op2(fast)] pub fn op_v8_read_double( #[cppgc] deser: &Deserializer, -) -> Result { +) -> Result { let mut double = 0f64; if !deser.inner.read_double(&mut double) { - return Err(type_error("ReadDouble() failed")); + return Err(deno_core::error::type_error("ReadDouble() failed")); } Ok(double) } @@ -355,10 +354,10 @@ pub fn op_v8_read_raw_bytes( #[op2(fast)] pub fn op_v8_read_uint32( #[cppgc] deser: &Deserializer, -) -> Result { +) -> Result { let mut value = 0; if !deser.inner.read_uint32(&mut value) { - return Err(type_error("ReadUint32() failed")); + return Err(deno_core::error::type_error("ReadUint32() failed")); } Ok(value) @@ -368,10 +367,10 @@ pub fn op_v8_read_uint32( #[serde] pub fn op_v8_read_uint64( #[cppgc] deser: &Deserializer, -) -> Result<(u32, u32), AnyError> { +) -> Result<(u32, u32), deno_core::error::AnyError> { let mut val = 0; if !deser.inner.read_uint64(&mut val) { - return Err(type_error("ReadUint64() failed")); + return Err(deno_core::error::type_error("ReadUint64() failed")); } Ok(((val >> 32) as u32, val as u32)) diff --git a/ext/node/ops/winerror.rs b/ext/node/ops/winerror.rs index e9dbadb6f7..cb053774ef 100644 --- a/ext/node/ops/winerror.rs +++ b/ext/node/ops/winerror.rs @@ -62,7 +62,7 @@ pub fn op_node_sys_to_uv_error(err: i32) -> String { WSAEHOSTUNREACH => "EHOSTUNREACH", ERROR_INSUFFICIENT_BUFFER => "EINVAL", ERROR_INVALID_DATA => "EINVAL", - ERROR_INVALID_NAME => "EINVAL", + ERROR_INVALID_NAME => "ENOENT", ERROR_INVALID_PARAMETER => "EINVAL", WSAEINVAL => "EINVAL", WSAEPFNOSUPPORT => "EINVAL", diff --git a/ext/node/ops/worker_threads.rs b/ext/node/ops/worker_threads.rs index 5f139c5dc8..d2e5758826 100644 --- a/ext/node/ops/worker_threads.rs +++ b/ext/node/ops/worker_threads.rs @@ -1,39 +1,56 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::error::generic_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::url::Url; use deno_core::OpState; use deno_fs::FileSystemRc; -use node_resolver::NodeResolution; use std::borrow::Cow; use std::path::Path; use std::path::PathBuf; use crate::NodePermissions; -use crate::NodeRequireResolverRc; -use crate::NodeResolverRc; +use crate::NodeRequireLoaderRc; #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn ensure_read_permission<'a, P>( state: &mut OpState, file_path: &'a Path, -) -> Result, AnyError> +) -> Result, deno_core::error::AnyError> where P: NodePermissions + 'static, { - let resolver = state.borrow::().clone(); + let loader = state.borrow::().clone(); let permissions = state.borrow_mut::

(); - resolver.ensure_read_permission(permissions, file_path) + loader.ensure_read_permission(permissions, file_path) } +#[derive(Debug, thiserror::Error)] +pub enum WorkerThreadsFilenameError { + #[error(transparent)] + Permission(deno_core::error::AnyError), + #[error("{0}")] + UrlParse(#[from] url::ParseError), + #[error("Relative path entries must start with '.' or '..'")] + InvalidRelativeUrl, + #[error("URL from Path-String")] + UrlFromPathString, + #[error("URL to Path-String")] + UrlToPathString, + #[error("URL to Path")] + UrlToPath, + #[error("File not found [{0:?}]")] + FileNotFound(PathBuf), + #[error(transparent)] + Fs(#[from] deno_io::fs::FsError), +} + +// todo(dsherret): we should remove this and do all this work inside op_create_worker #[op2] #[string] pub fn op_worker_threads_filename

( state: &mut OpState, #[string] specifier: String, -) -> Result +) -> Result where P: NodePermissions + 'static, { @@ -45,44 +62,26 @@ where } else { let path = PathBuf::from(&specifier); if path.is_relative() && !specifier.starts_with('.') { - return Err(generic_error( - "Relative path entries must start with '.' or '..'", - )); + return Err(WorkerThreadsFilenameError::InvalidRelativeUrl); } - let path = ensure_read_permission::

(state, &path)?; + let path = ensure_read_permission::

(state, &path) + .map_err(WorkerThreadsFilenameError::Permission)?; let fs = state.borrow::(); let canonicalized_path = deno_path_util::strip_unc_prefix(fs.realpath_sync(&path)?); Url::from_file_path(canonicalized_path) - .map_err(|e| generic_error(format!("URL from Path-String: {:#?}", e)))? + .map_err(|_| WorkerThreadsFilenameError::UrlFromPathString)? }; let url_path = url .to_file_path() - .map_err(|e| generic_error(format!("URL to Path-String: {:#?}", e)))?; - let url_path = ensure_read_permission::

(state, &url_path)?; + .map_err(|_| WorkerThreadsFilenameError::UrlToPathString)?; + let url_path = ensure_read_permission::

(state, &url_path) + .map_err(WorkerThreadsFilenameError::Permission)?; let fs = state.borrow::(); if !fs.exists_sync(&url_path) { - return Err(generic_error(format!("File not found [{:?}]", url_path))); - } - let node_resolver = state.borrow::(); - match node_resolver.url_to_node_resolution(url)? { - NodeResolution::Esm(u) => Ok(u.to_string()), - NodeResolution::CommonJs(u) => wrap_cjs(u), - NodeResolution::BuiltIn(_) => Err(generic_error("Neither ESM nor CJS")), + return Err(WorkerThreadsFilenameError::FileNotFound( + url_path.to_path_buf(), + )); } -} - -/// -/// Wrap a CJS file-URL and the required setup in a stringified `data:`-URL -/// -fn wrap_cjs(url: Url) -> Result { - let path = url - .to_file_path() - .map_err(|e| generic_error(format!("URL to Path: {:#?}", e)))?; - let filename = path.file_name().unwrap().to_string_lossy(); - Ok(format!( - "data:text/javascript,import {{ createRequire }} from \"node:module\";\ - const require = createRequire(\"{}\"); require(\"./{}\");", - url, filename, - )) + Ok(url.to_string()) } diff --git a/ext/node/ops/zlib/brotli.rs b/ext/node/ops/zlib/brotli.rs index 3e3905fc3d..1a681ff7f7 100644 --- a/ext/node/ops/zlib/brotli.rs +++ b/ext/node/ops/zlib/brotli.rs @@ -9,8 +9,6 @@ use brotli::BrotliDecompressStream; use brotli::BrotliResult; use brotli::BrotliState; use brotli::Decompressor; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::JsBuffer; use deno_core::OpState; @@ -19,7 +17,23 @@ use deno_core::ToJsBuffer; use std::cell::RefCell; use std::io::Read; -fn encoder_mode(mode: u32) -> Result { +#[derive(Debug, thiserror::Error)] +pub enum BrotliError { + #[error("Invalid encoder mode")] + InvalidEncoderMode, + #[error("Failed to compress")] + CompressFailed, + #[error("Failed to decompress")] + DecompressFailed, + #[error(transparent)] + Join(#[from] tokio::task::JoinError), + #[error(transparent)] + Resource(deno_core::error::AnyError), + #[error("{0}")] + Io(std::io::Error), +} + +fn encoder_mode(mode: u32) -> Result { Ok(match mode { 0 => BrotliEncoderMode::BROTLI_MODE_GENERIC, 1 => BrotliEncoderMode::BROTLI_MODE_TEXT, @@ -28,7 +42,7 @@ fn encoder_mode(mode: u32) -> Result { 4 => BrotliEncoderMode::BROTLI_FORCE_MSB_PRIOR, 5 => BrotliEncoderMode::BROTLI_FORCE_UTF8_PRIOR, 6 => BrotliEncoderMode::BROTLI_FORCE_SIGNED_PRIOR, - _ => return Err(type_error("Invalid encoder mode")), + _ => return Err(BrotliError::InvalidEncoderMode), }) } @@ -40,7 +54,7 @@ pub fn op_brotli_compress( #[smi] quality: i32, #[smi] lgwin: i32, #[smi] mode: u32, -) -> Result { +) -> Result { let mode = encoder_mode(mode)?; let mut out_size = out.len(); @@ -57,7 +71,7 @@ pub fn op_brotli_compress( &mut |_, _, _, _| (), ); if result != 1 { - return Err(type_error("Failed to compress")); + return Err(BrotliError::CompressFailed); } Ok(out_size) @@ -87,7 +101,7 @@ pub async fn op_brotli_compress_async( #[smi] quality: i32, #[smi] lgwin: i32, #[smi] mode: u32, -) -> Result { +) -> Result { let mode = encoder_mode(mode)?; tokio::task::spawn_blocking(move || { let input = &*input; @@ -107,7 +121,7 @@ pub async fn op_brotli_compress_async( &mut |_, _, _, _| (), ); if result != 1 { - return Err(type_error("Failed to compress")); + return Err(BrotliError::CompressFailed); } out.truncate(out_size); @@ -151,8 +165,11 @@ pub fn op_brotli_compress_stream( #[smi] rid: u32, #[buffer] input: &[u8], #[buffer] output: &mut [u8], -) -> Result { - let ctx = state.resource_table.get::(rid)?; +) -> Result { + let ctx = state + .resource_table + .get::(rid) + .map_err(BrotliError::Resource)?; let mut inst = ctx.inst.borrow_mut(); let mut output_offset = 0; @@ -168,7 +185,7 @@ pub fn op_brotli_compress_stream( &mut |_, _, _, _| (), ); if !result { - return Err(type_error("Failed to compress")); + return Err(BrotliError::CompressFailed); } Ok(output_offset) @@ -180,8 +197,11 @@ pub fn op_brotli_compress_stream_end( state: &mut OpState, #[smi] rid: u32, #[buffer] output: &mut [u8], -) -> Result { - let ctx = state.resource_table.get::(rid)?; +) -> Result { + let ctx = state + .resource_table + .get::(rid) + .map_err(BrotliError::Resource)?; let mut inst = ctx.inst.borrow_mut(); let mut output_offset = 0; @@ -197,13 +217,13 @@ pub fn op_brotli_compress_stream_end( &mut |_, _, _, _| (), ); if !result { - return Err(type_error("Failed to compress")); + return Err(BrotliError::CompressFailed); } Ok(output_offset) } -fn brotli_decompress(buffer: &[u8]) -> Result { +fn brotli_decompress(buffer: &[u8]) -> Result { let mut output = Vec::with_capacity(4096); let mut decompressor = Decompressor::new(buffer, buffer.len()); decompressor.read_to_end(&mut output)?; @@ -214,7 +234,7 @@ fn brotli_decompress(buffer: &[u8]) -> Result { #[serde] pub fn op_brotli_decompress( #[buffer] buffer: &[u8], -) -> Result { +) -> Result { brotli_decompress(buffer) } @@ -222,8 +242,11 @@ pub fn op_brotli_decompress( #[serde] pub async fn op_brotli_decompress_async( #[buffer] buffer: JsBuffer, -) -> Result { - tokio::task::spawn_blocking(move || brotli_decompress(&buffer)).await? +) -> Result { + tokio::task::spawn_blocking(move || { + brotli_decompress(&buffer).map_err(BrotliError::Io) + }) + .await? } struct BrotliDecompressCtx { @@ -252,8 +275,11 @@ pub fn op_brotli_decompress_stream( #[smi] rid: u32, #[buffer] input: &[u8], #[buffer] output: &mut [u8], -) -> Result { - let ctx = state.resource_table.get::(rid)?; +) -> Result { + let ctx = state + .resource_table + .get::(rid) + .map_err(BrotliError::Resource)?; let mut inst = ctx.inst.borrow_mut(); let mut output_offset = 0; @@ -268,7 +294,7 @@ pub fn op_brotli_decompress_stream( &mut inst, ); if matches!(result, BrotliResult::ResultFailure) { - return Err(type_error("Failed to decompress")); + return Err(BrotliError::DecompressFailed); } Ok(output_offset) @@ -280,8 +306,11 @@ pub fn op_brotli_decompress_stream_end( state: &mut OpState, #[smi] rid: u32, #[buffer] output: &mut [u8], -) -> Result { - let ctx = state.resource_table.get::(rid)?; +) -> Result { + let ctx = state + .resource_table + .get::(rid) + .map_err(BrotliError::Resource)?; let mut inst = ctx.inst.borrow_mut(); let mut output_offset = 0; @@ -296,7 +325,7 @@ pub fn op_brotli_decompress_stream_end( &mut inst, ); if matches!(result, BrotliResult::ResultFailure) { - return Err(type_error("Failed to decompress")); + return Err(BrotliError::DecompressFailed); } Ok(output_offset) diff --git a/ext/node/ops/zlib/mod.rs b/ext/node/ops/zlib/mod.rs index b1d6d21d22..e75ef050d2 100644 --- a/ext/node/ops/zlib/mod.rs +++ b/ext/node/ops/zlib/mod.rs @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::error::type_error; -use deno_core::error::AnyError; + use deno_core::op2; use std::borrow::Cow; use std::cell::RefCell; @@ -8,7 +7,7 @@ use zlib::*; mod alloc; pub mod brotli; -mod mode; +pub mod mode; mod stream; use mode::Flush; @@ -17,11 +16,11 @@ use mode::Mode; use self::stream::StreamWrapper; #[inline] -fn check(condition: bool, msg: &str) -> Result<(), AnyError> { +fn check(condition: bool, msg: &str) -> Result<(), deno_core::error::AnyError> { if condition { Ok(()) } else { - Err(type_error(msg.to_string())) + Err(deno_core::error::type_error(msg.to_string())) } } @@ -56,7 +55,7 @@ impl ZlibInner { out_off: u32, out_len: u32, flush: Flush, - ) -> Result<(), AnyError> { + ) -> Result<(), deno_core::error::AnyError> { check(self.init_done, "write before init")?; check(!self.write_in_progress, "write already in progress")?; check(!self.pending_close, "close already in progress")?; @@ -65,11 +64,11 @@ impl ZlibInner { let next_in = input .get(in_off as usize..in_off as usize + in_len as usize) - .ok_or_else(|| type_error("invalid input range"))? + .ok_or_else(|| deno_core::error::type_error("invalid input range"))? .as_ptr() as *mut _; let next_out = out .get_mut(out_off as usize..out_off as usize + out_len as usize) - .ok_or_else(|| type_error("invalid output range"))? + .ok_or_else(|| deno_core::error::type_error("invalid output range"))? .as_mut_ptr(); self.strm.avail_in = in_len; @@ -81,7 +80,10 @@ impl ZlibInner { Ok(()) } - fn do_write(&mut self, flush: Flush) -> Result<(), AnyError> { + fn do_write( + &mut self, + flush: Flush, + ) -> Result<(), deno_core::error::AnyError> { self.flush = flush; match self.mode { Mode::Deflate | Mode::Gzip | Mode::DeflateRaw => { @@ -127,7 +129,7 @@ impl ZlibInner { self.mode = Mode::Inflate; } } else if next_expected_header_byte.is_some() { - return Err(type_error( + return Err(deno_core::error::type_error( "invalid number of gzip magic number bytes read", )); } @@ -181,7 +183,7 @@ impl ZlibInner { Ok(()) } - fn init_stream(&mut self) -> Result<(), AnyError> { + fn init_stream(&mut self) -> Result<(), deno_core::error::AnyError> { match self.mode { Mode::Gzip | Mode::Gunzip => self.window_bits += 16, Mode::Unzip => self.window_bits += 32, @@ -199,7 +201,7 @@ impl ZlibInner { Mode::Inflate | Mode::Gunzip | Mode::InflateRaw | Mode::Unzip => { self.strm.inflate_init(self.window_bits) } - Mode::None => return Err(type_error("Unknown mode")), + Mode::None => return Err(deno_core::error::type_error("Unknown mode")), }; self.write_in_progress = false; @@ -208,7 +210,7 @@ impl ZlibInner { Ok(()) } - fn close(&mut self) -> Result { + fn close(&mut self) -> Result { if self.write_in_progress { self.pending_close = true; return Ok(false); @@ -222,10 +224,8 @@ impl ZlibInner { Ok(true) } - fn reset_stream(&mut self) -> Result<(), AnyError> { + fn reset_stream(&mut self) { self.err = self.strm.reset(self.mode); - - Ok(()) } } @@ -243,7 +243,7 @@ impl deno_core::Resource for Zlib { #[op2] #[cppgc] -pub fn op_zlib_new(#[smi] mode: i32) -> Result { +pub fn op_zlib_new(#[smi] mode: i32) -> Result { let mode = Mode::try_from(mode)?; let inner = ZlibInner { @@ -256,12 +256,20 @@ pub fn op_zlib_new(#[smi] mode: i32) -> Result { }) } +#[derive(Debug, thiserror::Error)] +pub enum ZlibError { + #[error("zlib not initialized")] + NotInitialized, + #[error(transparent)] + Mode(#[from] mode::ModeError), + #[error(transparent)] + Other(#[from] deno_core::error::AnyError), +} + #[op2(fast)] -pub fn op_zlib_close(#[cppgc] resource: &Zlib) -> Result<(), AnyError> { +pub fn op_zlib_close(#[cppgc] resource: &Zlib) -> Result<(), ZlibError> { let mut resource = resource.inner.borrow_mut(); - let zlib = resource - .as_mut() - .ok_or_else(|| type_error("zlib not initialized"))?; + let zlib = resource.as_mut().ok_or(ZlibError::NotInitialized)?; // If there is a pending write, defer the close until the write is done. zlib.close()?; @@ -282,11 +290,9 @@ pub fn op_zlib_write( #[smi] out_off: u32, #[smi] out_len: u32, #[buffer] result: &mut [u32], -) -> Result { +) -> Result { let mut zlib = resource.inner.borrow_mut(); - let zlib = zlib - .as_mut() - .ok_or_else(|| type_error("zlib not initialized"))?; + let zlib = zlib.as_mut().ok_or(ZlibError::NotInitialized)?; let flush = Flush::try_from(flush)?; zlib.start_write(input, in_off, in_len, out, out_off, out_len, flush)?; @@ -307,11 +313,9 @@ pub fn op_zlib_init( #[smi] mem_level: i32, #[smi] strategy: i32, #[buffer] dictionary: &[u8], -) -> Result { +) -> Result { let mut zlib = resource.inner.borrow_mut(); - let zlib = zlib - .as_mut() - .ok_or_else(|| type_error("zlib not initialized"))?; + let zlib = zlib.as_mut().ok_or(ZlibError::NotInitialized)?; check((8..=15).contains(&window_bits), "invalid windowBits")?; check((-1..=9).contains(&level), "invalid level")?; @@ -348,13 +352,11 @@ pub fn op_zlib_init( #[op2(fast)] #[smi] -pub fn op_zlib_reset(#[cppgc] resource: &Zlib) -> Result { +pub fn op_zlib_reset(#[cppgc] resource: &Zlib) -> Result { let mut zlib = resource.inner.borrow_mut(); - let zlib = zlib - .as_mut() - .ok_or_else(|| type_error("zlib not initialized"))?; + let zlib = zlib.as_mut().ok_or(ZlibError::NotInitialized)?; - zlib.reset_stream()?; + zlib.reset_stream(); Ok(zlib.err) } @@ -362,12 +364,10 @@ pub fn op_zlib_reset(#[cppgc] resource: &Zlib) -> Result { #[op2(fast)] pub fn op_zlib_close_if_pending( #[cppgc] resource: &Zlib, -) -> Result<(), AnyError> { +) -> Result<(), ZlibError> { let pending_close = { let mut zlib = resource.inner.borrow_mut(); - let zlib = zlib - .as_mut() - .ok_or_else(|| type_error("zlib not initialized"))?; + let zlib = zlib.as_mut().ok_or(ZlibError::NotInitialized)?; zlib.write_in_progress = false; zlib.pending_close diff --git a/ext/node/ops/zlib/mode.rs b/ext/node/ops/zlib/mode.rs index 753300cc49..41565f9b11 100644 --- a/ext/node/ops/zlib/mode.rs +++ b/ext/node/ops/zlib/mode.rs @@ -1,19 +1,8 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -#[derive(Debug)] -pub enum Error { - BadArgument, -} - -impl std::fmt::Display for Error { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Error::BadArgument => write!(f, "bad argument"), - } - } -} - -impl std::error::Error for Error {} +#[derive(Debug, thiserror::Error)] +#[error("bad argument")] +pub struct ModeError; macro_rules! repr_i32 { ($(#[$meta:meta])* $vis:vis enum $name:ident { @@ -25,12 +14,12 @@ macro_rules! repr_i32 { } impl core::convert::TryFrom for $name { - type Error = Error; + type Error = ModeError; fn try_from(v: i32) -> Result { match v { $(x if x == $name::$vname as i32 => Ok($name::$vname),)* - _ => Err(Error::BadArgument), + _ => Err(ModeError), } } } diff --git a/ext/node/polyfills/01_require.js b/ext/node/polyfills/01_require.js index 5b0980c310..d818bb5727 100644 --- a/ext/node/polyfills/01_require.js +++ b/ext/node/polyfills/01_require.js @@ -523,17 +523,13 @@ function resolveExports( return; } - if (!parentPath) { - return false; - } - return op_require_resolve_exports( usesLocalNodeModulesDir, modulesPath, request, name, expansion, - parentPath, + parentPath ?? "", ) ?? false; } @@ -1075,13 +1071,35 @@ Module._extensions[".js"] = function (module, filename) { } else if (pkg?.type === "commonjs") { format = "commonjs"; } - } else if (StringPrototypeEndsWith(filename, ".cjs")) { - format = "commonjs"; } module._compile(content, filename, format); }; +Module._extensions[".ts"] = + Module._extensions[".jsx"] = + Module._extensions[".tsx"] = + function (module, filename) { + const content = op_require_read_file(filename); + + let format; + const pkg = op_require_read_closest_package_json(filename); + if (pkg?.type === "module") { + format = "module"; + } else if (pkg?.type === "commonjs") { + format = "commonjs"; + } + + module._compile(content, filename, format); + }; + +Module._extensions[".cjs"] = + Module._extensions[".cts"] = + function (module, filename) { + const content = op_require_read_file(filename); + module._compile(content, filename, "commonjs"); + }; + function loadESMFromCJS(module, filename, code) { const namespace = op_import_sync( url.pathToFileURL(filename).toString(), @@ -1091,7 +1109,10 @@ function loadESMFromCJS(module, filename, code) { module.exports = namespace; } -Module._extensions[".mjs"] = function (module, filename) { +Module._extensions[".mjs"] = Module._extensions[".mts"] = function ( + module, + filename, +) { loadESMFromCJS(module, filename); }; @@ -1291,6 +1312,8 @@ export function findSourceMap(_path) { return undefined; } +Module.findSourceMap = findSourceMap; + /** * @param {string | URL} _specifier * @param {string | URL} _parentUrl diff --git a/ext/node/polyfills/_fs/_fs_common.ts b/ext/node/polyfills/_fs/_fs_common.ts index ac0bf5a551..a29548bb36 100644 --- a/ext/node/polyfills/_fs/_fs_common.ts +++ b/ext/node/polyfills/_fs/_fs_common.ts @@ -20,6 +20,7 @@ import { notImplemented, TextEncodings, } from "ext:deno_node/_utils.ts"; +import { type Buffer } from "node:buffer"; export type CallbackWithError = (err: ErrnoException | null) => void; diff --git a/ext/node/polyfills/_fs/_fs_open.ts b/ext/node/polyfills/_fs/_fs_open.ts index 8bd989790b..31ca4bb619 100644 --- a/ext/node/polyfills/_fs/_fs_open.ts +++ b/ext/node/polyfills/_fs/_fs_open.ts @@ -147,8 +147,8 @@ export function open( export function openPromise( path: string | Buffer | URL, - flags?: openFlags = "r", - mode? = 0o666, + flags: openFlags = "r", + mode = 0o666, ): Promise { return new Promise((resolve, reject) => { open(path, flags, mode, (err, fd) => { diff --git a/ext/node/polyfills/_fs/_fs_readFile.ts b/ext/node/polyfills/_fs/_fs_readFile.ts index 0f05ee1678..cf7e0305d8 100644 --- a/ext/node/polyfills/_fs/_fs_readFile.ts +++ b/ext/node/polyfills/_fs/_fs_readFile.ts @@ -19,6 +19,7 @@ import { TextEncodings, } from "ext:deno_node/_utils.ts"; import { FsFile } from "ext:deno_fs/30_fs.js"; +import { denoErrorToNodeError } from "ext:deno_node/internal/errors.ts"; function maybeDecode(data: Uint8Array, encoding: TextEncodings): string; function maybeDecode( @@ -87,7 +88,7 @@ export function readFile( } const buffer = maybeDecode(data, encoding); (cb as BinaryCallback)(null, buffer); - }, (err) => cb && cb(err)); + }, (err) => cb && cb(denoErrorToNodeError(err))); } } @@ -117,7 +118,12 @@ export function readFileSync( opt?: FileOptionsArgument, ): string | Buffer { path = path instanceof URL ? pathFromURL(path) : path; - const data = Deno.readFileSync(path); + let data; + try { + data = Deno.readFileSync(path); + } catch (err) { + throw denoErrorToNodeError(err); + } const encoding = getEncoding(opt); if (encoding && encoding !== "binary") { const text = maybeDecode(data, encoding); diff --git a/ext/node/polyfills/_fs/_fs_readv.ts b/ext/node/polyfills/_fs/_fs_readv.ts index 384f5e319a..2259f029ae 100644 --- a/ext/node/polyfills/_fs/_fs_readv.ts +++ b/ext/node/polyfills/_fs/_fs_readv.ts @@ -15,6 +15,7 @@ import { maybeCallback } from "ext:deno_node/_fs/_fs_common.ts"; import { validateInteger } from "ext:deno_node/internal/validators.mjs"; import * as io from "ext:deno_io/12_io.js"; import { op_fs_seek_async, op_fs_seek_sync } from "ext:core/ops"; +import process from "node:process"; type Callback = ( err: ErrnoException | null, diff --git a/ext/node/polyfills/_fs/_fs_stat.ts b/ext/node/polyfills/_fs/_fs_stat.ts index c4ed82d577..d00c81ffb6 100644 --- a/ext/node/polyfills/_fs/_fs_stat.ts +++ b/ext/node/polyfills/_fs/_fs_stat.ts @@ -383,7 +383,10 @@ export function stat( Deno.stat(path).then( (stat) => callback(null, CFISBIS(stat, options.bigint)), - (err) => callback(denoErrorToNodeError(err, { syscall: "stat" })), + (err) => + callback( + denoErrorToNodeError(err, { syscall: "stat", path: getPathname(path) }), + ), ); } @@ -417,9 +420,16 @@ export function statSync( return; } if (err instanceof Error) { - throw denoErrorToNodeError(err, { syscall: "stat" }); + throw denoErrorToNodeError(err, { + syscall: "stat", + path: getPathname(path), + }); } else { throw err; } } } + +function getPathname(path: string | URL) { + return typeof path === "string" ? path : path.pathname; +} diff --git a/ext/node/polyfills/_next_tick.ts b/ext/node/polyfills/_next_tick.ts index 62470c564e..af306a29c8 100644 --- a/ext/node/polyfills/_next_tick.ts +++ b/ext/node/polyfills/_next_tick.ts @@ -62,6 +62,8 @@ export function processTicksAndRejections() { callback(...args); } } + } catch (e) { + reportError(e); } finally { // FIXME(bartlomieju): Deno currently doesn't support async hooks // if (destroyHooksExist()) diff --git a/ext/node/polyfills/_tls_wrap.ts b/ext/node/polyfills/_tls_wrap.ts index a614b45df0..e36fc637e7 100644 --- a/ext/node/polyfills/_tls_wrap.ts +++ b/ext/node/polyfills/_tls_wrap.ts @@ -68,6 +68,7 @@ export class TLSSocket extends net.Socket { secureConnecting: boolean; _SNICallback: any; servername: string | null; + alpnProtocol: string | boolean | null; alpnProtocols: string[] | null; authorized: boolean; authorizationError: any; @@ -114,6 +115,7 @@ export class TLSSocket extends net.Socket { this.secureConnecting = true; this._SNICallback = null; this.servername = null; + this.alpnProtocol = null; this.alpnProtocols = tlsOptions.ALPNProtocols; this.authorized = false; this.authorizationError = null; @@ -151,10 +153,21 @@ export class TLSSocket extends net.Socket { handle.afterConnect = async (req: any, status: number) => { try { const conn = await Deno.startTls(handle[kStreamBaseField], options); + try { + const hs = await conn.handshake(); + if (hs.alpnProtocol) { + tlssock.alpnProtocol = hs.alpnProtocol; + } else { + tlssock.alpnProtocol = false; + } + } catch { + // Don't interrupt "secure" event to let the first read/write + // operation emit the error. + } handle[kStreamBaseField] = conn; tlssock.emit("secure"); tlssock.removeListener("end", onConnectEnd); - } catch { + } catch (_) { // TODO(kt3k): Handle this } return afterConnect.call(handle, req, status); @@ -269,6 +282,7 @@ export class ServerImpl extends EventEmitter { // Creates TCP handle and socket directly from Deno.TlsConn. // This works as TLS socket. We don't use TLSSocket class for doing // this because Deno.startTls only supports client side tcp connection. + // TODO(@satyarohith): set TLSSocket.alpnProtocol when we use TLSSocket class. const handle = new TCP(TCPConstants.SOCKET, await listener.accept()); const socket = new net.Socket({ handle }); this.emit("secureConnection", socket); diff --git a/ext/node/polyfills/_zlib.mjs b/ext/node/polyfills/_zlib.mjs index 851bd602f2..07fc440ef5 100644 --- a/ext/node/polyfills/_zlib.mjs +++ b/ext/node/polyfills/_zlib.mjs @@ -14,6 +14,7 @@ import { nextTick } from "ext:deno_node/_next_tick.ts"; import { isAnyArrayBuffer, isArrayBufferView, + isUint8Array, } from "ext:deno_node/internal/util/types.ts"; var kRangeErrorMessage = "Cannot create final Buffer. It would be larger " + @@ -158,6 +159,12 @@ export const inflateRawSync = function (buffer, opts) { function sanitizeInput(input) { if (typeof input === "string") input = Buffer.from(input); + if (isArrayBufferView(input) && !isUint8Array(input)) { + input = Buffer.from(input.buffer, input.byteOffset, input.byteLength); + } else if (isAnyArrayBuffer(input)) { + input = Buffer.from(input); + } + if ( !Buffer.isBuffer(input) && (input.buffer && !input.buffer.constructor === ArrayBuffer) diff --git a/ext/node/polyfills/http.ts b/ext/node/polyfills/http.ts index 20bef30099..9a920adeee 100644 --- a/ext/node/polyfills/http.ts +++ b/ext/node/polyfills/http.ts @@ -34,6 +34,7 @@ import { finished, Readable as NodeReadable, Writable as NodeWritable, + WritableOptions as NodeWritableOptions, } from "node:stream"; import { kUniqueHeaders, @@ -50,7 +51,6 @@ import { urlToHttpOptions } from "ext:deno_node/internal/url.ts"; import { kEmptyObject } from "ext:deno_node/internal/util.mjs"; import { constants, TCP } from "ext:deno_node/internal_binding/tcp_wrap.ts"; import { notImplemented, warnNotImplemented } from "ext:deno_node/_utils.ts"; -import { isWindows } from "ext:deno_node/_util/os.ts"; import { connResetException, ERR_HTTP_HEADERS_SENT, @@ -70,6 +70,7 @@ import { resourceForReadableStream } from "ext:deno_web/06_streams.js"; import { UpgradedConn } from "ext:deno_net/01_net.js"; import { STATUS_CODES } from "node:_http_server"; import { methods as METHODS } from "node:_http_common"; +import { deprecate } from "node:util"; const { internalRidSymbol } = core; const { ArrayIsArray, StringPrototypeToLowerCase } = primordials; @@ -1184,49 +1185,95 @@ function onError(self, error, cb) { } } -export class ServerResponse extends NodeWritable { - statusCode = 200; - statusMessage?: string = undefined; - #headers: Record = { __proto__: null }; - #hasNonStringHeaders: boolean = false; - #readable: ReadableStream; - override writable = true; - // used by `npm:on-finished` - finished = false; - headersSent = false; - #resolve: (value: Response | PromiseLike) => void; +export type ServerResponse = { + statusCode: number; + statusMessage?: string; + + _headers: Record; + _hasNonStringHeaders: boolean; + + _readable: ReadableStream; + finished: boolean; + headersSent: boolean; + _resolve: (value: Response | PromiseLike) => void; // deno-lint-ignore no-explicit-any - #socketOverride: any | null = null; + _socketOverride: any | null; + // deno-lint-ignore no-explicit-any + socket: any | null; - static #enqueue(controller: ReadableStreamDefaultController, chunk: Chunk) { - try { - if (typeof chunk === "string") { - controller.enqueue(ENCODER.encode(chunk)); - } else { - controller.enqueue(chunk); - } - } catch (_) { - // The stream might have been closed. Ignore the error. - } - } + setHeader(name: string, value: string | string[]): void; + appendHeader(name: string, value: string | string[]): void; + getHeader(name: string): string | string[]; + removeHeader(name: string): void; + getHeaderNames(): string[]; + getHeaders(): Record; + hasHeader(name: string): boolean; - /** Returns true if the response body should be null with the given - * http status code */ - static #bodyShouldBeNull(status: number) { - return status === 101 || status === 204 || status === 205 || status === 304; - } + writeHead( + status: number, + statusMessage?: string, + headers?: + | Record + | Array<[string, string]>, + ): void; + writeHead( + status: number, + headers?: + | Record + | Array<[string, string]>, + ): void; - constructor( + _ensureHeaders(singleChunk?: Chunk): void; + + respond(final: boolean, singleChunk?: Chunk): void; + // deno-lint-ignore no-explicit-any + end(chunk?: any, encoding?: any, cb?: any): void; + + flushHeaders(): void; + _implicitHeader(): void; + + // Undocumented field used by `npm:light-my-request`. + _header: string; + + assignSocket(socket): void; + detachSocket(socket): void; +} & { -readonly [K in keyof NodeWritable]: NodeWritable[K] }; + +type ServerResponseStatic = { + new ( resolve: (value: Response | PromiseLike) => void, socket: FakeSocket, - ) { - let controller: ReadableByteStreamController; - const readable = new ReadableStream({ - start(c) { - controller = c as ReadableByteStreamController; - }, - }); - super({ + ): ServerResponse; + _enqueue(controller: ReadableStreamDefaultController, chunk: Chunk): void; + _bodyShouldBeNull(statusCode: number): boolean; +}; + +export const ServerResponse = function ( + this: ServerResponse, + resolve: (value: Response | PromiseLike) => void, + socket: FakeSocket, +) { + this.statusCode = 200; + this.statusMessage = undefined; + this._headers = { __proto__: null }; + this._hasNonStringHeaders = false; + this.writable = true; + + // used by `npm:on-finished` + this.finished = false; + this.headersSent = false; + this._socketOverride = null; + + let controller: ReadableByteStreamController; + const readable = new ReadableStream({ + start(c) { + controller = c as ReadableByteStreamController; + }, + }); + + NodeWritable.call( + this, + { autoDestroy: true, defaultEncoding: "utf-8", emitClose: true, @@ -1235,16 +1282,16 @@ export class ServerResponse extends NodeWritable { write: (chunk, encoding, cb) => { // Writes chunks are directly written to the socket if // one is assigned via assignSocket() - if (this.#socketOverride && this.#socketOverride.writable) { - this.#socketOverride.write(chunk, encoding); + if (this._socketOverride && this._socketOverride.writable) { + this._socketOverride.write(chunk, encoding); return cb(); } if (!this.headersSent) { - ServerResponse.#enqueue(controller, chunk); + ServerResponse._enqueue(controller, chunk); this.respond(false); return cb(); } - ServerResponse.#enqueue(controller, chunk); + ServerResponse._enqueue(controller, chunk); return cb(); }, final: (cb) => { @@ -1260,193 +1307,269 @@ export class ServerResponse extends NodeWritable { } return cb(null); }, - }); - this.#readable = readable; - this.#resolve = resolve; - this.socket = socket; - } + } satisfies NodeWritableOptions, + ); - setHeader(name: string, value: string | string[]) { - if (Array.isArray(value)) { - this.#hasNonStringHeaders = true; - } - this.#headers[StringPrototypeToLowerCase(name)] = value; - return this; - } + this._readable = readable; + this._resolve = resolve; + this.socket = socket; - appendHeader(name: string, value: string | string[]) { - const key = StringPrototypeToLowerCase(name); - if (this.#headers[key] === undefined) { - if (Array.isArray(value)) this.#hasNonStringHeaders = true; - this.#headers[key] = value; + this._header = ""; +} as unknown as ServerResponseStatic; + +Object.setPrototypeOf(ServerResponse.prototype, NodeWritable.prototype); +Object.setPrototypeOf(ServerResponse, NodeWritable); + +ServerResponse._enqueue = function ( + this: ServerResponse, + controller: ReadableStreamDefaultController, + chunk: Chunk, +) { + try { + if (typeof chunk === "string") { + controller.enqueue(ENCODER.encode(chunk)); } else { - this.#hasNonStringHeaders = true; - if (!Array.isArray(this.#headers[key])) { - this.#headers[key] = [this.#headers[key]]; + controller.enqueue(chunk); + } + } catch (_) { + // The stream might have been closed. Ignore the error. + } +}; + +/** Returns true if the response body should be null with the given + * http status code */ +ServerResponse._bodyShouldBeNull = function ( + this: ServerResponse, + status: number, +) { + return status === 101 || status === 204 || status === 205 || status === 304; +}; + +ServerResponse.prototype.setHeader = function ( + this: ServerResponse, + name: string, + value: string | string[], +) { + if (Array.isArray(value)) { + this._hasNonStringHeaders = true; + } + this._headers[StringPrototypeToLowerCase(name)] = value; + return this; +}; + +ServerResponse.prototype.appendHeader = function ( + this: ServerResponse, + name: string, + value: string | string[], +) { + const key = StringPrototypeToLowerCase(name); + if (this._headers[key] === undefined) { + if (Array.isArray(value)) this._hasNonStringHeaders = true; + this._headers[key] = value; + } else { + this._hasNonStringHeaders = true; + if (!Array.isArray(this._headers[key])) { + this._headers[key] = [this._headers[key]]; + } + const header = this._headers[key]; + if (Array.isArray(value)) { + header.push(...value); + } else { + header.push(value); + } + } + return this; +}; + +ServerResponse.prototype.getHeader = function ( + this: ServerResponse, + name: string, +) { + return this._headers[StringPrototypeToLowerCase(name)]; +}; + +ServerResponse.prototype.removeHeader = function ( + this: ServerResponse, + name: string, +) { + delete this._headers[StringPrototypeToLowerCase(name)]; +}; + +ServerResponse.prototype.getHeaderNames = function (this: ServerResponse) { + return Object.keys(this._headers); +}; + +ServerResponse.prototype.getHeaders = function ( + this: ServerResponse, +): Record { + return { __proto__: null, ...this._headers }; +}; + +ServerResponse.prototype.hasHeader = function ( + this: ServerResponse, + name: string, +) { + return Object.hasOwn(this._headers, name); +}; + +ServerResponse.prototype.writeHead = function ( + this: ServerResponse, + status: number, + statusMessageOrHeaders?: + | string + | Record + | Array<[string, string]>, + maybeHeaders?: + | Record + | Array<[string, string]>, +) { + this.statusCode = status; + + let headers = null; + if (typeof statusMessageOrHeaders === "string") { + this.statusMessage = statusMessageOrHeaders; + if (maybeHeaders !== undefined) { + headers = maybeHeaders; + } + } else if (statusMessageOrHeaders !== undefined) { + headers = statusMessageOrHeaders; + } + + if (headers !== null) { + if (ArrayIsArray(headers)) { + headers = headers as Array<[string, string]>; + for (let i = 0; i < headers.length; i++) { + this.appendHeader(headers[i][0], headers[i][1]); } - const header = this.#headers[key]; - if (Array.isArray(value)) { - header.push(...value); - } else { - header.push(value); + } else { + headers = headers as Record; + for (const k in headers) { + if (Object.hasOwn(headers, k)) { + this.setHeader(k, headers[k]); + } } } - return this; } - getHeader(name: string) { - return this.#headers[StringPrototypeToLowerCase(name)]; - } - removeHeader(name: string) { - delete this.#headers[StringPrototypeToLowerCase(name)]; - } - getHeaderNames() { - return Object.keys(this.#headers); - } - getHeaders(): Record { - // @ts-ignore Ignore null __proto__ - return { __proto__: null, ...this.#headers }; - } - hasHeader(name: string) { - return Object.hasOwn(this.#headers, name); - } + return this; +}; - writeHead( - status: number, - statusMessage?: string, - headers?: - | Record - | Array<[string, string]>, - ): this; - writeHead( - status: number, - headers?: - | Record - | Array<[string, string]>, - ): this; - writeHead( - status: number, - statusMessageOrHeaders?: - | string - | Record - | Array<[string, string]>, - maybeHeaders?: - | Record - | Array<[string, string]>, - ): this { - this.statusCode = status; +ServerResponse.prototype._ensureHeaders = function ( + this: ServerResponse, + singleChunk?: Chunk, +) { + if (this.statusCode === 200 && this.statusMessage === undefined) { + this.statusMessage = "OK"; + } + if (typeof singleChunk === "string" && !this.hasHeader("content-type")) { + this.setHeader("content-type", "text/plain;charset=UTF-8"); + } +}; - let headers = null; - if (typeof statusMessageOrHeaders === "string") { - this.statusMessage = statusMessageOrHeaders; - if (maybeHeaders !== undefined) { - headers = maybeHeaders; - } - } else if (statusMessageOrHeaders !== undefined) { - headers = statusMessageOrHeaders; - } - - if (headers !== null) { - if (ArrayIsArray(headers)) { - headers = headers as Array<[string, string]>; - for (let i = 0; i < headers.length; i++) { - this.appendHeader(headers[i][0], headers[i][1]); +ServerResponse.prototype.respond = function ( + this: ServerResponse, + final: boolean, + singleChunk?: Chunk, +) { + this.headersSent = true; + this._ensureHeaders(singleChunk); + let body = singleChunk ?? (final ? null : this._readable); + if (ServerResponse._bodyShouldBeNull(this.statusCode)) { + body = null; + } + let headers: Record | [string, string][] = this + ._headers as Record; + if (this._hasNonStringHeaders) { + headers = []; + // Guard is not needed as this is a null prototype object. + // deno-lint-ignore guard-for-in + for (const key in this._headers) { + const entry = this._headers[key]; + if (Array.isArray(entry)) { + for (const value of entry) { + headers.push([key, value]); } } else { - headers = headers as Record; - for (const k in headers) { - if (Object.hasOwn(headers, k)) { - this.setHeader(k, headers[k]); - } - } + headers.push([key, entry]); } } - - return this; - } - - #ensureHeaders(singleChunk?: Chunk) { - if (this.statusCode === 200 && this.statusMessage === undefined) { - this.statusMessage = "OK"; - } - if ( - typeof singleChunk === "string" && - !this.hasHeader("content-type") - ) { - this.setHeader("content-type", "text/plain;charset=UTF-8"); - } - } - - respond(final: boolean, singleChunk?: Chunk) { - this.headersSent = true; - this.#ensureHeaders(singleChunk); - let body = singleChunk ?? (final ? null : this.#readable); - if (ServerResponse.#bodyShouldBeNull(this.statusCode)) { - body = null; - } - let headers: Record | [string, string][] = this - .#headers as Record; - if (this.#hasNonStringHeaders) { - headers = []; - // Guard is not needed as this is a null prototype object. - // deno-lint-ignore guard-for-in - for (const key in this.#headers) { - const entry = this.#headers[key]; - if (Array.isArray(entry)) { - for (const value of entry) { - headers.push([key, value]); - } - } else { - headers.push([key, entry]); - } - } - } - this.#resolve( - new Response(body, { - headers, - status: this.statusCode, - statusText: this.statusMessage, - }), - ); } + this._resolve( + new Response(body, { + headers, + status: this.statusCode, + statusText: this.statusMessage, + }), + ); +}; +ServerResponse.prototype.end = function ( + this: ServerResponse, // deno-lint-ignore no-explicit-any - override end(chunk?: any, encoding?: any, cb?: any): this { - this.finished = true; - if (!chunk && "transfer-encoding" in this.#headers) { - // FIXME(bnoordhuis) Node sends a zero length chunked body instead, i.e., - // the trailing "0\r\n", but respondWith() just hangs when I try that. - this.#headers["content-length"] = "0"; - delete this.#headers["transfer-encoding"]; - } - - // @ts-expect-error The signature for cb is stricter than the one implemented here - return super.end(chunk, encoding, cb); + chunk?: any, + // deno-lint-ignore no-explicit-any + encoding?: any, + // deno-lint-ignore no-explicit-any + cb?: any, +) { + this.finished = true; + if (!chunk && "transfer-encoding" in this._headers) { + // FIXME(bnoordhuis) Node sends a zero length chunked body instead, i.e., + // the trailing "0\r\n", but respondWith() just hangs when I try that. + this._headers["content-length"] = "0"; + delete this._headers["transfer-encoding"]; } - flushHeaders() { - // no-op - } + // @ts-expect-error The signature for cb is stricter than the one implemented here + NodeWritable.prototype.end.call(this, chunk, encoding, cb); +}; - // Undocumented API used by `npm:compression`. - _implicitHeader() { - this.writeHead(this.statusCode); - } +ServerResponse.prototype.flushHeaders = function (this: ServerResponse) { + // no-op +}; - assignSocket(socket) { - if (socket._httpMessage) { - throw new ERR_HTTP_SOCKET_ASSIGNED(); - } - socket._httpMessage = this; - this.#socketOverride = socket; - } +// Undocumented API used by `npm:compression`. +ServerResponse.prototype._implicitHeader = function (this: ServerResponse) { + this.writeHead(this.statusCode); +}; - detachSocket(socket) { - assert(socket._httpMessage === this); - socket._httpMessage = null; - this.#socketOverride = null; +ServerResponse.prototype.assignSocket = function ( + this: ServerResponse, + socket, +) { + if (socket._httpMessage) { + throw new ERR_HTTP_SOCKET_ASSIGNED(); } -} + socket._httpMessage = this; + this._socketOverride = socket; +}; + +ServerResponse.prototype.detachSocket = function ( + this: ServerResponse, + socket, +) { + assert(socket._httpMessage === this); + socket._httpMessage = null; + this._socketOverride = null; +}; + +Object.defineProperty(ServerResponse.prototype, "connection", { + get: deprecate( + function (this: ServerResponse) { + return this._socketOverride; + }, + "ServerResponse.prototype.connection is deprecated", + "DEP0066", + ), + set: deprecate( + // deno-lint-ignore no-explicit-any + function (this: ServerResponse, socket: any) { + this._socketOverride = socket; + }, + "ServerResponse.prototype.connection is deprecated", + "DEP0066", + ), +}); // TODO(@AaronO): optimize export class IncomingMessageForServer extends NodeReadable { @@ -1588,8 +1711,9 @@ export class ServerImpl extends EventEmitter { port = options.port | 0; } - // Use 0.0.0.0 for Windows, and [::] for other platforms. - let hostname = options.host ?? (isWindows ? "0.0.0.0" : "[::]"); + // TODO(bnoordhuis) Node prefers [::] when host is omitted, + // we on the other hand default to 0.0.0.0. + let hostname = options.host ?? "0.0.0.0"; if (hostname == "localhost") { hostname = "127.0.0.1"; } @@ -1678,6 +1802,8 @@ export class ServerImpl extends EventEmitter { this.#server.ref(); } this.#unref = false; + + return this; } unref() { @@ -1685,6 +1811,8 @@ export class ServerImpl extends EventEmitter { this.#server.unref(); } this.#unref = true; + + return this; } close(cb?: (err?: Error) => void): this { diff --git a/ext/node/polyfills/https.ts b/ext/node/polyfills/https.ts index dd24cf048c..f60c5e471a 100644 --- a/ext/node/polyfills/https.ts +++ b/ext/node/polyfills/https.ts @@ -1,6 +1,9 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright Joyent and Node contributors. All rights reserved. MIT license. +// TODO(petamoriken): enable prefer-primordials for node polyfills +// deno-lint-ignore-file prefer-primordials + import { notImplemented } from "ext:deno_node/_utils.ts"; import { urlToHttpOptions } from "ext:deno_node/internal/url.ts"; import { @@ -14,14 +17,6 @@ import { type ServerHandler, ServerImpl as HttpServer } from "node:http"; import { validateObject } from "ext:deno_node/internal/validators.mjs"; import { kEmptyObject } from "ext:deno_node/internal/util.mjs"; import { Buffer } from "node:buffer"; -import { primordials } from "ext:core/mod.js"; -const { - ArrayPrototypeShift, - ArrayPrototypeUnshift, - ArrayIsArray, - ObjectPrototypeIsPrototypeOf, - ObjectAssign, -} = primordials; export class Server extends HttpServer { constructor(opts, requestListener?: ServerHandler) { @@ -34,11 +29,11 @@ export class Server extends HttpServer { validateObject(opts, "options"); } - if (opts.cert && ArrayIsArray(opts.cert)) { + if (opts.cert && Array.isArray(opts.cert)) { notImplemented("https.Server.opts.cert array type"); } - if (opts.key && ArrayIsArray(opts.key)) { + if (opts.key && Array.isArray(opts.key)) { notImplemented("https.Server.opts.key array type"); } @@ -47,12 +42,10 @@ export class Server extends HttpServer { _additionalServeOptions() { return { - cert: ObjectPrototypeIsPrototypeOf(Buffer, this._opts.cert) - // deno-lint-ignore prefer-primordials + cert: this._opts.cert instanceof Buffer ? this._opts.cert.toString() : this._opts.cert, - key: ObjectPrototypeIsPrototypeOf(Buffer, this._opts.key) - // deno-lint-ignore prefer-primordials + key: this._opts.key instanceof Buffer ? this._opts.key.toString() : this._opts.key, }; @@ -166,18 +159,18 @@ export function request(...args: any[]) { let options = {}; if (typeof args[0] === "string") { - const urlStr = ArrayPrototypeShift(args); + const urlStr = args.shift(); options = urlToHttpOptions(new URL(urlStr)); - } else if (ObjectPrototypeIsPrototypeOf(URL, args[0])) { - options = urlToHttpOptions(ArrayPrototypeShift(args)); + } else if (args[0] instanceof URL) { + options = urlToHttpOptions(args.shift()); } if (args[0] && typeof args[0] !== "function") { - ObjectAssign(options, ArrayPrototypeShift(args)); + Object.assign(options, args.shift()); } options._defaultAgent = globalAgent; - ArrayPrototypeUnshift(args, options); + args.unshift(options); return new HttpsClientRequest(args[0], args[1]); } diff --git a/ext/node/polyfills/inspector.js b/ext/node/polyfills/inspector.js new file mode 100644 index 0000000000..7eb15ce917 --- /dev/null +++ b/ext/node/polyfills/inspector.js @@ -0,0 +1,210 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +// Copyright Joyent and Node contributors. All rights reserved. MIT license. + +import process from "node:process"; +import { EventEmitter } from "node:events"; +import { primordials } from "ext:core/mod.js"; +import { + op_get_extras_binding_object, + op_inspector_close, + op_inspector_connect, + op_inspector_disconnect, + op_inspector_dispatch, + op_inspector_emit_protocol_event, + op_inspector_enabled, + op_inspector_open, + op_inspector_url, + op_inspector_wait, +} from "ext:core/ops"; +import { + isUint32, + validateFunction, + validateInt32, + validateObject, + validateString, +} from "ext:deno_node/internal/validators.mjs"; +import { + ERR_INSPECTOR_ALREADY_ACTIVATED, + ERR_INSPECTOR_ALREADY_CONNECTED, + ERR_INSPECTOR_CLOSED, + ERR_INSPECTOR_COMMAND, + ERR_INSPECTOR_NOT_ACTIVE, + ERR_INSPECTOR_NOT_CONNECTED, + ERR_INSPECTOR_NOT_WORKER, +} from "ext:deno_node/internal/errors.ts"; + +const { + SymbolDispose, + JSONParse, + JSONStringify, + SafeMap, +} = primordials; + +class Session extends EventEmitter { + #connection = null; + #nextId = 1; + #messageCallbacks = new SafeMap(); + + connect() { + if (this.#connection) { + throw new ERR_INSPECTOR_ALREADY_CONNECTED("The inspector session"); + } + this.#connection = op_inspector_connect(false, (m) => this.#onMessage(m)); + } + + connectToMainThread() { + if (isMainThread) { + throw new ERR_INSPECTOR_NOT_WORKER(); + } + if (this.#connection) { + throw new ERR_INSPECTOR_ALREADY_CONNECTED("The inspector session"); + } + this.#connection = op_inspector_connect(true, (m) => this.#onMessage(m)); + } + + #onMessage(message) { + const parsed = JSONParse(message); + try { + if (parsed.id) { + const callback = this.#messageCallbacks.get(parsed.id); + this.#messageCallbacks.delete(parsed.id); + if (callback) { + if (parsed.error) { + return callback( + new ERR_INSPECTOR_COMMAND( + parsed.error.code, + parsed.error.message, + ), + ); + } + + callback(null, parsed.result); + } + } else { + this.emit(parsed.method, parsed); + this.emit("inspectorNotification", parsed); + } + } catch (error) { + process.emitWarning(error); + } + } + + post(method, params, callback) { + validateString(method, "method"); + if (!callback && typeof params === "function") { + callback = params; + params = null; + } + if (params) { + validateObject(params, "params"); + } + if (callback) { + validateFunction(callback, "callback"); + } + + if (!this.#connection) { + throw new ERR_INSPECTOR_NOT_CONNECTED(); + } + const id = this.#nextId++; + const message = { id, method }; + if (params) { + message.params = params; + } + if (callback) { + this.#messageCallbacks.set(id, callback); + } + op_inspector_dispatch(this.#connection, JSONStringify(message)); + } + + disconnect() { + if (!this.#connection) { + return; + } + op_inspector_disconnect(this.#connection); + this.#connection = null; + // deno-lint-ignore prefer-primordials + for (const callback of this.#messageCallbacks.values()) { + process.nextTick(callback, new ERR_INSPECTOR_CLOSED()); + } + this.#messageCallbacks.clear(); + this.#nextId = 1; + } +} + +function open(port, host, wait) { + if (op_inspector_enabled()) { + throw new ERR_INSPECTOR_ALREADY_ACTIVATED(); + } + // inspectorOpen() currently does not typecheck its arguments and adding + // such checks would be a potentially breaking change. However, the native + // open() function requires the port to fit into a 16-bit unsigned integer, + // causing an integer overflow otherwise, so we at least need to prevent that. + if (isUint32(port)) { + validateInt32(port, "port", 0, 65535); + } else { + // equiv of handling args[0]->IsUint32() + port = undefined; + } + if (typeof host !== "string") { + // equiv of handling args[1]->IsString() + host = undefined; + } + op_inspector_open(port, host); + if (wait) { + op_inspector_wait(); + } + + return { + __proto__: null, + [SymbolDispose]() { + _debugEnd(); + }, + }; +} + +function close() { + op_inspector_close(); +} + +function url() { + return op_inspector_url(); +} + +function waitForDebugger() { + if (!op_inspector_wait()) { + throw new ERR_INSPECTOR_NOT_ACTIVE(); + } +} + +function broadcastToFrontend(eventName, params) { + validateString(eventName, "eventName"); + if (params) { + validateObject(params, "params"); + } + op_inspector_emit_protocol_event(eventName, JSONStringify(params ?? {})); +} + +const Network = { + requestWillBeSent: (params) => + broadcastToFrontend("Network.requestWillBeSent", params), + responseReceived: (params) => + broadcastToFrontend("Network.responseReceived", params), + loadingFinished: (params) => + broadcastToFrontend("Network.loadingFinished", params), + loadingFailed: (params) => + broadcastToFrontend("Network.loadingFailed", params), +}; + +const console = op_get_extras_binding_object().console; + +export { close, console, Network, open, Session, url, waitForDebugger }; + +export default { + open, + close, + url, + waitForDebugger, + console, + Session, + Network, +}; diff --git a/ext/node/polyfills/inspector.ts b/ext/node/polyfills/inspector.ts deleted file mode 100644 index 9de86ab14f..0000000000 --- a/ext/node/polyfills/inspector.ts +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -// Copyright Joyent and Node contributors. All rights reserved. MIT license. - -import { EventEmitter } from "node:events"; -import { notImplemented } from "ext:deno_node/_utils.ts"; -import { primordials } from "ext:core/mod.js"; - -const { - SafeMap, -} = primordials; - -class Session extends EventEmitter { - #connection = null; - #nextId = 1; - #messageCallbacks = new SafeMap(); - - /** Connects the session to the inspector back-end. */ - connect() { - notImplemented("inspector.Session.prototype.connect"); - } - - /** Connects the session to the main thread - * inspector back-end. */ - connectToMainThread() { - notImplemented("inspector.Session.prototype.connectToMainThread"); - } - - /** Posts a message to the inspector back-end. */ - post( - _method: string, - _params?: Record, - _callback?: (...args: unknown[]) => void, - ) { - notImplemented("inspector.Session.prototype.post"); - } - - /** Immediately closes the session, all pending - * message callbacks will be called with an - * error. - */ - disconnect() { - notImplemented("inspector.Session.prototype.disconnect"); - } -} - -/** Activates inspector on host and port. - * See https://nodejs.org/api/inspector.html#inspectoropenport-host-wait */ -function open(_port?: number, _host?: string, _wait?: boolean) { - notImplemented("inspector.Session.prototype.open"); -} - -/** Deactivate the inspector. Blocks until there are no active connections. - * See https://nodejs.org/api/inspector.html#inspectorclose */ -function close() { - notImplemented("inspector.Session.prototype.close"); -} - -/** Return the URL of the active inspector, or undefined if there is none. - * See https://nodejs.org/api/inspector.html#inspectorurl */ -function url() { - // TODO(kt3k): returns undefined for now, which means the inspector is not activated. - return undefined; -} - -/** Blocks until a client (existing or connected later) has sent Runtime.runIfWaitingForDebugger command. - * See https://nodejs.org/api/inspector.html#inspectorwaitfordebugger */ -function waitForDebugger() { - notImplemented("inspector.wairForDebugger"); -} - -const console = globalThis.console; - -export { close, console, open, Session, url, waitForDebugger }; - -export default { - close, - console, - open, - Session, - url, - waitForDebugger, -}; diff --git a/ext/node/polyfills/inspector/promises.js b/ext/node/polyfills/inspector/promises.js new file mode 100644 index 0000000000..3483e53f5e --- /dev/null +++ b/ext/node/polyfills/inspector/promises.js @@ -0,0 +1,20 @@ +// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +// Copyright Joyent and Node contributors. All rights reserved. MIT license. + +import inspector from "node:inspector"; +import { promisify } from "ext:deno_node/internal/util.mjs"; + +class Session extends inspector.Session { + constructor() { + super(); + } +} +Session.prototype.post = promisify(inspector.Session.prototype.post); + +export * from "node:inspector"; +export { Session }; + +export default { + ...inspector, + Session, +}; diff --git a/ext/node/polyfills/internal/child_process.ts b/ext/node/polyfills/internal/child_process.ts index 65d825fd25..cfff1079ff 100644 --- a/ext/node/polyfills/internal/child_process.ts +++ b/ext/node/polyfills/internal/child_process.ts @@ -1339,7 +1339,7 @@ export function setupChannel(target: any, ipc: number) { } } - process.nextTick(handleMessage, msg); + nextTick(handleMessage, msg); } } catch (err) { if ( @@ -1400,7 +1400,7 @@ export function setupChannel(target: any, ipc: number) { if (!target.connected) { const err = new ERR_IPC_CHANNEL_CLOSED(); if (typeof callback === "function") { - process.nextTick(callback, err); + nextTick(callback, err); } else { nextTick(() => target.emit("error", err)); } @@ -1416,7 +1416,18 @@ export function setupChannel(target: any, ipc: number) { .then(() => { control.unrefCounted(); if (callback) { - process.nextTick(callback, null); + nextTick(callback, null); + } + }, (err: Error) => { + control.unrefCounted(); + if (err instanceof Deno.errors.Interrupted) { + // Channel closed on us mid-write. + } else { + if (typeof callback === "function") { + nextTick(callback, err); + } else { + nextTick(() => target.emit("error", err)); + } } }); return queueOk[0]; @@ -1433,7 +1444,7 @@ export function setupChannel(target: any, ipc: number) { target.connected = false; target[kCanDisconnect] = false; control[kControlDisconnect](); - process.nextTick(() => { + nextTick(() => { target.channel = null; core.close(ipc); target.emit("disconnect"); diff --git a/ext/node/polyfills/internal/crypto/_randomInt.ts b/ext/node/polyfills/internal/crypto/_randomInt.ts index 7f4d703ad4..e08b3e9639 100644 --- a/ext/node/polyfills/internal/crypto/_randomInt.ts +++ b/ext/node/polyfills/internal/crypto/_randomInt.ts @@ -1,9 +1,15 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -// TODO(petamoriken): enable prefer-primordials for node polyfills -// deno-lint-ignore-file prefer-primordials - import { op_node_random_int } from "ext:core/ops"; +import { primordials } from "ext:core/mod.js"; +const { + Error, + MathCeil, + MathFloor, + MathPow, + NumberIsSafeInteger, + RangeError, +} = primordials; export default function randomInt(max: number): number; export default function randomInt(min: number, max: number): number; @@ -23,7 +29,9 @@ export default function randomInt( cb?: (err: Error | null, n?: number) => void, ): number | void { if (typeof max === "number" && typeof min === "number") { - [max, min] = [min, max]; + const temp = max; + max = min; + min = temp; } if (min === undefined) min = 0; else if (typeof min === "function") { @@ -32,13 +40,13 @@ export default function randomInt( } if ( - !Number.isSafeInteger(min) || - typeof max === "number" && !Number.isSafeInteger(max) + !NumberIsSafeInteger(min) || + typeof max === "number" && !NumberIsSafeInteger(max) ) { throw new Error("max or min is not a Safe Number"); } - if (max - min > Math.pow(2, 48)) { + if (max - min > MathPow(2, 48)) { throw new RangeError("max - min should be less than 2^48!"); } @@ -46,8 +54,8 @@ export default function randomInt( throw new Error("Min is bigger than Max!"); } - min = Math.ceil(min); - max = Math.floor(max); + min = MathCeil(min); + max = MathFloor(max); const result = op_node_random_int(min, max); if (cb) { diff --git a/ext/node/polyfills/internal/crypto/keygen.ts b/ext/node/polyfills/internal/crypto/keygen.ts index a40c76c0d7..44bfd83277 100644 --- a/ext/node/polyfills/internal/crypto/keygen.ts +++ b/ext/node/polyfills/internal/crypto/keygen.ts @@ -29,6 +29,7 @@ import { } from "ext:deno_node/internal/validators.mjs"; import { Buffer } from "node:buffer"; import { KeyFormat, KeyType } from "ext:deno_node/internal/crypto/types.ts"; +import process from "node:process"; import { op_node_generate_dh_group_key, diff --git a/ext/node/polyfills/internal/crypto/random.ts b/ext/node/polyfills/internal/crypto/random.ts index 4219414dc1..a41b868190 100644 --- a/ext/node/polyfills/internal/crypto/random.ts +++ b/ext/node/polyfills/internal/crypto/random.ts @@ -38,6 +38,7 @@ import { ERR_INVALID_ARG_TYPE, ERR_OUT_OF_RANGE, } from "ext:deno_node/internal/errors.ts"; +import { Buffer } from "node:buffer"; export { default as randomBytes } from "ext:deno_node/internal/crypto/_randomBytes.ts"; export { diff --git a/ext/node/polyfills/internal/dns/utils.ts b/ext/node/polyfills/internal/dns/utils.ts index 1e0c3d9ed6..226fce93dd 100644 --- a/ext/node/polyfills/internal/dns/utils.ts +++ b/ext/node/polyfills/internal/dns/utils.ts @@ -416,10 +416,20 @@ export function emitInvalidHostnameWarning(hostname: string) { ); } -let dnsOrder = getOptionValue("--dns-result-order") || "verbatim"; +let dnsOrder = getOptionValue("--dns-result-order") || "ipv4first"; export function getDefaultVerbatim() { - return dnsOrder !== "ipv4first"; + switch (dnsOrder) { + case "verbatim": { + return true; + } + case "ipv4first": { + return false; + } + default: { + return false; + } + } } /** diff --git a/ext/node/polyfills/internal/errors.ts b/ext/node/polyfills/internal/errors.ts index 51bd7a0250..5a3d4437a1 100644 --- a/ext/node/polyfills/internal/errors.ts +++ b/ext/node/polyfills/internal/errors.ts @@ -2558,19 +2558,6 @@ export class ERR_FS_RMDIR_ENOTDIR extends NodeSystemError { } } -export class ERR_OS_NO_HOMEDIR extends NodeSystemError { - constructor() { - const code = isWindows ? "ENOENT" : "ENOTDIR"; - const ctx: NodeSystemErrorCtx = { - message: "not a directory", - syscall: "home", - code, - errno: isWindows ? osConstants.errno.ENOENT : osConstants.errno.ENOTDIR, - }; - super(code, ctx, "Path is not a directory"); - } -} - export class ERR_HTTP_SOCKET_ASSIGNED extends NodeError { constructor() { super( diff --git a/ext/node/polyfills/internal/util/inspect.mjs b/ext/node/polyfills/internal/util/inspect.mjs index 3a61c387c0..ae797449bf 100644 --- a/ext/node/polyfills/internal/util/inspect.mjs +++ b/ext/node/polyfills/internal/util/inspect.mjs @@ -565,6 +565,19 @@ export function stripVTControlCharacters(str) { export function styleText(format, text) { validateString(text, "text"); + + if (Array.isArray(format)) { + for (let i = 0; i < format.length; i++) { + const item = format[i]; + const formatCodes = inspect.colors[item]; + if (formatCodes == null) { + validateOneOf(item, "format", Object.keys(inspect.colors)); + } + text = `\u001b[${formatCodes[0]}m${text}\u001b[${formatCodes[1]}m`; + } + return text; + } + const formatCodes = inspect.colors[format]; if (formatCodes == null) { validateOneOf(format, "format", Object.keys(inspect.colors)); diff --git a/ext/node/polyfills/internal_binding/cares_wrap.ts b/ext/node/polyfills/internal_binding/cares_wrap.ts index cbfea40b22..6feb7faf0d 100644 --- a/ext/node/polyfills/internal_binding/cares_wrap.ts +++ b/ext/node/polyfills/internal_binding/cares_wrap.ts @@ -75,18 +75,11 @@ export function getaddrinfo( const recordTypes: ("A" | "AAAA")[] = []; - if (family === 6) { + if (family === 0 || family === 4) { + recordTypes.push("A"); + } + if (family === 0 || family === 6) { recordTypes.push("AAAA"); - } else if (family === 4) { - recordTypes.push("A"); - } else if (family === 0 && hostname === "localhost") { - // Ipv6 is preferred over Ipv4 for localhost - recordTypes.push("AAAA"); - recordTypes.push("A"); - } else if (family === 0) { - // Only get Ipv4 addresses for the other hostnames - // This simulates what `getaddrinfo` does when the family is not specified - recordTypes.push("A"); } (async () => { diff --git a/ext/node/polyfills/internal_binding/http_parser.ts b/ext/node/polyfills/internal_binding/http_parser.ts index ca4f896e20..bad10d9851 100644 --- a/ext/node/polyfills/internal_binding/http_parser.ts +++ b/ext/node/polyfills/internal_binding/http_parser.ts @@ -126,6 +126,7 @@ ObjectSetPrototypeOf(HTTPParser.prototype, AsyncWrap.prototype); function defineProps(obj: object, props: Record) { for (const entry of new SafeArrayIterator(ObjectEntries(props))) { ObjectDefineProperty(obj, entry[0], { + __proto__: null, value: entry[1], enumerable: true, writable: true, diff --git a/ext/node/polyfills/net.ts b/ext/node/polyfills/net.ts index 35d273be93..48e1d0de87 100644 --- a/ext/node/polyfills/net.ts +++ b/ext/node/polyfills/net.ts @@ -1871,13 +1871,23 @@ function _setupListenHandle( // Try to bind to the unspecified IPv6 address, see if IPv6 is available if (!address && typeof fd !== "number") { - if (isWindows) { - address = DEFAULT_IPV4_ADDR; - addressType = 4; - } else { - address = DEFAULT_IPV6_ADDR; - addressType = 6; - } + // TODO(@bartlomieju): differs from Node which tries to bind to IPv6 first + // when no address is provided. + // + // Forcing IPv4 as a workaround for Deno not aligning with Node on + // implicit binding on Windows. + // + // REF: https://github.com/denoland/deno/issues/10762 + // rval = _createServerHandle(DEFAULT_IPV6_ADDR, port, 6, fd, flags); + + // if (typeof rval === "number") { + // rval = null; + address = DEFAULT_IPV4_ADDR; + addressType = 4; + // } else { + // address = DEFAULT_IPV6_ADDR; + // addressType = 6; + // } } if (rval === null) { diff --git a/ext/node/polyfills/os.ts b/ext/node/polyfills/os.ts index e47e8679ec..edc89ed2c3 100644 --- a/ext/node/polyfills/os.ts +++ b/ext/node/polyfills/os.ts @@ -28,16 +28,17 @@ import { op_homedir, op_node_os_get_priority, op_node_os_set_priority, - op_node_os_username, + op_node_os_user_info, } from "ext:core/ops"; import { validateIntegerRange } from "ext:deno_node/_utils.ts"; import process from "node:process"; import { isWindows } from "ext:deno_node/_util/os.ts"; -import { ERR_OS_NO_HOMEDIR } from "ext:deno_node/internal/errors.ts"; import { os } from "ext:deno_node/internal_binding/constants.ts"; import { osUptime } from "ext:runtime/30_os.js"; import { Buffer } from "ext:deno_node/internal/buffer.mjs"; +import { primordials } from "ext:core/mod.js"; +const { StringPrototypeEndsWith, StringPrototypeSlice } = primordials; export const constants = os; @@ -136,6 +137,8 @@ export function arch(): string { (uptime as any)[Symbol.toPrimitive] = (): number => uptime(); // deno-lint-ignore no-explicit-any (machine as any)[Symbol.toPrimitive] = (): string => machine(); +// deno-lint-ignore no-explicit-any +(tmpdir as any)[Symbol.toPrimitive] = (): string | null => tmpdir(); export function cpus(): CPUCoreInfo[] { return op_cpus(); @@ -268,26 +271,27 @@ export function setPriority(pid: number, priority?: number) { export function tmpdir(): string | null { /* This follows the node js implementation, but has a few differences: - * On windows, if none of the environment variables are defined, - we return null. - * On unix we use a plain Deno.env.get, instead of safeGetenv, + * We use a plain Deno.env.get, instead of safeGetenv, which special cases setuid binaries. - * Node removes a single trailing / or \, we remove all. */ if (isWindows) { - const temp = Deno.env.get("TEMP") || Deno.env.get("TMP"); - if (temp) { - return temp.replace(/(? 1 && StringPrototypeEndsWith(temp, "\\") && + !StringPrototypeEndsWith(temp, ":\\") + ) { + temp = StringPrototypeSlice(temp, 0, -1); } - const base = Deno.env.get("SYSTEMROOT") || Deno.env.get("WINDIR"); - if (base) { - return base + "\\temp"; - } - return null; + + return temp; } else { // !isWindows - const temp = Deno.env.get("TMPDIR") || Deno.env.get("TMP") || + let temp = Deno.env.get("TMPDIR") || Deno.env.get("TMP") || Deno.env.get("TEMP") || "/tmp"; - return temp.replace(/(? 1 && StringPrototypeEndsWith(temp, "/")) { + temp = StringPrototypeSlice(temp, 0, -1); + } + return temp; } } @@ -320,7 +324,6 @@ export function uptime(): number { return osUptime(); } -/** Not yet implemented */ export function userInfo( options: UserInfoOptions = { encoding: "utf-8" }, ): UserInfo { @@ -331,20 +334,10 @@ export function userInfo( uid = -1; gid = -1; } - - // TODO(@crowlKats): figure out how to do this correctly: - // The value of homedir returned by os.userInfo() is provided by the operating system. - // This differs from the result of os.homedir(), which queries environment - // variables for the home directory before falling back to the operating system response. - let _homedir = homedir(); - if (!_homedir) { - throw new ERR_OS_NO_HOMEDIR(); - } - let shell = isWindows ? null : (Deno.env.get("SHELL") || null); - let username = op_node_os_username(); + let { username, homedir, shell } = op_node_os_user_info(uid); if (options?.encoding === "buffer") { - _homedir = _homedir ? Buffer.from(_homedir) : _homedir; + homedir = homedir ? Buffer.from(homedir) : homedir; shell = shell ? Buffer.from(shell) : shell; username = Buffer.from(username); } @@ -352,7 +345,7 @@ export function userInfo( return { uid, gid, - homedir: _homedir, + homedir, shell, username, }; diff --git a/ext/node/polyfills/vm.js b/ext/node/polyfills/vm.js index 183ddad2f4..b64c847c58 100644 --- a/ext/node/polyfills/vm.js +++ b/ext/node/polyfills/vm.js @@ -182,6 +182,7 @@ function getContextOptions(options) { let defaultContextNameIndex = 1; export function createContext( + // deno-lint-ignore prefer-primordials contextObject = {}, options = { __proto__: null }, ) { diff --git a/ext/tls/Cargo.toml b/ext/tls/Cargo.toml index b060f28298..943fc8413e 100644 --- a/ext/tls/Cargo.toml +++ b/ext/tls/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_tls" -version = "0.160.0" +version = "0.164.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/url/Cargo.toml b/ext/url/Cargo.toml index ceef18f45a..557a4669e6 100644 --- a/ext/url/Cargo.toml +++ b/ext/url/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_url" -version = "0.173.0" +version = "0.177.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/web/02_timers.js b/ext/web/02_timers.js index 89acaca42b..6058febd59 100644 --- a/ext/web/02_timers.js +++ b/ext/web/02_timers.js @@ -1,12 +1,9 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. import { core, primordials } from "ext:core/mod.js"; -import { op_defer, op_now } from "ext:core/ops"; +import { op_defer } from "ext:core/ops"; const { - Uint8Array, - Uint32Array, PromisePrototypeThen, - TypedArrayPrototypeGetBuffer, TypeError, indirectEval, ReflectApply, @@ -18,13 +15,6 @@ const { import * as webidl from "ext:deno_webidl/00_webidl.js"; -const hrU8 = new Uint8Array(8); -const hr = new Uint32Array(TypedArrayPrototypeGetBuffer(hrU8)); -function opNow() { - op_now(hrU8); - return (hr[0] * 1000 + hr[1] / 1e6); -} - // --------------------------------------------------------------------------- function checkThis(thisArg) { @@ -151,7 +141,6 @@ export { clearInterval, clearTimeout, defer, - opNow, refTimer, setImmediate, setInterval, diff --git a/ext/web/15_performance.js b/ext/web/15_performance.js index ea55572781..9e0e310a57 100644 --- a/ext/web/15_performance.js +++ b/ext/web/15_performance.js @@ -1,6 +1,7 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. import { primordials } from "ext:core/mod.js"; +import { op_now, op_time_origin } from "ext:core/ops"; const { ArrayPrototypeFilter, ArrayPrototypePush, @@ -10,19 +11,34 @@ const { Symbol, SymbolFor, TypeError, + TypedArrayPrototypeGetBuffer, + Uint8Array, + Uint32Array, } = primordials; import * as webidl from "ext:deno_webidl/00_webidl.js"; import { structuredClone } from "./02_structured_clone.js"; import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; import { EventTarget } from "./02_event.js"; -import { opNow } from "./02_timers.js"; import { DOMException } from "./01_dom_exception.js"; const illegalConstructorKey = Symbol("illegalConstructorKey"); let performanceEntries = []; let timeOrigin; +const hrU8 = new Uint8Array(8); +const hr = new Uint32Array(TypedArrayPrototypeGetBuffer(hrU8)); + +function setTimeOrigin() { + op_time_origin(hrU8); + timeOrigin = hr[0] * 1000 + hr[1] / 1e6; +} + +function now() { + op_now(hrU8); + return hr[0] * 1000 + hr[1] / 1e6; +} + webidl.converters["PerformanceMarkOptions"] = webidl .createDictionaryConverter( "PerformanceMarkOptions", @@ -90,10 +106,6 @@ webidl.converters["DOMString or PerformanceMeasureOptions"] = ( return webidl.converters.DOMString(V, prefix, context, opts); }; -function setTimeOrigin(origin) { - timeOrigin = origin; -} - function findMostRecent( name, type, @@ -135,8 +147,6 @@ function filterByNameType( ); } -const now = opNow; - const _name = Symbol("[[name]]"); const _entryType = Symbol("[[entryType]]"); const _startTime = Symbol("[[startTime]]"); diff --git a/ext/web/Cargo.toml b/ext/web/Cargo.toml index 395ea182f9..db28d0e578 100644 --- a/ext/web/Cargo.toml +++ b/ext/web/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_web" -version = "0.204.0" +version = "0.208.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/web/lib.rs b/ext/web/lib.rs index 4935af5bd3..af0fc2c276 100644 --- a/ext/web/lib.rs +++ b/ext/web/lib.rs @@ -52,7 +52,8 @@ pub use crate::message_port::Transferable; use crate::timers::op_defer; use crate::timers::op_now; -use crate::timers::StartTime; +use crate::timers::op_time_origin; +pub use crate::timers::StartTime; pub use crate::timers::TimersPermission; deno_core::extension!(deno_web, @@ -84,6 +85,7 @@ deno_core::extension!(deno_web, compression::op_compression_write, compression::op_compression_finish, op_now

, + op_time_origin

, op_defer, stream_resource::op_readable_stream_resource_allocate, stream_resource::op_readable_stream_resource_allocate_sized, @@ -123,7 +125,7 @@ deno_core::extension!(deno_web, if let Some(location) = options.maybe_location { state.put(Location(location)); } - state.put(StartTime::now()); + state.put(StartTime::default()); } ); diff --git a/ext/web/timers.rs b/ext/web/timers.rs index a9ab7c97e4..06444ed34f 100644 --- a/ext/web/timers.rs +++ b/ext/web/timers.rs @@ -4,7 +4,10 @@ use deno_core::op2; use deno_core::OpState; +use std::time::Duration; use std::time::Instant; +use std::time::SystemTime; +use std::time::UNIX_EPOCH; pub trait TimersPermission { fn allow_hrtime(&mut self) -> bool; @@ -17,21 +20,28 @@ impl TimersPermission for deno_permissions::PermissionsContainer { } } -pub type StartTime = Instant; +pub struct StartTime(Instant); -// Returns a milliseconds and nanoseconds subsec -// since the start time of the deno runtime. -// If the High precision flag is not set, the -// nanoseconds are rounded on 2ms. -#[op2(fast)] -pub fn op_now(state: &mut OpState, #[buffer] buf: &mut [u8]) +impl Default for StartTime { + fn default() -> Self { + Self(Instant::now()) + } +} + +impl std::ops::Deref for StartTime { + type Target = Instant; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +fn expose_time(state: &mut OpState, duration: Duration, out: &mut [u8]) where TP: TimersPermission + 'static, { - let start_time = state.borrow::(); - let elapsed = start_time.elapsed(); - let seconds = elapsed.as_secs(); - let mut subsec_nanos = elapsed.subsec_nanos(); + let seconds = duration.as_secs() as u32; + let mut subsec_nanos = duration.subsec_nanos(); // If the permission is not enabled // Round the nano result on 2 milliseconds @@ -40,14 +50,33 @@ where let reduced_time_precision = 2_000_000; // 2ms in nanoseconds subsec_nanos -= subsec_nanos % reduced_time_precision; } - if buf.len() < 8 { - return; + + if out.len() >= 8 { + out[0..4].copy_from_slice(&seconds.to_ne_bytes()); + out[4..8].copy_from_slice(&subsec_nanos.to_ne_bytes()); } - let buf: &mut [u32] = - // SAFETY: buffer is at least 8 bytes long. - unsafe { std::slice::from_raw_parts_mut(buf.as_mut_ptr() as _, 2) }; - buf[0] = seconds as u32; - buf[1] = subsec_nanos; +} + +#[op2(fast)] +pub fn op_now(state: &mut OpState, #[buffer] buf: &mut [u8]) +where + TP: TimersPermission + 'static, +{ + let start_time = state.borrow::(); + let elapsed = start_time.elapsed(); + expose_time::(state, elapsed, buf); +} + +#[op2(fast)] +pub fn op_time_origin(state: &mut OpState, #[buffer] buf: &mut [u8]) +where + TP: TimersPermission + 'static, +{ + // https://w3c.github.io/hr-time/#dfn-estimated-monotonic-time-of-the-unix-epoch + let wall_time = SystemTime::now(); + let monotonic_time = state.borrow::().elapsed(); + let epoch = wall_time.duration_since(UNIX_EPOCH).unwrap() - monotonic_time; + expose_time::(state, epoch, buf); } #[allow(clippy::unused_async)] diff --git a/ext/webgpu/Cargo.toml b/ext/webgpu/Cargo.toml index 7354919d4a..f23bb8371e 100644 --- a/ext/webgpu/Cargo.toml +++ b/ext/webgpu/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webgpu" -version = "0.140.0" +version = "0.144.0" authors = ["the Deno authors"] edition.workspace = true license = "MIT" diff --git a/ext/webidl/Cargo.toml b/ext/webidl/Cargo.toml index d069b32125..8c3f6f6128 100644 --- a/ext/webidl/Cargo.toml +++ b/ext/webidl/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webidl" -version = "0.173.0" +version = "0.177.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/websocket/Cargo.toml b/ext/websocket/Cargo.toml index ec359100d4..61f1f5959c 100644 --- a/ext/websocket/Cargo.toml +++ b/ext/websocket/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_websocket" -version = "0.178.0" +version = "0.182.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index 2a67ac5a17..a5734271cf 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -50,6 +50,7 @@ use tokio::io::ReadHalf; use tokio::io::WriteHalf; use tokio::net::TcpStream; +use deno_permissions::PermissionCheckError; use fastwebsockets::CloseCode; use fastwebsockets::FragmentCollectorRead; use fastwebsockets::Frame; @@ -75,7 +76,7 @@ pub enum WebsocketError { #[error(transparent)] Url(url::ParseError), #[error(transparent)] - Permission(deno_core::error::AnyError), + Permission(#[from] PermissionCheckError), #[error(transparent)] Resource(deno_core::error::AnyError), #[error(transparent)] @@ -112,7 +113,7 @@ pub trait WebSocketPermissions { &mut self, _url: &url::Url, _api_name: &str, - ) -> Result<(), deno_core::error::AnyError>; + ) -> Result<(), PermissionCheckError>; } impl WebSocketPermissions for deno_permissions::PermissionsContainer { @@ -121,7 +122,7 @@ impl WebSocketPermissions for deno_permissions::PermissionsContainer { &mut self, url: &url::Url, api_name: &str, - ) -> Result<(), deno_core::error::AnyError> { + ) -> Result<(), PermissionCheckError> { deno_permissions::PermissionsContainer::check_net_url(self, url, api_name) } } @@ -158,13 +159,10 @@ pub fn op_ws_check_permission_and_cancel_handle( where WP: WebSocketPermissions + 'static, { - state - .borrow_mut::() - .check_net_url( - &url::Url::parse(&url).map_err(WebsocketError::Url)?, - &api_name, - ) - .map_err(WebsocketError::Permission)?; + state.borrow_mut::().check_net_url( + &url::Url::parse(&url).map_err(WebsocketError::Url)?, + &api_name, + )?; if cancel_handle { let rid = state diff --git a/ext/webstorage/Cargo.toml b/ext/webstorage/Cargo.toml index 0d6fcd6a0d..01e23ab839 100644 --- a/ext/webstorage/Cargo.toml +++ b/ext/webstorage/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_webstorage" -version = "0.168.0" +version = "0.172.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/resolvers/deno/Cargo.toml b/resolvers/deno/Cargo.toml index e75a9a1014..24d50587b3 100644 --- a/resolvers/deno/Cargo.toml +++ b/resolvers/deno/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_resolver" -version = "0.5.0" +version = "0.9.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/resolvers/deno/fs.rs b/resolvers/deno/fs.rs index b08be37982..44495fa7c2 100644 --- a/resolvers/deno/fs.rs +++ b/resolvers/deno/fs.rs @@ -15,13 +15,3 @@ pub trait DenoResolverFs { fn is_dir_sync(&self, path: &Path) -> bool; fn read_dir_sync(&self, dir_path: &Path) -> std::io::Result>; } - -pub(crate) struct DenoPkgJsonFsAdapter<'a, Fs: DenoResolverFs>(pub &'a Fs); - -impl<'a, Fs: DenoResolverFs> deno_package_json::fs::DenoPkgJsonFs - for DenoPkgJsonFsAdapter<'a, Fs> -{ - fn read_to_string_lossy(&self, path: &Path) -> std::io::Result { - self.0.read_to_string_lossy(path) - } -} diff --git a/resolvers/deno/npm/byonm.rs b/resolvers/deno/npm/byonm.rs index 3394b3e501..b85117052c 100644 --- a/resolvers/deno/npm/byonm.rs +++ b/resolvers/deno/npm/byonm.rs @@ -10,16 +10,17 @@ use deno_package_json::PackageJsonDepValue; use deno_path_util::url_to_file_path; use deno_semver::package::PackageReq; use deno_semver::Version; +use node_resolver::env::NodeResolverEnv; use node_resolver::errors::PackageFolderResolveError; use node_resolver::errors::PackageFolderResolveIoError; use node_resolver::errors::PackageJsonLoadError; use node_resolver::errors::PackageNotFoundError; -use node_resolver::load_pkg_json; +use node_resolver::InNpmPackageChecker; use node_resolver::NpmResolver; +use node_resolver::PackageJsonResolverRc; use thiserror::Error; use url::Url; -use crate::fs::DenoPkgJsonFsAdapter; use crate::fs::DenoResolverFs; use super::local::normalize_pkg_name_for_node_modules_deno_folder; @@ -36,32 +37,41 @@ pub enum ByonmResolvePkgFolderFromDenoReqError { Io(#[from] std::io::Error), } -pub struct ByonmNpmResolverCreateOptions { - pub fs: Fs, +pub struct ByonmNpmResolverCreateOptions< + Fs: DenoResolverFs, + TEnv: NodeResolverEnv, +> { // todo(dsherret): investigate removing this pub root_node_modules_dir: Option, + pub fs: Fs, + pub pkg_json_resolver: PackageJsonResolverRc, } #[derive(Debug)] -pub struct ByonmNpmResolver { +pub struct ByonmNpmResolver { fs: Fs, + pkg_json_resolver: PackageJsonResolverRc, root_node_modules_dir: Option, } -impl Clone for ByonmNpmResolver { +impl Clone + for ByonmNpmResolver +{ fn clone(&self) -> Self { Self { fs: self.fs.clone(), + pkg_json_resolver: self.pkg_json_resolver.clone(), root_node_modules_dir: self.root_node_modules_dir.clone(), } } } -impl ByonmNpmResolver { - pub fn new(options: ByonmNpmResolverCreateOptions) -> Self { +impl ByonmNpmResolver { + pub fn new(options: ByonmNpmResolverCreateOptions) -> Self { Self { - fs: options.fs, root_node_modules_dir: options.root_node_modules_dir, + fs: options.fs, + pkg_json_resolver: options.pkg_json_resolver, } } @@ -73,7 +83,7 @@ impl ByonmNpmResolver { &self, path: &Path, ) -> Result>, PackageJsonLoadError> { - load_pkg_json(&DenoPkgJsonFsAdapter(&self.fs), path) + self.pkg_json_resolver.load_package_json(path) } /// Finds the ancestor package.json that contains the specified dependency. @@ -290,8 +300,10 @@ impl ByonmNpmResolver { } } -impl NpmResolver - for ByonmNpmResolver +impl< + Fs: DenoResolverFs + Send + Sync + std::fmt::Debug, + TEnv: NodeResolverEnv, + > NpmResolver for ByonmNpmResolver { fn resolve_package_folder_from_package( &self, @@ -342,7 +354,12 @@ impl NpmResolver .into() }) } +} +#[derive(Debug)] +pub struct ByonmInNpmPackageChecker; + +impl InNpmPackageChecker for ByonmInNpmPackageChecker { fn in_npm_package(&self, specifier: &Url) -> bool { specifier.scheme() == "file" && specifier diff --git a/resolvers/deno/npm/mod.rs b/resolvers/deno/npm/mod.rs index 9d885cad31..45e2341c78 100644 --- a/resolvers/deno/npm/mod.rs +++ b/resolvers/deno/npm/mod.rs @@ -3,6 +3,7 @@ mod byonm; mod local; +pub use byonm::ByonmInNpmPackageChecker; pub use byonm::ByonmNpmResolver; pub use byonm::ByonmNpmResolverCreateOptions; pub use byonm::ByonmResolvePkgFolderFromDenoReqError; diff --git a/resolvers/deno/sloppy_imports.rs b/resolvers/deno/sloppy_imports.rs index e215e87686..7aba5b771a 100644 --- a/resolvers/deno/sloppy_imports.rs +++ b/resolvers/deno/sloppy_imports.rs @@ -232,7 +232,7 @@ impl SloppyImportsResolver { | MediaType::Tsx | MediaType::Json | MediaType::Wasm - | MediaType::TsBuildInfo + | MediaType::Css | MediaType::SourceMap => { return None; } diff --git a/resolvers/node/Cargo.toml b/resolvers/node/Cargo.toml index 44549bec19..6c2407ad9f 100644 --- a/resolvers/node/Cargo.toml +++ b/resolvers/node/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "node_resolver" -version = "0.12.0" +version = "0.16.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/resolvers/node/analyze.rs b/resolvers/node/analyze.rs index 009296006a..c7415933d7 100644 --- a/resolvers/node/analyze.rs +++ b/resolvers/node/analyze.rs @@ -19,18 +19,19 @@ use anyhow::Error as AnyError; use url::Url; use crate::env::NodeResolverEnv; -use crate::package_json::load_pkg_json; +use crate::npm::InNpmPackageCheckerRc; use crate::resolution::NodeResolverRc; use crate::NodeModuleKind; use crate::NodeResolutionMode; use crate::NpmResolverRc; +use crate::PackageJsonResolverRc; use crate::PathClean; #[derive(Debug, Clone)] -pub enum CjsAnalysis { +pub enum CjsAnalysis<'a> { /// File was found to be an ES module and the translator should /// load the code as ESM. - Esm(String), + Esm(Cow<'a, str>), Cjs(CjsAnalysisExports), } @@ -50,11 +51,11 @@ pub trait CjsCodeAnalyzer { /// already has it. If the source is needed by the implementation, /// then it can use the provided source, or otherwise load it if /// necessary. - async fn analyze_cjs( + async fn analyze_cjs<'a>( &self, specifier: &Url, - maybe_source: Option, - ) -> Result; + maybe_source: Option>, + ) -> Result, AnyError>; } pub struct NodeCodeTranslator< @@ -63,8 +64,10 @@ pub struct NodeCodeTranslator< > { cjs_code_analyzer: TCjsCodeAnalyzer, env: TNodeResolverEnv, + in_npm_pkg_checker: InNpmPackageCheckerRc, node_resolver: NodeResolverRc, npm_resolver: NpmResolverRc, + pkg_json_resolver: PackageJsonResolverRc, } impl @@ -73,14 +76,18 @@ impl pub fn new( cjs_code_analyzer: TCjsCodeAnalyzer, env: TNodeResolverEnv, + in_npm_pkg_checker: InNpmPackageCheckerRc, node_resolver: NodeResolverRc, npm_resolver: NpmResolverRc, + pkg_json_resolver: PackageJsonResolverRc, ) -> Self { Self { cjs_code_analyzer, env, + in_npm_pkg_checker, node_resolver, npm_resolver, + pkg_json_resolver, } } @@ -90,11 +97,11 @@ impl /// For all discovered reexports the analysis will be performed recursively. /// /// If successful a source code for equivalent ES module is returned. - pub async fn translate_cjs_to_esm( + pub async fn translate_cjs_to_esm<'a>( &self, entry_specifier: &Url, - source: Option, - ) -> Result { + source: Option>, + ) -> Result, AnyError> { let mut temp_var_count = 0; let analysis = self @@ -108,7 +115,7 @@ impl }; let mut source = vec![ - r#"import {createRequire as __internalCreateRequire} from "node:module"; + r#"import {createRequire as __internalCreateRequire, Module as __internalModule } from "node:module"; const require = __internalCreateRequire(import.meta.url);"# .to_string(), ]; @@ -135,7 +142,12 @@ impl } source.push(format!( - "const mod = require(\"{}\");", + r#"let mod; + if (import.meta.main) {{ + mod = __internalModule._load("{0}", null, true) + }} else {{ + mod = require("{0}"); + }}"#, url_to_file_path(entry_specifier) .unwrap() .to_str() @@ -159,7 +171,7 @@ impl source.push("export default mod;".to_string()); let translated_source = source.join("\n"); - Ok(translated_source) + Ok(Cow::Owned(translated_source)) } async fn analyze_reexports<'a>( @@ -174,7 +186,7 @@ impl struct Analysis { reexport_specifier: url::Url, referrer: url::Url, - analysis: CjsAnalysis, + analysis: CjsAnalysis<'static>, } type AnalysisFuture<'a> = LocalBoxFuture<'a, Result>; @@ -329,8 +341,9 @@ impl }?; let package_json_path = module_dir.join("package.json"); - let maybe_package_json = - load_pkg_json(self.env.pkg_json_fs(), &package_json_path)?; + let maybe_package_json = self + .pkg_json_resolver + .load_package_json(&package_json_path)?; if let Some(package_json) = maybe_package_json { if let Some(exports) = &package_json.exports { return Some( @@ -356,8 +369,9 @@ impl if self.env.is_dir_sync(&d) { // subdir might have a package.json that specifies the entrypoint let package_json_path = d.join("package.json"); - let maybe_package_json = - load_pkg_json(self.env.pkg_json_fs(), &package_json_path)?; + let maybe_package_json = self + .pkg_json_resolver + .load_package_json(&package_json_path)?; if let Some(package_json) = maybe_package_json { if let Some(main) = package_json.main(NodeModuleKind::Cjs) { return Ok(Some(url_from_file_path(&d.join(main).clean())?)); @@ -382,7 +396,7 @@ impl // as a fallback, attempt to resolve it via the ancestor directories let mut last = referrer_path.as_path(); while let Some(parent) = last.parent() { - if !self.npm_resolver.in_npm_package_at_dir_path(parent) { + if !self.in_npm_pkg_checker.in_npm_package_at_dir_path(parent) { break; } let path = if parent.ends_with("node_modules") { diff --git a/resolvers/node/errors.rs b/resolvers/node/errors.rs index 4ba829eda5..aacbecefb4 100644 --- a/resolvers/node/errors.rs +++ b/resolvers/node/errors.rs @@ -81,29 +81,6 @@ pub trait NodeJsErrorCoded { fn code(&self) -> NodeJsErrorCode; } -kinded_err!( - ResolvePkgSubpathFromDenoModuleError, - ResolvePkgSubpathFromDenoModuleErrorKind -); - -impl NodeJsErrorCoded for ResolvePkgSubpathFromDenoModuleError { - fn code(&self) -> NodeJsErrorCode { - use ResolvePkgSubpathFromDenoModuleErrorKind::*; - match self.as_kind() { - PackageSubpathResolve(e) => e.code(), - UrlToNodeResolution(e) => e.code(), - } - } -} - -#[derive(Debug, Error)] -pub enum ResolvePkgSubpathFromDenoModuleErrorKind { - #[error(transparent)] - PackageSubpathResolve(#[from] PackageSubpathResolveError), - #[error(transparent)] - UrlToNodeResolution(#[from] UrlToNodeResolutionError), -} - // todo(https://github.com/denoland/deno_core/issues/810): make this a TypeError #[derive(Debug, Clone, Error)] #[error( @@ -394,37 +371,6 @@ impl NodeJsErrorCoded for CanonicalizingPkgJsonDirError { } } -#[derive(Debug, Error)] -#[error("TypeScript files are not supported in npm packages: {specifier}")] -pub struct TypeScriptNotSupportedInNpmError { - pub specifier: Url, -} - -impl NodeJsErrorCoded for TypeScriptNotSupportedInNpmError { - fn code(&self) -> NodeJsErrorCode { - NodeJsErrorCode::ERR_UNKNOWN_FILE_EXTENSION - } -} - -kinded_err!(UrlToNodeResolutionError, UrlToNodeResolutionErrorKind); - -impl NodeJsErrorCoded for UrlToNodeResolutionError { - fn code(&self) -> NodeJsErrorCode { - match self.as_kind() { - UrlToNodeResolutionErrorKind::TypeScriptNotSupported(e) => e.code(), - UrlToNodeResolutionErrorKind::ClosestPkgJson(e) => e.code(), - } - } -} - -#[derive(Debug, Error)] -pub enum UrlToNodeResolutionErrorKind { - #[error(transparent)] - TypeScriptNotSupported(#[from] TypeScriptNotSupportedInNpmError), - #[error(transparent)] - ClosestPkgJson(#[from] ClosestPkgJsonError), -} - // todo(https://github.com/denoland/deno_core/issues/810): make this a TypeError #[derive(Debug, Error)] #[error( @@ -533,8 +479,6 @@ pub enum NodeResolveErrorKind { TypesNotFound(#[from] TypesNotFoundError), #[error(transparent)] FinalizeResolution(#[from] FinalizeResolutionError), - #[error(transparent)] - UrlToNodeResolution(#[from] UrlToNodeResolutionError), } kinded_err!(FinalizeResolutionError, FinalizeResolutionErrorKind); @@ -728,8 +672,6 @@ pub enum ResolvePkgJsonBinExportError { MissingPkgJson { pkg_json_path: PathBuf }, #[error("Failed resolving binary export. {message}")] InvalidBinProperty { message: String }, - #[error(transparent)] - UrlToNodeResolution(#[from] UrlToNodeResolutionError), } #[derive(Debug, Error)] diff --git a/resolvers/node/lib.rs b/resolvers/node/lib.rs index f03f770486..18b0a85363 100644 --- a/resolvers/node/lib.rs +++ b/resolvers/node/lib.rs @@ -13,9 +13,12 @@ mod resolution; mod sync; pub use deno_package_json::PackageJson; +pub use npm::InNpmPackageChecker; +pub use npm::InNpmPackageCheckerRc; pub use npm::NpmResolver; pub use npm::NpmResolverRc; -pub use package_json::load_pkg_json; +pub use package_json::PackageJsonResolver; +pub use package_json::PackageJsonResolverRc; pub use package_json::PackageJsonThreadLocalCache; pub use path::PathClean; pub use resolution::parse_npm_pkg_name; diff --git a/resolvers/node/npm.rs b/resolvers/node/npm.rs index 6b5f21db62..2132f0b545 100644 --- a/resolvers/node/npm.rs +++ b/resolvers/node/npm.rs @@ -22,7 +22,13 @@ pub trait NpmResolver: std::fmt::Debug + MaybeSend + MaybeSync { specifier: &str, referrer: &Url, ) -> Result; +} +#[allow(clippy::disallowed_types)] +pub type InNpmPackageCheckerRc = crate::sync::MaybeArc; + +/// Checks if a provided specifier is in an npm package. +pub trait InNpmPackageChecker: std::fmt::Debug + MaybeSend + MaybeSync { fn in_npm_package(&self, specifier: &Url) -> bool; fn in_npm_package_at_dir_path(&self, path: &Path) -> bool { diff --git a/resolvers/node/package_json.rs b/resolvers/node/package_json.rs index de750f1d7e..6967779e5d 100644 --- a/resolvers/node/package_json.rs +++ b/resolvers/node/package_json.rs @@ -2,15 +2,21 @@ use deno_package_json::PackageJson; use deno_package_json::PackageJsonRc; +use deno_path_util::strip_unc_prefix; use std::cell::RefCell; use std::collections::HashMap; use std::io::ErrorKind; use std::path::Path; use std::path::PathBuf; +use url::Url; +use crate::env::NodeResolverEnv; +use crate::errors::CanonicalizingPkgJsonDirError; +use crate::errors::ClosestPkgJsonError; use crate::errors::PackageJsonLoadError; -// use a thread local cache so that workers have their own distinct cache +// todo(dsherret): this isn't exactly correct and we should change it to instead +// be created per worker and passed down as a ctor arg to the pkg json resolver thread_local! { static CACHE: RefCell> = RefCell::new(HashMap::new()); } @@ -33,21 +39,91 @@ impl deno_package_json::PackageJsonCache for PackageJsonThreadLocalCache { } } -/// Helper to load a package.json file using the thread local cache -/// in node_resolver. -pub fn load_pkg_json( - fs: &dyn deno_package_json::fs::DenoPkgJsonFs, - path: &Path, -) -> Result, PackageJsonLoadError> { - let result = - PackageJson::load_from_path(path, fs, Some(&PackageJsonThreadLocalCache)); - match result { - Ok(pkg_json) => Ok(Some(pkg_json)), - Err(deno_package_json::PackageJsonLoadError::Io { source, .. }) - if source.kind() == ErrorKind::NotFound => - { +#[allow(clippy::disallowed_types)] +pub type PackageJsonResolverRc = + crate::sync::MaybeArc>; + +#[derive(Debug)] +pub struct PackageJsonResolver { + env: TEnv, +} + +impl PackageJsonResolver { + pub fn new(env: TEnv) -> Self { + Self { env } + } + + pub fn get_closest_package_json( + &self, + url: &Url, + ) -> Result, ClosestPkgJsonError> { + let Ok(file_path) = deno_path_util::url_to_file_path(url) else { + return Ok(None); + }; + self.get_closest_package_json_from_path(&file_path) + } + + pub fn get_closest_package_json_from_path( + &self, + file_path: &Path, + ) -> Result, ClosestPkgJsonError> { + // we use this for deno compile using byonm because the script paths + // won't be in virtual file system, but the package.json paths will be + fn canonicalize_first_ancestor_exists( + dir_path: &Path, + env: &TEnv, + ) -> Result, std::io::Error> { + for ancestor in dir_path.ancestors() { + match env.realpath_sync(ancestor) { + Ok(dir_path) => return Ok(Some(dir_path)), + Err(err) if err.kind() == std::io::ErrorKind::NotFound => { + // keep searching + } + Err(err) => return Err(err), + } + } Ok(None) } - Err(err) => Err(PackageJsonLoadError(err)), + + let parent_dir = file_path.parent().unwrap(); + let Some(start_dir) = canonicalize_first_ancestor_exists( + parent_dir, &self.env, + ) + .map_err(|source| CanonicalizingPkgJsonDirError { + dir_path: parent_dir.to_path_buf(), + source, + })? + else { + return Ok(None); + }; + let start_dir = strip_unc_prefix(start_dir); + for current_dir in start_dir.ancestors() { + let package_json_path = current_dir.join("package.json"); + if let Some(pkg_json) = self.load_package_json(&package_json_path)? { + return Ok(Some(pkg_json)); + } + } + + Ok(None) + } + + pub fn load_package_json( + &self, + path: &Path, + ) -> Result, PackageJsonLoadError> { + let result = PackageJson::load_from_path( + path, + self.env.pkg_json_fs(), + Some(&PackageJsonThreadLocalCache), + ); + match result { + Ok(pkg_json) => Ok(Some(pkg_json)), + Err(deno_package_json::PackageJsonLoadError::Io { source, .. }) + if source.kind() == ErrorKind::NotFound => + { + Ok(None) + } + Err(err) => Err(PackageJsonLoadError(err)), + } } } diff --git a/resolvers/node/resolution.rs b/resolvers/node/resolution.rs index 811583a5ee..d44539e978 100644 --- a/resolvers/node/resolution.rs +++ b/resolvers/node/resolution.rs @@ -6,9 +6,6 @@ use std::path::PathBuf; use anyhow::bail; use anyhow::Error as AnyError; -use deno_media_type::MediaType; -use deno_package_json::PackageJsonRc; -use deno_path_util::strip_unc_prefix; use deno_path_util::url_from_file_path; use serde_json::Map; use serde_json::Value; @@ -16,8 +13,6 @@ use url::Url; use crate::env::NodeResolverEnv; use crate::errors; -use crate::errors::CanonicalizingPkgJsonDirError; -use crate::errors::ClosestPkgJsonError; use crate::errors::DataUrlReferrerError; use crate::errors::FinalizeResolutionError; use crate::errors::InvalidModuleSpecifierError; @@ -32,7 +27,6 @@ use crate::errors::PackageExportsResolveError; use crate::errors::PackageImportNotDefinedError; use crate::errors::PackageImportsResolveError; use crate::errors::PackageImportsResolveErrorKind; -use crate::errors::PackageJsonLoadError; use crate::errors::PackagePathNotExportedError; use crate::errors::PackageResolveError; use crate::errors::PackageSubpathResolveError; @@ -42,14 +36,13 @@ use crate::errors::PackageTargetResolveError; use crate::errors::PackageTargetResolveErrorKind; use crate::errors::ResolveBinaryCommandsError; use crate::errors::ResolvePkgJsonBinExportError; -use crate::errors::ResolvePkgSubpathFromDenoModuleError; -use crate::errors::TypeScriptNotSupportedInNpmError; use crate::errors::TypesNotFoundError; use crate::errors::TypesNotFoundErrorData; use crate::errors::UnsupportedDirImportError; use crate::errors::UnsupportedEsmUrlSchemeError; -use crate::errors::UrlToNodeResolutionError; +use crate::npm::InNpmPackageCheckerRc; use crate::NpmResolverRc; +use crate::PackageJsonResolverRc; use crate::PathClean; use deno_package_json::PackageJson; @@ -73,16 +66,14 @@ impl NodeResolutionMode { #[derive(Debug)] pub enum NodeResolution { - Esm(Url), - CommonJs(Url), + Module(Url), BuiltIn(String), } impl NodeResolution { pub fn into_url(self) -> Url { match self { - Self::Esm(u) => u, - Self::CommonJs(u) => u, + Self::Module(u) => u, Self::BuiltIn(specifier) => { if specifier.starts_with("node:") { Url::parse(&specifier).unwrap() @@ -92,42 +83,6 @@ impl NodeResolution { } } } - - pub fn into_specifier_and_media_type( - resolution: Option, - ) -> (Url, MediaType) { - match resolution { - Some(NodeResolution::CommonJs(specifier)) => { - let media_type = MediaType::from_specifier(&specifier); - ( - specifier, - match media_type { - MediaType::JavaScript | MediaType::Jsx => MediaType::Cjs, - MediaType::TypeScript | MediaType::Tsx => MediaType::Cts, - MediaType::Dts => MediaType::Dcts, - _ => media_type, - }, - ) - } - Some(NodeResolution::Esm(specifier)) => { - let media_type = MediaType::from_specifier(&specifier); - ( - specifier, - match media_type { - MediaType::JavaScript | MediaType::Jsx => MediaType::Mjs, - MediaType::TypeScript | MediaType::Tsx => MediaType::Mts, - MediaType::Dts => MediaType::Dmts, - _ => media_type, - }, - ) - } - Some(resolution) => (resolution.into_url(), MediaType::Dts), - None => ( - Url::parse("internal:///missing_dependency.d.ts").unwrap(), - MediaType::Dts, - ), - } - } } #[allow(clippy::disallowed_types)] @@ -136,16 +91,28 @@ pub type NodeResolverRc = crate::sync::MaybeArc>; #[derive(Debug)] pub struct NodeResolver { env: TEnv, + in_npm_pkg_checker: InNpmPackageCheckerRc, npm_resolver: NpmResolverRc, + pkg_json_resolver: PackageJsonResolverRc, } impl NodeResolver { - pub fn new(env: TEnv, npm_resolver: NpmResolverRc) -> Self { - Self { env, npm_resolver } + pub fn new( + env: TEnv, + in_npm_pkg_checker: InNpmPackageCheckerRc, + npm_resolver: NpmResolverRc, + pkg_json_resolver: PackageJsonResolverRc, + ) -> Self { + Self { + env, + in_npm_pkg_checker, + npm_resolver, + pkg_json_resolver, + } } pub fn in_npm_package(&self, specifier: &Url) -> bool { - self.npm_resolver.in_npm_package(specifier) + self.in_npm_pkg_checker.in_npm_package(specifier) } /// This function is an implementation of `defaultResolve` in @@ -166,7 +133,7 @@ impl NodeResolver { if let Ok(url) = Url::parse(specifier) { if url.scheme() == "data" { - return Ok(NodeResolution::Esm(url)); + return Ok(NodeResolution::Module(url)); } if let Some(module_name) = @@ -191,7 +158,7 @@ impl NodeResolver { let url = referrer .join(specifier) .map_err(|source| DataUrlReferrerError { source })?; - return Ok(NodeResolution::Esm(url)); + return Ok(NodeResolution::Module(url)); } } @@ -212,7 +179,7 @@ impl NodeResolver { }; let url = self.finalize_resolution(url, Some(referrer))?; - let resolve_response = self.url_to_node_resolution(url)?; + let resolve_response = NodeResolution::Module(url); // TODO(bartlomieju): skipped checking errors for commonJS resolution and // "preserveSymlinksMain"/"preserveSymlinks" options. Ok(resolve_response) @@ -236,6 +203,7 @@ impl NodeResolver { })?) } else if specifier.starts_with('#') { let pkg_config = self + .pkg_json_resolver .get_closest_package_json(referrer) .map_err(PackageImportsResolveErrorKind::ClosestPkgJson) .map_err(|err| PackageImportsResolveError(Box::new(err)))?; @@ -332,7 +300,7 @@ impl NodeResolver { package_subpath: Option<&str>, maybe_referrer: Option<&Url>, mode: NodeResolutionMode, - ) -> Result { + ) -> Result { let node_module_kind = NodeModuleKind::Esm; let package_subpath = package_subpath .map(|s| format!("./{s}")) @@ -345,10 +313,9 @@ impl NodeResolver { DEFAULT_CONDITIONS, mode, )?; - let resolve_response = self.url_to_node_resolution(resolved_url)?; // TODO(bartlomieju): skipped checking errors for commonJS resolution and // "preserveSymlinksMain"/"preserveSymlinks" options. - Ok(resolve_response) + Ok(resolved_url) } pub fn resolve_binary_commands( @@ -356,7 +323,9 @@ impl NodeResolver { package_folder: &Path, ) -> Result, ResolveBinaryCommandsError> { let pkg_json_path = package_folder.join("package.json"); - let Some(package_json) = self.load_package_json(&pkg_json_path)? else { + let Some(package_json) = + self.pkg_json_resolver.load_package_json(&pkg_json_path)? + else { return Ok(Vec::new()); }; @@ -381,9 +350,11 @@ impl NodeResolver { &self, package_folder: &Path, sub_path: Option<&str>, - ) -> Result { + ) -> Result { let pkg_json_path = package_folder.join("package.json"); - let Some(package_json) = self.load_package_json(&pkg_json_path)? else { + let Some(package_json) = + self.pkg_json_resolver.load_package_json(&pkg_json_path)? + else { return Err(ResolvePkgJsonBinExportError::MissingPkgJson { pkg_json_path, }); @@ -396,37 +367,9 @@ impl NodeResolver { })?; let url = url_from_file_path(&package_folder.join(bin_entry)).unwrap(); - let resolve_response = self.url_to_node_resolution(url)?; // TODO(bartlomieju): skipped checking errors for commonJS resolution and // "preserveSymlinksMain"/"preserveSymlinks" options. - Ok(resolve_response) - } - - pub fn url_to_node_resolution( - &self, - url: Url, - ) -> Result { - let url_str = url.as_str().to_lowercase(); - if url_str.starts_with("http") || url_str.ends_with(".json") { - Ok(NodeResolution::Esm(url)) - } else if url_str.ends_with(".js") || url_str.ends_with(".d.ts") { - let maybe_package_config = self.get_closest_package_json(&url)?; - match maybe_package_config { - Some(c) if c.typ == "module" => Ok(NodeResolution::Esm(url)), - Some(_) => Ok(NodeResolution::CommonJs(url)), - None => Ok(NodeResolution::Esm(url)), - } - } else if url_str.ends_with(".mjs") || url_str.ends_with(".d.mts") { - Ok(NodeResolution::Esm(url)) - } else if url_str.ends_with(".ts") || url_str.ends_with(".mts") { - if self.in_npm_package(&url) { - Err(TypeScriptNotSupportedInNpmError { specifier: url }.into()) - } else { - Ok(NodeResolution::Esm(url)) - } - } else { - Ok(NodeResolution::CommonJs(url)) - } + Ok(url) } /// Checks if the resolved file has a corresponding declaration file. @@ -1101,7 +1044,9 @@ impl NodeResolver { let (package_name, package_subpath, _is_scoped) = parse_npm_pkg_name(specifier, referrer)?; - if let Some(package_config) = self.get_closest_package_json(referrer)? { + if let Some(package_config) = + self.pkg_json_resolver.get_closest_package_json(referrer)? + { // ResolveSelf if package_config.name.as_ref() == Some(&package_name) { if let Some(exports) = &package_config.exports { @@ -1216,7 +1161,10 @@ impl NodeResolver { mode: NodeResolutionMode, ) -> Result { let package_json_path = package_dir_path.join("package.json"); - match self.load_package_json(&package_json_path)? { + match self + .pkg_json_resolver + .load_package_json(&package_json_path)? + { Some(pkg_json) => self.resolve_package_subpath( &pkg_json, package_subpath, @@ -1337,70 +1285,6 @@ impl NodeResolver { } } - pub fn get_closest_package_json( - &self, - url: &Url, - ) -> Result, ClosestPkgJsonError> { - let Ok(file_path) = deno_path_util::url_to_file_path(url) else { - return Ok(None); - }; - self.get_closest_package_json_from_path(&file_path) - } - - pub fn get_closest_package_json_from_path( - &self, - file_path: &Path, - ) -> Result, ClosestPkgJsonError> { - // we use this for deno compile using byonm because the script paths - // won't be in virtual file system, but the package.json paths will be - fn canonicalize_first_ancestor_exists( - dir_path: &Path, - env: &dyn NodeResolverEnv, - ) -> Result, std::io::Error> { - for ancestor in dir_path.ancestors() { - match env.realpath_sync(ancestor) { - Ok(dir_path) => return Ok(Some(dir_path)), - Err(err) if err.kind() == std::io::ErrorKind::NotFound => { - // keep searching - } - Err(err) => return Err(err), - } - } - Ok(None) - } - - let parent_dir = file_path.parent().unwrap(); - let Some(start_dir) = canonicalize_first_ancestor_exists( - parent_dir, &self.env, - ) - .map_err(|source| CanonicalizingPkgJsonDirError { - dir_path: parent_dir.to_path_buf(), - source, - })? - else { - return Ok(None); - }; - let start_dir = strip_unc_prefix(start_dir); - for current_dir in start_dir.ancestors() { - let package_json_path = current_dir.join("package.json"); - if let Some(pkg_json) = self.load_package_json(&package_json_path)? { - return Ok(Some(pkg_json)); - } - } - - Ok(None) - } - - pub fn load_package_json( - &self, - package_json_path: &Path, - ) -> Result, PackageJsonLoadError> { - crate::package_json::load_pkg_json( - self.env.pkg_json_fs(), - package_json_path, - ) - } - pub(super) fn legacy_main_resolve( &self, package_json: &PackageJson, diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index dcb6b1ade6..ba236de149 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_runtime" -version = "0.182.0" +version = "0.186.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -117,10 +117,12 @@ once_cell.workspace = true percent-encoding.workspace = true regex.workspace = true rustyline = { workspace = true, features = ["custom-bindings"] } +same-file = "1.0.6" serde.workspace = true signal-hook = "0.3.17" signal-hook-registry = "1.4.0" tempfile.workspace = true +thiserror.workspace = true tokio.workspace = true tokio-metrics.workspace = true twox-hash.workspace = true diff --git a/runtime/errors.rs b/runtime/errors.rs index 9fc84407a9..0c26e0e47a 100644 --- a/runtime/errors.rs +++ b/runtime/errors.rs @@ -9,6 +9,16 @@ //! Diagnostics are compile-time type errors, whereas JsErrors are runtime //! exceptions. +use crate::ops::fs_events::FsEventsError; +use crate::ops::http::HttpStartError; +use crate::ops::os::OsError; +use crate::ops::permissions::PermissionError; +use crate::ops::process::CheckRunPermissionError; +use crate::ops::process::ProcessError; +use crate::ops::signal::SignalError; +use crate::ops::tty::TtyError; +use crate::ops::web_worker::SyncFetchError; +use crate::ops::worker_host::CreateWorkerError; use deno_broadcast_channel::BroadcastChannelError; use deno_cache::CacheError; use deno_canvas::CanvasError; @@ -40,6 +50,12 @@ use deno_kv::KvError; use deno_kv::KvMutationError; use deno_napi::NApiError; use deno_net::ops::NetError; +use deno_permissions::ChildPermissionError; +use deno_permissions::NetDescriptorFromUrlParseError; +use deno_permissions::PathResolveError; +use deno_permissions::PermissionCheckError; +use deno_permissions::RunDescriptorParseError; +use deno_permissions::SysDescriptorParseError; use deno_tls::TlsError; use deno_web::BlobError; use deno_web::CompressionError; @@ -49,11 +65,60 @@ use deno_web::WebError; use deno_websocket::HandshakeError; use deno_websocket::WebsocketError; use deno_webstorage::WebStorageError; +use rustyline::error::ReadlineError; use std::env; use std::error::Error; use std::io; use std::sync::Arc; +fn get_run_descriptor_parse_error(e: &RunDescriptorParseError) -> &'static str { + match e { + RunDescriptorParseError::Which(_) => "Error", + RunDescriptorParseError::PathResolve(e) => get_path_resolve_error(e), + RunDescriptorParseError::EmptyRunQuery => "Error", + } +} + +fn get_sys_descriptor_parse_error(e: &SysDescriptorParseError) -> &'static str { + match e { + SysDescriptorParseError::InvalidKind(_) => "TypeError", + SysDescriptorParseError::Empty => "Error", + } +} + +fn get_path_resolve_error(e: &PathResolveError) -> &'static str { + match e { + PathResolveError::CwdResolve(e) => get_io_error_class(e), + PathResolveError::EmptyPath => "Error", + } +} + +fn get_permission_error_class(e: &PermissionError) -> &'static str { + match e { + PermissionError::InvalidPermissionName(_) => "ReferenceError", + PermissionError::PathResolve(e) => get_path_resolve_error(e), + PermissionError::NetDescriptorParse(_) => "URIError", + PermissionError::SysDescriptorParse(e) => get_sys_descriptor_parse_error(e), + PermissionError::RunDescriptorParse(e) => get_run_descriptor_parse_error(e), + } +} + +fn get_permission_check_error_class(e: &PermissionCheckError) -> &'static str { + match e { + PermissionCheckError::PermissionDenied(_) => "NotCapable", + PermissionCheckError::InvalidFilePath(_) => "URIError", + PermissionCheckError::NetDescriptorForUrlParse(e) => match e { + NetDescriptorFromUrlParseError::MissingHost(_) => "TypeError", + NetDescriptorFromUrlParseError::Host(_) => "URIError", + }, + PermissionCheckError::SysDescriptorParse(e) => { + get_sys_descriptor_parse_error(e) + } + PermissionCheckError::PathResolve(e) => get_path_resolve_error(e), + PermissionCheckError::HostParse(_) => "URIError", + } +} + fn get_dlopen_error_class(error: &dlopen2::Error) -> &'static str { use dlopen2::Error::*; match error { @@ -436,7 +501,7 @@ fn get_napi_error_class(e: &NApiError) -> &'static str { NApiError::InvalidPath | NApiError::LibLoading(_) | NApiError::ModuleNotFound(_) => "TypeError", - NApiError::Permission(e) => get_error_class_name(e).unwrap_or("Error"), + NApiError::Permission(e) => get_permission_check_error_class(e), } } @@ -514,7 +579,7 @@ fn get_ffi_repr_error_class(e: &ReprError) -> &'static str { ReprError::InvalidF32 => "TypeError", ReprError::InvalidF64 => "TypeError", ReprError::InvalidPointer => "TypeError", - ReprError::Permission(e) => get_error_class_name(e).unwrap_or("Error"), + ReprError::Permission(e) => get_permission_check_error_class(e), } } @@ -522,7 +587,7 @@ fn get_ffi_dlfcn_error_class(e: &DlfcnError) -> &'static str { match e { DlfcnError::RegisterSymbol { .. } => "Error", DlfcnError::Dlopen(_) => "Error", - DlfcnError::Permission(e) => get_error_class_name(e).unwrap_or("Error"), + DlfcnError::Permission(e) => get_permission_check_error_class(e), DlfcnError::Other(e) => get_error_class_name(e).unwrap_or("Error"), } } @@ -540,7 +605,7 @@ fn get_ffi_callback_error_class(e: &CallbackError) -> &'static str { match e { CallbackError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), CallbackError::Other(e) => get_error_class_name(e).unwrap_or("Error"), - CallbackError::Permission(e) => get_error_class_name(e).unwrap_or("Error"), + CallbackError::Permission(e) => get_permission_check_error_class(e), } } @@ -549,15 +614,16 @@ fn get_ffi_call_error_class(e: &CallError) -> &'static str { CallError::IR(_) => "TypeError", CallError::NonblockingCallFailure(_) => "Error", CallError::InvalidSymbol(_) => "TypeError", - CallError::Permission(e) => get_error_class_name(e).unwrap_or("Error"), + CallError::Permission(e) => get_permission_check_error_class(e), CallError::Callback(e) => get_ffi_callback_error_class(e), + CallError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), } } fn get_webstorage_class_name(e: &WebStorageError) -> &'static str { match e { WebStorageError::ContextNotSupported => "DOMExceptionNotSupportedError", - WebStorageError::Sqlite(_) => todo!(), + WebStorageError::Sqlite(_) => "Error", WebStorageError::Io(e) => get_io_error_class(e), WebStorageError::StorageExceeded => "DOMExceptionQuotaExceededError", } @@ -624,9 +690,8 @@ fn get_broadcast_channel_error(error: &BroadcastChannelError) -> &'static str { fn get_fetch_error(error: &FetchError) -> &'static str { match error { - FetchError::Resource(e) | FetchError::Permission(e) => { - get_error_class_name(e).unwrap_or("Error") - } + FetchError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), + FetchError::Permission(e) => get_permission_check_error_class(e), FetchError::NetworkError => "TypeError", FetchError::FsNotGet(_) => "TypeError", FetchError::InvalidUrl(_) => "TypeError", @@ -660,9 +725,8 @@ fn get_http_client_create_error(error: &HttpClientCreateError) -> &'static str { fn get_websocket_error(error: &WebsocketError) -> &'static str { match error { - WebsocketError::Permission(e) | WebsocketError::Resource(e) => { - get_error_class_name(e).unwrap_or("Error") - } + WebsocketError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), + WebsocketError::Permission(e) => get_permission_check_error_class(e), WebsocketError::Url(e) => get_url_parse_error_class(e), WebsocketError::Io(e) => get_io_error_class(e), WebsocketError::WebSocket(_) => "TypeError", @@ -695,18 +759,14 @@ fn get_websocket_handshake_error(error: &HandshakeError) -> &'static str { } } -fn get_fs_error(error: &FsOpsError) -> &'static str { +fn get_fs_ops_error(error: &FsOpsError) -> &'static str { match error { FsOpsError::Io(e) => get_io_error_class(e), - FsOpsError::OperationError(e) => match &e.err { - FsError::Io(e) => get_io_error_class(e), - FsError::FileBusy => "Busy", - FsError::NotSupported => "NotSupported", - FsError::NotCapable(_) => "NotCapable", - }, - FsOpsError::Permission(e) - | FsOpsError::Resource(e) - | FsOpsError::Other(e) => get_error_class_name(e).unwrap_or("Error"), + FsOpsError::OperationError(e) => get_fs_error(&e.err), + FsOpsError::Permission(e) => get_permission_check_error_class(e), + FsOpsError::Resource(e) | FsOpsError::Other(e) => { + get_error_class_name(e).unwrap_or("Error") + } FsOpsError::InvalidUtf8(_) => "InvalidData", FsOpsError::StripPrefix(_) => "Error", FsOpsError::Canceled(e) => { @@ -773,9 +833,10 @@ fn get_net_error(error: &NetError) -> &'static str { NetError::SocketBusy => "Busy", NetError::Io(e) => get_io_error_class(e), NetError::AcceptTaskOngoing => "Busy", - NetError::RootCertStore(e) - | NetError::Permission(e) - | NetError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), + NetError::RootCertStore(e) | NetError::Resource(e) => { + get_error_class_name(e).unwrap_or("Error") + } + NetError::Permission(e) => get_permission_check_error_class(e), NetError::NoResolvedAddress => "Error", NetError::AddrParse(_) => "Error", NetError::Map(e) => get_net_map_error(e), @@ -806,6 +867,121 @@ fn get_net_map_error(error: &deno_net::io::MapError) -> &'static str { } } +fn get_child_permission_error(e: &ChildPermissionError) -> &'static str { + match e { + ChildPermissionError::Escalation => "NotCapable", + ChildPermissionError::PathResolve(e) => get_path_resolve_error(e), + ChildPermissionError::NetDescriptorParse(_) => "URIError", + ChildPermissionError::EnvDescriptorParse(_) => "Error", + ChildPermissionError::SysDescriptorParse(e) => { + get_sys_descriptor_parse_error(e) + } + ChildPermissionError::RunDescriptorParse(e) => { + get_run_descriptor_parse_error(e) + } + } +} + +fn get_create_worker_error(error: &CreateWorkerError) -> &'static str { + match error { + CreateWorkerError::ClassicWorkers => "DOMExceptionNotSupportedError", + CreateWorkerError::Permission(e) => get_child_permission_error(e), + CreateWorkerError::ModuleResolution(e) => { + get_module_resolution_error_class(e) + } + CreateWorkerError::Io(e) => get_io_error_class(e), + CreateWorkerError::MessagePort(e) => get_web_message_port_error_class(e), + } +} + +fn get_tty_error(error: &TtyError) -> &'static str { + match error { + TtyError::Resource(e) | TtyError::Other(e) => { + get_error_class_name(e).unwrap_or("Error") + } + TtyError::Io(e) => get_io_error_class(e), + #[cfg(unix)] + TtyError::Nix(e) => get_nix_error_class(e), + } +} + +fn get_readline_error(error: &ReadlineError) -> &'static str { + match error { + ReadlineError::Io(e) => get_io_error_class(e), + ReadlineError::Eof => "Error", + ReadlineError::Interrupted => "Error", + #[cfg(unix)] + ReadlineError::Errno(e) => get_nix_error_class(e), + ReadlineError::WindowResized => "Error", + #[cfg(windows)] + ReadlineError::Decode(_) => "Error", + #[cfg(windows)] + ReadlineError::SystemError(_) => "Error", + _ => "Error", + } +} + +fn get_signal_error(error: &SignalError) -> &'static str { + match error { + SignalError::InvalidSignalStr(_) => "TypeError", + SignalError::InvalidSignalInt(_) => "TypeError", + SignalError::SignalNotAllowed(_) => "TypeError", + SignalError::Io(e) => get_io_error_class(e), + } +} + +fn get_fs_events_error(error: &FsEventsError) -> &'static str { + match error { + FsEventsError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), + FsEventsError::Permission(e) => get_permission_check_error_class(e), + FsEventsError::Notify(e) => get_notify_error_class(e), + FsEventsError::Canceled(e) => { + let io_err: io::Error = e.to_owned().into(); + get_io_error_class(&io_err) + } + } +} + +fn get_http_start_error(error: &HttpStartError) -> &'static str { + match error { + HttpStartError::TcpStreamInUse => "Busy", + HttpStartError::TlsStreamInUse => "Busy", + HttpStartError::UnixSocketInUse => "Busy", + HttpStartError::ReuniteTcp(_) => "Error", + #[cfg(unix)] + HttpStartError::ReuniteUnix(_) => "Error", + HttpStartError::Io(e) => get_io_error_class(e), + HttpStartError::Other(e) => get_error_class_name(e).unwrap_or("Error"), + } +} + +fn get_process_error(error: &ProcessError) -> &'static str { + match error { + ProcessError::SpawnFailed { error, .. } => get_process_error(error), + ProcessError::FailedResolvingCwd(e) | ProcessError::Io(e) => { + get_io_error_class(e) + } + ProcessError::Permission(e) => get_permission_check_error_class(e), + ProcessError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), + ProcessError::BorrowMut(_) => "Error", + ProcessError::Which(_) => "Error", + ProcessError::ChildProcessAlreadyTerminated => "TypeError", + ProcessError::Signal(e) => get_signal_error(e), + ProcessError::MissingCmd => "Error", + ProcessError::InvalidPid => "TypeError", + #[cfg(unix)] + ProcessError::Nix(e) => get_nix_error_class(e), + ProcessError::RunPermission(e) => match e { + CheckRunPermissionError::Permission(e) => { + get_permission_check_error_class(e) + } + CheckRunPermissionError::Other(e) => { + get_error_class_name(e).unwrap_or("Error") + } + }, + } +} + fn get_http_error(error: &HttpError) -> &'static str { match error { HttpError::Canceled(e) => { @@ -859,10 +1035,712 @@ fn get_websocket_upgrade_error(error: &WebSocketUpgradeError) -> &'static str { } } +fn get_fs_error(e: &FsError) -> &'static str { + match &e { + FsError::Io(e) => get_io_error_class(e), + FsError::FileBusy => "Busy", + FsError::NotSupported => "NotSupported", + FsError::NotCapable(_) => "NotCapable", + } +} + +mod node { + use super::get_error_class_name; + use super::get_io_error_class; + use super::get_permission_check_error_class; + use super::get_serde_json_error_class; + use super::get_url_parse_error_class; + pub use deno_node::ops::blocklist::BlocklistError; + pub use deno_node::ops::crypto::cipher::CipherContextError; + pub use deno_node::ops::crypto::cipher::CipherError; + pub use deno_node::ops::crypto::cipher::DecipherContextError; + pub use deno_node::ops::crypto::cipher::DecipherError; + pub use deno_node::ops::crypto::digest::HashError; + pub use deno_node::ops::crypto::keys::AsymmetricPrivateKeyDerError; + pub use deno_node::ops::crypto::keys::AsymmetricPrivateKeyError; + pub use deno_node::ops::crypto::keys::AsymmetricPublicKeyDerError; + pub use deno_node::ops::crypto::keys::AsymmetricPublicKeyError; + pub use deno_node::ops::crypto::keys::AsymmetricPublicKeyJwkError; + pub use deno_node::ops::crypto::keys::EcJwkError; + pub use deno_node::ops::crypto::keys::EdRawError; + pub use deno_node::ops::crypto::keys::ExportPrivateKeyPemError; + pub use deno_node::ops::crypto::keys::ExportPublicKeyPemError; + pub use deno_node::ops::crypto::keys::GenerateRsaPssError; + pub use deno_node::ops::crypto::keys::RsaJwkError; + pub use deno_node::ops::crypto::keys::RsaPssParamsParseError; + pub use deno_node::ops::crypto::keys::X509PublicKeyError; + pub use deno_node::ops::crypto::sign::KeyObjectHandlePrehashedSignAndVerifyError; + pub use deno_node::ops::crypto::x509::X509Error; + pub use deno_node::ops::crypto::DiffieHellmanError; + pub use deno_node::ops::crypto::EcdhEncodePubKey; + pub use deno_node::ops::crypto::HkdfError; + pub use deno_node::ops::crypto::Pbkdf2Error; + pub use deno_node::ops::crypto::PrivateEncryptDecryptError; + pub use deno_node::ops::crypto::ScryptAsyncError; + pub use deno_node::ops::crypto::SignEd25519Error; + pub use deno_node::ops::crypto::VerifyEd25519Error; + pub use deno_node::ops::fs::FsError; + pub use deno_node::ops::http2::Http2Error; + pub use deno_node::ops::idna::IdnaError; + pub use deno_node::ops::ipc::IpcError; + pub use deno_node::ops::ipc::IpcJsonStreamError; + use deno_node::ops::os::priority::PriorityError; + pub use deno_node::ops::os::OsError; + pub use deno_node::ops::require::RequireError; + pub use deno_node::ops::worker_threads::WorkerThreadsFilenameError; + pub use deno_node::ops::zlib::brotli::BrotliError; + pub use deno_node::ops::zlib::mode::ModeError; + pub use deno_node::ops::zlib::ZlibError; + + pub fn get_blocklist_error(error: &BlocklistError) -> &'static str { + match error { + BlocklistError::AddrParse(_) => "Error", + BlocklistError::IpNetwork(_) => "Error", + BlocklistError::InvalidAddress => "Error", + BlocklistError::IpVersionMismatch => "Error", + } + } + + pub fn get_fs_error(error: &FsError) -> &'static str { + match error { + FsError::Permission(e) => get_permission_check_error_class(e), + FsError::Io(e) => get_io_error_class(e), + #[cfg(windows)] + FsError::PathHasNoRoot => "Error", + #[cfg(not(any(unix, windows)))] + FsError::UnsupportedPlatform => "Error", + FsError::Fs(e) => super::get_fs_error(e), + } + } + + pub fn get_idna_error(error: &IdnaError) -> &'static str { + match error { + IdnaError::InvalidInput => "RangeError", + IdnaError::InputTooLong => "Error", + IdnaError::IllegalInput => "RangeError", + } + } + + pub fn get_ipc_json_stream_error(error: &IpcJsonStreamError) -> &'static str { + match error { + IpcJsonStreamError::Io(e) => get_io_error_class(e), + IpcJsonStreamError::SimdJson(_) => "Error", + } + } + + pub fn get_ipc_error(error: &IpcError) -> &'static str { + match error { + IpcError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), + IpcError::IpcJsonStream(e) => get_ipc_json_stream_error(e), + IpcError::Canceled(e) => { + let io_err: std::io::Error = e.to_owned().into(); + get_io_error_class(&io_err) + } + IpcError::SerdeJson(e) => get_serde_json_error_class(e), + } + } + + pub fn get_worker_threads_filename_error( + error: &WorkerThreadsFilenameError, + ) -> &'static str { + match error { + WorkerThreadsFilenameError::Permission(e) => { + get_error_class_name(e).unwrap_or("Error") + } + WorkerThreadsFilenameError::UrlParse(e) => get_url_parse_error_class(e), + WorkerThreadsFilenameError::InvalidRelativeUrl => "Error", + WorkerThreadsFilenameError::UrlFromPathString => "Error", + WorkerThreadsFilenameError::UrlToPathString => "Error", + WorkerThreadsFilenameError::UrlToPath => "Error", + WorkerThreadsFilenameError::FileNotFound(_) => "Error", + WorkerThreadsFilenameError::Fs(e) => super::get_fs_error(e), + } + } + + pub fn get_require_error(error: &RequireError) -> &'static str { + match error { + RequireError::UrlParse(e) => get_url_parse_error_class(e), + RequireError::Permission(e) => get_error_class_name(e).unwrap_or("Error"), + RequireError::PackageExportsResolve(_) + | RequireError::PackageJsonLoad(_) + | RequireError::ClosestPkgJson(_) + | RequireError::FilePathConversion(_) + | RequireError::UrlConversion(_) + | RequireError::ReadModule(_) + | RequireError::PackageImportsResolve(_) => "Error", + RequireError::Fs(e) | RequireError::UnableToGetCwd(e) => { + super::get_fs_error(e) + } + } + } + + pub fn get_http2_error(error: &Http2Error) -> &'static str { + match error { + Http2Error::Resource(e) => get_error_class_name(e).unwrap_or("Error"), + Http2Error::UrlParse(e) => get_url_parse_error_class(e), + Http2Error::H2(_) => "Error", + } + } + + pub fn get_os_error(error: &OsError) -> &'static str { + match error { + OsError::Priority(e) => match e { + PriorityError::Io(e) => get_io_error_class(e), + #[cfg(windows)] + PriorityError::InvalidPriority => "TypeError", + }, + OsError::Permission(e) => get_permission_check_error_class(e), + OsError::FailedToGetCpuInfo => "TypeError", + OsError::FailedToGetUserInfo(e) => get_io_error_class(e), + } + } + + pub fn get_brotli_error(error: &BrotliError) -> &'static str { + match error { + BrotliError::InvalidEncoderMode => "TypeError", + BrotliError::CompressFailed => "TypeError", + BrotliError::DecompressFailed => "TypeError", + BrotliError::Join(_) => "Error", + BrotliError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), + BrotliError::Io(e) => get_io_error_class(e), + } + } + + pub fn get_mode_error(_: &ModeError) -> &'static str { + "Error" + } + + pub fn get_zlib_error(e: &ZlibError) -> &'static str { + match e { + ZlibError::NotInitialized => "TypeError", + ZlibError::Mode(e) => get_mode_error(e), + ZlibError::Other(e) => get_error_class_name(e).unwrap_or("Error"), + } + } + + pub fn get_crypto_cipher_context_error( + e: &CipherContextError, + ) -> &'static str { + match e { + CipherContextError::ContextInUse => "TypeError", + CipherContextError::Cipher(e) => get_crypto_cipher_error(e), + CipherContextError::Resource(e) => { + get_error_class_name(e).unwrap_or("Error") + } + } + } + + pub fn get_crypto_cipher_error(e: &CipherError) -> &'static str { + match e { + CipherError::InvalidIvLength => "TypeError", + CipherError::InvalidKeyLength => "RangeError", + CipherError::InvalidInitializationVector => "TypeError", + CipherError::CannotPadInputData => "TypeError", + CipherError::UnknownCipher(_) => "TypeError", + } + } + + pub fn get_crypto_decipher_context_error( + e: &DecipherContextError, + ) -> &'static str { + match e { + DecipherContextError::ContextInUse => "TypeError", + DecipherContextError::Decipher(e) => get_crypto_decipher_error(e), + DecipherContextError::Resource(e) => { + get_error_class_name(e).unwrap_or("Error") + } + } + } + + pub fn get_crypto_decipher_error(e: &DecipherError) -> &'static str { + match e { + DecipherError::InvalidIvLength => "TypeError", + DecipherError::InvalidKeyLength => "RangeError", + DecipherError::InvalidInitializationVector => "TypeError", + DecipherError::CannotUnpadInputData => "TypeError", + DecipherError::DataAuthenticationFailed => "TypeError", + DecipherError::SetAutoPaddingFalseAes128GcmUnsupported => "TypeError", + DecipherError::SetAutoPaddingFalseAes256GcmUnsupported => "TypeError", + DecipherError::UnknownCipher(_) => "TypeError", + } + } + + pub fn get_x509_error(_: &X509Error) -> &'static str { + "Error" + } + + pub fn get_crypto_key_object_handle_prehashed_sign_and_verify_error( + e: &KeyObjectHandlePrehashedSignAndVerifyError, + ) -> &'static str { + match e { + KeyObjectHandlePrehashedSignAndVerifyError::InvalidDsaSignatureEncoding => "TypeError", + KeyObjectHandlePrehashedSignAndVerifyError::KeyIsNotPrivate => "TypeError", + KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaSignature(_) => "TypeError", + KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithRsa => "Error", + KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaPssSignature(_) => "TypeError", + KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithRsaPss => "Error", + KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithDsa => "TypeError", + KeyObjectHandlePrehashedSignAndVerifyError::RsaPssHashAlgorithmUnsupported => "TypeError", + KeyObjectHandlePrehashedSignAndVerifyError::PrivateKeyDisallowsUsage { .. } => "TypeError", + KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigest => "TypeError", + KeyObjectHandlePrehashedSignAndVerifyError::X25519KeyCannotBeUsedForSigning => "TypeError", + KeyObjectHandlePrehashedSignAndVerifyError::Ed25519KeyCannotBeUsedForPrehashedSigning => "TypeError", + KeyObjectHandlePrehashedSignAndVerifyError::DhKeyCannotBeUsedForSigning => "TypeError", + KeyObjectHandlePrehashedSignAndVerifyError::KeyIsNotPublicOrPrivate => "TypeError", + KeyObjectHandlePrehashedSignAndVerifyError::InvalidDsaSignature => "TypeError", + KeyObjectHandlePrehashedSignAndVerifyError::X25519KeyCannotBeUsedForVerification => "TypeError", + KeyObjectHandlePrehashedSignAndVerifyError::Ed25519KeyCannotBeUsedForPrehashedVerification => "TypeError", + KeyObjectHandlePrehashedSignAndVerifyError::DhKeyCannotBeUsedForVerification => "TypeError", + } + } + + pub fn get_crypto_hash_error(_: &HashError) -> &'static str { + "Error" + } + + pub fn get_asymmetric_public_key_jwk_error( + e: &AsymmetricPublicKeyJwkError, + ) -> &'static str { + match e { + AsymmetricPublicKeyJwkError::UnsupportedJwkEcCurveP224 => "TypeError", + AsymmetricPublicKeyJwkError::JwkExportNotImplementedForKeyType => { + "TypeError" + } + AsymmetricPublicKeyJwkError::KeyIsNotAsymmetricPublicKey => "TypeError", + } + } + + pub fn get_generate_rsa_pss_error(_: &GenerateRsaPssError) -> &'static str { + "TypeError" + } + + pub fn get_asymmetric_private_key_der_error( + e: &AsymmetricPrivateKeyDerError, + ) -> &'static str { + match e { + AsymmetricPrivateKeyDerError::KeyIsNotAsymmetricPrivateKey => "TypeError", + AsymmetricPrivateKeyDerError::InvalidRsaPrivateKey => "TypeError", + AsymmetricPrivateKeyDerError::ExportingNonRsaPrivateKeyAsPkcs1Unsupported => "TypeError", + AsymmetricPrivateKeyDerError::InvalidEcPrivateKey => "TypeError", + AsymmetricPrivateKeyDerError::ExportingNonEcPrivateKeyAsSec1Unsupported => "TypeError", + AsymmetricPrivateKeyDerError::ExportingNonRsaPssPrivateKeyAsPkcs8Unsupported => "Error", + AsymmetricPrivateKeyDerError::InvalidDsaPrivateKey => "TypeError", + AsymmetricPrivateKeyDerError::InvalidX25519PrivateKey => "TypeError", + AsymmetricPrivateKeyDerError::InvalidEd25519PrivateKey => "TypeError", + AsymmetricPrivateKeyDerError::InvalidDhPrivateKey => "TypeError", + AsymmetricPrivateKeyDerError::UnsupportedKeyType(_) => "TypeError", + } + } + + pub fn get_asymmetric_public_key_der_error( + _: &AsymmetricPublicKeyDerError, + ) -> &'static str { + "TypeError" + } + + pub fn get_export_public_key_pem_error( + e: &ExportPublicKeyPemError, + ) -> &'static str { + match e { + ExportPublicKeyPemError::AsymmetricPublicKeyDer(e) => { + get_asymmetric_public_key_der_error(e) + } + ExportPublicKeyPemError::VeryLargeData => "TypeError", + ExportPublicKeyPemError::Der(_) => "Error", + } + } + + pub fn get_export_private_key_pem_error( + e: &ExportPrivateKeyPemError, + ) -> &'static str { + match e { + ExportPrivateKeyPemError::AsymmetricPublicKeyDer(e) => { + get_asymmetric_private_key_der_error(e) + } + ExportPrivateKeyPemError::VeryLargeData => "TypeError", + ExportPrivateKeyPemError::Der(_) => "Error", + } + } + + pub fn get_x509_public_key_error(e: &X509PublicKeyError) -> &'static str { + match e { + X509PublicKeyError::X509(_) => "Error", + X509PublicKeyError::Rsa(_) => "Error", + X509PublicKeyError::Asn1(_) => "Error", + X509PublicKeyError::Ec(_) => "Error", + X509PublicKeyError::UnsupportedEcNamedCurve => "TypeError", + X509PublicKeyError::MissingEcParameters => "TypeError", + X509PublicKeyError::MalformedDssPublicKey => "TypeError", + X509PublicKeyError::UnsupportedX509KeyType => "TypeError", + } + } + + pub fn get_rsa_jwk_error(e: &RsaJwkError) -> &'static str { + match e { + RsaJwkError::Base64(_) => "Error", + RsaJwkError::Rsa(_) => "Error", + RsaJwkError::MissingRsaPrivateComponent => "TypeError", + } + } + + pub fn get_ec_jwk_error(e: &EcJwkError) -> &'static str { + match e { + EcJwkError::Ec(_) => "Error", + EcJwkError::UnsupportedCurve(_) => "TypeError", + } + } + + pub fn get_ed_raw_error(e: &EdRawError) -> &'static str { + match e { + EdRawError::Ed25519Signature(_) => "Error", + EdRawError::InvalidEd25519Key => "TypeError", + EdRawError::UnsupportedCurve => "TypeError", + } + } + + pub fn get_pbkdf2_error(e: &Pbkdf2Error) -> &'static str { + match e { + Pbkdf2Error::UnsupportedDigest(_) => "TypeError", + Pbkdf2Error::Join(_) => "Error", + } + } + + pub fn get_scrypt_async_error(e: &ScryptAsyncError) -> &'static str { + match e { + ScryptAsyncError::Join(_) => "Error", + ScryptAsyncError::Other(e) => get_error_class_name(e).unwrap_or("Error"), + } + } + + pub fn get_hkdf_error_error(e: &HkdfError) -> &'static str { + match e { + HkdfError::ExpectedSecretKey => "TypeError", + HkdfError::HkdfExpandFailed => "TypeError", + HkdfError::UnsupportedDigest(_) => "TypeError", + HkdfError::Join(_) => "Error", + } + } + + pub fn get_rsa_pss_params_parse_error( + _: &RsaPssParamsParseError, + ) -> &'static str { + "TypeError" + } + + pub fn get_asymmetric_private_key_error( + e: &AsymmetricPrivateKeyError, + ) -> &'static str { + match e { + AsymmetricPrivateKeyError::InvalidPemPrivateKeyInvalidUtf8(_) => "TypeError", + AsymmetricPrivateKeyError::InvalidEncryptedPemPrivateKey => "TypeError", + AsymmetricPrivateKeyError::InvalidPemPrivateKey => "TypeError", + AsymmetricPrivateKeyError::EncryptedPrivateKeyRequiresPassphraseToDecrypt => "TypeError", + AsymmetricPrivateKeyError::InvalidPkcs1PrivateKey => "TypeError", + AsymmetricPrivateKeyError::InvalidSec1PrivateKey => "TypeError", + AsymmetricPrivateKeyError::UnsupportedPemLabel(_) => "TypeError", + AsymmetricPrivateKeyError::RsaPssParamsParse(e) => get_rsa_pss_params_parse_error(e), + AsymmetricPrivateKeyError::InvalidEncryptedPkcs8PrivateKey => "TypeError", + AsymmetricPrivateKeyError::InvalidPkcs8PrivateKey => "TypeError", + AsymmetricPrivateKeyError::Pkcs1PrivateKeyDoesNotSupportEncryptionWithPassphrase => "TypeError", + AsymmetricPrivateKeyError::Sec1PrivateKeyDoesNotSupportEncryptionWithPassphrase => "TypeError", + AsymmetricPrivateKeyError::UnsupportedEcNamedCurve => "TypeError", + AsymmetricPrivateKeyError::InvalidPrivateKey => "TypeError", + AsymmetricPrivateKeyError::InvalidDsaPrivateKey => "TypeError", + AsymmetricPrivateKeyError::MalformedOrMissingNamedCurveInEcParameters => "TypeError", + AsymmetricPrivateKeyError::UnsupportedKeyType(_) => "TypeError", + AsymmetricPrivateKeyError::UnsupportedKeyFormat(_) => "TypeError", + AsymmetricPrivateKeyError::InvalidX25519PrivateKey => "TypeError", + AsymmetricPrivateKeyError::X25519PrivateKeyIsWrongLength => "TypeError", + AsymmetricPrivateKeyError::InvalidEd25519PrivateKey => "TypeError", + AsymmetricPrivateKeyError::MissingDhParameters => "TypeError", + AsymmetricPrivateKeyError::UnsupportedPrivateKeyOid => "TypeError", + } + } + + pub fn get_asymmetric_public_key_error( + e: &AsymmetricPublicKeyError, + ) -> &'static str { + match e { + AsymmetricPublicKeyError::InvalidPemPrivateKeyInvalidUtf8(_) => { + "TypeError" + } + AsymmetricPublicKeyError::InvalidPemPublicKey => "TypeError", + AsymmetricPublicKeyError::InvalidPkcs1PublicKey => "TypeError", + AsymmetricPublicKeyError::AsymmetricPrivateKey(e) => { + get_asymmetric_private_key_error(e) + } + AsymmetricPublicKeyError::InvalidX509Certificate => "TypeError", + AsymmetricPublicKeyError::X509(_) => "Error", + AsymmetricPublicKeyError::X509PublicKey(e) => { + get_x509_public_key_error(e) + } + AsymmetricPublicKeyError::UnsupportedPemLabel(_) => "TypeError", + AsymmetricPublicKeyError::InvalidSpkiPublicKey => "TypeError", + AsymmetricPublicKeyError::UnsupportedKeyType(_) => "TypeError", + AsymmetricPublicKeyError::UnsupportedKeyFormat(_) => "TypeError", + AsymmetricPublicKeyError::Spki(_) => "Error", + AsymmetricPublicKeyError::Pkcs1(_) => "Error", + AsymmetricPublicKeyError::RsaPssParamsParse(_) => "TypeError", + AsymmetricPublicKeyError::MalformedDssPublicKey => "TypeError", + AsymmetricPublicKeyError::MalformedOrMissingNamedCurveInEcParameters => { + "TypeError" + } + AsymmetricPublicKeyError::MalformedOrMissingPublicKeyInEcSpki => { + "TypeError" + } + AsymmetricPublicKeyError::Ec(_) => "Error", + AsymmetricPublicKeyError::UnsupportedEcNamedCurve => "TypeError", + AsymmetricPublicKeyError::MalformedOrMissingPublicKeyInX25519Spki => { + "TypeError" + } + AsymmetricPublicKeyError::X25519PublicKeyIsTooShort => "TypeError", + AsymmetricPublicKeyError::InvalidEd25519PublicKey => "TypeError", + AsymmetricPublicKeyError::MissingDhParameters => "TypeError", + AsymmetricPublicKeyError::MalformedDhParameters => "TypeError", + AsymmetricPublicKeyError::MalformedOrMissingPublicKeyInDhSpki => { + "TypeError" + } + AsymmetricPublicKeyError::UnsupportedPrivateKeyOid => "TypeError", + } + } + + pub fn get_private_encrypt_decrypt_error( + e: &PrivateEncryptDecryptError, + ) -> &'static str { + match e { + PrivateEncryptDecryptError::Pkcs8(_) => "Error", + PrivateEncryptDecryptError::Spki(_) => "Error", + PrivateEncryptDecryptError::Utf8(_) => "Error", + PrivateEncryptDecryptError::Rsa(_) => "Error", + PrivateEncryptDecryptError::UnknownPadding => "TypeError", + } + } + + pub fn get_ecdh_encode_pub_key_error(e: &EcdhEncodePubKey) -> &'static str { + match e { + EcdhEncodePubKey::InvalidPublicKey => "TypeError", + EcdhEncodePubKey::UnsupportedCurve => "TypeError", + EcdhEncodePubKey::Sec1(_) => "Error", + } + } + + pub fn get_diffie_hellman_error(_: &DiffieHellmanError) -> &'static str { + "TypeError" + } + + pub fn get_sign_ed25519_error(_: &SignEd25519Error) -> &'static str { + "TypeError" + } + + pub fn get_verify_ed25519_error(_: &VerifyEd25519Error) -> &'static str { + "TypeError" + } +} + +fn get_os_error(error: &OsError) -> &'static str { + match error { + OsError::Permission(e) => get_permission_check_error_class(e), + OsError::InvalidUtf8(_) => "InvalidData", + OsError::EnvEmptyKey => "TypeError", + OsError::EnvInvalidKey(_) => "TypeError", + OsError::EnvInvalidValue(_) => "TypeError", + OsError::Io(e) => get_io_error_class(e), + OsError::Var(e) => get_env_var_error_class(e), + } +} + +fn get_sync_fetch_error(error: &SyncFetchError) -> &'static str { + match error { + SyncFetchError::BlobUrlsNotSupportedInContext => "TypeError", + SyncFetchError::Io(e) => get_io_error_class(e), + SyncFetchError::InvalidScriptUrl => "TypeError", + SyncFetchError::InvalidStatusCode(_) => "TypeError", + SyncFetchError::ClassicScriptSchemeUnsupportedInWorkers(_) => "TypeError", + SyncFetchError::InvalidUri(_) => "Error", + SyncFetchError::InvalidMimeType(_) => "DOMExceptionNetworkError", + SyncFetchError::MissingMimeType => "DOMExceptionNetworkError", + SyncFetchError::Fetch(e) => get_fetch_error(e), + SyncFetchError::Join(_) => "Error", + SyncFetchError::Other(e) => get_error_class_name(e).unwrap_or("Error"), + } +} + pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> { deno_core::error::get_custom_error_class(e) + .or_else(|| { + e.downcast_ref::() + .map(get_child_permission_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(get_permission_check_error_class) + }) + .or_else(|| { + e.downcast_ref::() + .map(get_permission_error_class) + }) + .or_else(|| e.downcast_ref::().map(get_fs_error)) + .or_else(|| { + e.downcast_ref::() + .map(node::get_blocklist_error) + }) + .or_else(|| e.downcast_ref::().map(node::get_fs_error)) + .or_else(|| { + e.downcast_ref::() + .map(node::get_idna_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_ipc_json_stream_error) + }) + .or_else(|| e.downcast_ref::().map(node::get_ipc_error)) + .or_else(|| { + e.downcast_ref::() + .map(node::get_worker_threads_filename_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_require_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_http2_error) + }) + .or_else(|| e.downcast_ref::().map(node::get_os_error)) + .or_else(|| { + e.downcast_ref::() + .map(node::get_brotli_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_mode_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_zlib_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_crypto_cipher_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_crypto_cipher_context_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_crypto_decipher_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_crypto_decipher_context_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_x509_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_crypto_key_object_handle_prehashed_sign_and_verify_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_crypto_hash_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_asymmetric_public_key_jwk_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_generate_rsa_pss_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_asymmetric_private_key_der_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_asymmetric_public_key_der_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_export_public_key_pem_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_export_private_key_pem_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_rsa_jwk_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_ec_jwk_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_ed_raw_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_pbkdf2_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_scrypt_async_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_hkdf_error_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_rsa_pss_params_parse_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_asymmetric_private_key_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_asymmetric_public_key_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_private_encrypt_decrypt_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_ecdh_encode_pub_key_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_diffie_hellman_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_sign_ed25519_error) + }) + .or_else(|| { + e.downcast_ref::() + .map(node::get_verify_ed25519_error) + }) .or_else(|| e.downcast_ref::().map(get_napi_error_class)) .or_else(|| e.downcast_ref::().map(get_web_error_class)) + .or_else(|| { + e.downcast_ref::() + .map(get_create_worker_error) + }) + .or_else(|| e.downcast_ref::().map(get_tty_error)) + .or_else(|| e.downcast_ref::().map(get_readline_error)) + .or_else(|| e.downcast_ref::().map(get_signal_error)) + .or_else(|| e.downcast_ref::().map(get_fs_events_error)) + .or_else(|| e.downcast_ref::().map(get_http_start_error)) + .or_else(|| e.downcast_ref::().map(get_process_error)) + .or_else(|| e.downcast_ref::().map(get_os_error)) + .or_else(|| e.downcast_ref::().map(get_sync_fetch_error)) .or_else(|| { e.downcast_ref::() .map(get_web_compression_error_class) @@ -884,7 +1762,7 @@ pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> { e.downcast_ref::() .map(get_websocket_upgrade_error) }) - .or_else(|| e.downcast_ref::().map(get_fs_error)) + .or_else(|| e.downcast_ref::().map(get_fs_ops_error)) .or_else(|| { e.downcast_ref::() .map(get_ffi_dlfcn_error_class) diff --git a/runtime/fmt_errors.rs b/runtime/fmt_errors.rs index 8476aab28c..4cd8a06345 100644 --- a/runtime/fmt_errors.rs +++ b/runtime/fmt_errors.rs @@ -1,12 +1,10 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. //! This mod provides DenoError to unify errors across Deno. +use color_print::cformat; use color_print::cstr; use deno_core::error::format_frame; use deno_core::error::JsError; -use deno_terminal::colors::cyan; -use deno_terminal::colors::italic_bold; -use deno_terminal::colors::red; -use deno_terminal::colors::yellow; +use deno_terminal::colors; use std::fmt::Write as _; #[derive(Debug, Clone)] @@ -25,6 +23,7 @@ struct IndexedErrorReference<'a> { enum FixSuggestionKind { Info, Hint, + Docs, } #[derive(Debug)] @@ -67,6 +66,13 @@ impl<'a> FixSuggestion<'a> { message: FixSuggestionMessage::Multiline(messages), } } + + pub fn docs(url: &'a str) -> Self { + Self { + kind: FixSuggestionKind::Docs, + message: FixSuggestionMessage::Single(url), + } + } } struct AnsiColors; @@ -79,10 +85,10 @@ impl deno_core::error::ErrorFormat for AnsiColors { use deno_core::error::ErrorElement::*; match element { Anonymous | NativeFrame | FileName | EvalOrigin => { - cyan(s).to_string().into() + colors::cyan(s).to_string().into() } - LineNumber | ColumnNumber => yellow(s).to_string().into(), - FunctionName | PromiseAll => italic_bold(s).to_string().into(), + LineNumber | ColumnNumber => colors::yellow(s).to_string().into(), + FunctionName | PromiseAll => colors::italic_bold(s).to_string().into(), } } } @@ -115,7 +121,7 @@ fn format_maybe_source_line( if column_number as usize > source_line.len() { return format!( "\n{} Couldn't format source line: Column {} is out of bounds (source may have changed at runtime)", - yellow("Warning"), column_number, + colors::yellow("Warning"), column_number, ); } @@ -128,9 +134,9 @@ fn format_maybe_source_line( } s.push('^'); let color_underline = if is_error { - red(&s).to_string() + colors::red(&s).to_string() } else { - cyan(&s).to_string() + colors::cyan(&s).to_string() }; let indent = format!("{:indent$}", "", indent = level); @@ -201,7 +207,8 @@ fn format_js_error_inner( if let Some(circular) = &circular { if js_error.is_same_error(circular.reference.to) { - write!(s, " {}", cyan(format!("", circular.index))).unwrap(); + write!(s, " {}", colors::cyan(format!("", circular.index))) + .unwrap(); } } @@ -239,7 +246,8 @@ fn format_js_error_inner( .unwrap_or(false); let error_string = if is_caused_by_circular { - cyan(format!("[Circular *{}]", circular.unwrap().index)).to_string() + colors::cyan(format!("[Circular *{}]", circular.unwrap().index)) + .to_string() } else { format_js_error_inner(cause, circular, false, vec![]) }; @@ -256,12 +264,23 @@ fn format_js_error_inner( for (index, suggestion) in suggestions.iter().enumerate() { write!(s, " ").unwrap(); match suggestion.kind { - FixSuggestionKind::Hint => write!(s, "{} ", cyan("hint:")).unwrap(), - FixSuggestionKind::Info => write!(s, "{} ", yellow("info:")).unwrap(), + FixSuggestionKind::Hint => { + write!(s, "{} ", colors::cyan("hint:")).unwrap() + } + FixSuggestionKind::Info => { + write!(s, "{} ", colors::yellow("info:")).unwrap() + } + FixSuggestionKind::Docs => { + write!(s, "{} ", colors::green("docs:")).unwrap() + } }; match suggestion.message { FixSuggestionMessage::Single(msg) => { - write!(s, "{}", msg).unwrap(); + if matches!(suggestion.kind, FixSuggestionKind::Docs) { + write!(s, "{}", cformat!("{}", msg)).unwrap(); + } else { + write!(s, "{}", msg).unwrap(); + } } FixSuggestionMessage::Multiline(messages) => { for (idx, message) in messages.iter().enumerate() { @@ -300,7 +319,49 @@ fn get_suggestions_for_terminal_errors(e: &JsError) -> Vec { cstr!("or add package.json next to the file with \"type\": \"commonjs\" option"), cstr!("and pass --unstable-detect-cjs flag."), ]), - FixSuggestion::hint("See https://docs.deno.com/go/commonjs for details"), + FixSuggestion::docs("https://docs.deno.com/go/commonjs"), + ]; + } else if msg.contains("__filename is not defined") { + return vec![ + FixSuggestion::info(cstr!( + "__filename global is not available in ES modules." + )), + FixSuggestion::hint(cstr!("Use import.meta.filename instead.")), + ]; + } else if msg.contains("__dirname is not defined") { + return vec![ + FixSuggestion::info(cstr!( + "__dirname global is not available in ES modules." + )), + FixSuggestion::hint(cstr!("Use import.meta.dirname instead.")), + ]; + } else if msg.contains("Buffer is not defined") { + return vec![ + FixSuggestion::info(cstr!( + "Buffer is not available in the global scope in Deno." + )), + FixSuggestion::hint(cstr!("Import it explicitly with import { Buffer } from \"node:buffer\";.")), + ]; + } else if msg.contains("clearImmediate is not defined") { + return vec![ + FixSuggestion::info(cstr!( + "clearImmediate is not available in the global scope in Deno." + )), + FixSuggestion::hint(cstr!("Import it explicitly with import { clearImmediate } from \"node:timers\";.")), + ]; + } else if msg.contains("setImmediate is not defined") { + return vec![ + FixSuggestion::info(cstr!( + "setImmediate is not available in the global scope in Deno." + )), + FixSuggestion::hint(cstr!("Import it explicitly with import { setImmediate } from \"node:timers\";.")), + ]; + } else if msg.contains("global is not defined") { + return vec![ + FixSuggestion::info(cstr!( + "global is not available in the global scope in Deno." + )), + FixSuggestion::hint(cstr!("Use globalThis instead, or assign globalThis.global = globalThis.")), ]; } else if msg.contains("openKv is not a function") { return vec![ diff --git a/runtime/inspector_server.rs b/runtime/inspector_server.rs index 33b2ab8727..a789dd3dca 100644 --- a/runtime/inspector_server.rs +++ b/runtime/inspector_server.rs @@ -19,11 +19,12 @@ use deno_core::serde_json::Value; use deno_core::unsync::spawn; use deno_core::url::Url; use deno_core::InspectorMsg; +use deno_core::InspectorSessionKind; +use deno_core::InspectorSessionOptions; use deno_core::InspectorSessionProxy; use deno_core::JsRuntime; use fastwebsockets::Frame; use fastwebsockets::OpCode; -use fastwebsockets::Payload; use fastwebsockets::WebSocket; use hyper::body::Bytes; use hyper_util::rt::TokioIo; @@ -34,7 +35,6 @@ use std::pin::pin; use std::process; use std::rc::Rc; use std::thread; -use std::time::Duration; use tokio::net::TcpListener; use tokio::sync::broadcast; use uuid::Uuid; @@ -194,6 +194,11 @@ fn handle_ws_request( let inspector_session_proxy = InspectorSessionProxy { tx: outbound_tx, rx: inbound_rx, + options: InspectorSessionOptions { + kind: InspectorSessionKind::NonBlocking { + wait_for_disconnect: true, + }, + }, }; log::info!("Debugger session started."); @@ -395,13 +400,8 @@ async fn pump_websocket_messages( inbound_tx: UnboundedSender, mut outbound_rx: UnboundedReceiver, ) { - let mut ticker = tokio::time::interval(Duration::from_secs(30)); - 'pump: loop { tokio::select! { - _ = ticker.tick() => { - let _ = websocket.write_frame(Frame::new(true, OpCode::Ping, None, Payload::Borrowed(&[]))).await; - } Some(msg) = outbound_rx.next() => { let msg = Frame::text(msg.content.into_bytes().into()); let _ = websocket.write_frame(msg).await; diff --git a/runtime/js/40_fs_events.js b/runtime/js/40_fs_events.js index ec2474c0ad..322ee6b3ca 100644 --- a/runtime/js/40_fs_events.js +++ b/runtime/js/40_fs_events.js @@ -21,7 +21,7 @@ class FsWatcher { constructor(paths, options) { const { recursive } = options; - this.#rid = op_fs_events_open({ recursive, paths }); + this.#rid = op_fs_events_open(recursive, paths); } unref() { diff --git a/runtime/js/99_main.js b/runtime/js/99_main.js index 56a5b411bb..6ddaa1335e 100644 --- a/runtime/js/99_main.js +++ b/runtime/js/99_main.js @@ -27,7 +27,6 @@ const { ArrayPrototypeForEach, ArrayPrototypeIncludes, ArrayPrototypeMap, - DateNow, Error, ErrorPrototype, FunctionPrototypeBind, @@ -642,7 +641,7 @@ function bootstrapMainRuntime(runtimeOptions, warmup = false) { removeImportedOps(); - performance.setTimeOrigin(DateNow()); + performance.setTimeOrigin(); globalThis_ = globalThis; // Remove bootstrapping data from the global scope @@ -696,6 +695,7 @@ function bootstrapMainRuntime(runtimeOptions, warmup = false) { // are lost. let jupyterNs = undefined; ObjectDefineProperty(finalDenoNs, "jupyter", { + __proto__: null, get() { if (jupyterNs) { return jupyterNs; @@ -857,7 +857,7 @@ function bootstrapWorkerRuntime( 7: nodeDebug, } = runtimeOptions; - performance.setTimeOrigin(DateNow()); + performance.setTimeOrigin(); globalThis_ = globalThis; // Remove bootstrapping data from the global scope diff --git a/runtime/ops/fs_events.rs b/runtime/ops/fs_events.rs index d88a32d917..c8e0228bc0 100644 --- a/runtime/ops/fs_events.rs +++ b/runtime/ops/fs_events.rs @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; use deno_core::AsyncRefCell; use deno_core::CancelFuture; @@ -20,13 +19,14 @@ use notify::EventKind; use notify::RecommendedWatcher; use notify::RecursiveMode; use notify::Watcher; -use serde::Deserialize; use serde::Serialize; use std::borrow::Cow; use std::cell::RefCell; use std::convert::From; +use std::path::Path; use std::path::PathBuf; use std::rc::Rc; +use std::sync::Arc; use tokio::sync::mpsc; deno_core::extension!( @@ -35,9 +35,7 @@ deno_core::extension!( ); struct FsEventsResource { - #[allow(unused)] - watcher: RecommendedWatcher, - receiver: AsyncRefCell>>, + receiver: AsyncRefCell>>, cancel: CancelHandle, } @@ -59,7 +57,7 @@ impl Resource for FsEventsResource { /// /// Feel free to expand this struct as long as you can add tests to demonstrate /// the complexity. -#[derive(Serialize, Debug)] +#[derive(Serialize, Debug, Clone)] struct FsEvent { kind: &'static str, paths: Vec, @@ -93,43 +91,102 @@ impl From for FsEvent { } } -#[derive(Deserialize)] -pub struct OpenArgs { - recursive: bool, +type WatchSender = (Vec, mpsc::Sender>); + +struct WatcherState { + senders: Arc>>, + watcher: RecommendedWatcher, +} + +fn starts_with_canonicalized(path: &Path, prefix: &str) -> bool { + #[allow(clippy::disallowed_methods)] + let path = path.canonicalize().ok(); + #[allow(clippy::disallowed_methods)] + let prefix = std::fs::canonicalize(prefix).ok(); + match (path, prefix) { + (Some(path), Some(prefix)) => path.starts_with(prefix), + _ => false, + } +} + +#[derive(Debug, thiserror::Error)] +pub enum FsEventsError { + #[error(transparent)] + Resource(deno_core::error::AnyError), + #[error(transparent)] + Permission(#[from] deno_permissions::PermissionCheckError), + #[error(transparent)] + Notify(#[from] NotifyError), + #[error(transparent)] + Canceled(#[from] deno_core::Canceled), +} + +fn start_watcher( + state: &mut OpState, paths: Vec, + sender: mpsc::Sender>, +) -> Result<(), FsEventsError> { + if let Some(watcher) = state.try_borrow_mut::() { + watcher.senders.lock().push((paths, sender)); + return Ok(()); + } + + let senders = Arc::new(Mutex::new(vec![(paths, sender)])); + + let sender_clone = senders.clone(); + let watcher: RecommendedWatcher = Watcher::new( + move |res: Result| { + let res2 = res.map(FsEvent::from).map_err(FsEventsError::Notify); + for (paths, sender) in sender_clone.lock().iter() { + // Ignore result, if send failed it means that watcher was already closed, + // but not all messages have been flushed. + + // Only send the event if the path matches one of the paths that the user is watching + if let Ok(event) = &res2 { + if paths.iter().any(|path| { + event.paths.iter().any(|event_path| { + same_file::is_same_file(event_path, path).unwrap_or(false) + || starts_with_canonicalized(event_path, path) + }) + }) { + let _ = sender.try_send(Ok(event.clone())); + } + } + } + }, + Default::default(), + )?; + + state.put::(WatcherState { watcher, senders }); + + Ok(()) } #[op2] #[smi] fn op_fs_events_open( state: &mut OpState, - #[serde] args: OpenArgs, -) -> Result { - let (sender, receiver) = mpsc::channel::>(16); - let sender = Mutex::new(sender); - let mut watcher: RecommendedWatcher = Watcher::new( - move |res: Result| { - let res2 = res.map(FsEvent::from).map_err(AnyError::from); - let sender = sender.lock(); - // Ignore result, if send failed it means that watcher was already closed, - // but not all messages have been flushed. - let _ = sender.try_send(res2); - }, - Default::default(), - )?; - let recursive_mode = if args.recursive { + recursive: bool, + #[serde] paths: Vec, +) -> Result { + let (sender, receiver) = mpsc::channel::>(16); + + start_watcher(state, paths.clone(), sender)?; + + let recursive_mode = if recursive { RecursiveMode::Recursive } else { RecursiveMode::NonRecursive }; - for path in &args.paths { + for path in &paths { let path = state .borrow_mut::() .check_read(path, "Deno.watchFs()")?; - watcher.watch(&path, recursive_mode)?; + + let watcher = state.borrow_mut::(); + watcher.watcher.watch(&path, recursive_mode)?; } let resource = FsEventsResource { - watcher, receiver: AsyncRefCell::new(receiver), cancel: Default::default(), }; @@ -142,14 +199,18 @@ fn op_fs_events_open( async fn op_fs_events_poll( state: Rc>, #[smi] rid: ResourceId, -) -> Result, AnyError> { - let resource = state.borrow().resource_table.get::(rid)?; +) -> Result, FsEventsError> { + let resource = state + .borrow() + .resource_table + .get::(rid) + .map_err(FsEventsError::Resource)?; let mut receiver = RcRef::map(&resource, |r| &r.receiver).borrow_mut().await; let cancel = RcRef::map(resource, |r| &r.cancel); let maybe_result = receiver.recv().or_cancel(cancel).await?; match maybe_result { Some(Ok(value)) => Ok(Some(value)), - Some(Err(err)) => Err(err), + Some(Err(err)) => Err(FsEventsError::Notify(err)), None => Ok(None), } } diff --git a/runtime/ops/http.rs b/runtime/ops/http.rs index cbabbe22c5..6e31576686 100644 --- a/runtime/ops/http.rs +++ b/runtime/ops/http.rs @@ -2,9 +2,6 @@ use std::rc::Rc; -use deno_core::error::bad_resource_id; -use deno_core::error::custom_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::OpState; use deno_core::ResourceId; @@ -16,12 +13,31 @@ pub const UNSTABLE_FEATURE_NAME: &str = "http"; deno_core::extension!(deno_http_runtime, ops = [op_http_start],); +#[derive(Debug, thiserror::Error)] +pub enum HttpStartError { + #[error("TCP stream is currently in use")] + TcpStreamInUse, + #[error("TLS stream is currently in use")] + TlsStreamInUse, + #[error("Unix socket is currently in use")] + UnixSocketInUse, + #[error(transparent)] + ReuniteTcp(#[from] tokio::net::tcp::ReuniteError), + #[cfg(unix)] + #[error(transparent)] + ReuniteUnix(#[from] tokio::net::unix::ReuniteError), + #[error("{0}")] + Io(#[from] std::io::Error), + #[error(transparent)] + Other(deno_core::error::AnyError), +} + #[op2(fast)] #[smi] fn op_http_start( state: &mut OpState, #[smi] tcp_stream_rid: ResourceId, -) -> Result { +) -> Result { if let Ok(resource_rc) = state .resource_table .take::(tcp_stream_rid) @@ -30,7 +46,7 @@ fn op_http_start( // process of starting a HTTP server on top of this TCP connection, so we just return a Busy error. // See also: https://github.com/denoland/deno/pull/16242 let resource = Rc::try_unwrap(resource_rc) - .map_err(|_| custom_error("Busy", "TCP stream is currently in use"))?; + .map_err(|_| HttpStartError::TcpStreamInUse)?; let (read_half, write_half) = resource.into_inner(); let tcp_stream = read_half.reunite(write_half)?; let addr = tcp_stream.local_addr()?; @@ -45,7 +61,7 @@ fn op_http_start( // process of starting a HTTP server on top of this TLS connection, so we just return a Busy error. // See also: https://github.com/denoland/deno/pull/16242 let resource = Rc::try_unwrap(resource_rc) - .map_err(|_| custom_error("Busy", "TLS stream is currently in use"))?; + .map_err(|_| HttpStartError::TlsStreamInUse)?; let (read_half, write_half) = resource.into_inner(); let tls_stream = read_half.unsplit(write_half); let addr = tls_stream.local_addr()?; @@ -61,7 +77,7 @@ fn op_http_start( // process of starting a HTTP server on top of this UNIX socket, so we just return a Busy error. // See also: https://github.com/denoland/deno/pull/16242 let resource = Rc::try_unwrap(resource_rc) - .map_err(|_| custom_error("Busy", "Unix socket is currently in use"))?; + .map_err(|_| HttpStartError::UnixSocketInUse)?; let (read_half, write_half) = resource.into_inner(); let unix_stream = read_half.reunite(write_half)?; let addr = unix_stream.local_addr()?; @@ -73,5 +89,5 @@ fn op_http_start( )); } - Err(bad_resource_id()) + Err(HttpStartError::Other(deno_core::error::bad_resource_id())) } diff --git a/runtime/ops/mod.rs b/runtime/ops/mod.rs index feed5052bb..67065b901b 100644 --- a/runtime/ops/mod.rs +++ b/runtime/ops/mod.rs @@ -9,7 +9,6 @@ pub mod process; pub mod runtime; pub mod signal; pub mod tty; -mod utils; pub mod web_worker; pub mod worker_host; diff --git a/runtime/ops/os/mod.rs b/runtime/ops/os/mod.rs index bd9260e97e..9bee9d8234 100644 --- a/runtime/ops/os/mod.rs +++ b/runtime/ops/os/mod.rs @@ -1,9 +1,6 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use super::utils::into_string; use crate::worker::ExitCode; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::v8; use deno_core::OpState; @@ -73,9 +70,27 @@ deno_core::extension!( }, ); +#[derive(Debug, thiserror::Error)] +pub enum OsError { + #[error(transparent)] + Permission(#[from] deno_permissions::PermissionCheckError), + #[error("File name or path {0:?} is not valid UTF-8")] + InvalidUtf8(std::ffi::OsString), + #[error("Key is an empty string.")] + EnvEmptyKey, + #[error("Key contains invalid characters: {0:?}")] + EnvInvalidKey(String), + #[error("Value contains invalid characters: {0:?}")] + EnvInvalidValue(String), + #[error(transparent)] + Var(#[from] env::VarError), + #[error("{0}")] + Io(#[from] std::io::Error), +} + #[op2] #[string] -fn op_exec_path(state: &mut OpState) -> Result { +fn op_exec_path(state: &mut OpState) -> Result { let current_exe = env::current_exe().unwrap(); state .borrow_mut::() @@ -83,7 +98,10 @@ fn op_exec_path(state: &mut OpState) -> Result { // normalize path so it doesn't include '.' or '..' components let path = normalize_path(current_exe); - into_string(path.into_os_string()) + path + .into_os_string() + .into_string() + .map_err(OsError::InvalidUtf8) } #[op2(fast)] @@ -91,20 +109,16 @@ fn op_set_env( state: &mut OpState, #[string] key: &str, #[string] value: &str, -) -> Result<(), AnyError> { +) -> Result<(), OsError> { state.borrow_mut::().check_env(key)?; if key.is_empty() { - return Err(type_error("Key is an empty string.")); + return Err(OsError::EnvEmptyKey); } if key.contains(&['=', '\0'] as &[char]) { - return Err(type_error(format!( - "Key contains invalid characters: {key:?}" - ))); + return Err(OsError::EnvInvalidKey(key.to_string())); } if value.contains('\0') { - return Err(type_error(format!( - "Value contains invalid characters: {value:?}" - ))); + return Err(OsError::EnvInvalidValue(value.to_string())); } env::set_var(key, value); Ok(()) @@ -112,7 +126,9 @@ fn op_set_env( #[op2] #[serde] -fn op_env(state: &mut OpState) -> Result, AnyError> { +fn op_env( + state: &mut OpState, +) -> Result, deno_core::error::AnyError> { state.borrow_mut::().check_env_all()?; Ok(env::vars().collect()) } @@ -122,7 +138,7 @@ fn op_env(state: &mut OpState) -> Result, AnyError> { fn op_get_env( state: &mut OpState, #[string] key: String, -) -> Result, AnyError> { +) -> Result, OsError> { let skip_permission_check = NODE_ENV_VAR_ALLOWLIST.contains(&key); if !skip_permission_check { @@ -130,13 +146,11 @@ fn op_get_env( } if key.is_empty() { - return Err(type_error("Key is an empty string.")); + return Err(OsError::EnvEmptyKey); } if key.contains(&['=', '\0'] as &[char]) { - return Err(type_error(format!( - "Key contains invalid characters: {key:?}" - ))); + return Err(OsError::EnvInvalidKey(key.to_string())); } let r = match env::var(key) { @@ -150,10 +164,10 @@ fn op_get_env( fn op_delete_env( state: &mut OpState, #[string] key: String, -) -> Result<(), AnyError> { +) -> Result<(), OsError> { state.borrow_mut::().check_env(&key)?; if key.is_empty() || key.contains(&['=', '\0'] as &[char]) { - return Err(type_error("Key contains invalid characters.")); + return Err(OsError::EnvInvalidKey(key.to_string())); } env::remove_var(key); Ok(()) @@ -178,7 +192,9 @@ fn op_exit(state: &mut OpState) { #[op2] #[serde] -fn op_loadavg(state: &mut OpState) -> Result<(f64, f64, f64), AnyError> { +fn op_loadavg( + state: &mut OpState, +) -> Result<(f64, f64, f64), deno_core::error::AnyError> { state .borrow_mut::() .check_sys("loadavg", "Deno.loadavg()")?; @@ -187,7 +203,9 @@ fn op_loadavg(state: &mut OpState) -> Result<(f64, f64, f64), AnyError> { #[op2] #[string] -fn op_hostname(state: &mut OpState) -> Result { +fn op_hostname( + state: &mut OpState, +) -> Result { state .borrow_mut::() .check_sys("hostname", "Deno.hostname()")?; @@ -196,7 +214,9 @@ fn op_hostname(state: &mut OpState) -> Result { #[op2] #[string] -fn op_os_release(state: &mut OpState) -> Result { +fn op_os_release( + state: &mut OpState, +) -> Result { state .borrow_mut::() .check_sys("osRelease", "Deno.osRelease()")?; @@ -207,7 +227,7 @@ fn op_os_release(state: &mut OpState) -> Result { #[serde] fn op_network_interfaces( state: &mut OpState, -) -> Result, AnyError> { +) -> Result, OsError> { state .borrow_mut::() .check_sys("networkInterfaces", "Deno.networkInterfaces()")?; @@ -259,7 +279,7 @@ impl From for NetworkInterface { #[serde] fn op_system_memory_info( state: &mut OpState, -) -> Result, AnyError> { +) -> Result, deno_core::error::AnyError> { state .borrow_mut::() .check_sys("systemMemoryInfo", "Deno.systemMemoryInfo()")?; @@ -269,7 +289,9 @@ fn op_system_memory_info( #[cfg(not(windows))] #[op2] #[smi] -fn op_gid(state: &mut OpState) -> Result, AnyError> { +fn op_gid( + state: &mut OpState, +) -> Result, deno_core::error::AnyError> { state .borrow_mut::() .check_sys("gid", "Deno.gid()")?; @@ -283,7 +305,9 @@ fn op_gid(state: &mut OpState) -> Result, AnyError> { #[cfg(windows)] #[op2] #[smi] -fn op_gid(state: &mut OpState) -> Result, AnyError> { +fn op_gid( + state: &mut OpState, +) -> Result, deno_core::error::AnyError> { state .borrow_mut::() .check_sys("gid", "Deno.gid()")?; @@ -293,7 +317,9 @@ fn op_gid(state: &mut OpState) -> Result, AnyError> { #[cfg(not(windows))] #[op2] #[smi] -fn op_uid(state: &mut OpState) -> Result, AnyError> { +fn op_uid( + state: &mut OpState, +) -> Result, deno_core::error::AnyError> { state .borrow_mut::() .check_sys("uid", "Deno.uid()")?; @@ -307,7 +333,9 @@ fn op_uid(state: &mut OpState) -> Result, AnyError> { #[cfg(windows)] #[op2] #[smi] -fn op_uid(state: &mut OpState) -> Result, AnyError> { +fn op_uid( + state: &mut OpState, +) -> Result, deno_core::error::AnyError> { state .borrow_mut::() .check_sys("uid", "Deno.uid()")?; @@ -485,7 +513,7 @@ fn rss() -> usize { } } -fn os_uptime(state: &mut OpState) -> Result { +fn os_uptime(state: &mut OpState) -> Result { state .borrow_mut::() .check_sys("osUptime", "Deno.osUptime()")?; @@ -494,6 +522,8 @@ fn os_uptime(state: &mut OpState) -> Result { #[op2(fast)] #[number] -fn op_os_uptime(state: &mut OpState) -> Result { +fn op_os_uptime( + state: &mut OpState, +) -> Result { os_uptime(state) } diff --git a/runtime/ops/permissions.rs b/runtime/ops/permissions.rs index 1dbc852596..9ad963f3bc 100644 --- a/runtime/ops/permissions.rs +++ b/runtime/ops/permissions.rs @@ -2,8 +2,6 @@ use ::deno_permissions::PermissionState; use ::deno_permissions::PermissionsContainer; -use deno_core::error::custom_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::OpState; use serde::Deserialize; @@ -47,12 +45,26 @@ impl From for PermissionStatus { } } +#[derive(Debug, thiserror::Error)] +pub enum PermissionError { + #[error("No such permission name: {0}")] + InvalidPermissionName(String), + #[error("{0}")] + PathResolve(#[from] ::deno_permissions::PathResolveError), + #[error("{0}")] + NetDescriptorParse(#[from] ::deno_permissions::NetDescriptorParseError), + #[error("{0}")] + SysDescriptorParse(#[from] ::deno_permissions::SysDescriptorParseError), + #[error("{0}")] + RunDescriptorParse(#[from] ::deno_permissions::RunDescriptorParseError), +} + #[op2] #[serde] pub fn op_query_permission( state: &mut OpState, #[serde] args: PermissionArgs, -) -> Result { +) -> Result { let permissions = state.borrow::(); let perm = match args.name.as_ref() { "read" => permissions.query_read(args.path.as_deref())?, @@ -62,12 +74,7 @@ pub fn op_query_permission( "sys" => permissions.query_sys(args.kind.as_deref())?, "run" => permissions.query_run(args.command.as_deref())?, "ffi" => permissions.query_ffi(args.path.as_deref())?, - n => { - return Err(custom_error( - "ReferenceError", - format!("No such permission name: {n}"), - )) - } + _ => return Err(PermissionError::InvalidPermissionName(args.name)), }; Ok(PermissionStatus::from(perm)) } @@ -77,7 +84,7 @@ pub fn op_query_permission( pub fn op_revoke_permission( state: &mut OpState, #[serde] args: PermissionArgs, -) -> Result { +) -> Result { let permissions = state.borrow::(); let perm = match args.name.as_ref() { "read" => permissions.revoke_read(args.path.as_deref())?, @@ -87,12 +94,7 @@ pub fn op_revoke_permission( "sys" => permissions.revoke_sys(args.kind.as_deref())?, "run" => permissions.revoke_run(args.command.as_deref())?, "ffi" => permissions.revoke_ffi(args.path.as_deref())?, - n => { - return Err(custom_error( - "ReferenceError", - format!("No such permission name: {n}"), - )) - } + _ => return Err(PermissionError::InvalidPermissionName(args.name)), }; Ok(PermissionStatus::from(perm)) } @@ -102,7 +104,7 @@ pub fn op_revoke_permission( pub fn op_request_permission( state: &mut OpState, #[serde] args: PermissionArgs, -) -> Result { +) -> Result { let permissions = state.borrow::(); let perm = match args.name.as_ref() { "read" => permissions.request_read(args.path.as_deref())?, @@ -112,12 +114,7 @@ pub fn op_request_permission( "sys" => permissions.request_sys(args.kind.as_deref())?, "run" => permissions.request_run(args.command.as_deref())?, "ffi" => permissions.request_ffi(args.path.as_deref())?, - n => { - return Err(custom_error( - "ReferenceError", - format!("No such permission name: {n}"), - )) - } + _ => return Err(PermissionError::InvalidPermissionName(args.name)), }; Ok(PermissionStatus::from(perm)) } diff --git a/runtime/ops/process.rs b/runtime/ops/process.rs index f6555e9324..de3141f1f9 100644 --- a/runtime/ops/process.rs +++ b/runtime/ops/process.rs @@ -1,8 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::anyhow::Context; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::serde_json; use deno_core::AsyncMutFuture; @@ -35,6 +32,7 @@ use tokio::process::Command; #[cfg(windows)] use std::os::windows::process::CommandExt; +use crate::ops::signal::SignalError; #[cfg(unix)] use std::os::unix::prelude::ExitStatusExt; #[cfg(unix)] @@ -105,11 +103,12 @@ impl StdioOrRid { pub fn as_stdio( &self, state: &mut OpState, - ) -> Result { + ) -> Result { match &self { StdioOrRid::Stdio(val) => Ok(val.as_stdio()), StdioOrRid::Rid(rid) => { FileResource::with_file(state, *rid, |file| Ok(file.as_stdio()?)) + .map_err(ProcessError::Resource) } } } @@ -191,6 +190,41 @@ pub struct SpawnArgs { needs_npm_process_state: bool, } +#[derive(Debug, thiserror::Error)] +pub enum ProcessError { + #[error("Failed to spawn '{command}': {error}")] + SpawnFailed { + command: String, + #[source] + error: Box, + }, + #[error("{0}")] + Io(#[from] std::io::Error), + #[cfg(unix)] + #[error(transparent)] + Nix(nix::Error), + #[error("failed resolving cwd: {0}")] + FailedResolvingCwd(#[source] std::io::Error), + #[error(transparent)] + Permission(#[from] deno_permissions::PermissionCheckError), + #[error(transparent)] + RunPermission(#[from] CheckRunPermissionError), + #[error(transparent)] + Resource(deno_core::error::AnyError), + #[error(transparent)] + BorrowMut(std::cell::BorrowMutError), + #[error(transparent)] + Which(which::Error), + #[error("Child process has already terminated.")] + ChildProcessAlreadyTerminated, + #[error("Invalid pid")] + InvalidPid, + #[error(transparent)] + Signal(#[from] SignalError), + #[error("Missing cmd")] + MissingCmd, // only for Deno.run +} + #[derive(Deserialize)] #[serde(rename_all = "camelCase")] pub struct ChildStdio { @@ -208,7 +242,7 @@ pub struct ChildStatus { } impl TryFrom for ChildStatus { - type Error = AnyError; + type Error = SignalError; fn try_from(status: ExitStatus) -> Result { let code = status.code(); @@ -259,7 +293,7 @@ type CreateCommand = ( pub fn npm_process_state_tempfile( contents: &[u8], -) -> Result { +) -> Result { let mut temp_file = tempfile::tempfile()?; temp_file.write_all(contents)?; let handle = temp_file.into_raw_io_handle(); @@ -301,7 +335,7 @@ fn create_command( state: &mut OpState, mut args: SpawnArgs, api_name: &str, -) -> Result { +) -> Result { let maybe_npm_process_state = if args.needs_npm_process_state { let provider = state.borrow::(); let process_state = provider.get_npm_process_state(); @@ -505,7 +539,7 @@ fn spawn_child( ipc_pipe_rid: Option, extra_pipe_rids: Vec>, detached: bool, -) -> Result { +) -> Result { let mut command = tokio::process::Command::from(command); // TODO(@crowlkats): allow detaching processes. // currently deno will orphan a process when exiting with an error or Deno.exit() @@ -554,10 +588,10 @@ fn spawn_child( } } - return Err(AnyError::from(err).context(format!( - "Failed to spawn '{}'", - command.get_program().to_string_lossy() - ))); + return Err(ProcessError::SpawnFailed { + command: command.get_program().to_string_lossy().to_string(), + error: Box::new(err.into()), + }); } }; @@ -600,11 +634,19 @@ fn compute_run_cmd_and_check_permissions( arg_clear_env: bool, state: &mut OpState, api_name: &str, -) -> Result<(PathBuf, RunEnv), AnyError> { - let run_env = compute_run_env(arg_cwd, arg_envs, arg_clear_env) - .with_context(|| format!("Failed to spawn '{}'", arg_cmd))?; - let cmd = resolve_cmd(arg_cmd, &run_env) - .with_context(|| format!("Failed to spawn '{}'", arg_cmd))?; +) -> Result<(PathBuf, RunEnv), ProcessError> { + let run_env = + compute_run_env(arg_cwd, arg_envs, arg_clear_env).map_err(|e| { + ProcessError::SpawnFailed { + command: arg_cmd.to_string(), + error: Box::new(e), + } + })?; + let cmd = + resolve_cmd(arg_cmd, &run_env).map_err(|e| ProcessError::SpawnFailed { + command: arg_cmd.to_string(), + error: Box::new(e), + })?; check_run_permission( state, &RunQueryDescriptor::Path { @@ -631,9 +673,10 @@ fn compute_run_env( arg_cwd: Option<&str>, arg_envs: &[(String, String)], arg_clear_env: bool, -) -> Result { +) -> Result { #[allow(clippy::disallowed_methods)] - let cwd = std::env::current_dir().context("failed resolving cwd")?; + let cwd = + std::env::current_dir().map_err(ProcessError::FailedResolvingCwd)?; let cwd = arg_cwd .map(|cwd_arg| resolve_path(cwd_arg, &cwd)) .unwrap_or(cwd); @@ -670,7 +713,7 @@ fn compute_run_env( Ok(RunEnv { envs, cwd }) } -fn resolve_cmd(cmd: &str, env: &RunEnv) -> Result { +fn resolve_cmd(cmd: &str, env: &RunEnv) -> Result { let is_path = cmd.contains('/'); #[cfg(windows)] let is_path = is_path || cmd.contains('\\') || Path::new(&cmd).is_absolute(); @@ -683,7 +726,7 @@ fn resolve_cmd(cmd: &str, env: &RunEnv) -> Result { Err(which::Error::CannotFindBinaryPath) => { Err(std::io::Error::from(std::io::ErrorKind::NotFound).into()) } - Err(err) => Err(err.into()), + Err(err) => Err(ProcessError::Which(err)), } } } @@ -692,12 +735,20 @@ fn resolve_path(path: &str, cwd: &Path) -> PathBuf { deno_path_util::normalize_path(cwd.join(path)) } +#[derive(Debug, thiserror::Error)] +pub enum CheckRunPermissionError { + #[error(transparent)] + Permission(#[from] deno_permissions::PermissionCheckError), + #[error("{0}")] + Other(deno_core::error::AnyError), +} + fn check_run_permission( state: &mut OpState, cmd: &RunQueryDescriptor, run_env: &RunEnv, api_name: &str, -) -> Result<(), AnyError> { +) -> Result<(), CheckRunPermissionError> { let permissions = state.borrow_mut::(); if !permissions.query_run_all(api_name) { // error the same on all platforms @@ -705,14 +756,14 @@ fn check_run_permission( if !env_var_names.is_empty() { // we don't allow users to launch subprocesses with any LD_ or DYLD_* // env vars set because this allows executing code (ex. LD_PRELOAD) - return Err(deno_core::error::custom_error( + return Err(CheckRunPermissionError::Other(deno_core::error::custom_error( "NotCapable", format!( "Requires --allow-all permissions to spawn subprocess with {} environment variable{}.", env_var_names.join(", "), if env_var_names.len() != 1 { "s" } else { "" } ) - )); + ))); } permissions.check_run(cmd, api_name)?; } @@ -754,7 +805,7 @@ fn op_spawn_child( state: &mut OpState, #[serde] args: SpawnArgs, #[string] api_name: String, -) -> Result { +) -> Result { let detached = args.detached; let (command, pipe_rid, extra_pipe_rids, handles_to_close) = create_command(state, args, &api_name)?; @@ -771,16 +822,23 @@ fn op_spawn_child( async fn op_spawn_wait( state: Rc>, #[smi] rid: ResourceId, -) -> Result { +) -> Result { let resource = state .borrow_mut() .resource_table - .get::(rid)?; - let result = resource.0.try_borrow_mut()?.wait().await?.try_into(); + .get::(rid) + .map_err(ProcessError::Resource)?; + let result = resource + .0 + .try_borrow_mut() + .map_err(ProcessError::BorrowMut)? + .wait() + .await? + .try_into()?; if let Ok(resource) = state.borrow_mut().resource_table.take_any(rid) { resource.close(); } - result + Ok(result) } #[op2] @@ -788,16 +846,14 @@ async fn op_spawn_wait( fn op_spawn_sync( state: &mut OpState, #[serde] args: SpawnArgs, -) -> Result { +) -> Result { let stdout = matches!(args.stdio.stdout, StdioOrRid::Stdio(Stdio::Piped)); let stderr = matches!(args.stdio.stderr, StdioOrRid::Stdio(Stdio::Piped)); let (mut command, _, _, _) = create_command(state, args, "Deno.Command().outputSync()")?; - let output = command.output().with_context(|| { - format!( - "Failed to spawn '{}'", - command.get_program().to_string_lossy() - ) + let output = command.output().map_err(|e| ProcessError::SpawnFailed { + command: command.get_program().to_string_lossy().to_string(), + error: Box::new(e.into()), })?; Ok(SpawnOutput { @@ -820,17 +876,15 @@ fn op_spawn_kill( state: &mut OpState, #[smi] rid: ResourceId, #[string] signal: String, -) -> Result<(), AnyError> { +) -> Result<(), ProcessError> { if let Ok(child_resource) = state.resource_table.get::(rid) { deprecated::kill(child_resource.1 as i32, &signal)?; return Ok(()); } - Err(type_error("Child process has already terminated.")) + Err(ProcessError::ChildProcessAlreadyTerminated) } mod deprecated { - use deno_core::anyhow; - use super::*; #[derive(Deserialize)] @@ -876,9 +930,9 @@ mod deprecated { pub fn op_run( state: &mut OpState, #[serde] run_args: RunArgs, - ) -> Result { + ) -> Result { let args = run_args.cmd; - let cmd = args.first().ok_or_else(|| anyhow::anyhow!("Missing cmd"))?; + let cmd = args.first().ok_or(ProcessError::MissingCmd)?; let (cmd, run_env) = compute_run_cmd_and_check_permissions( cmd, run_args.cwd.as_deref(), @@ -990,11 +1044,12 @@ mod deprecated { pub async fn op_run_status( state: Rc>, #[smi] rid: ResourceId, - ) -> Result { + ) -> Result { let resource = state .borrow_mut() .resource_table - .get::(rid)?; + .get::(rid) + .map_err(ProcessError::Resource)?; let mut child = resource.borrow_mut().await; let run_status = child.wait().await?; let code = run_status.code(); @@ -1017,17 +1072,17 @@ mod deprecated { } #[cfg(unix)] - pub fn kill(pid: i32, signal: &str) -> Result<(), AnyError> { + pub fn kill(pid: i32, signal: &str) -> Result<(), ProcessError> { let signo = super::super::signal::signal_str_to_int(signal)?; use nix::sys::signal::kill as unix_kill; use nix::sys::signal::Signal; use nix::unistd::Pid; - let sig = Signal::try_from(signo)?; - unix_kill(Pid::from_raw(pid), Option::Some(sig)).map_err(AnyError::from) + let sig = Signal::try_from(signo).map_err(ProcessError::Nix)?; + unix_kill(Pid::from_raw(pid), Some(sig)).map_err(ProcessError::Nix) } #[cfg(not(unix))] - pub fn kill(pid: i32, signal: &str) -> Result<(), AnyError> { + pub fn kill(pid: i32, signal: &str) -> Result<(), ProcessError> { use std::io::Error; use std::io::ErrorKind::NotFound; use winapi::shared::minwindef::DWORD; @@ -1041,9 +1096,9 @@ mod deprecated { use winapi::um::winnt::PROCESS_TERMINATE; if !matches!(signal, "SIGKILL" | "SIGTERM") { - Err(type_error(format!("Invalid signal: {signal}"))) + Err(SignalError::InvalidSignalStr(signal.to_string()).into()) } else if pid <= 0 { - Err(type_error("Invalid pid")) + Err(ProcessError::InvalidPid) } else { let handle = // SAFETY: winapi call @@ -1077,11 +1132,10 @@ mod deprecated { #[smi] pid: i32, #[string] signal: String, #[string] api_name: String, - ) -> Result<(), AnyError> { + ) -> Result<(), ProcessError> { state .borrow_mut::() .check_run_all(&api_name)?; - kill(pid, &signal)?; - Ok(()) + kill(pid, &signal) } } diff --git a/runtime/ops/runtime.rs b/runtime/ops/runtime.rs index 419274ebd9..8d54783fc9 100644 --- a/runtime/ops/runtime.rs +++ b/runtime/ops/runtime.rs @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::error::AnyError; use deno_core::op2; use deno_core::ModuleSpecifier; use deno_core::OpState; @@ -16,10 +15,9 @@ deno_core::extension!( #[op2] #[string] -fn op_main_module(state: &mut OpState) -> Result { +fn op_main_module(state: &mut OpState) -> String { let main_url = state.borrow::(); - let main_path = main_url.to_string(); - Ok(main_path) + main_url.to_string() } /// This is an op instead of being done at initialization time because diff --git a/runtime/ops/signal.rs b/runtime/ops/signal.rs index 3bf43d6e15..e1e4ab68bc 100644 --- a/runtime/ops/signal.rs +++ b/runtime/ops/signal.rs @@ -1,6 +1,4 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::AsyncRefCell; use deno_core::CancelFuture; @@ -46,6 +44,42 @@ deno_core::extension!( } ); +#[derive(Debug, thiserror::Error)] +pub enum SignalError { + #[cfg(any( + target_os = "android", + target_os = "linux", + target_os = "openbsd", + target_os = "openbsd", + target_os = "macos", + target_os = "solaris", + target_os = "illumos" + ))] + #[error("Invalid signal: {0}")] + InvalidSignalStr(String), + #[cfg(any( + target_os = "android", + target_os = "linux", + target_os = "openbsd", + target_os = "openbsd", + target_os = "macos", + target_os = "solaris", + target_os = "illumos" + ))] + #[error("Invalid signal: {0}")] + InvalidSignalInt(libc::c_int), + #[cfg(target_os = "windows")] + #[error("Windows only supports ctrl-c (SIGINT) and ctrl-break (SIGBREAK), but got {0}")] + InvalidSignalStr(String), + #[cfg(target_os = "windows")] + #[error("Windows only supports ctrl-c (SIGINT) and ctrl-break (SIGBREAK), but got {0}")] + InvalidSignalInt(libc::c_int), + #[error("Binding to signal '{0}' is not allowed")] + SignalNotAllowed(String), + #[error("{0}")] + Io(#[from] std::io::Error), +} + #[cfg(unix)] #[derive(Default)] struct SignalState { @@ -153,18 +187,18 @@ macro_rules! first_literal { }; } macro_rules! signal_dict { - ($error_msg:expr, $(($number:literal, $($name:literal)|+)),*) => { - pub fn signal_str_to_int(s: &str) -> Result { + ($(($number:literal, $($name:literal)|+)),*) => { + pub fn signal_str_to_int(s: &str) -> Result { match s { $($($name)|* => Ok($number),)* - _ => Err(type_error($error_msg(s))), + _ => Err(SignalError::InvalidSignalStr(s.to_string())), } } - pub fn signal_int_to_str(s: libc::c_int) -> Result<&'static str, AnyError> { + pub fn signal_int_to_str(s: libc::c_int) -> Result<&'static str, SignalError> { match s { $($number => Ok(first_literal!($($name),+)),)* - _ => Err(type_error($error_msg(s))), + _ => Err(SignalError::InvalidSignalInt(s)), } } } @@ -172,7 +206,6 @@ macro_rules! signal_dict { #[cfg(target_os = "freebsd")] signal_dict!( - |s| { format!("Invalid signal : {}", s) }, (1, "SIGHUP"), (2, "SIGINT"), (3, "SIGQUIT"), @@ -210,7 +243,6 @@ signal_dict!( #[cfg(target_os = "openbsd")] signal_dict!( - |s| { format!("Invalid signal : {}", s) }, (1, "SIGHUP"), (2, "SIGINT"), (3, "SIGQUIT"), @@ -246,7 +278,6 @@ signal_dict!( #[cfg(any(target_os = "android", target_os = "linux"))] signal_dict!( - |s| { format!("Invalid signal : {s}") }, (1, "SIGHUP"), (2, "SIGINT"), (3, "SIGQUIT"), @@ -282,7 +313,6 @@ signal_dict!( #[cfg(target_os = "macos")] signal_dict!( - |s| { format!("Invalid signal : {s}") }, (1, "SIGHUP"), (2, "SIGINT"), (3, "SIGQUIT"), @@ -318,7 +348,6 @@ signal_dict!( #[cfg(any(target_os = "solaris", target_os = "illumos"))] signal_dict!( - |s| { format!("Invalid signal : {s}") }, (1, "SIGHUP"), (2, "SIGINT"), (3, "SIGQUIT"), @@ -362,11 +391,7 @@ signal_dict!( ); #[cfg(target_os = "windows")] -signal_dict!( - |_| { "Windows only supports ctrl-c (SIGINT) and ctrl-break (SIGBREAK)." }, - (2, "SIGINT"), - (21, "SIGBREAK") -); +signal_dict!((2, "SIGINT"), (21, "SIGBREAK")); #[cfg(unix)] #[op2(fast)] @@ -374,12 +399,10 @@ signal_dict!( fn op_signal_bind( state: &mut OpState, #[string] sig: &str, -) -> Result { +) -> Result { let signo = signal_str_to_int(sig)?; if signal_hook_registry::FORBIDDEN.contains(&signo) { - return Err(type_error(format!( - "Binding to signal '{sig}' is not allowed", - ))); + return Err(SignalError::SignalNotAllowed(sig.to_string())); } let signal = AsyncRefCell::new(signal(SignalKind::from_raw(signo))?); @@ -413,7 +436,7 @@ fn op_signal_bind( fn op_signal_bind( state: &mut OpState, #[string] sig: &str, -) -> Result { +) -> Result { let signo = signal_str_to_int(sig)?; let resource = SignalStreamResource { signal: AsyncRefCell::new(match signo { @@ -437,7 +460,7 @@ fn op_signal_bind( async fn op_signal_poll( state: Rc>, #[smi] rid: ResourceId, -) -> Result { +) -> Result { let resource = state .borrow_mut() .resource_table @@ -456,7 +479,7 @@ async fn op_signal_poll( pub fn op_signal_unbind( state: &mut OpState, #[smi] rid: ResourceId, -) -> Result<(), AnyError> { +) -> Result<(), deno_core::error::AnyError> { let resource = state.resource_table.take::(rid)?; #[cfg(unix)] diff --git a/runtime/ops/tty.rs b/runtime/ops/tty.rs index 77e1330b51..7849185faa 100644 --- a/runtime/ops/tty.rs +++ b/runtime/ops/tty.rs @@ -2,7 +2,6 @@ use std::io::Error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::OpState; use rustyline::config::Configurer; @@ -64,6 +63,19 @@ deno_core::extension!( }, ); +#[derive(Debug, thiserror::Error)] +pub enum TtyError { + #[error(transparent)] + Resource(deno_core::error::AnyError), + #[error("{0}")] + Io(#[from] std::io::Error), + #[cfg(unix)] + #[error(transparent)] + Nix(nix::Error), + #[error(transparent)] + Other(deno_core::error::AnyError), +} + // ref: #[cfg(windows)] const COOKED_MODE: DWORD = @@ -90,8 +102,11 @@ fn op_set_raw( rid: u32, is_raw: bool, cbreak: bool, -) -> Result<(), AnyError> { - let handle_or_fd = state.resource_table.get_fd(rid)?; +) -> Result<(), TtyError> { + let handle_or_fd = state + .resource_table + .get_fd(rid) + .map_err(TtyError::Resource)?; // From https://github.com/kkawakam/rustyline/blob/master/src/tty/windows.rs // and https://github.com/kkawakam/rustyline/blob/master/src/tty/unix.rs @@ -107,7 +122,7 @@ fn op_set_raw( let handle = handle_or_fd; if cbreak { - return Err(deno_core::error::not_supported()); + return Err(TtyError::Other(deno_core::error::not_supported())); } let mut original_mode: DWORD = 0; @@ -115,7 +130,7 @@ fn op_set_raw( if unsafe { consoleapi::GetConsoleMode(handle, &mut original_mode) } == FALSE { - return Err(Error::last_os_error().into()); + return Err(TtyError::Io(Error::last_os_error())); } let new_mode = if is_raw { @@ -185,7 +200,7 @@ fn op_set_raw( winapi::um::wincon::WriteConsoleInputW(handle, &record, 1, &mut 0) } == FALSE { - return Err(Error::last_os_error().into()); + return Err(TtyError::Io(Error::last_os_error())); } /* Wait for read thread to acknowledge the cancellation to ensure that nothing @@ -199,7 +214,7 @@ fn op_set_raw( // SAFETY: winapi call if unsafe { consoleapi::SetConsoleMode(handle, new_mode) } == FALSE { - return Err(Error::last_os_error().into()); + return Err(TtyError::Io(Error::last_os_error())); } Ok(()) @@ -252,7 +267,8 @@ fn op_set_raw( Some(mode) => mode, None => { // Save original mode. - let original_mode = termios::tcgetattr(raw_fd)?; + let original_mode = + termios::tcgetattr(raw_fd).map_err(TtyError::Nix)?; tty_mode_store.set(rid, original_mode.clone()); original_mode } @@ -274,11 +290,13 @@ fn op_set_raw( } raw.control_chars[termios::SpecialCharacterIndices::VMIN as usize] = 1; raw.control_chars[termios::SpecialCharacterIndices::VTIME as usize] = 0; - termios::tcsetattr(raw_fd, termios::SetArg::TCSADRAIN, &raw)?; + termios::tcsetattr(raw_fd, termios::SetArg::TCSADRAIN, &raw) + .map_err(TtyError::Nix)?; } else { // Try restore saved mode. if let Some(mode) = tty_mode_store.take(rid) { - termios::tcsetattr(raw_fd, termios::SetArg::TCSADRAIN, &mode)?; + termios::tcsetattr(raw_fd, termios::SetArg::TCSADRAIN, &mode) + .map_err(TtyError::Nix)?; } } @@ -290,13 +308,16 @@ fn op_set_raw( fn op_console_size( state: &mut OpState, #[buffer] result: &mut [u32], -) -> Result<(), AnyError> { +) -> Result<(), TtyError> { fn check_console_size( state: &mut OpState, result: &mut [u32], rid: u32, - ) -> Result<(), AnyError> { - let fd = state.resource_table.get_fd(rid)?; + ) -> Result<(), TtyError> { + let fd = state + .resource_table + .get_fd(rid) + .map_err(TtyError::Resource)?; let size = console_size_from_fd(fd)?; result[0] = size.cols; result[1] = size.rows; @@ -419,7 +440,7 @@ mod tests { pub fn op_read_line_prompt( #[string] prompt_text: &str, #[string] default_value: &str, -) -> Result, AnyError> { +) -> Result, ReadlineError> { let mut editor = Editor::<(), rustyline::history::DefaultHistory>::new() .expect("Failed to create editor."); @@ -439,6 +460,6 @@ pub fn op_read_line_prompt( Ok(None) } Err(ReadlineError::Eof) => Ok(None), - Err(err) => Err(err.into()), + Err(err) => Err(err), } } diff --git a/runtime/ops/utils.rs b/runtime/ops/utils.rs deleted file mode 100644 index d5ce61c1fa..0000000000 --- a/runtime/ops/utils.rs +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. - -use deno_core::error::custom_error; -use deno_core::error::AnyError; - -/// A utility function to map OsStrings to Strings -pub fn into_string(s: std::ffi::OsString) -> Result { - s.into_string().map_err(|s| { - let message = format!("File name or path {s:?} is not valid UTF-8"); - custom_error("InvalidData", message) - }) -} diff --git a/runtime/ops/web_worker.rs b/runtime/ops/web_worker.rs index e28bf2192f..d0c3eea668 100644 --- a/runtime/ops/web_worker.rs +++ b/runtime/ops/web_worker.rs @@ -4,15 +4,16 @@ mod sync_fetch; use crate::web_worker::WebWorkerInternalHandle; use crate::web_worker::WebWorkerType; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::CancelFuture; use deno_core::OpState; use deno_web::JsMessageData; +use deno_web::MessagePortError; use std::cell::RefCell; use std::rc::Rc; use self::sync_fetch::op_worker_sync_fetch; +pub use sync_fetch::SyncFetchError; deno_core::extension!( deno_web_worker, @@ -30,17 +31,16 @@ deno_core::extension!( fn op_worker_post_message( state: &mut OpState, #[serde] data: JsMessageData, -) -> Result<(), AnyError> { +) -> Result<(), MessagePortError> { let handle = state.borrow::().clone(); - handle.port.send(state, data)?; - Ok(()) + handle.port.send(state, data) } #[op2(async(lazy), fast)] #[serde] async fn op_worker_recv_message( state: Rc>, -) -> Result, AnyError> { +) -> Result, MessagePortError> { let handle = { let state = state.borrow(); state.borrow::().clone() @@ -50,7 +50,6 @@ async fn op_worker_recv_message( .recv(state.clone()) .or_cancel(handle.cancel) .await? - .map_err(|e| e.into()) } #[op2(fast)] diff --git a/runtime/ops/web_worker/sync_fetch.rs b/runtime/ops/web_worker/sync_fetch.rs index bd55a5fc8c..d1f133d3d2 100644 --- a/runtime/ops/web_worker/sync_fetch.rs +++ b/runtime/ops/web_worker/sync_fetch.rs @@ -4,14 +4,12 @@ use std::sync::Arc; use crate::web_worker::WebWorkerInternalHandle; use crate::web_worker::WebWorkerType; -use deno_core::error::custom_error; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::futures::StreamExt; use deno_core::op2; use deno_core::url::Url; use deno_core::OpState; use deno_fetch::data_url::DataUrl; +use deno_fetch::FetchError; use deno_web::BlobStore; use http_body_util::BodyExt; use hyper::body::Bytes; @@ -27,6 +25,32 @@ fn mime_type_essence(mime_type: &str) -> String { essence.trim().to_ascii_lowercase() } +#[derive(Debug, thiserror::Error)] +pub enum SyncFetchError { + #[error("Blob URLs are not supported in this context.")] + BlobUrlsNotSupportedInContext, + #[error("{0}")] + Io(#[from] std::io::Error), + #[error("Invalid script URL")] + InvalidScriptUrl, + #[error("http status error: {0}")] + InvalidStatusCode(http::StatusCode), + #[error("Classic scripts with scheme {0}: are not supported in workers")] + ClassicScriptSchemeUnsupportedInWorkers(String), + #[error("{0}")] + InvalidUri(#[from] http::uri::InvalidUri), + #[error("Invalid MIME type {0:?}.")] + InvalidMimeType(String), + #[error("Missing MIME type.")] + MissingMimeType, + #[error(transparent)] + Fetch(#[from] FetchError), + #[error(transparent)] + Join(#[from] tokio::task::JoinError), + #[error(transparent)] + Other(deno_core::error::AnyError), +} + #[derive(Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct SyncFetchScript { @@ -40,21 +64,22 @@ pub fn op_worker_sync_fetch( state: &mut OpState, #[serde] scripts: Vec, loose_mime_checks: bool, -) -> Result, AnyError> { +) -> Result, SyncFetchError> { let handle = state.borrow::().clone(); assert_eq!(handle.worker_type, WebWorkerType::Classic); // it's not safe to share a client across tokio runtimes, so create a fresh one // https://github.com/seanmonstar/reqwest/issues/1148#issuecomment-910868788 let options = state.borrow::().clone(); - let client = deno_fetch::create_client_from_options(&options)?; + let client = deno_fetch::create_client_from_options(&options) + .map_err(FetchError::ClientCreate)?; // TODO(andreubotella) It's not good to throw an exception related to blob // URLs when none of the script URLs use the blob scheme. // Also, in which contexts are blob URLs not supported? let blob_store = state .try_borrow::>() - .ok_or_else(|| type_error("Blob URLs are not supported in this context."))? + .ok_or(SyncFetchError::BlobUrlsNotSupportedInContext)? .clone(); // TODO(andreubotella): make the below thread into a resource that can be @@ -74,7 +99,7 @@ pub fn op_worker_sync_fetch( let blob_store = blob_store.clone(); deno_core::unsync::spawn(async move { let script_url = Url::parse(&script) - .map_err(|_| type_error("Invalid script URL"))?; + .map_err(|_| SyncFetchError::InvalidScriptUrl)?; let mut loose_mime_checks = loose_mime_checks; let (body, mime_type, res_url) = match script_url.scheme() { @@ -86,15 +111,13 @@ pub fn op_worker_sync_fetch( ); *req.uri_mut() = script_url.as_str().parse()?; - let resp = client.send(req).await?; + let resp = + client.send(req).await.map_err(FetchError::ClientSend)?; if resp.status().is_client_error() || resp.status().is_server_error() { - return Err(type_error(format!( - "http status error: {}", - resp.status() - ))); + return Err(SyncFetchError::InvalidStatusCode(resp.status())); } // TODO(andreubotella) Properly run fetch's "extract a MIME type". @@ -107,30 +130,32 @@ pub fn op_worker_sync_fetch( // Always check the MIME type with HTTP(S). loose_mime_checks = false; - let body = resp.collect().await?.to_bytes(); + let body = resp + .collect() + .await + .map_err(SyncFetchError::Other)? + .to_bytes(); (body, mime_type, script) } "data" => { - let data_url = DataUrl::process(&script) - .map_err(|e| type_error(format!("{e:?}")))?; + let data_url = + DataUrl::process(&script).map_err(FetchError::DataUrl)?; let mime_type = { let mime = data_url.mime_type(); format!("{}/{}", mime.type_, mime.subtype) }; - let (body, _) = data_url - .decode_to_vec() - .map_err(|e| type_error(format!("{e:?}")))?; + let (body, _) = + data_url.decode_to_vec().map_err(FetchError::Base64)?; (Bytes::from(body), Some(mime_type), script) } "blob" => { - let blob = - blob_store.get_object_url(script_url).ok_or_else(|| { - type_error("Blob for the given URL not found.") - })?; + let blob = blob_store + .get_object_url(script_url) + .ok_or(FetchError::BlobNotFound)?; let mime_type = mime_type_essence(&blob.media_type); @@ -139,10 +164,11 @@ pub fn op_worker_sync_fetch( (Bytes::from(body), Some(mime_type), script) } _ => { - return Err(type_error(format!( - "Classic scripts with scheme {}: are not supported in workers.", - script_url.scheme() - ))) + return Err( + SyncFetchError::ClassicScriptSchemeUnsupportedInWorkers( + script_url.scheme().to_string(), + ), + ) } }; @@ -151,17 +177,11 @@ pub fn op_worker_sync_fetch( match mime_type.as_deref() { Some("application/javascript" | "text/javascript") => {} Some(mime_type) => { - return Err(custom_error( - "DOMExceptionNetworkError", - format!("Invalid MIME type {mime_type:?}."), - )) - } - None => { - return Err(custom_error( - "DOMExceptionNetworkError", - "Missing MIME type.", + return Err(SyncFetchError::InvalidMimeType( + mime_type.to_string(), )) } + None => return Err(SyncFetchError::MissingMimeType), } } diff --git a/runtime/ops/worker_host.rs b/runtime/ops/worker_host.rs index d85541d51d..521284a6a0 100644 --- a/runtime/ops/worker_host.rs +++ b/runtime/ops/worker_host.rs @@ -10,8 +10,6 @@ use crate::web_worker::WorkerControlEvent; use crate::web_worker::WorkerId; use crate::web_worker::WorkerMetadata; use crate::worker::FormatJsErrorFn; -use deno_core::error::custom_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::serde::Deserialize; use deno_core::CancelFuture; @@ -22,6 +20,7 @@ use deno_permissions::ChildPermissionsArg; use deno_permissions::PermissionsContainer; use deno_web::deserialize_js_transferables; use deno_web::JsMessageData; +use deno_web::MessagePortError; use log::debug; use std::cell::RefCell; use std::collections::HashMap; @@ -119,6 +118,20 @@ pub struct CreateWorkerArgs { close_on_idle: bool, } +#[derive(Debug, thiserror::Error)] +pub enum CreateWorkerError { + #[error("Classic workers are not supported.")] + ClassicWorkers, + #[error(transparent)] + Permission(deno_permissions::ChildPermissionError), + #[error(transparent)] + ModuleResolution(#[from] deno_core::ModuleResolutionError), + #[error(transparent)] + MessagePort(#[from] MessagePortError), + #[error("{0}")] + Io(#[from] std::io::Error), +} + /// Create worker as the host #[op2] #[serde] @@ -126,7 +139,7 @@ fn op_create_worker( state: &mut OpState, #[serde] args: CreateWorkerArgs, #[serde] maybe_worker_metadata: Option, -) -> Result { +) -> Result { let specifier = args.specifier.clone(); let maybe_source_code = if args.has_source_code { Some(args.source_code.clone()) @@ -137,10 +150,7 @@ fn op_create_worker( let worker_type = args.worker_type; if let WebWorkerType::Classic = worker_type { if let TestingFeaturesEnabled(false) = state.borrow() { - return Err(custom_error( - "DOMExceptionNotSupportedError", - "Classic workers are not supported.", - )); + return Err(CreateWorkerError::ClassicWorkers); } } @@ -154,7 +164,9 @@ fn op_create_worker( let parent_permissions = state.borrow_mut::(); let worker_permissions = if let Some(child_permissions_arg) = args.permissions { - parent_permissions.create_child_permissions(child_permissions_arg)? + parent_permissions + .create_child_permissions(child_permissions_arg) + .map_err(CreateWorkerError::Permission)? } else { parent_permissions.clone() }; @@ -166,9 +178,8 @@ fn op_create_worker( let module_specifier = deno_core::resolve_url(&specifier)?; let worker_name = args_name.unwrap_or_default(); - let (handle_sender, handle_receiver) = std::sync::mpsc::sync_channel::< - Result, - >(1); + let (handle_sender, handle_receiver) = + std::sync::mpsc::sync_channel::(1); // Setup new thread let thread_builder = std::thread::Builder::new().name(format!("{worker_id}")); @@ -202,7 +213,7 @@ fn op_create_worker( }); // Send thread safe handle from newly created worker to host thread - handle_sender.send(Ok(external_handle)).unwrap(); + handle_sender.send(external_handle).unwrap(); drop(handle_sender); // At this point the only method of communication with host @@ -218,7 +229,7 @@ fn op_create_worker( })?; // Receive WebWorkerHandle from newly created worker - let worker_handle = handle_receiver.recv().unwrap()?; + let worker_handle = handle_receiver.recv().unwrap(); let worker_thread = WorkerThread { worker_handle: worker_handle.into(), @@ -291,7 +302,7 @@ fn close_channel( async fn op_host_recv_ctrl( state: Rc>, #[serde] id: WorkerId, -) -> Result { +) -> WorkerControlEvent { let (worker_handle, cancel_handle) = { let state = state.borrow(); let workers_table = state.borrow::(); @@ -300,7 +311,7 @@ async fn op_host_recv_ctrl( (handle.worker_handle.clone(), handle.cancel_handle.clone()) } else { // If handle was not found it means worker has already shutdown - return Ok(WorkerControlEvent::Close); + return WorkerControlEvent::Close; } }; @@ -309,22 +320,21 @@ async fn op_host_recv_ctrl( .or_cancel(cancel_handle) .await; match maybe_event { - Ok(Ok(Some(event))) => { + Ok(Some(event)) => { // Terminal error means that worker should be removed from worker table. if let WorkerControlEvent::TerminalError(_) = &event { close_channel(state, id, WorkerChannel::Ctrl); } - Ok(event) + event } - Ok(Ok(None)) => { + Ok(None) => { // If there was no event from worker it means it has already been closed. close_channel(state, id, WorkerChannel::Ctrl); - Ok(WorkerControlEvent::Close) + WorkerControlEvent::Close } - Ok(Err(err)) => Err(err), Err(_) => { // The worker was terminated. - Ok(WorkerControlEvent::Close) + WorkerControlEvent::Close } } } @@ -334,7 +344,7 @@ async fn op_host_recv_ctrl( async fn op_host_recv_message( state: Rc>, #[serde] id: WorkerId, -) -> Result, AnyError> { +) -> Result, MessagePortError> { let (worker_handle, cancel_handle) = { let s = state.borrow(); let workers_table = s.borrow::(); @@ -359,7 +369,7 @@ async fn op_host_recv_message( } Ok(ret) } - Ok(Err(err)) => Err(err.into()), + Ok(Err(err)) => Err(err), Err(_) => { // The worker was terminated. Ok(None) @@ -373,7 +383,7 @@ fn op_host_post_message( state: &mut OpState, #[serde] id: WorkerId, #[serde] data: JsMessageData, -) -> Result<(), AnyError> { +) -> Result<(), MessagePortError> { if let Some(worker_thread) = state.borrow::().get(&id) { debug!("post message to worker {}", id); let worker_handle = worker_thread.worker_handle.clone(); diff --git a/runtime/permissions.rs b/runtime/permissions.rs index fa62227e0d..e8460e03f8 100644 --- a/runtime/permissions.rs +++ b/runtime/permissions.rs @@ -3,9 +3,6 @@ use std::path::Path; use std::path::PathBuf; -use deno_core::anyhow::bail; -use deno_core::anyhow::Context; -use deno_core::error::AnyError; use deno_path_util::normalize_path; use deno_permissions::AllowRunDescriptor; use deno_permissions::AllowRunDescriptorParseResult; @@ -15,9 +12,12 @@ use deno_permissions::FfiDescriptor; use deno_permissions::ImportDescriptor; use deno_permissions::NetDescriptor; use deno_permissions::PathQueryDescriptor; +use deno_permissions::PathResolveError; use deno_permissions::ReadDescriptor; +use deno_permissions::RunDescriptorParseError; use deno_permissions::RunQueryDescriptor; use deno_permissions::SysDescriptor; +use deno_permissions::SysDescriptorParseError; use deno_permissions::WriteDescriptor; #[derive(Debug)] @@ -30,9 +30,9 @@ impl RuntimePermissionDescriptorParser { Self { fs } } - fn resolve_from_cwd(&self, path: &str) -> Result { + fn resolve_from_cwd(&self, path: &str) -> Result { if path.is_empty() { - bail!("Empty path is not allowed"); + return Err(PathResolveError::EmptyPath); } let path = Path::new(path); if path.is_absolute() { @@ -43,12 +43,11 @@ impl RuntimePermissionDescriptorParser { } } - fn resolve_cwd(&self) -> Result { + fn resolve_cwd(&self) -> Result { self .fs .cwd() - .map_err(|e| e.into_io_error()) - .context("failed resolving cwd") + .map_err(|e| PathResolveError::CwdResolve(e.into_io_error())) } } @@ -58,37 +57,37 @@ impl deno_permissions::PermissionDescriptorParser fn parse_read_descriptor( &self, text: &str, - ) -> Result { + ) -> Result { Ok(ReadDescriptor(self.resolve_from_cwd(text)?)) } fn parse_write_descriptor( &self, text: &str, - ) -> Result { + ) -> Result { Ok(WriteDescriptor(self.resolve_from_cwd(text)?)) } fn parse_net_descriptor( &self, text: &str, - ) -> Result { + ) -> Result { NetDescriptor::parse(text) } fn parse_import_descriptor( &self, text: &str, - ) -> Result { + ) -> Result { ImportDescriptor::parse(text) } fn parse_env_descriptor( &self, text: &str, - ) -> Result { + ) -> Result { if text.is_empty() { - Err(AnyError::msg("Empty env not allowed")) + Err(deno_permissions::EnvDescriptorParseError) } else { Ok(EnvDescriptor::new(text)) } @@ -97,9 +96,9 @@ impl deno_permissions::PermissionDescriptorParser fn parse_sys_descriptor( &self, text: &str, - ) -> Result { + ) -> Result { if text.is_empty() { - Err(AnyError::msg("Empty sys not allowed")) + Err(SysDescriptorParseError::Empty) } else { Ok(SysDescriptor::parse(text.to_string())?) } @@ -108,21 +107,21 @@ impl deno_permissions::PermissionDescriptorParser fn parse_allow_run_descriptor( &self, text: &str, - ) -> Result { + ) -> Result { Ok(AllowRunDescriptor::parse(text, &self.resolve_cwd()?)?) } fn parse_deny_run_descriptor( &self, text: &str, - ) -> Result { + ) -> Result { Ok(DenyRunDescriptor::parse(text, &self.resolve_cwd()?)) } fn parse_ffi_descriptor( &self, text: &str, - ) -> Result { + ) -> Result { Ok(FfiDescriptor(self.resolve_from_cwd(text)?)) } @@ -131,7 +130,7 @@ impl deno_permissions::PermissionDescriptorParser fn parse_path_query( &self, path: &str, - ) -> Result { + ) -> Result { Ok(PathQueryDescriptor { resolved: self.resolve_from_cwd(path)?, requested: path.to_string(), @@ -141,11 +140,12 @@ impl deno_permissions::PermissionDescriptorParser fn parse_run_query( &self, requested: &str, - ) -> Result { + ) -> Result { if requested.is_empty() { - bail!("Empty run query is not allowed"); + return Err(RunDescriptorParseError::EmptyRunQuery); } RunQueryDescriptor::parse(requested) + .map_err(RunDescriptorParseError::PathResolve) } } diff --git a/runtime/permissions/Cargo.toml b/runtime/permissions/Cargo.toml index d7f2364a56..e088eb8ce5 100644 --- a/runtime/permissions/Cargo.toml +++ b/runtime/permissions/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_permissions" -version = "0.33.0" +version = "0.37.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -23,6 +23,7 @@ log.workspace = true once_cell.workspace = true percent-encoding = { version = "2.3.1", features = [] } serde.workspace = true +thiserror.workspace = true which.workspace = true [target.'cfg(windows)'.dependencies] diff --git a/runtime/permissions/lib.rs b/runtime/permissions/lib.rs index 1e1321bb2f..6480f4bf58 100644 --- a/runtime/permissions/lib.rs +++ b/runtime/permissions/lib.rs @@ -1,11 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::anyhow::bail; -use deno_core::anyhow::Context; -use deno_core::error::custom_error; -use deno_core::error::type_error; -use deno_core::error::uri_error; -use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; use deno_core::serde::de; use deno_core::serde::Deserialize; @@ -43,6 +37,21 @@ pub use prompter::PermissionPrompter; pub use prompter::PromptCallback; pub use prompter::PromptResponse; +#[derive(Debug, thiserror::Error)] +#[error("Requires {access}, {}", format_permission_error(.name))] +pub struct PermissionDeniedError { + access: String, + name: &'static str, +} + +fn format_permission_error(name: &'static str) -> String { + if is_standalone() { + format!("specify the required permissions during compilation using `deno compile --allow-{name}`") + } else { + format!("run again with the --allow-{name} flag") + } +} + /// Fast exit from permission check routines if this permission /// is in the "fully-granted" state. macro_rules! skip_check_if_is_permission_fully_granted { @@ -104,7 +113,10 @@ impl From for AllowPartial { impl PermissionState { #[inline(always)] - fn log_perm_access(name: &str, info: impl FnOnce() -> Option) { + fn log_perm_access( + name: &'static str, + info: impl FnOnce() -> Option, + ) { // Eliminates log overhead (when logging is disabled), // log_enabled!(Debug) check in a hot path still has overhead // TODO(AaronO): generalize or upstream this optimization @@ -120,53 +132,47 @@ impl PermissionState { } } - fn fmt_access(name: &str, info: impl FnOnce() -> Option) -> String { + fn fmt_access( + name: &'static str, + info: impl FnOnce() -> Option, + ) -> String { format!( "{} access{}", name, - info() - .map(|info| { format!(" to {info}") }) - .unwrap_or_default(), + info().map(|info| format!(" to {info}")).unwrap_or_default(), ) } - fn error(name: &str, info: impl FnOnce() -> Option) -> AnyError { - let msg = if is_standalone() { - format!( - "Requires {}, specify the required permissions during compilation using `deno compile --allow-{}`", - Self::fmt_access(name, info), - name - ) - } else { - format!( - "Requires {}, run again with the --allow-{} flag", - Self::fmt_access(name, info), - name - ) - }; - custom_error("NotCapable", msg) + fn error( + name: &'static str, + info: impl FnOnce() -> Option, + ) -> PermissionDeniedError { + PermissionDeniedError { + access: Self::fmt_access(name, info), + name, + } } /// Check the permission state. bool is whether a prompt was issued. #[inline] fn check( self, - name: &str, + name: &'static str, api_name: Option<&str>, info: Option<&str>, prompt: bool, - ) -> (Result<(), AnyError>, bool, bool) { + ) -> (Result<(), PermissionDeniedError>, bool, bool) { self.check2(name, api_name, || info.map(|s| s.to_string()), prompt) } #[inline] fn check2( self, - name: &str, + name: &'static str, api_name: Option<&str>, info: impl Fn() -> Option, prompt: bool, - ) -> (Result<(), AnyError>, bool, bool) { + ) -> (Result<(), PermissionDeniedError>, bool, bool) { match self { PermissionState::Granted => { Self::log_perm_access(name, info); @@ -246,7 +252,7 @@ impl UnitPermission { self.state } - pub fn check(&mut self) -> Result<(), AnyError> { + pub fn check(&mut self) -> Result<(), PermissionDeniedError> { let (result, prompted, _is_allow_all) = self.state.check(self.name, None, None, self.prompt); if prompted { @@ -262,7 +268,7 @@ impl UnitPermission { fn create_child_permissions( &mut self, flag: ChildUnitPermissionArg, - ) -> Result { + ) -> Result { let mut perm = self.clone(); match flag { ChildUnitPermissionArg::Inherit => { @@ -270,7 +276,7 @@ impl UnitPermission { } ChildUnitPermissionArg::Granted => { if self.check().is_err() { - return Err(escalation_error()); + return Err(ChildPermissionError::Escalation); } perm.state = PermissionState::Granted; } @@ -327,7 +333,7 @@ pub trait QueryDescriptor: Debug { &self, perm: &mut UnaryPermission, api_name: Option<&str>, - ) -> Result<(), AnyError>; + ) -> Result<(), PermissionDeniedError>; fn matches_allow(&self, other: &Self::AllowDesc) -> bool; fn matches_deny(&self, other: &Self::DenyDesc) -> bool; @@ -402,7 +408,7 @@ impl UnaryPermission { pub fn check_all_api( &mut self, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(self); self.check_desc(None, false, api_name) } @@ -412,7 +418,7 @@ impl UnaryPermission { desc: Option<&TQuery>, assert_non_partial: bool, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { let (result, prompted, is_allow_all) = self .query_desc(desc, AllowPartial::from(!assert_non_partial)) .check2( @@ -599,11 +605,14 @@ impl UnaryPermission { } } - fn create_child_permissions( + fn create_child_permissions( &mut self, flag: ChildUnaryPermissionArg, - parse: impl Fn(&str) -> Result, AnyError>, - ) -> Result, AnyError> { + parse: impl Fn(&str) -> Result, E>, + ) -> Result, ChildPermissionError> + where + ChildPermissionError: From, + { let mut perms = Self::default(); match flag { @@ -612,7 +621,7 @@ impl UnaryPermission { } ChildUnaryPermissionArg::Granted => { if self.check_all_api(None).is_err() { - return Err(escalation_error()); + return Err(ChildPermissionError::Escalation); } perms.granted_global = true; } @@ -621,13 +630,13 @@ impl UnaryPermission { perms.granted_list = granted_list .iter() .filter_map(|i| parse(i).transpose()) - .collect::>()?; + .collect::>()?; if !perms.granted_list.iter().all(|desc| { TQuery::from_allow(desc) .check_in_permission(self, None) .is_ok() }) { - return Err(escalation_error()); + return Err(ChildPermissionError::Escalation); } } } @@ -698,7 +707,7 @@ impl QueryDescriptor for ReadQueryDescriptor { &self, perm: &mut UnaryPermission, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(perm); perm.check_desc(Some(self), true, api_name) } @@ -761,7 +770,7 @@ impl QueryDescriptor for WriteQueryDescriptor { &self, perm: &mut UnaryPermission, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(perm); perm.check_desc(Some(self), true, api_name) } @@ -796,22 +805,37 @@ pub enum Host { Ip(IpAddr), } +#[derive(Debug, thiserror::Error)] +pub enum HostParseError { + #[error("invalid IPv6 address: '{0}'")] + InvalidIpv6(String), + #[error("invalid host: '{0}'")] + InvalidHost(String), + #[error("invalid empty host: '{0}'")] + InvalidEmptyHost(String), + #[error("invalid host '{host}': {error}")] + Fqdn { + #[source] + error: fqdn::Error, + host: String, + }, +} + impl Host { - // TODO(bartlomieju): rewrite to not use `AnyError` but a specific error implementations - fn parse(s: &str) -> Result { + fn parse(s: &str) -> Result { if s.starts_with('[') && s.ends_with(']') { let ip = s[1..s.len() - 1] .parse::() - .map_err(|_| uri_error(format!("invalid IPv6 address: '{s}'")))?; + .map_err(|_| HostParseError::InvalidIpv6(s.to_string()))?; return Ok(Host::Ip(IpAddr::V6(ip))); } let (without_trailing_dot, has_trailing_dot) = s.strip_suffix('.').map_or((s, false), |s| (s, true)); if let Ok(ip) = without_trailing_dot.parse::() { if has_trailing_dot { - return Err(uri_error(format!( - "invalid host: '{without_trailing_dot}'" - ))); + return Err(HostParseError::InvalidHost( + without_trailing_dot.to_string(), + )); } Ok(Host::Ip(ip)) } else { @@ -822,11 +846,13 @@ impl Host { }; let fqdn = { use std::str::FromStr; - FQDN::from_str(&lower) - .with_context(|| format!("invalid host: '{s}'"))? + FQDN::from_str(&lower).map_err(|e| HostParseError::Fqdn { + error: e, + host: s.to_string(), + })? }; if fqdn.is_root() { - return Err(uri_error(format!("invalid empty host: '{s}'"))); + return Err(HostParseError::InvalidEmptyHost(s.to_string())); } Ok(Host::Fqdn(fqdn)) } @@ -870,7 +896,7 @@ impl QueryDescriptor for NetDescriptor { &self, perm: &mut UnaryPermission, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(perm); perm.check_desc(Some(self), false, api_name) } @@ -896,39 +922,72 @@ impl QueryDescriptor for NetDescriptor { } } -// TODO(bartlomieju): rewrite to not use `AnyError` but a specific error implementations +#[derive(Debug, thiserror::Error)] +pub enum NetDescriptorParseError { + #[error("invalid value '{0}': URLs are not supported, only domains and ips")] + Url(String), + #[error("invalid IPv6 address in '{hostname}': '{ip}'")] + InvalidIpv6 { hostname: String, ip: String }, + #[error("invalid port in '{hostname}': '{port}'")] + InvalidPort { hostname: String, port: String }, + #[error("invalid host: '{0}'")] + InvalidHost(String), + #[error("invalid empty port in '{0}'")] + EmptyPort(String), + #[error("ipv6 addresses must be enclosed in square brackets: '{0}'")] + Ipv6MissingSquareBrackets(String), + #[error("{0}")] + Host(#[from] HostParseError), +} + +#[derive(Debug, thiserror::Error)] +pub enum NetDescriptorFromUrlParseError { + #[error("Missing host in url: '{0}'")] + MissingHost(Url), + #[error("{0}")] + Host(#[from] HostParseError), +} + impl NetDescriptor { - pub fn parse(hostname: &str) -> Result { + pub fn parse(hostname: &str) -> Result { if hostname.starts_with("http://") || hostname.starts_with("https://") { - return Err(uri_error(format!("invalid value '{hostname}': URLs are not supported, only domains and ips"))); + return Err(NetDescriptorParseError::Url(hostname.to_string())); } // If this is a IPv6 address enclosed in square brackets, parse it as such. if hostname.starts_with('[') { if let Some((ip, after)) = hostname.split_once(']') { let ip = ip[1..].parse::().map_err(|_| { - uri_error(format!("invalid IPv6 address in '{hostname}': '{ip}'")) + NetDescriptorParseError::InvalidIpv6 { + hostname: hostname.to_string(), + ip: ip.to_string(), + } })?; let port = if let Some(port) = after.strip_prefix(':') { let port = port.parse::().map_err(|_| { - uri_error(format!("invalid port in '{hostname}': '{port}'")) + NetDescriptorParseError::InvalidPort { + hostname: hostname.to_string(), + port: port.to_string(), + } })?; Some(port) } else if after.is_empty() { None } else { - return Err(uri_error(format!("invalid host: '{hostname}'"))); + return Err(NetDescriptorParseError::InvalidHost( + hostname.to_string(), + )); }; return Ok(NetDescriptor(Host::Ip(IpAddr::V6(ip)), port)); } else { - return Err(uri_error(format!("invalid host: '{hostname}'"))); + return Err(NetDescriptorParseError::InvalidHost(hostname.to_string())); } } // Otherwise it is an IPv4 address or a FQDN with an optional port. let (host, port) = match hostname.split_once(':') { Some((_, "")) => { - return Err(uri_error(format!("invalid empty port in '{hostname}'"))); + return Err(NetDescriptorParseError::EmptyPort(hostname.to_string())); } Some((host, port)) => (host, port), None => (hostname, ""), @@ -943,11 +1002,14 @@ impl NetDescriptor { // should give them a hint. There are always at least two colons in an // IPv6 address, so this heuristic finds likely a bare IPv6 address. if port.contains(':') { - uri_error(format!( - "ipv6 addresses must be enclosed in square brackets: '{hostname}'" - )) + NetDescriptorParseError::Ipv6MissingSquareBrackets( + hostname.to_string(), + ) } else { - uri_error(format!("invalid port in '{hostname}': '{port}'")) + NetDescriptorParseError::InvalidPort { + hostname: hostname.to_string(), + port: port.to_string(), + } } })?; Some(port) @@ -956,10 +1018,10 @@ impl NetDescriptor { Ok(NetDescriptor(host, port)) } - pub fn from_url(url: &Url) -> Result { - let host = url - .host_str() - .ok_or_else(|| type_error(format!("Missing host in url: '{}'", url)))?; + pub fn from_url(url: &Url) -> Result { + let host = url.host_str().ok_or_else(|| { + NetDescriptorFromUrlParseError::MissingHost(url.clone()) + })?; let host = Host::parse(host)?; let port = url.port_or_known_default(); Ok(NetDescriptor(host, port)) @@ -1011,7 +1073,7 @@ impl QueryDescriptor for ImportDescriptor { &self, perm: &mut UnaryPermission, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(perm); perm.check_desc(Some(self), false, api_name) } @@ -1038,15 +1100,19 @@ impl QueryDescriptor for ImportDescriptor { } impl ImportDescriptor { - pub fn parse(specifier: &str) -> Result { + pub fn parse(specifier: &str) -> Result { Ok(ImportDescriptor(NetDescriptor::parse(specifier)?)) } - pub fn from_url(url: &Url) -> Result { + pub fn from_url(url: &Url) -> Result { Ok(ImportDescriptor(NetDescriptor::from_url(url)?)) } } +#[derive(Debug, thiserror::Error)] +#[error("Empty env not allowed")] +pub struct EnvDescriptorParseError; + #[derive(Clone, Eq, PartialEq, Hash, Debug)] pub struct EnvDescriptor(EnvVarName); @@ -1084,7 +1150,7 @@ impl QueryDescriptor for EnvDescriptor { &self, perm: &mut UnaryPermission, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(perm); perm.check_desc(Some(self), false, api_name) } @@ -1131,14 +1197,25 @@ pub enum RunQueryDescriptor { Name(String), } +#[derive(Debug, thiserror::Error)] +pub enum PathResolveError { + #[error("failed resolving cwd: {0}")] + CwdResolve(#[source] std::io::Error), + #[error("Empty path is not allowed")] + EmptyPath, +} + impl RunQueryDescriptor { - pub fn parse(requested: &str) -> Result { + pub fn parse( + requested: &str, + ) -> Result { if is_path(requested) { let path = PathBuf::from(requested); let resolved = if path.is_absolute() { normalize_path(path) } else { - let cwd = std::env::current_dir().context("failed resolving cwd")?; + let cwd = + std::env::current_dir().map_err(PathResolveError::CwdResolve)?; normalize_path(cwd.join(path)) }; Ok(RunQueryDescriptor::Path { @@ -1210,7 +1287,7 @@ impl QueryDescriptor for RunQueryDescriptor { &self, perm: &mut UnaryPermission, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(perm); perm.check_desc(Some(self), false, api_name) } @@ -1280,6 +1357,16 @@ pub enum AllowRunDescriptorParseResult { Descriptor(AllowRunDescriptor), } +#[derive(Debug, thiserror::Error)] +pub enum RunDescriptorParseError { + #[error("{0}")] + Which(#[from] which::Error), + #[error("{0}")] + PathResolve(#[from] PathResolveError), + #[error("Empty run query is not allowed")] + EmptyRunQuery, +} + #[derive(Debug, Clone, Hash, Eq, PartialEq)] pub struct AllowRunDescriptor(pub PathBuf); @@ -1360,17 +1447,29 @@ fn denies_run_name(name: &str, cmd_path: &Path) -> bool { suffix.is_empty() || suffix.starts_with('.') } +#[derive(Debug, thiserror::Error)] +pub enum SysDescriptorParseError { + #[error("unknown system info kind \"{0}\"")] + InvalidKind(String), // TypeError + #[error("Empty sys not allowed")] + Empty, // Error +} + #[derive(Clone, Eq, PartialEq, Hash, Debug)] pub struct SysDescriptor(String); impl SysDescriptor { - pub fn parse(kind: String) -> Result { + pub fn parse(kind: String) -> Result { match kind.as_str() { "hostname" | "osRelease" | "osUptime" | "loadavg" | "networkInterfaces" | "systemMemoryInfo" | "uid" | "gid" | "cpus" - | "homedir" | "getegid" | "username" | "statfs" | "getPriority" - | "setPriority" => Ok(Self(kind)), - _ => Err(type_error(format!("unknown system info kind \"{kind}\""))), + | "homedir" | "getegid" | "statfs" | "getPriority" | "setPriority" + | "userInfo" => Ok(Self(kind)), + + // the underlying permission check changed to `userInfo` to better match the API, + // alias this to avoid breaking existing projects with `--allow-sys=username` + "username" => Ok(Self("userInfo".into())), + _ => Err(SysDescriptorParseError::InvalidKind(kind)), } } @@ -1407,7 +1506,7 @@ impl QueryDescriptor for SysDescriptor { &self, perm: &mut UnaryPermission, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(perm); perm.check_desc(Some(self), false, api_name) } @@ -1468,7 +1567,7 @@ impl QueryDescriptor for FfiQueryDescriptor { &self, perm: &mut UnaryPermission, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(perm); perm.check_desc(Some(self), true, api_name) } @@ -1520,7 +1619,7 @@ impl UnaryPermission { &mut self, desc: &ReadQueryDescriptor, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(self); self.check_desc(Some(desc), true, api_name) } @@ -1530,12 +1629,15 @@ impl UnaryPermission { &mut self, desc: &ReadQueryDescriptor, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(self); self.check_desc(Some(desc), false, api_name) } - pub fn check_all(&mut self, api_name: Option<&str>) -> Result<(), AnyError> { + pub fn check_all( + &mut self, + api_name: Option<&str>, + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(self); self.check_desc(None, false, api_name) } @@ -1564,7 +1666,7 @@ impl UnaryPermission { &mut self, path: &WriteQueryDescriptor, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(self); self.check_desc(Some(path), true, api_name) } @@ -1574,12 +1676,15 @@ impl UnaryPermission { &mut self, path: &WriteQueryDescriptor, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(self); self.check_desc(Some(path), false, api_name) } - pub fn check_all(&mut self, api_name: Option<&str>) -> Result<(), AnyError> { + pub fn check_all( + &mut self, + api_name: Option<&str>, + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(self); self.check_desc(None, false, api_name) } @@ -1602,12 +1707,12 @@ impl UnaryPermission { &mut self, host: &NetDescriptor, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(self); self.check_desc(Some(host), false, api_name) } - pub fn check_all(&mut self) -> Result<(), AnyError> { + pub fn check_all(&mut self) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(self); self.check_desc(None, false, None) } @@ -1633,12 +1738,12 @@ impl UnaryPermission { &mut self, host: &ImportDescriptor, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(self); self.check_desc(Some(host), false, api_name) } - pub fn check_all(&mut self) -> Result<(), AnyError> { + pub fn check_all(&mut self) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(self); self.check_desc(None, false, None) } @@ -1664,12 +1769,12 @@ impl UnaryPermission { &mut self, env: &str, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(self); self.check_desc(Some(&EnvDescriptor::new(env)), false, api_name) } - pub fn check_all(&mut self) -> Result<(), AnyError> { + pub fn check_all(&mut self) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(self); self.check_desc(None, false, None) } @@ -1692,12 +1797,12 @@ impl UnaryPermission { &mut self, kind: &SysDescriptor, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(self); self.check_desc(Some(kind), false, api_name) } - pub fn check_all(&mut self) -> Result<(), AnyError> { + pub fn check_all(&mut self) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(self); self.check_desc(None, false, None) } @@ -1726,11 +1831,14 @@ impl UnaryPermission { &mut self, cmd: &RunQueryDescriptor, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { self.check_desc(Some(cmd), false, api_name) } - pub fn check_all(&mut self, api_name: Option<&str>) -> Result<(), AnyError> { + pub fn check_all( + &mut self, + api_name: Option<&str>, + ) -> Result<(), PermissionDeniedError> { self.check_desc(None, false, api_name) } @@ -1773,7 +1881,7 @@ impl UnaryPermission { &mut self, path: &FfiQueryDescriptor, api_name: Option<&str>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(self); self.check_desc(Some(path), true, api_name) } @@ -1781,12 +1889,12 @@ impl UnaryPermission { pub fn check_partial( &mut self, path: Option<&FfiQueryDescriptor>, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(self); self.check_desc(path, false, None) } - pub fn check_all(&mut self) -> Result<(), AnyError> { + pub fn check_all(&mut self) -> Result<(), PermissionDeniedError> { skip_check_if_is_permission_fully_granted!(self); self.check_desc(None, false, Some("all")) } @@ -1826,23 +1934,39 @@ pub struct PermissionsOptions { pub prompt: bool, } +#[derive(Debug, thiserror::Error)] +pub enum PermissionsFromOptionsError { + #[error("{0}")] + PathResolve(#[from] PathResolveError), + #[error("{0}")] + SysDescriptorParse(#[from] SysDescriptorParseError), + #[error("{0}")] + NetDescriptorParse(#[from] NetDescriptorParseError), + #[error("{0}")] + EnvDescriptorParse(#[from] EnvDescriptorParseError), + #[error("{0}")] + RunDescriptorParse(#[from] RunDescriptorParseError), + #[error("Empty command name not allowed in --allow-run=...")] + RunEmptyCommandName, +} + impl Permissions { pub fn new_unary( allow_list: Option>, deny_list: Option>, prompt: bool, - ) -> Result, AnyError> + ) -> UnaryPermission where TQuery: QueryDescriptor, { - Ok(UnaryPermission:: { + UnaryPermission:: { granted_global: global_from_option(allow_list.as_ref()), granted_list: allow_list.unwrap_or_default(), flag_denied_global: global_from_option(deny_list.as_ref()), flag_denied_list: deny_list.unwrap_or_default(), prompt, ..Default::default() - }) + } } pub const fn new_all(allow_state: bool) -> UnitPermission { @@ -1858,15 +1982,15 @@ impl Permissions { pub fn from_options( parser: &dyn PermissionDescriptorParser, opts: &PermissionsOptions, - ) -> Result { + ) -> Result { fn resolve_allow_run( parser: &dyn PermissionDescriptorParser, allow_run: &[String], - ) -> Result, AnyError> { + ) -> Result, PermissionsFromOptionsError> { let mut new_allow_run = HashSet::with_capacity(allow_run.len()); for unresolved in allow_run { if unresolved.is_empty() { - bail!("Empty command name not allowed in --allow-run=...") + return Err(PermissionsFromOptionsError::RunEmptyCommandName); } match parser.parse_allow_run_descriptor(unresolved)? { AllowRunDescriptorParseResult::Descriptor(descriptor) => { @@ -1885,10 +2009,13 @@ impl Permissions { Ok(new_allow_run) } - fn parse_maybe_vec( + fn parse_maybe_vec( items: Option<&[String]>, - parse: impl Fn(&str) -> Result, - ) -> Result>, AnyError> { + parse: impl Fn(&str) -> Result, + ) -> Result>, PermissionsFromOptionsError> + where + PermissionsFromOptionsError: From, + { match items { Some(items) => Ok(Some( items @@ -1940,14 +2067,14 @@ impl Permissions { parser.parse_read_descriptor(item) })?, opts.prompt, - )?, + ), write: Permissions::new_unary( parse_maybe_vec(opts.allow_write.as_deref(), |item| { parser.parse_write_descriptor(item) })?, deny_write, opts.prompt, - )?, + ), net: Permissions::new_unary( parse_maybe_vec(opts.allow_net.as_deref(), |item| { parser.parse_net_descriptor(item) @@ -1956,7 +2083,7 @@ impl Permissions { parser.parse_net_descriptor(item) })?, opts.prompt, - )?, + ), env: Permissions::new_unary( parse_maybe_vec(opts.allow_env.as_deref(), |item| { parser.parse_env_descriptor(item) @@ -1965,7 +2092,7 @@ impl Permissions { parser.parse_env_descriptor(text) })?, opts.prompt, - )?, + ), sys: Permissions::new_unary( parse_maybe_vec(opts.allow_sys.as_deref(), |text| { parser.parse_sys_descriptor(text) @@ -1974,14 +2101,14 @@ impl Permissions { parser.parse_sys_descriptor(text) })?, opts.prompt, - )?, + ), run: Permissions::new_unary( allow_run, parse_maybe_vec(opts.deny_run.as_deref(), |text| { parser.parse_deny_run_descriptor(text) })?, opts.prompt, - )?, + ), ffi: Permissions::new_unary( parse_maybe_vec(opts.allow_ffi.as_deref(), |text| { parser.parse_ffi_descriptor(text) @@ -1990,14 +2117,14 @@ impl Permissions { parser.parse_ffi_descriptor(text) })?, opts.prompt, - )?, + ), import: Permissions::new_unary( parse_maybe_vec(opts.allow_import.as_deref(), |item| { parser.parse_import_descriptor(item) })?, None, opts.prompt, - )?, + ), all: Permissions::new_all(opts.allow_all), }) } @@ -2029,14 +2156,14 @@ impl Permissions { fn none(prompt: bool) -> Self { Self { - read: Permissions::new_unary(None, None, prompt).unwrap(), - write: Permissions::new_unary(None, None, prompt).unwrap(), - net: Permissions::new_unary(None, None, prompt).unwrap(), - env: Permissions::new_unary(None, None, prompt).unwrap(), - sys: Permissions::new_unary(None, None, prompt).unwrap(), - run: Permissions::new_unary(None, None, prompt).unwrap(), - ffi: Permissions::new_unary(None, None, prompt).unwrap(), - import: Permissions::new_unary(None, None, prompt).unwrap(), + read: Permissions::new_unary(None, None, prompt), + write: Permissions::new_unary(None, None, prompt), + net: Permissions::new_unary(None, None, prompt), + env: Permissions::new_unary(None, None, prompt), + sys: Permissions::new_unary(None, None, prompt), + run: Permissions::new_unary(None, None, prompt), + ffi: Permissions::new_unary(None, None, prompt), + import: Permissions::new_unary(None, None, prompt), all: Permissions::new_all(false), } } @@ -2048,6 +2175,38 @@ pub enum CheckSpecifierKind { Dynamic, } +#[derive(Debug, thiserror::Error)] +pub enum ChildPermissionError { + #[error("Can't escalate parent thread permissions")] + Escalation, + #[error("{0}")] + PathResolve(#[from] PathResolveError), + #[error("{0}")] + NetDescriptorParse(#[from] NetDescriptorParseError), + #[error("{0}")] + EnvDescriptorParse(#[from] EnvDescriptorParseError), + #[error("{0}")] + SysDescriptorParse(#[from] SysDescriptorParseError), + #[error("{0}")] + RunDescriptorParse(#[from] RunDescriptorParseError), +} + +#[derive(Debug, thiserror::Error)] +pub enum PermissionCheckError { + #[error(transparent)] + PermissionDenied(#[from] PermissionDeniedError), + #[error("Invalid file path.\n Specifier: {0}")] + InvalidFilePath(Url), + #[error(transparent)] + NetDescriptorForUrlParse(#[from] NetDescriptorFromUrlParseError), + #[error(transparent)] + SysDescriptorParse(#[from] SysDescriptorParseError), + #[error(transparent)] + PathResolve(#[from] PathResolveError), + #[error(transparent)] + HostParse(#[from] HostParseError), +} + /// Wrapper struct for `Permissions` that can be shared across threads. /// /// We need a way to have internal mutability for permissions as they might get @@ -2080,7 +2239,7 @@ impl PermissionsContainer { pub fn create_child_permissions( &self, child_permissions_arg: ChildPermissionsArg, - ) -> Result { + ) -> Result { fn is_granted_unary(arg: &ChildUnaryPermissionArg) -> bool { match arg { ChildUnaryPermissionArg::Inherit | ChildUnaryPermissionArg::Granted => { @@ -2118,48 +2277,71 @@ impl PermissionsContainer { // WARNING: When adding a permission here, ensure it is handled // in the worker_perms.all block above - worker_perms.read = inner - .read - .create_child_permissions(child_permissions_arg.read, |text| { - Ok(Some(self.descriptor_parser.parse_read_descriptor(text)?)) - })?; - worker_perms.write = inner - .write - .create_child_permissions(child_permissions_arg.write, |text| { - Ok(Some(self.descriptor_parser.parse_write_descriptor(text)?)) - })?; - worker_perms.import = inner - .import - .create_child_permissions(child_permissions_arg.import, |text| { - Ok(Some(self.descriptor_parser.parse_import_descriptor(text)?)) - })?; - worker_perms.net = inner - .net - .create_child_permissions(child_permissions_arg.net, |text| { - Ok(Some(self.descriptor_parser.parse_net_descriptor(text)?)) - })?; - worker_perms.env = inner - .env - .create_child_permissions(child_permissions_arg.env, |text| { - Ok(Some(self.descriptor_parser.parse_env_descriptor(text)?)) - })?; - worker_perms.sys = inner - .sys - .create_child_permissions(child_permissions_arg.sys, |text| { - Ok(Some(self.descriptor_parser.parse_sys_descriptor(text)?)) - })?; + worker_perms.read = inner.read.create_child_permissions( + child_permissions_arg.read, + |text| { + Ok::<_, PathResolveError>(Some( + self.descriptor_parser.parse_read_descriptor(text)?, + )) + }, + )?; + worker_perms.write = inner.write.create_child_permissions( + child_permissions_arg.write, + |text| { + Ok::<_, PathResolveError>(Some( + self.descriptor_parser.parse_write_descriptor(text)?, + )) + }, + )?; + worker_perms.import = inner.import.create_child_permissions( + child_permissions_arg.import, + |text| { + Ok::<_, NetDescriptorParseError>(Some( + self.descriptor_parser.parse_import_descriptor(text)?, + )) + }, + )?; + worker_perms.net = inner.net.create_child_permissions( + child_permissions_arg.net, + |text| { + Ok::<_, NetDescriptorParseError>(Some( + self.descriptor_parser.parse_net_descriptor(text)?, + )) + }, + )?; + worker_perms.env = inner.env.create_child_permissions( + child_permissions_arg.env, + |text| { + Ok::<_, EnvDescriptorParseError>(Some( + self.descriptor_parser.parse_env_descriptor(text)?, + )) + }, + )?; + worker_perms.sys = inner.sys.create_child_permissions( + child_permissions_arg.sys, + |text| { + Ok::<_, SysDescriptorParseError>(Some( + self.descriptor_parser.parse_sys_descriptor(text)?, + )) + }, + )?; worker_perms.run = inner.run.create_child_permissions( child_permissions_arg.run, |text| match self.descriptor_parser.parse_allow_run_descriptor(text)? { - AllowRunDescriptorParseResult::Unresolved(_) => Ok(None), + AllowRunDescriptorParseResult::Unresolved(_) => { + Ok::<_, RunDescriptorParseError>(None) + } AllowRunDescriptorParseResult::Descriptor(desc) => Ok(Some(desc)), }, )?; - worker_perms.ffi = inner - .ffi - .create_child_permissions(child_permissions_arg.ffi, |text| { - Ok(Some(self.descriptor_parser.parse_ffi_descriptor(text)?)) - })?; + worker_perms.ffi = inner.ffi.create_child_permissions( + child_permissions_arg.ffi, + |text| { + Ok::<_, PathResolveError>(Some( + self.descriptor_parser.parse_ffi_descriptor(text)?, + )) + }, + )?; Ok(PermissionsContainer::new( self.descriptor_parser.clone(), @@ -2172,7 +2354,7 @@ impl PermissionsContainer { &self, specifier: &ModuleSpecifier, kind: CheckSpecifierKind, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionCheckError> { let mut inner = self.inner.lock(); match specifier.scheme() { "file" => { @@ -2181,17 +2363,20 @@ impl PermissionsContainer { } match url_to_file_path(specifier) { - Ok(path) => inner.read.check( - &PathQueryDescriptor { - requested: path.to_string_lossy().into_owned(), - resolved: path, - } - .into_read(), - Some("import()"), - ), - Err(_) => Err(uri_error(format!( - "Invalid file path.\n Specifier: {specifier}" - ))), + Ok(path) => inner + .read + .check( + &PathQueryDescriptor { + requested: path.to_string_lossy().into_owned(), + resolved: path, + } + .into_read(), + Some("import()"), + ) + .map_err(PermissionCheckError::PermissionDenied), + Err(_) => { + Err(PermissionCheckError::InvalidFilePath(specifier.clone())) + } } } "data" => Ok(()), @@ -2216,7 +2401,7 @@ impl PermissionsContainer { &self, path: &str, api_name: &str, - ) -> Result { + ) -> Result { self.check_read_with_api_name(path, Some(api_name)) } @@ -2226,7 +2411,7 @@ impl PermissionsContainer { &self, path: &str, api_name: Option<&str>, - ) -> Result { + ) -> Result { let mut inner = self.inner.lock(); let inner = &mut inner.read; if inner.is_allow_all() { @@ -2244,7 +2429,7 @@ impl PermissionsContainer { &self, path: &'a Path, api_name: Option<&str>, - ) -> Result, AnyError> { + ) -> Result, PermissionCheckError> { let mut inner = self.inner.lock(); let inner = &mut inner.read; if inner.is_allow_all() { @@ -2268,7 +2453,7 @@ impl PermissionsContainer { path: &Path, display: &str, api_name: &str, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionCheckError> { let mut inner = self.inner.lock(); let inner = &mut inner.read; skip_check_if_is_permission_fully_granted!(inner); @@ -2279,12 +2464,17 @@ impl PermissionsContainer { } .into_read(), Some(api_name), - ) + )?; + Ok(()) } #[inline(always)] - pub fn check_read_all(&self, api_name: &str) -> Result<(), AnyError> { - self.inner.lock().read.check_all(Some(api_name)) + pub fn check_read_all( + &self, + api_name: &str, + ) -> Result<(), PermissionCheckError> { + self.inner.lock().read.check_all(Some(api_name))?; + Ok(()) } #[inline(always)] @@ -2298,7 +2488,7 @@ impl PermissionsContainer { &self, path: &str, api_name: &str, - ) -> Result { + ) -> Result { self.check_write_with_api_name(path, Some(api_name)) } @@ -2308,7 +2498,7 @@ impl PermissionsContainer { &self, path: &str, api_name: Option<&str>, - ) -> Result { + ) -> Result { let mut inner = self.inner.lock(); let inner = &mut inner.write; if inner.is_allow_all() { @@ -2326,7 +2516,7 @@ impl PermissionsContainer { &self, path: &'a Path, api_name: &str, - ) -> Result, AnyError> { + ) -> Result, PermissionCheckError> { let mut inner = self.inner.lock(); let inner = &mut inner.write; if inner.is_allow_all() { @@ -2343,8 +2533,12 @@ impl PermissionsContainer { } #[inline(always)] - pub fn check_write_all(&self, api_name: &str) -> Result<(), AnyError> { - self.inner.lock().write.check_all(Some(api_name)) + pub fn check_write_all( + &self, + api_name: &str, + ) -> Result<(), PermissionCheckError> { + self.inner.lock().write.check_all(Some(api_name))?; + Ok(()) } /// As `check_write()`, but permission error messages will anonymize the path @@ -2355,7 +2549,7 @@ impl PermissionsContainer { path: &Path, display: &str, api_name: &str, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionCheckError> { let mut inner = self.inner.lock(); let inner = &mut inner.write; skip_check_if_is_permission_fully_granted!(inner); @@ -2366,7 +2560,8 @@ impl PermissionsContainer { } .into_write(), Some(api_name), - ) + )?; + Ok(()) } #[inline(always)] @@ -2374,7 +2569,7 @@ impl PermissionsContainer { &mut self, path: &str, api_name: &str, - ) -> Result { + ) -> Result { let mut inner = self.inner.lock(); let inner = &mut inner.write; if inner.is_allow_all() { @@ -2391,13 +2586,18 @@ impl PermissionsContainer { &mut self, cmd: &RunQueryDescriptor, api_name: &str, - ) -> Result<(), AnyError> { - self.inner.lock().run.check(cmd, Some(api_name)) + ) -> Result<(), PermissionCheckError> { + self.inner.lock().run.check(cmd, Some(api_name))?; + Ok(()) } #[inline(always)] - pub fn check_run_all(&mut self, api_name: &str) -> Result<(), AnyError> { - self.inner.lock().run.check_all(Some(api_name)) + pub fn check_run_all( + &mut self, + api_name: &str, + ) -> Result<(), PermissionCheckError> { + self.inner.lock().run.check_all(Some(api_name))?; + Ok(()) } #[inline(always)] @@ -2406,38 +2606,50 @@ impl PermissionsContainer { } #[inline(always)] - pub fn check_sys(&self, kind: &str, api_name: &str) -> Result<(), AnyError> { + pub fn check_sys( + &self, + kind: &str, + api_name: &str, + ) -> Result<(), PermissionCheckError> { self.inner.lock().sys.check( &self.descriptor_parser.parse_sys_descriptor(kind)?, Some(api_name), - ) + )?; + Ok(()) } #[inline(always)] - pub fn check_env(&mut self, var: &str) -> Result<(), AnyError> { - self.inner.lock().env.check(var, None) + pub fn check_env(&mut self, var: &str) -> Result<(), PermissionCheckError> { + self.inner.lock().env.check(var, None)?; + Ok(()) } #[inline(always)] - pub fn check_env_all(&mut self) -> Result<(), AnyError> { - self.inner.lock().env.check_all() + pub fn check_env_all(&mut self) -> Result<(), PermissionCheckError> { + self.inner.lock().env.check_all()?; + Ok(()) } #[inline(always)] - pub fn check_sys_all(&mut self) -> Result<(), AnyError> { - self.inner.lock().sys.check_all() + pub fn check_sys_all(&mut self) -> Result<(), PermissionCheckError> { + self.inner.lock().sys.check_all()?; + Ok(()) } #[inline(always)] - pub fn check_ffi_all(&mut self) -> Result<(), AnyError> { - self.inner.lock().ffi.check_all() + pub fn check_ffi_all(&mut self) -> Result<(), PermissionCheckError> { + self.inner.lock().ffi.check_all()?; + Ok(()) } /// This checks to see if the allow-all flag was passed, not whether all /// permissions are enabled! #[inline(always)] - pub fn check_was_allow_all_flag_passed(&mut self) -> Result<(), AnyError> { - self.inner.lock().all.check() + pub fn check_was_allow_all_flag_passed( + &mut self, + ) -> Result<(), PermissionCheckError> { + self.inner.lock().all.check()?; + Ok(()) } /// Checks special file access, returning the failed permission type if @@ -2549,13 +2761,14 @@ impl PermissionsContainer { &mut self, url: &Url, api_name: &str, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionCheckError> { let mut inner = self.inner.lock(); if inner.net.is_allow_all() { return Ok(()); } let desc = self.descriptor_parser.parse_net_descriptor_from_url(url)?; - inner.net.check(&desc, Some(api_name)) + inner.net.check(&desc, Some(api_name))?; + Ok(()) } #[inline(always)] @@ -2563,17 +2776,21 @@ impl PermissionsContainer { &mut self, host: &(T, Option), api_name: &str, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionCheckError> { let mut inner = self.inner.lock(); let inner = &mut inner.net; skip_check_if_is_permission_fully_granted!(inner); let hostname = Host::parse(host.0.as_ref())?; let descriptor = NetDescriptor(hostname, host.1); - inner.check(&descriptor, Some(api_name)) + inner.check(&descriptor, Some(api_name))?; + Ok(()) } #[inline(always)] - pub fn check_ffi(&mut self, path: &str) -> Result { + pub fn check_ffi( + &mut self, + path: &str, + ) -> Result { let mut inner = self.inner.lock(); let inner = &mut inner.ffi; if inner.is_allow_all() { @@ -2587,14 +2804,15 @@ impl PermissionsContainer { #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] #[inline(always)] - pub fn check_ffi_partial_no_path(&mut self) -> Result<(), AnyError> { + pub fn check_ffi_partial_no_path( + &mut self, + ) -> Result<(), PermissionCheckError> { let mut inner = self.inner.lock(); let inner = &mut inner.ffi; - if inner.is_allow_all() { - Ok(()) - } else { - inner.check_partial(None) + if !inner.is_allow_all() { + inner.check_partial(None)?; } + Ok(()) } #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] @@ -2602,7 +2820,7 @@ impl PermissionsContainer { pub fn check_ffi_partial_with_path( &mut self, path: &str, - ) -> Result { + ) -> Result { let mut inner = self.inner.lock(); let inner = &mut inner.ffi; if inner.is_allow_all() { @@ -2620,7 +2838,7 @@ impl PermissionsContainer { pub fn query_read( &self, path: Option<&str>, - ) -> Result { + ) -> Result { let inner = self.inner.lock(); let permission = &inner.read; if permission.is_allow_all() { @@ -2630,7 +2848,7 @@ impl PermissionsContainer { permission.query( path .map(|path| { - Result::<_, AnyError>::Ok( + Ok::<_, PathResolveError>( self.descriptor_parser.parse_path_query(path)?.into_read(), ) }) @@ -2644,7 +2862,7 @@ impl PermissionsContainer { pub fn query_write( &self, path: Option<&str>, - ) -> Result { + ) -> Result { let inner = self.inner.lock(); let permission = &inner.write; if permission.is_allow_all() { @@ -2654,7 +2872,7 @@ impl PermissionsContainer { permission.query( path .map(|path| { - Result::<_, AnyError>::Ok( + Ok::<_, PathResolveError>( self.descriptor_parser.parse_path_query(path)?.into_write(), ) }) @@ -2668,7 +2886,7 @@ impl PermissionsContainer { pub fn query_net( &self, host: Option<&str>, - ) -> Result { + ) -> Result { let inner = self.inner.lock(); let permission = &inner.net; if permission.is_allow_all() { @@ -2699,7 +2917,7 @@ impl PermissionsContainer { pub fn query_sys( &self, kind: Option<&str>, - ) -> Result { + ) -> Result { let inner = self.inner.lock(); let permission = &inner.sys; if permission.is_allow_all() { @@ -2719,7 +2937,7 @@ impl PermissionsContainer { pub fn query_run( &self, cmd: Option<&str>, - ) -> Result { + ) -> Result { let inner = self.inner.lock(); let permission = &inner.run; if permission.is_allow_all() { @@ -2739,7 +2957,7 @@ impl PermissionsContainer { pub fn query_ffi( &self, path: Option<&str>, - ) -> Result { + ) -> Result { let inner = self.inner.lock(); let permission = &inner.ffi; if permission.is_allow_all() { @@ -2749,7 +2967,7 @@ impl PermissionsContainer { permission.query( path .map(|path| { - Result::<_, AnyError>::Ok( + Ok::<_, PathResolveError>( self.descriptor_parser.parse_path_query(path)?.into_ffi(), ) }) @@ -2765,12 +2983,12 @@ impl PermissionsContainer { pub fn revoke_read( &self, path: Option<&str>, - ) -> Result { + ) -> Result { Ok( self.inner.lock().read.revoke( path .map(|path| { - Result::<_, AnyError>::Ok( + Ok::<_, PathResolveError>( self.descriptor_parser.parse_path_query(path)?.into_read(), ) }) @@ -2784,12 +3002,12 @@ impl PermissionsContainer { pub fn revoke_write( &self, path: Option<&str>, - ) -> Result { + ) -> Result { Ok( self.inner.lock().write.revoke( path .map(|path| { - Result::<_, AnyError>::Ok( + Ok::<_, PathResolveError>( self.descriptor_parser.parse_path_query(path)?.into_write(), ) }) @@ -2803,7 +3021,7 @@ impl PermissionsContainer { pub fn revoke_net( &self, host: Option<&str>, - ) -> Result { + ) -> Result { Ok( self.inner.lock().net.revoke( match host { @@ -2824,7 +3042,7 @@ impl PermissionsContainer { pub fn revoke_sys( &self, kind: Option<&str>, - ) -> Result { + ) -> Result { Ok( self.inner.lock().sys.revoke( kind @@ -2839,7 +3057,7 @@ impl PermissionsContainer { pub fn revoke_run( &self, cmd: Option<&str>, - ) -> Result { + ) -> Result { Ok( self.inner.lock().run.revoke( cmd @@ -2854,12 +3072,12 @@ impl PermissionsContainer { pub fn revoke_ffi( &self, path: Option<&str>, - ) -> Result { + ) -> Result { Ok( self.inner.lock().ffi.revoke( path .map(|path| { - Result::<_, AnyError>::Ok( + Ok::<_, PathResolveError>( self.descriptor_parser.parse_path_query(path)?.into_ffi(), ) }) @@ -2875,12 +3093,12 @@ impl PermissionsContainer { pub fn request_read( &self, path: Option<&str>, - ) -> Result { + ) -> Result { Ok( self.inner.lock().read.request( path .map(|path| { - Result::<_, AnyError>::Ok( + Ok::<_, PathResolveError>( self.descriptor_parser.parse_path_query(path)?.into_read(), ) }) @@ -2894,12 +3112,12 @@ impl PermissionsContainer { pub fn request_write( &self, path: Option<&str>, - ) -> Result { + ) -> Result { Ok( self.inner.lock().write.request( path .map(|path| { - Result::<_, AnyError>::Ok( + Ok::<_, PathResolveError>( self.descriptor_parser.parse_path_query(path)?.into_write(), ) }) @@ -2913,7 +3131,7 @@ impl PermissionsContainer { pub fn request_net( &self, host: Option<&str>, - ) -> Result { + ) -> Result { Ok( self.inner.lock().net.request( match host { @@ -2934,7 +3152,7 @@ impl PermissionsContainer { pub fn request_sys( &self, kind: Option<&str>, - ) -> Result { + ) -> Result { Ok( self.inner.lock().sys.request( kind @@ -2949,7 +3167,7 @@ impl PermissionsContainer { pub fn request_run( &self, cmd: Option<&str>, - ) -> Result { + ) -> Result { Ok( self.inner.lock().run.request( cmd @@ -2964,12 +3182,12 @@ impl PermissionsContainer { pub fn request_ffi( &self, path: Option<&str>, - ) -> Result { + ) -> Result { Ok( self.inner.lock().ffi.request( path .map(|path| { - Result::<_, AnyError>::Ok( + Ok::<_, PathResolveError>( self.descriptor_parser.parse_path_query(path)?.into_ffi(), ) }) @@ -3005,10 +3223,6 @@ fn global_from_option(flag: Option<&HashSet>) -> bool { matches!(flag, Some(v) if v.is_empty()) } -fn escalation_error() -> AnyError { - custom_error("NotCapable", "Can't escalate parent thread permissions") -} - #[derive(Debug, Eq, PartialEq)] pub enum ChildUnitPermissionArg { Inherit, @@ -3269,65 +3483,73 @@ pub trait PermissionDescriptorParser: Debug + Send + Sync { fn parse_read_descriptor( &self, text: &str, - ) -> Result; + ) -> Result; fn parse_write_descriptor( &self, text: &str, - ) -> Result; + ) -> Result; - fn parse_net_descriptor(&self, text: &str) - -> Result; + fn parse_net_descriptor( + &self, + text: &str, + ) -> Result; fn parse_net_descriptor_from_url( &self, url: &Url, - ) -> Result { + ) -> Result { NetDescriptor::from_url(url) } fn parse_import_descriptor( &self, text: &str, - ) -> Result; + ) -> Result; fn parse_import_descriptor_from_url( &self, url: &Url, - ) -> Result { + ) -> Result { ImportDescriptor::from_url(url) } - fn parse_env_descriptor(&self, text: &str) - -> Result; + fn parse_env_descriptor( + &self, + text: &str, + ) -> Result; - fn parse_sys_descriptor(&self, text: &str) - -> Result; + fn parse_sys_descriptor( + &self, + text: &str, + ) -> Result; fn parse_allow_run_descriptor( &self, text: &str, - ) -> Result; + ) -> Result; fn parse_deny_run_descriptor( &self, text: &str, - ) -> Result; + ) -> Result; - fn parse_ffi_descriptor(&self, text: &str) - -> Result; + fn parse_ffi_descriptor( + &self, + text: &str, + ) -> Result; // queries fn parse_path_query( &self, path: &str, - ) -> Result; + ) -> Result; fn parse_run_query( &self, requested: &str, - ) -> Result; + ) -> Result; } static IS_STANDALONE: AtomicFlag = AtomicFlag::lowered(); @@ -3370,49 +3592,49 @@ mod tests { fn parse_read_descriptor( &self, text: &str, - ) -> Result { + ) -> Result { Ok(ReadDescriptor(self.join_path_with_root(text))) } fn parse_write_descriptor( &self, text: &str, - ) -> Result { + ) -> Result { Ok(WriteDescriptor(self.join_path_with_root(text))) } fn parse_net_descriptor( &self, text: &str, - ) -> Result { + ) -> Result { NetDescriptor::parse(text) } fn parse_import_descriptor( &self, text: &str, - ) -> Result { + ) -> Result { ImportDescriptor::parse(text) } fn parse_env_descriptor( &self, text: &str, - ) -> Result { + ) -> Result { Ok(EnvDescriptor::new(text)) } fn parse_sys_descriptor( &self, text: &str, - ) -> Result { + ) -> Result { SysDescriptor::parse(text.to_string()) } fn parse_allow_run_descriptor( &self, text: &str, - ) -> Result { + ) -> Result { Ok(AllowRunDescriptorParseResult::Descriptor( AllowRunDescriptor(self.join_path_with_root(text)), )) @@ -3421,7 +3643,7 @@ mod tests { fn parse_deny_run_descriptor( &self, text: &str, - ) -> Result { + ) -> Result { if text.contains("/") { Ok(DenyRunDescriptor::Path(self.join_path_with_root(text))) } else { @@ -3432,14 +3654,14 @@ mod tests { fn parse_ffi_descriptor( &self, text: &str, - ) -> Result { + ) -> Result { Ok(FfiDescriptor(self.join_path_with_root(text))) } fn parse_path_query( &self, path: &str, - ) -> Result { + ) -> Result { Ok(PathQueryDescriptor { resolved: self.join_path_with_root(path), requested: path.to_string(), @@ -3449,8 +3671,8 @@ mod tests { fn parse_run_query( &self, requested: &str, - ) -> Result { - RunQueryDescriptor::parse(requested) + ) -> Result { + RunQueryDescriptor::parse(requested).map_err(Into::into) } } @@ -4331,7 +4553,6 @@ mod tests { None, false, ) - .unwrap() }; prompt_value.set(true); @@ -4558,13 +4779,12 @@ mod tests { .lock() .clone(), Permissions { - env: Permissions::new_unary(Some(HashSet::new()), None, false).unwrap(), + env: Permissions::new_unary(Some(HashSet::new()), None, false), net: Permissions::new_unary( Some(HashSet::from([NetDescriptor::parse("foo").unwrap()])), None, false - ) - .unwrap(), + ), ..Permissions::none_without_prompt() } ); diff --git a/runtime/permissions/prompter.rs b/runtime/permissions/prompter.rs index 316911edc1..168a845a29 100644 --- a/runtime/permissions/prompter.rs +++ b/runtime/permissions/prompter.rs @@ -1,6 +1,5 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. -use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; use deno_terminal::colors; use once_cell::sync::Lazy; @@ -101,8 +100,7 @@ pub struct TtyPrompter; fn clear_stdin( _stdin_lock: &mut StdinLock, _stderr_lock: &mut StderrLock, -) -> Result<(), AnyError> { - use deno_core::anyhow::bail; +) -> Result<(), std::io::Error> { use std::mem::MaybeUninit; const STDIN_FD: i32 = 0; @@ -117,7 +115,10 @@ fn clear_stdin( loop { let r = libc::tcflush(STDIN_FD, libc::TCIFLUSH); if r != 0 { - bail!("clear_stdin failed (tcflush)"); + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + "clear_stdin failed (tcflush)", + )); } // Initialize timeout for select to be 100ms @@ -137,7 +138,10 @@ fn clear_stdin( // Check if select returned an error if r < 0 { - bail!("clear_stdin failed (select)"); + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + "clear_stdin failed (select)", + )); } // Check if select returned due to timeout (stdin is quiescent) @@ -156,8 +160,7 @@ fn clear_stdin( fn clear_stdin( stdin_lock: &mut StdinLock, stderr_lock: &mut StderrLock, -) -> Result<(), AnyError> { - use deno_core::anyhow::bail; +) -> Result<(), std::io::Error> { use winapi::shared::minwindef::TRUE; use winapi::shared::minwindef::UINT; use winapi::shared::minwindef::WORD; @@ -194,18 +197,23 @@ fn clear_stdin( return Ok(()); - unsafe fn flush_input_buffer(stdin: HANDLE) -> Result<(), AnyError> { + unsafe fn flush_input_buffer(stdin: HANDLE) -> Result<(), std::io::Error> { let success = FlushConsoleInputBuffer(stdin); if success != TRUE { - bail!( - "Could not flush the console input buffer: {}", - std::io::Error::last_os_error() - ) + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + format!( + "Could not flush the console input buffer: {}", + std::io::Error::last_os_error() + ), + )); } Ok(()) } - unsafe fn emulate_enter_key_press(stdin: HANDLE) -> Result<(), AnyError> { + unsafe fn emulate_enter_key_press( + stdin: HANDLE, + ) -> Result<(), std::io::Error> { // https://github.com/libuv/libuv/blob/a39009a5a9252a566ca0704d02df8dabc4ce328f/src/win/tty.c#L1121-L1131 let mut input_record: INPUT_RECORD = std::mem::zeroed(); input_record.EventType = KEY_EVENT; @@ -220,34 +228,43 @@ fn clear_stdin( let success = WriteConsoleInputW(stdin, &input_record, 1, &mut record_written); if success != TRUE { - bail!( - "Could not emulate enter key press: {}", - std::io::Error::last_os_error() - ); + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + format!( + "Could not emulate enter key press: {}", + std::io::Error::last_os_error() + ), + )); } Ok(()) } - unsafe fn is_input_buffer_empty(stdin: HANDLE) -> Result { + unsafe fn is_input_buffer_empty( + stdin: HANDLE, + ) -> Result { let mut buffer = Vec::with_capacity(1); let mut events_read = 0; let success = PeekConsoleInputW(stdin, buffer.as_mut_ptr(), 1, &mut events_read); if success != TRUE { - bail!( - "Could not peek the console input buffer: {}", - std::io::Error::last_os_error() - ) + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + format!( + "Could not peek the console input buffer: {}", + std::io::Error::last_os_error() + ), + )); } Ok(events_read == 0) } - fn move_cursor_up(stderr_lock: &mut StderrLock) -> Result<(), AnyError> { - write!(stderr_lock, "\x1B[1A")?; - Ok(()) + fn move_cursor_up( + stderr_lock: &mut StderrLock, + ) -> Result<(), std::io::Error> { + write!(stderr_lock, "\x1B[1A") } - fn read_stdin_line(stdin_lock: &mut StdinLock) -> Result<(), AnyError> { + fn read_stdin_line(stdin_lock: &mut StdinLock) -> Result<(), std::io::Error> { let mut input = String::new(); stdin_lock.read_line(&mut input)?; Ok(()) @@ -269,7 +286,7 @@ fn get_stdin_metadata() -> std::io::Result { unsafe { let stdin = std::fs::File::from_raw_fd(0); let metadata = stdin.metadata().unwrap(); - stdin.into_raw_fd(); + let _ = stdin.into_raw_fd(); Ok(metadata) } } diff --git a/runtime/shared.rs b/runtime/shared.rs index 02dfd18719..f7d76f67a7 100644 --- a/runtime/shared.rs +++ b/runtime/shared.rs @@ -98,6 +98,7 @@ pub fn maybe_transpile_source( imports_not_used_as_values: deno_ast::ImportsNotUsedAsValues::Remove, ..Default::default() }, + &deno_ast::TranspileModuleOptions::default(), &deno_ast::EmitOptions { source_map: if cfg!(debug_assertions) { SourceMapOption::Separate @@ -109,9 +110,9 @@ pub fn maybe_transpile_source( )? .into_source(); - let maybe_source_map: Option = - transpiled_source.source_map.map(|sm| sm.into()); - let source_text = String::from_utf8(transpiled_source.source)?; - + let maybe_source_map: Option = transpiled_source + .source_map + .map(|sm| sm.into_bytes().into()); + let source_text = transpiled_source.text; Ok((source_text.into(), maybe_source_map)) } diff --git a/runtime/snapshot.rs b/runtime/snapshot.rs index 041132f971..251ee5f41c 100644 --- a/runtime/snapshot.rs +++ b/runtime/snapshot.rs @@ -5,12 +5,12 @@ use crate::ops::bootstrap::SnapshotOptions; use crate::shared::maybe_transpile_source; use crate::shared::runtime; use deno_cache::SqliteBackedCache; -use deno_core::error::AnyError; use deno_core::snapshot::*; use deno_core::v8; use deno_core::Extension; use deno_http::DefaultHttpPropertyExtractor; use deno_io::fs::FsError; +use deno_permissions::PermissionCheckError; use std::borrow::Cow; use std::io::Write; use std::path::Path; @@ -26,7 +26,7 @@ impl deno_websocket::WebSocketPermissions for Permissions { &mut self, _url: &deno_core::url::Url, _api_name: &str, - ) -> Result<(), deno_core::error::AnyError> { + ) -> Result<(), PermissionCheckError> { unreachable!("snapshotting!") } } @@ -42,7 +42,7 @@ impl deno_fetch::FetchPermissions for Permissions { &mut self, _url: &deno_core::url::Url, _api_name: &str, - ) -> Result<(), deno_core::error::AnyError> { + ) -> Result<(), PermissionCheckError> { unreachable!("snapshotting!") } @@ -50,28 +50,26 @@ impl deno_fetch::FetchPermissions for Permissions { &mut self, _p: &'a Path, _api_name: &str, - ) -> Result, AnyError> { + ) -> Result, PermissionCheckError> { unreachable!("snapshotting!") } } impl deno_ffi::FfiPermissions for Permissions { - fn check_partial_no_path( - &mut self, - ) -> Result<(), deno_core::error::AnyError> { + fn check_partial_no_path(&mut self) -> Result<(), PermissionCheckError> { unreachable!("snapshotting!") } fn check_partial_with_path( &mut self, _path: &str, - ) -> Result { + ) -> Result { unreachable!("snapshotting!") } } impl deno_napi::NapiPermissions for Permissions { - fn check(&mut self, _path: &str) -> std::result::Result { + fn check(&mut self, _path: &str) -> Result { unreachable!("snapshotting!") } } @@ -81,20 +79,27 @@ impl deno_node::NodePermissions for Permissions { &mut self, _url: &deno_core::url::Url, _api_name: &str, - ) -> Result<(), deno_core::error::AnyError> { + ) -> Result<(), PermissionCheckError> { + unreachable!("snapshotting!") + } + fn check_net( + &mut self, + _host: (&str, Option), + _api_name: &str, + ) -> Result<(), PermissionCheckError> { unreachable!("snapshotting!") } fn check_read_path<'a>( &mut self, _path: &'a Path, - ) -> Result, AnyError> { + ) -> Result, PermissionCheckError> { unreachable!("snapshotting!") } fn check_read_with_api_name( &mut self, _p: &str, _api_name: Option<&str>, - ) -> Result { + ) -> Result { unreachable!("snapshotting!") } fn query_read_all(&mut self) -> bool { @@ -104,14 +109,14 @@ impl deno_node::NodePermissions for Permissions { &mut self, _p: &str, _api_name: Option<&str>, - ) -> Result { + ) -> Result { unreachable!("snapshotting!") } fn check_sys( &mut self, _kind: &str, _api_name: &str, - ) -> Result<(), deno_core::error::AnyError> { + ) -> Result<(), PermissionCheckError> { unreachable!("snapshotting!") } } @@ -121,7 +126,7 @@ impl deno_net::NetPermissions for Permissions { &mut self, _host: &(T, Option), _api_name: &str, - ) -> Result<(), deno_core::error::AnyError> { + ) -> Result<(), PermissionCheckError> { unreachable!("snapshotting!") } @@ -129,7 +134,7 @@ impl deno_net::NetPermissions for Permissions { &mut self, _p: &str, _api_name: &str, - ) -> Result { + ) -> Result { unreachable!("snapshotting!") } @@ -137,7 +142,7 @@ impl deno_net::NetPermissions for Permissions { &mut self, _p: &str, _api_name: &str, - ) -> Result { + ) -> Result { unreachable!("snapshotting!") } @@ -145,7 +150,7 @@ impl deno_net::NetPermissions for Permissions { &mut self, _p: &'a Path, _api_name: &str, - ) -> Result, AnyError> { + ) -> Result, PermissionCheckError> { unreachable!("snapshotting!") } } @@ -158,7 +163,7 @@ impl deno_fs::FsPermissions for Permissions { _write: bool, _path: &'a Path, _api_name: &str, - ) -> Result, FsError> { + ) -> Result, FsError> { unreachable!("snapshotting!") } @@ -166,11 +171,14 @@ impl deno_fs::FsPermissions for Permissions { &mut self, _path: &str, _api_name: &str, - ) -> Result { + ) -> Result { unreachable!("snapshotting!") } - fn check_read_all(&mut self, _api_name: &str) -> Result<(), AnyError> { + fn check_read_all( + &mut self, + _api_name: &str, + ) -> Result<(), PermissionCheckError> { unreachable!("snapshotting!") } @@ -179,7 +187,7 @@ impl deno_fs::FsPermissions for Permissions { _path: &Path, _display: &str, _api_name: &str, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionCheckError> { unreachable!("snapshotting!") } @@ -187,7 +195,7 @@ impl deno_fs::FsPermissions for Permissions { &mut self, _path: &str, _api_name: &str, - ) -> Result { + ) -> Result { unreachable!("snapshotting!") } @@ -195,11 +203,14 @@ impl deno_fs::FsPermissions for Permissions { &mut self, _path: &str, _api_name: &str, - ) -> Result { + ) -> Result { unreachable!("snapshotting!") } - fn check_write_all(&mut self, _api_name: &str) -> Result<(), AnyError> { + fn check_write_all( + &mut self, + _api_name: &str, + ) -> Result<(), PermissionCheckError> { unreachable!("snapshotting!") } @@ -208,7 +219,7 @@ impl deno_fs::FsPermissions for Permissions { _path: &Path, _display: &str, _api_name: &str, - ) -> Result<(), AnyError> { + ) -> Result<(), PermissionCheckError> { unreachable!("snapshotting!") } @@ -216,7 +227,7 @@ impl deno_fs::FsPermissions for Permissions { &mut self, _path: &'a Path, _api_name: &str, - ) -> Result, AnyError> { + ) -> Result, PermissionCheckError> { unreachable!("snapshotting!") } @@ -224,7 +235,7 @@ impl deno_fs::FsPermissions for Permissions { &mut self, _path: &'a Path, _api_name: &str, - ) -> Result, AnyError> { + ) -> Result, PermissionCheckError> { unreachable!("snapshotting!") } } @@ -234,7 +245,7 @@ impl deno_kv::sqlite::SqliteDbHandlerPermissions for Permissions { &mut self, _path: &str, _api_name: &str, - ) -> Result { + ) -> Result { unreachable!("snapshotting!") } @@ -242,7 +253,7 @@ impl deno_kv::sqlite::SqliteDbHandlerPermissions for Permissions { &mut self, _path: &'a Path, _api_name: &str, - ) -> Result, AnyError> { + ) -> Result, PermissionCheckError> { unreachable!("snapshotting!") } } diff --git a/runtime/tokio_util.rs b/runtime/tokio_util.rs index 0d81f6e235..aa0282ece8 100644 --- a/runtime/tokio_util.rs +++ b/runtime/tokio_util.rs @@ -43,7 +43,15 @@ pub fn create_basic_runtime() -> tokio::runtime::Runtime { // parallel for deno fmt. // The default value is 512, which is an unhelpfully large thread pool. We // don't ever want to have more than a couple dozen threads. - .max_blocking_threads(32) + .max_blocking_threads(if cfg!(windows) { + // on windows, tokio uses blocking tasks for child process IO, make sure + // we have enough available threads for other tasks to run + 4 * std::thread::available_parallelism() + .map(|n| n.get()) + .unwrap_or(8) + } else { + 32 + }) .build() .unwrap() } diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index f560ce17ec..61e5c77029 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -166,7 +166,10 @@ pub struct WebWorkerInternalHandle { impl WebWorkerInternalHandle { /// Post WorkerEvent to parent as a worker - pub fn post_event(&self, event: WorkerControlEvent) -> Result<(), AnyError> { + pub fn post_event( + &self, + event: WorkerControlEvent, + ) -> Result<(), mpsc::TrySendError> { let mut sender = self.sender.clone(); // If the channel is closed, // the worker must have terminated but the termination message has not yet been received. @@ -176,8 +179,7 @@ impl WebWorkerInternalHandle { self.has_terminated.store(true, Ordering::SeqCst); return Ok(()); } - sender.try_send(event)?; - Ok(()) + sender.try_send(event) } /// Check if this worker is terminated or being terminated @@ -263,11 +265,9 @@ impl WebWorkerHandle { /// Get the WorkerEvent with lock /// Return error if more than one listener tries to get event #[allow(clippy::await_holding_refcell_ref)] // TODO(ry) remove! - pub async fn get_control_event( - &self, - ) -> Result, AnyError> { + pub async fn get_control_event(&self) -> Option { let mut receiver = self.receiver.borrow_mut(); - Ok(receiver.next().await) + receiver.next().await } /// Terminate the worker @@ -562,7 +562,7 @@ impl WebWorker { extension_transpiler: Some(Rc::new(|specifier, source| { maybe_transpile_source(specifier, source) })), - inspector: services.maybe_inspector_server.is_some(), + inspector: true, feature_checker: Some(services.feature_checker), op_metrics_factory_fn, import_meta_resolve_callback: Some(Box::new( @@ -579,18 +579,18 @@ impl WebWorker { js_runtime.op_state().borrow_mut().put(op_summary_metrics); } + // Put inspector handle into the op state so we can put a breakpoint when + // executing a CJS entrypoint. + let op_state = js_runtime.op_state(); + let inspector = js_runtime.inspector(); + op_state.borrow_mut().put(inspector); + if let Some(server) = services.maybe_inspector_server { server.register_inspector( options.main_module.to_string(), &mut js_runtime, false, ); - - // Put inspector handle into the op state so we can put a breakpoint when - // executing a CJS entrypoint. - let op_state = js_runtime.op_state(); - let inspector = js_runtime.inspector(); - op_state.borrow_mut().put(inspector); } let (internal_handle, external_handle) = { diff --git a/runtime/worker.rs b/runtime/worker.rs index 477d3b880c..88a61fa938 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -20,6 +20,8 @@ use deno_core::CompiledWasmModuleStore; use deno_core::Extension; use deno_core::FeatureChecker; use deno_core::GetErrorClassFn; +use deno_core::InspectorSessionKind; +use deno_core::InspectorSessionOptions; use deno_core::JsRuntime; use deno_core::LocalInspectorSession; use deno_core::ModuleCodeString; @@ -486,7 +488,7 @@ impl MainWorker { extension_transpiler: Some(Rc::new(|specifier, source| { maybe_transpile_source(specifier, source) })), - inspector: options.maybe_inspector_server.is_some(), + inspector: true, is_main: true, feature_checker: Some(services.feature_checker.clone()), op_metrics_factory_fn, @@ -544,6 +546,12 @@ impl MainWorker { js_runtime.op_state().borrow_mut().put(op_summary_metrics); } + // Put inspector handle into the op state so we can put a breakpoint when + // executing a CJS entrypoint. + let op_state = js_runtime.op_state(); + let inspector = js_runtime.inspector(); + op_state.borrow_mut().put(inspector); + if let Some(server) = options.maybe_inspector_server.clone() { server.register_inspector( main_module.to_string(), @@ -551,13 +559,8 @@ impl MainWorker { options.should_break_on_first_statement || options.should_wait_for_inspector_session, ); - - // Put inspector handle into the op state so we can put a breakpoint when - // executing a CJS entrypoint. - let op_state = js_runtime.op_state(); - let inspector = js_runtime.inspector(); - op_state.borrow_mut().put(inspector); } + let ( bootstrap_fn_global, dispatch_load_event_fn_global, @@ -792,7 +795,11 @@ impl MainWorker { /// was not configured to create inspector. pub fn create_inspector_session(&mut self) -> LocalInspectorSession { self.js_runtime.maybe_init_inspector(); - self.js_runtime.inspector().borrow().create_local_session() + self.js_runtime.inspector().borrow().create_local_session( + InspectorSessionOptions { + kind: InspectorSessionKind::Blocking, + }, + ) } pub async fn run_event_loop( diff --git a/rust-toolchain.toml b/rust-toolchain.toml index f19c7df470..3d572e0d69 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -channel = "1.81.0" +channel = "1.82.0" components = ["rustfmt", "clippy"] diff --git a/tests/Cargo.toml b/tests/Cargo.toml index bb84c22496..ede509e380 100644 --- a/tests/Cargo.toml +++ b/tests/Cargo.toml @@ -47,6 +47,8 @@ deno_tls.workspace = true fastwebsockets = { workspace = true, features = ["upgrade", "unstable-split"] } file_test_runner = "0.7.2" flaky_test = "=0.2.2" +hickory-client = "=0.24" +hickory-server = "=0.24" http.workspace = true http-body-util.workspace = true hyper.workspace = true @@ -60,8 +62,6 @@ serde.workspace = true test_util.workspace = true tokio.workspace = true tower-lsp.workspace = true -trust-dns-client = "=0.23.2" -trust-dns-server = "=0.23.2" url.workspace = true uuid = { workspace = true, features = ["serde"] } zeromq.workspace = true diff --git a/tests/ffi/tests/integration_tests.rs b/tests/ffi/tests/integration_tests.rs index c84a1b820d..dbc0036bc2 100644 --- a/tests/ffi/tests/integration_tests.rs +++ b/tests/ffi/tests/integration_tests.rs @@ -95,10 +95,10 @@ fn basic() { 579.912\n\ 579.912\n\ 579\n\ - 8589934590\n\ - -8589934590\n\ - 8589934590\n\ - -8589934590\n\ + 8589934590n\n\ + -8589934590n\n\ + 8589934590n\n\ + -8589934590n\n\ 9007199254740992n\n\ 9007199254740992n\n\ -9007199254740992n\n\ diff --git a/tests/integration/flags_tests.rs b/tests/integration/flags_tests.rs index 455507b9fa..663da363da 100644 --- a/tests/integration/flags_tests.rs +++ b/tests/integration/flags_tests.rs @@ -18,8 +18,8 @@ fn help_output() { "Start an interactive Read-Eval-Print Loop (REPL) for Deno", "Evaluate a script from the command line", "Add dependencies", - "Install script as an executable", - "Uninstall a script previously installed with deno install", + "Installs dependencies either in the local project or globally to a bin directory", + "Uninstalls a dependency or an executable script in the installation root's bin directory", "Run benchmarks", "Type-check the dependencies", "Compile the script into a self contained executable", diff --git a/tests/integration/lsp_tests.rs b/tests/integration/lsp_tests.rs index 85e02041ed..af5f9de23e 100644 --- a/tests/integration/lsp_tests.rs +++ b/tests/integration/lsp_tests.rs @@ -1827,15 +1827,41 @@ fn lsp_hover_disabled() { fn lsp_inlay_hints() { let context = TestContextBuilder::new().use_temp_cwd().build(); let mut client = context.new_lsp_command().build(); - client.initialize(|builder| { - builder.enable_inlay_hints(); - }); + client.initialize_default(); + client.change_configuration(json!({ + "deno": { + "enable": true, + }, + "typescript": { + "inlayHints": { + "parameterNames": { + "enabled": "all", + }, + "parameterTypes": { + "enabled": true, + }, + "variableTypes": { + "enabled": true, + }, + "propertyDeclarationTypes": { + "enabled": true, + }, + "functionLikeReturnTypes": { + "enabled": true, + }, + "enumMemberValues": { + "enabled": true, + }, + }, + }, + })); client.did_open(json!({ "textDocument": { "uri": "file:///a/file.ts", "languageId": "typescript", "version": 1, - "text": r#"function a(b: string) { + "text": r#" + function a(b: string) { return b; } @@ -1854,8 +1880,19 @@ fn lsp_inlay_hints() { } ["a"].map((v) => v + v); - "# - } + + interface Bar { + someField: string; + } + function getBar(): Bar { + return { someField: "foo" }; + } + // This shouldn't have a type hint because the variable name makes it + // redundant. + const bar = getBar(); + const someValue = getBar(); + "#, + }, })); let res = client.write_request( "textDocument/inlayHint", @@ -1864,65 +1901,130 @@ fn lsp_inlay_hints() { "uri": "file:///a/file.ts", }, "range": { - "start": { "line": 0, "character": 0 }, - "end": { "line": 19, "character": 0, } - } + "start": { "line": 1, "character": 0 }, + "end": { "line": 31, "character": 0, }, + }, }), ); assert_eq!( res, json!([ { - "position": { "line": 0, "character": 21 }, - "label": ": string", + "position": { "line": 1, "character": 29 }, + "label": [{ "value": ": " }, { "value": "string" }], "kind": 1, - "paddingLeft": true + "paddingLeft": true, }, { - "position": { "line": 4, "character": 10 }, - "label": "b:", + "position": { "line": 5, "character": 10 }, + "label": [ + { + "value": "b", + "location": { + "uri": "file:///a/file.ts", + "range": { + "start": { "line": 1, "character": 19 }, + "end": { "line": 1, "character": 20 }, + }, + }, + }, + { "value": ":" }, + ], "kind": 2, - "paddingRight": true + "paddingRight": true, }, { - "position": { "line": 7, "character": 11 }, + "position": { "line": 8, "character": 11 }, "label": "= 0", - "paddingLeft": true + "paddingLeft": true, }, { - "position": { "line": 10, "character": 17 }, - "label": "string:", + "position": { "line": 11, "character": 17 }, + "label": [ + { + "value": "string", + "location": { + "uri": "deno:/asset/lib.es5.d.ts", + "range": { + "start": { "line": 41, "character": 26 }, + "end": { "line": 41, "character": 32 }, + }, + }, + }, + { "value": ":" }, + ], "kind": 2, - "paddingRight": true + "paddingRight": true, }, { - "position": { "line": 10, "character": 24 }, - "label": "radix:", + "position": { "line": 11, "character": 24 }, + "label": [ + { + "value": "radix", + "location": { + "uri": "deno:/asset/lib.es5.d.ts", + "range": { + "start": { "line": 41, "character": 42 }, + "end": { "line": 41, "character": 47 }, + }, + }, + }, + { "value": ":" }, + ], "kind": 2, - "paddingRight": true + "paddingRight": true, }, { - "position": { "line": 12, "character": 15 }, - "label": ": number", + "position": { "line": 13, "character": 15 }, + "label": [{ "value": ": " }, { "value": "number" }], "kind": 1, - "paddingLeft": true + "paddingLeft": true, }, { - "position": { "line": 15, "character": 11 }, - "label": ": number", + "position": { "line": 16, "character": 11 }, + "label": [{ "value": ": " }, { "value": "number" }], "kind": 1, - "paddingLeft": true + "paddingLeft": true, }, { - "position": { "line": 18, "character": 18 }, - "label": "callbackfn:", + "position": { "line": 19, "character": 18 }, + "label": [ + { + "value": "callbackfn", + "location": { + "uri": "deno:/asset/lib.es5.d.ts", + "range": { + "start": { "line": 1462, "character": 11 }, + "end": { "line": 1462, "character": 21 }, + }, + }, + }, + { "value": ":" }, + ], "kind": 2, - "paddingRight": true + "paddingRight": true, }, { - "position": { "line": 18, "character": 20 }, - "label": ": string", + "position": { "line": 19, "character": 20 }, + "label": [{ "value": ": " }, { "value": "string" }], "kind": 1, - "paddingLeft": true + "paddingLeft": true, }, { - "position": { "line": 18, "character": 21 }, - "label": ": string", + "position": { "line": 19, "character": 21 }, + "label": [{ "value": ": " }, { "value": "string" }], "kind": 1, - "paddingLeft": true - } - ]) + "paddingLeft": true, + }, { + "position": { "line": 30, "character": 23 }, + "label": [ + { "value": ": " }, + { + "value": "Bar", + "location": { + "uri": "file:///a/file.ts", + "range": { + "start": { "line": 21, "character": 18 }, + "end": { "line": 21, "character": 21 }, + }, + }, + }, + ], + "kind": 1, + "paddingLeft": true, + }, + ]), ); client.shutdown(); } @@ -5853,7 +5955,7 @@ fn lsp_jsr_code_action_missing_declaration() { "character": 6, }, }, - "newText": "import type { ReturnType } from \"jsr:@denotest/types-file/types\";\n", + "newText": "import { ReturnType } from \"jsr:@denotest/types-file/types\";\n", }, { "range": { @@ -6294,6 +6396,45 @@ fn lsp_cache_on_save() { client.shutdown(); } +// Regression test for https://github.com/denoland/deno/issues/25999. +#[test] +fn lsp_asset_document_dom_code_action() { + let context = TestContextBuilder::new().use_temp_cwd().build(); + let temp_dir = context.temp_dir(); + temp_dir.write( + "deno.json", + json!({ + "compilerOptions": { + "lib": ["deno.window", "dom"], + }, + }) + .to_string(), + ); + let mut client = context.new_lsp_command().build(); + client.initialize_default(); + client.did_open(json!({ + "textDocument": { + "uri": temp_dir.url().join("file.ts").unwrap(), + "languageId": "typescript", + "version": 1, + "text": r#""#, + }, + })); + let res = client.write_request( + "textDocument/codeAction", + json!({ + "textDocument": { "uri": "asset:///lib.dom.d.ts" }, + "range": { + "start": { "line": 0, "character": 0 }, + "end": { "line": 0, "character": 0 }, + }, + "context": { "diagnostics": [], "only": ["quickfix"] }, + }), + ); + assert_eq!(res, json!(null)); + client.shutdown(); +} + // Regression test for https://github.com/denoland/deno/issues/22122. #[test] fn lsp_cache_then_definition() { @@ -6367,6 +6508,16 @@ fn lsp_code_actions_imports() { let context = TestContextBuilder::new().use_temp_cwd().build(); let mut client = context.new_lsp_command().build(); client.initialize_default(); + client.change_configuration(json!({ + "deno": { + "enable": true, + }, + "typescript": { + "preferences": { + "preferTypeOnlyAutoImports": true, + }, + }, + })); client.did_open(json!({ "textDocument": { "uri": "file:///a/file00.ts", @@ -6477,6 +6628,23 @@ export class DuckConfig { }] }] } + }, { + "title": "Add all missing imports", + "kind": "quickfix", + "diagnostics": [{ + "range": { + "start": { "line": 0, "character": 50 }, + "end": { "line": 0, "character": 67 } + }, + "severity": 1, + "code": 2304, + "source": "deno-ts", + "message": "Cannot find name 'DuckConfigOptions'." + }], + "data": { + "specifier": "file:///a/file00.ts", + "fixId": "fixMissingImport" + } }, { "title": "Add import from \"./file01.ts\"", "kind": "quickfix", @@ -6505,23 +6673,6 @@ export class DuckConfig { }] }] } - }, { - "title": "Add all missing imports", - "kind": "quickfix", - "diagnostics": [{ - "range": { - "start": { "line": 0, "character": 50 }, - "end": { "line": 0, "character": 67 } - }, - "severity": 1, - "code": 2304, - "source": "deno-ts", - "message": "Cannot find name 'DuckConfigOptions'." - }], - "data": { - "specifier": "file:///a/file00.ts", - "fixId": "fixMissingImport" - } }]) ); let res = client.write_request( @@ -6666,7 +6817,7 @@ fn lsp_code_actions_imports_dts() { "start": { "line": 0, "character": 0 }, "end": { "line": 0, "character": 0 }, }, - "newText": "import type { SomeType } from \"./decl.d.ts\";\n", + "newText": "import { SomeType } from \"./decl.d.ts\";\n", }], }], }, @@ -6675,6 +6826,117 @@ fn lsp_code_actions_imports_dts() { client.shutdown(); } +#[test] +fn lsp_code_actions_import_map_remap() { + let context = TestContextBuilder::new().use_temp_cwd().build(); + let temp_dir = context.temp_dir(); + temp_dir.write( + "deno.json", + json!({ + "imports": { + "foo": "./foo.ts", + "bar": "./bar.ts", + }, + }) + .to_string(), + ); + temp_dir.write("foo.ts", ""); + temp_dir.write("bar.ts", ""); + let mut client = context.new_lsp_command().build(); + client.initialize_default(); + let diagnostics = client.did_open(json!({ + "textDocument": { + "uri": temp_dir.url().join("file.ts").unwrap(), + "languageId": "typescript", + "version": 1, + "text": r#" + import "./foo.ts"; + import type {} from "./bar.ts"; + "#, + } + })); + let res = client.write_request( + "textDocument/codeAction", + json!({ + "textDocument": { "uri": temp_dir.url().join("file.ts").unwrap() }, + "range": { + "start": { "line": 0, "character": 0 }, + "end": { "line": 3, "character": 0 }, + }, + "context": { + "diagnostics": diagnostics.all(), + "only": ["quickfix"], + }, + }), + ); + assert_eq!( + res, + json!([ + { + "title": "Update \"./foo.ts\" to \"foo\" to use import map.", + "kind": "quickfix", + "diagnostics": [ + { + "range": { + "start": { "line": 1, "character": 15 }, + "end": { "line": 1, "character": 25 }, + }, + "severity": 4, + "code": "import-map-remap", + "source": "deno", + "message": "The import specifier can be remapped to \"foo\" which will resolve it via the active import map.", + "data": { "from": "./foo.ts", "to": "foo" }, + }, + ], + "edit": { + "changes": { + temp_dir.url().join("file.ts").unwrap(): [ + { + "range": { + "start": { "line": 1, "character": 15 }, + "end": { "line": 1, "character": 25 }, + }, + "newText": "\"foo\"", + }, + ], + }, + }, + }, + { + "title": "Update \"./bar.ts\" to \"bar\" to use import map.", + "kind": "quickfix", + "diagnostics": [ + { + "range": { + "start": { "line": 2, "character": 28 }, + "end": { "line": 2, "character": 38 }, + }, + "severity": 4, + "code": "import-map-remap", + "source": "deno", + "message": "The import specifier can be remapped to \"bar\" which will resolve it via the active import map.", + "data": { "from": "./bar.ts", "to": "bar" }, + }, + ], + "edit": { + "changes": { + temp_dir.url().join("file.ts").unwrap(): [ + { + "range": { + "start": { "line": 2, "character": 28 }, + "end": { "line": 2, "character": 38 }, + }, + "newText": "\"bar\"", + }, + ], + }, + }, + }, + ]), + ); + client.shutdown(); +} + #[test] fn lsp_code_actions_refactor() { let context = TestContextBuilder::new().use_temp_cwd().build(); @@ -6986,7 +7248,7 @@ fn lsp_code_actions_imports_respects_fmt_config() { "start": { "line": 0, "character": 0 }, "end": { "line": 0, "character": 0 } }, - "newText": "import type { DuckConfigOptions } from './file01.ts'\n" + "newText": "import { DuckConfigOptions } from './file01.ts'\n" }] }] } @@ -7039,7 +7301,7 @@ fn lsp_code_actions_imports_respects_fmt_config() { "start": { "line": 0, "character": 0 }, "end": { "line": 0, "character": 0 } }, - "newText": "import type { DuckConfigOptions } from './file01.ts'\n" + "newText": "import { DuckConfigOptions } from './file01.ts'\n" }] }] }, @@ -7139,7 +7401,7 @@ fn lsp_quote_style_from_workspace_settings() { "start": { "line": 0, "character": 0 }, "end": { "line": 0, "character": 0 }, }, - "newText": "import type { DuckConfigOptions } from './file01.ts';\n", + "newText": "import { DuckConfigOptions } from './file01.ts';\n", }], }], }, @@ -7183,7 +7445,7 @@ fn lsp_quote_style_from_workspace_settings() { "start": { "line": 0, "character": 0 }, "end": { "line": 0, "character": 0 }, }, - "newText": "import type { DuckConfigOptions } from \"./file01.ts\";\n", + "newText": "import { DuckConfigOptions } from \"./file01.ts\";\n", }], }], }, @@ -7863,6 +8125,151 @@ fn lsp_npm_completions_auto_import_and_quick_fix_no_import_map() { client.shutdown(); } +#[test] +fn lsp_npm_auto_import_and_quick_fix_byonm() { + let context = TestContextBuilder::new() + .use_http_server() + .use_temp_cwd() + .add_npm_env_vars() + .build(); + let temp_dir = context.temp_dir(); + temp_dir.write("deno.json", json!({}).to_string()); + temp_dir.write( + "package.json", + json!({ + "dependencies": { + "cowsay": "*", + }, + }) + .to_string(), + ); + context + .new_command() + .args("install") + .run() + .skip_output_check(); + temp_dir.write("other.ts", "import \"cowsay\";\n"); + let mut client = context.new_lsp_command().build(); + client.initialize_default(); + let diagnostics = client.did_open(json!({ + "textDocument": { + "uri": temp_dir.url().join("file.ts").unwrap(), + "languageId": "typescript", + "version": 1, + "text": "think({ text: \"foo\" });\n", + }, + })); + let list = client.get_completion_list( + temp_dir.url().join("file.ts").unwrap(), + (0, 5), + json!({ "triggerKind": 1 }), + ); + assert!(!list.is_incomplete); + let item = list + .items + .iter() + .find(|item| item.label == "think") + .unwrap(); + let res = client.write_request("completionItem/resolve", item); + assert_eq!( + res, + json!({ + "label": "think", + "labelDetails": { + "description": "cowsay", + }, + "kind": 3, + "detail": "function think(options: IOptions): string", + "documentation": { + "kind": "markdown", + "value": "\n\n*@param* \noptions ## Face :\nEither choose a mode (set the value as true) **_or_**\nset your own defined eyes and tongue to `e` and `T`.\n- ### `e` : eyes\n- ### `T` : tongue\n\n## Cow :\nEither specify a cow name (e.g. \"fox\") **_or_**\nset the value of `r` to true which selects a random cow.\n- ### `r` : random selection\n- ### `f` : cow name - from `cows` folder\n\n## Modes :\nModes are just ready-to-use faces, here's their list:\n- #### `b` : borg\n- #### `d` : dead \n- #### `g` : greedy\n- #### `p` : paranoia\n- #### `s` : stoned\n- #### `t` : tired\n- #### `w` : youthful\n- #### `y` : wired \n\n*@example* \n```\n// custom cow and face\ncowsay.think({\n text: 'Hello world!',\n e: '^^', // eyes\n T: 'U ', // tongue\n f: 'USA' // name of the cow from `cows` folder\n})\n\n// using a random cow\ncowsay.think({\n text: 'Hello world!',\n e: 'xx', // eyes\n r: true, // random mode - use a random cow.\n})\n\n// using a mode\ncowsay.think({\n text: 'Hello world!',\n y: true, // using y mode - youthful mode\n})\n```", + }, + "sortText": "￿16_0", + "additionalTextEdits": [ + { + "range": { + "start": { "line": 0, "character": 0 }, + "end": { "line": 0, "character": 0 }, + }, + "newText": "import { think } from \"cowsay\";\n\n", + }, + ], + }), + ); + let diagnostics = diagnostics + .messages_with_file_and_source( + temp_dir.url().join("file.ts").unwrap().as_str(), + "deno-ts", + ) + .diagnostics; + let res = client.write_request( + "textDocument/codeAction", + json!(json!({ + "textDocument": { + "uri": temp_dir.url().join("file.ts").unwrap(), + }, + "range": { + "start": { "line": 0, "character": 0 }, + "end": { "line": 0, "character": 5 }, + }, + "context": { + "diagnostics": &diagnostics, + "only": ["quickfix"], + }, + })), + ); + assert_eq!( + res, + json!([ + { + "title": "Add import from \"cowsay\"", + "kind": "quickfix", + "diagnostics": &diagnostics, + "edit": { + "documentChanges": [{ + "textDocument": { + "uri": temp_dir.url().join("file.ts").unwrap(), + "version": 1, + }, + "edits": [{ + "range": { + "start": { "line": 0, "character": 0 }, + "end": { "line": 0, "character": 0 }, + }, + "newText": "import { think } from \"cowsay\";\n\n", + }], + }], + }, + }, + { + "title": "Add missing function declaration 'think'", + "kind": "quickfix", + "diagnostics": &diagnostics, + "edit": { + "documentChanges": [ + { + "textDocument": { + "uri": temp_dir.url().join("file.ts").unwrap(), + "version": 1, + }, + "edits": [ + { + "range": { + "start": { "line": 1, "character": 0 }, + "end": { "line": 1, "character": 0 }, + }, + "newText": "\nfunction think(arg0: { text: string; }) {\n throw new Error(\"Function not implemented.\");\n}\n", + }, + ], + }, + ], + }, + }, + ]), + ); + client.shutdown(); +} + #[test] fn lsp_completions_node_specifier() { let context = TestContextBuilder::new().use_temp_cwd().build(); @@ -7975,8 +8382,8 @@ fn lsp_infer_return_type() { let context = TestContextBuilder::new().use_temp_cwd().build(); let temp_dir = context.temp_dir(); temp_dir.write("deno.json", json!({}).to_string()); - let types_file = source_file( - temp_dir.path().join("types.d.ts"), + temp_dir.write( + "types.d.ts", r#" export interface SomeInterface { someField: number; @@ -8057,7 +8464,7 @@ fn lsp_infer_return_type() { "start": { "line": 1, "character": 20 }, "end": { "line": 1, "character": 20 }, }, - "newText": format!(": import(\"{}\").SomeInterface", types_file.url()), + "newText": ": import(\"./types.d.ts\").SomeInterface", }, ], }, @@ -15261,25 +15668,23 @@ fn lsp_sloppy_imports() { fn lsp_sloppy_imports_prefers_dts() { let context = TestContextBuilder::new().use_temp_cwd().build(); let temp_dir = context.temp_dir(); - let temp_dir = temp_dir.path(); - - temp_dir - .join("deno.json") - .write(r#"{ "unstable": ["sloppy-imports"] }"#); - - let mut client: LspClient = context - .new_lsp_command() - .set_root_dir(temp_dir.clone()) - .build(); - client.initialize_default(); - - temp_dir.join("a.js").write("export const foo: number;"); - - let a_dts = source_file(temp_dir.join("a.d.ts"), "export const foo = 3;"); + temp_dir.write("deno.json", json!({}).to_string()); + temp_dir.write("a.js", "export const foo: number;"); + let a_dts = + source_file(temp_dir.path().join("a.d.ts"), "export const foo = 3;"); let file = source_file( - temp_dir.join("file.ts"), + temp_dir.path().join("file.ts"), "import { foo } from './a.js';\nconsole.log(foo);", ); + let mut client: LspClient = context.new_lsp_command().build(); + client.initialize_default(); + client.change_configuration(json!({ + "deno": { + "enable": true, + "unstable": ["sloppy-imports"], + }, + })); + let diagnostics = client.did_open_file(&file); // no other warnings because "a.js" exists assert_eq!( diff --git a/tests/integration/node_unit_tests.rs b/tests/integration/node_unit_tests.rs index d66db5a40e..40bd7b2fbb 100644 --- a/tests/integration/node_unit_tests.rs +++ b/tests/integration/node_unit_tests.rs @@ -212,3 +212,7 @@ itest!(unhandled_rejection_web_process { envs: env_vars_for_npm_tests(), http_server: true, }); + +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +// The itest macro is deprecated. Please move your new test to ~/tests/specs. +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! diff --git a/tests/integration/run_tests.rs b/tests/integration/run_tests.rs index db9f79556e..e29ecc486c 100644 --- a/tests/integration/run_tests.rs +++ b/tests/integration/run_tests.rs @@ -16,280 +16,19 @@ use deno_tls::rustls; use deno_tls::rustls::ClientConnection; use deno_tls::rustls_pemfile; use deno_tls::TlsStream; +use hickory_client::serialize::txt::Parser; use pretty_assertions::assert_eq; use test_util as util; use test_util::itest; use test_util::TempDir; -use trust_dns_client::serialize::txt::Lexer; -use trust_dns_client::serialize::txt::Parser; use util::assert_contains; use util::assert_not_contains; -use util::env_vars_for_npm_tests; use util::PathRef; use util::TestContext; use util::TestContextBuilder; const CODE_CACHE_DB_FILE_NAME: &str = "v8_code_cache_v2"; -itest!(stdout_write_all { - args: "run --quiet run/stdout_write_all.ts", - output: "run/stdout_write_all.out", -}); - -itest!(stdin_read_all { - args: "run --quiet run/stdin_read_all.ts", - output: "run/stdin_read_all.out", - input: Some("01234567890123456789012345678901234567890123456789"), -}); - -itest!(stdout_write_sync_async { - args: "run --quiet run/stdout_write_sync_async.ts", - output: "run/stdout_write_sync_async.out", -}); - -itest!(_001_hello { - args: "run --reload run/001_hello.js", - output: "run/001_hello.js.out", -}); - -itest!(_002_hello { - args: "run --quiet --reload run/002_hello.ts", - output: "run/002_hello.ts.out", -}); - -itest!(_003_relative_import { - args: "run --quiet --reload run/003_relative_import.ts", - output: "run/003_relative_import.ts.out", -}); - -itest!(_004_set_timeout { - args: "run --quiet --reload run/004_set_timeout.ts", - output: "run/004_set_timeout.ts.out", -}); - -itest!(_005_more_imports { - args: "run --quiet --reload run/005_more_imports.ts", - output: "run/005_more_imports.ts.out", -}); - -itest!(_006_url_imports { - args: "run --quiet --reload --allow-import run/006_url_imports.ts", - output: "run/006_url_imports.ts.out", - http_server: true, -}); - -itest!(_012_async { - args: "run --quiet --reload run/012_async.ts", - output: "run/012_async.ts.out", -}); - -itest!(_013_dynamic_import { - args: "run --quiet --reload --allow-read run/013_dynamic_import.ts", - output: "run/013_dynamic_import.ts.out", -}); - -itest!(_014_duplicate_import { - args: "run --quiet --reload --allow-read run/014_duplicate_import.ts ", - output: "run/014_duplicate_import.ts.out", -}); - -itest!(_015_duplicate_parallel_import { - args: - "run --quiet --reload --allow-read run/015_duplicate_parallel_import.js", - output: "run/015_duplicate_parallel_import.js.out", -}); - -itest!(_016_double_await { - args: "run --quiet --allow-read --reload run/016_double_await.ts", - output: "run/016_double_await.ts.out", -}); - -itest!(_017_import_redirect { - args: "run --quiet --allow-import --reload run/017_import_redirect.ts", - output: "run/017_import_redirect.ts.out", -}); - -itest!(_017_import_redirect_check { - args: - "run --quiet --allow-import --reload --check run/017_import_redirect.ts", - output: "run/017_import_redirect.ts.out", -}); - -itest!(_017_import_redirect_vendor_dir { - args: - "run --quiet --allow-import --reload --vendor --check $TESTDATA/run/017_import_redirect.ts", - output: "run/017_import_redirect.ts.out", - temp_cwd: true, -}); - -itest!(_017_import_redirect_info { - args: "info --quiet --allow-import --reload run/017_import_redirect.ts", - output: "run/017_import_redirect_info.out", -}); - -itest!(_018_async_catch { - args: "run --quiet --reload run/018_async_catch.ts", - output: "run/018_async_catch.ts.out", -}); - -itest!(_019_media_types { - args: "run --reload --allow-import run/019_media_types.ts", - output: "run/019_media_types.ts.out", - http_server: true, -}); - -itest!(_020_json_modules { - args: "run --reload run/020_json_modules.ts", - output: "run/020_json_modules.ts.out", - exit_code: 1, -}); - -itest!(_021_mjs_modules { - args: "run --quiet --reload run/021_mjs_modules.ts", - output: "run/021_mjs_modules.ts.out", -}); - -itest!(_025_reload_js_type_error { - args: "run --quiet --reload run/025_reload_js_type_error.js", - output: "run/025_reload_js_type_error.js.out", -}); - -itest!(_027_redirect_typescript { - args: "run --quiet --reload --allow-import run/027_redirect_typescript.ts", - output: "run/027_redirect_typescript.ts.out", - http_server: true, -}); - -itest!(_027_redirect_typescript_vendor_dir { - args: - "run --quiet --reload --vendor --allow-import $TESTDATA/run/027_redirect_typescript.ts", - output: "run/027_redirect_typescript.ts.out", - http_server: true, - temp_cwd: true, -}); - -itest!(_028_args { - args: - "run --quiet --reload run/028_args.ts --arg1 val1 --arg2=val2 -- arg3 arg4", - output: "run/028_args.ts.out", -}); - -itest!(_033_import_map_remote { - args: - "run --quiet --reload --allow-import --import-map=http://127.0.0.1:4545/import_maps/import_map_remote.json import_maps/test_remote.ts", - output: "run/033_import_map_remote.out", - http_server: true, -}); - -itest!(_033_import_map_vendor_dir_remote { - args: - "run --quiet --reload --allow-import --import-map=http://127.0.0.1:4545/import_maps/import_map_remote.json --vendor $TESTDATA/import_maps/test_remote.ts", - output: "run/033_import_map_remote.out", - http_server: true, - temp_cwd: true, -}); - -itest!(_033_import_map_data_uri { - args: - "run --quiet --reload --allow-import --import-map=data:application/json;charset=utf-8;base64,ewogICJpbXBvcnRzIjogewogICAgInRlc3Rfc2VydmVyLyI6ICJodHRwOi8vbG9jYWxob3N0OjQ1NDUvIgogIH0KfQ== run/import_maps/test_data.ts", - output: "run/import_maps/test_data.ts.out", - http_server: true, -}); - -itest!(onload { - args: "run --quiet --reload --config ../config/deno.json run/onload/main.ts", - output: "run/onload/main.out", -}); - -itest!(_035_cached_only_flag { - args: "run --reload --check --allow-import --cached-only http://127.0.0.1:4545/run/019_media_types.ts", - output: "run/035_cached_only_flag.out", - exit_code: 1, - http_server: true, -}); - -itest!(_038_checkjs { - // checking if JS file is run through TS compiler - args: - "run --reload --config run/checkjs.tsconfig.json --check run/038_checkjs.js", - exit_code: 1, - output: "run/038_checkjs.js.out", -}); - -itest!(_042_dyn_import_evalcontext { - args: "run --quiet --allow-read --reload run/042_dyn_import_evalcontext.ts", - output: "run/042_dyn_import_evalcontext.ts.out", -}); - -itest!(_044_bad_resource { - args: "run --quiet --reload --allow-read run/044_bad_resource.ts", - output: "run/044_bad_resource.ts.out", - exit_code: 1, -}); - -itest!(_046_tsx { - args: "run --quiet --reload run/046_jsx_test.tsx", - output: "run/046_jsx_test.tsx.out", -}); - -itest!(_047_jsx { - args: "run --quiet --reload run/047_jsx_test.jsx", - output: "run/047_jsx_test.jsx.out", -}); - -itest!(_048_media_types_jsx { - args: "run --reload --allow-import run/048_media_types_jsx.ts", - output: "run/048_media_types_jsx.ts.out", - http_server: true, -}); - -itest!(_052_no_remote_flag { - args: - "run --reload --check --allow-import --no-remote http://127.0.0.1:4545/run/019_media_types.ts", - output: "run/052_no_remote_flag.out", - exit_code: 1, - http_server: true, -}); - -itest!(_058_tasks_microtasks_close { - args: "run --quiet run/058_tasks_microtasks_close.ts", - output: "run/058_tasks_microtasks_close.ts.out", -}); - -itest!(_059_fs_relative_path_perm { - args: "run run/059_fs_relative_path_perm.ts", - output: "run/059_fs_relative_path_perm.ts.out", - exit_code: 1, -}); - -itest!(_070_location { - args: "run --location https://foo/bar?baz#bat run/070_location.ts", - output: "run/070_location.ts.out", -}); - -itest!(_071_location_unset { - args: "run run/071_location_unset.ts", - output: "run/071_location_unset.ts.out", -}); - -itest!(_072_location_relative_fetch { - args: "run --location http://127.0.0.1:4545/ --allow-net run/072_location_relative_fetch.ts", - output: "run/072_location_relative_fetch.ts.out", - http_server: true, -}); - -// tests the beforeunload event -itest!(beforeunload_event { - args: "run run/before_unload.js", - output: "run/before_unload.js.out", -}); - -// tests the serialization of webstorage (both localStorage and sessionStorage) -itest!(webstorage_serialization { - args: "run run/webstorage/serialization.ts", - output: "run/webstorage/serialization.ts.out", -}); - // tests to ensure that when `--location` is set, all code shares the same // localStorage cache based on the origin of the location URL. #[test] @@ -394,41 +133,6 @@ fn webstorage_main_module() { .assert_matches_text("Storage { hello: \"deno\", length: 1 }\n"); } -itest!(_075_import_local_query_hash { - args: "run run/075_import_local_query_hash.ts", - output: "run/075_import_local_query_hash.ts.out", -}); - -itest!(_077_fetch_empty { - args: "run -A run/077_fetch_empty.ts", - output: "run/077_fetch_empty.ts.out", - exit_code: 1, -}); - -itest!(_078_unload_on_exit { - args: "run run/078_unload_on_exit.ts", - output: "run/078_unload_on_exit.ts.out", - exit_code: 1, -}); - -itest!(_079_location_authentication { - args: - "run --location https://foo:bar@baz/qux run/079_location_authentication.ts", - output: "run/079_location_authentication.ts.out", -}); - -itest!(_081_location_relative_fetch_redirect { - args: "run --location http://127.0.0.1:4546/ --allow-net run/081_location_relative_fetch_redirect.ts", - output: "run/081_location_relative_fetch_redirect.ts.out", - http_server: true, - }); - -itest!(_082_prepare_stack_trace_throw { - args: "run run/082_prepare_stack_trace_throw.js", - output: "run/082_prepare_stack_trace_throw.js.out", - exit_code: 1, -}); - #[test] fn _083_legacy_external_source_map() { let _g = util::http_server(); @@ -457,27 +161,6 @@ fn _083_legacy_external_source_map() { assert_eq!(out, ""); } -itest!(dynamic_import_async_error { - args: "run --allow-read run/dynamic_import_async_error/main.ts", - output: "run/dynamic_import_async_error/main.out", -}); - -itest!(dynamic_import_already_rejected { - args: "run --allow-read run/dynamic_import_already_rejected/main.ts", - output: "run/dynamic_import_already_rejected/main.out", -}); - -itest!(dynamic_import_concurrent_non_statically_analyzable { - args: "run --allow-import --allow-read --allow-net --quiet run/dynamic_import_concurrent_non_statically_analyzable/main.ts", - output: "run/dynamic_import_concurrent_non_statically_analyzable/main.out", - http_server: true, -}); - -itest!(_088_dynamic_import_already_evaluating { - args: "run --allow-read run/088_dynamic_import_already_evaluating.ts", - output: "run/088_dynamic_import_already_evaluating.ts.out", -}); - itest!(_089_run_allow_list { args: "run --allow-run=curl run/089_run_allow_list.ts", envs: vec![ @@ -713,16 +396,6 @@ fn permission_request_long() { }); } -itest!(deny_all_permission_args { - args: "run --deny-env --deny-read --deny-write --deny-ffi --deny-run --deny-sys --deny-net run/deny_all_permission_args.js", - output: "run/deny_all_permission_args.out", -}); - -itest!(deny_some_permission_args { - args: "run --allow-env --deny-env=FOO --allow-read --deny-read=/foo --allow-write --deny-write=/foo --allow-ffi --deny-ffi=/foo --allow-run --deny-run=foo --allow-sys --deny-sys=hostname --allow-net --deny-net=127.0.0.1 run/deny_some_permission_args.js", - output: "run/deny_some_permission_args.out", -}); - #[test] fn permissions_cache() { TestContext::default() @@ -755,52 +428,6 @@ itest!(env_file_missing { output: "run/env_file_missing.out", }); -itest!(_091_use_define_for_class_fields { - args: "run --check run/091_use_define_for_class_fields.ts", - output: "run/091_use_define_for_class_fields.ts.out", - exit_code: 1, -}); - -itest!(js_import_detect { - args: "run --quiet --reload run/js_import_detect.ts", - output: "run/js_import_detect.ts.out", - exit_code: 0, -}); - -itest!(blob_gc_finalization { - args: "run run/blob_gc_finalization.js", - output: "run/blob_gc_finalization.js.out", - exit_code: 0, -}); - -itest!(fetch_response_finalization { - args: - "run --v8-flags=--expose-gc --allow-net run/fetch_response_finalization.js", - output: "run/fetch_response_finalization.js.out", - http_server: true, - exit_code: 0, -}); - -itest!(import_type { - args: "run --reload run/import_type.ts", - output: "run/import_type.ts.out", -}); - -itest!(import_type_no_check { - args: "run --reload --no-check run/import_type.ts", - output: "run/import_type.ts.out", -}); - -itest!(private_field_presence { - args: "run --reload run/private_field_presence.ts", - output: "run/private_field_presence.ts.out", -}); - -itest!(private_field_presence_no_check { - args: "run --reload --no-check run/private_field_presence.ts", - output: "run/private_field_presence.ts.out", -}); - itest!(lock_write_fetch { args: "run --quiet --allow-import --allow-read --allow-write --allow-env --allow-run run/lock_write_fetch/main.ts", @@ -809,33 +436,6 @@ itest!(lock_write_fetch { exit_code: 0, }); -itest!(lock_check_ok { - args: - "run --quiet --allow-import --lock=run/lock_check_ok.json http://127.0.0.1:4545/run/003_relative_import.ts", - output: "run/003_relative_import.ts.out", - http_server: true, -}); - -itest!(lock_check_ok2 { - args: - "run --allow-import --lock=run/lock_check_ok2.json run/019_media_types.ts", - output: "run/019_media_types.ts.out", - http_server: true, -}); - -itest!(lock_v2_check_ok { - args: - "run --allow-import --quiet --lock=run/lock_v2_check_ok.json http://127.0.0.1:4545/run/003_relative_import.ts", - output: "run/003_relative_import.ts.out", - http_server: true, -}); - -itest!(lock_v2_check_ok2 { - args: "run --allow-import --lock=run/lock_v2_check_ok2.json run/019_media_types.ts", - output: "run/019_media_types.ts.out", - http_server: true, -}); - #[test] fn lock_redirects() { let context = TestContextBuilder::new() @@ -1234,141 +834,12 @@ fn get_lockfile_npm_package_integrity( .to_string() } -itest!(mts_dmts_mjs { - args: "run subdir/import.mts", - output: "run/mts_dmts_mjs.out", -}); - -itest!(mts_dmts_mjs_no_check { - args: "run --no-check subdir/import.mts", - output: "run/mts_dmts_mjs.out", -}); - -itest!(async_error { - exit_code: 1, - args: "run --reload run/async_error.ts", - output: "run/async_error.ts.out", -}); - -itest!(config { - args: - "run --reload --config run/config/tsconfig.json --check run/config/main.ts", - output: "run/config/main.out", -}); - -itest!(config_types { - args: - "run --reload --quiet --check=all --config run/config_types/tsconfig.json run/config_types/main.ts", - output: "run/config_types/main.out", -}); - -itest!(config_types_remote { - http_server: true, - args: "run --allow-import --reload --quiet --check=all --config run/config_types/remote.tsconfig.json run/config_types/main.ts", - output: "run/config_types/main.out", -}); - -itest!(empty_typescript { - args: "run --reload --check run/empty.ts", - output_str: Some("Check file:[WILDCARD]/run/empty.ts\n"), -}); - -itest!(error_001 { - args: "run --reload run/error_001.ts", - exit_code: 1, - output: "run/error_001.ts.out", -}); - -itest!(error_002 { - args: "run --reload run/error_002.ts", - exit_code: 1, - output: "run/error_002.ts.out", -}); - -itest!(error_003_typescript { - args: "run --reload --check run/error_003_typescript.ts", - exit_code: 1, - output: "run/error_003_typescript.ts.out", -}); - -// Supposing that we've already attempted to run error_003_typescript.ts -// we want to make sure that JS wasn't emitted. Running again without reload flag -// should result in the same output. -// https://github.com/denoland/deno/issues/2436 -itest!(error_003_typescript2 { - args: "run --check run/error_003_typescript.ts", - exit_code: 1, - output: "run/error_003_typescript.ts.out", -}); - -itest!(error_004_missing_module { - args: "run --reload run/error_004_missing_module.ts", - exit_code: 1, - output: "run/error_004_missing_module.ts.out", -}); - -itest!(error_005_missing_dynamic_import { - args: - "run --reload --allow-read --quiet run/error_005_missing_dynamic_import.ts", - exit_code: 1, - output: "run/error_005_missing_dynamic_import.ts.out", -}); - -itest!(error_006_import_ext_failure { - args: "run --reload run/error_006_import_ext_failure.ts", - exit_code: 1, - output: "run/error_006_import_ext_failure.ts.out", -}); - -itest!(error_007_any { - args: "run --reload run/error_007_any.ts", - exit_code: 1, - output: "run/error_007_any.ts.out", -}); - -itest!(error_008_checkjs { - args: "run --reload run/error_008_checkjs.js", - exit_code: 1, - output: "run/error_008_checkjs.js.out", -}); - -itest!(error_009_extensions_error { - args: "run run/error_009_extensions_error.js", - output: "run/error_009_extensions_error.js.out", - exit_code: 1, -}); - -itest!(error_011_bad_module_specifier { - args: "run --reload run/error_011_bad_module_specifier.ts", - exit_code: 1, - output: "run/error_011_bad_module_specifier.ts.out", -}); - -itest!(error_012_bad_dynamic_import_specifier { - args: "run --reload --check run/error_012_bad_dynamic_import_specifier.ts", - exit_code: 1, - output: "run/error_012_bad_dynamic_import_specifier.ts.out", -}); - itest!(error_013_missing_script { args: "run --reload missing_file_name", exit_code: 1, output: "run/error_013_missing_script.out", }); -itest!(error_014_catch_dynamic_import_error { - args: - "run --reload --allow-read run/error_014_catch_dynamic_import_error.js", - output: "run/error_014_catch_dynamic_import_error.js.out", -}); - -itest!(error_015_dynamic_import_permissions { - args: "run --reload --quiet run/error_015_dynamic_import_permissions.js", - output: "run/error_015_dynamic_import_permissions.out", - exit_code: 1, - http_server: true, -}); - // We have an allow-import flag but not allow-read, it should still result in error. itest!(error_016_dynamic_import_permissions2 { args: @@ -1378,60 +849,6 @@ itest!(error_016_dynamic_import_permissions2 { http_server: true, }); -itest!(error_017_hide_long_source_ts { - args: "run --reload --check run/error_017_hide_long_source_ts.ts", - output: "run/error_017_hide_long_source_ts.ts.out", - exit_code: 1, -}); - -itest!(error_018_hide_long_source_js { - args: "run run/error_018_hide_long_source_js.js", - output: "run/error_018_hide_long_source_js.js.out", - exit_code: 1, -}); - -itest!(error_019_stack_function { - args: "run run/error_019_stack_function.ts", - output: "run/error_019_stack_function.ts.out", - exit_code: 1, -}); - -itest!(error_020_stack_constructor { - args: "run run/error_020_stack_constructor.ts", - output: "run/error_020_stack_constructor.ts.out", - exit_code: 1, -}); - -itest!(error_021_stack_method { - args: "run run/error_021_stack_method.ts", - output: "run/error_021_stack_method.ts.out", - exit_code: 1, -}); - -itest!(error_022_stack_custom_error { - args: "run run/error_022_stack_custom_error.ts", - output: "run/error_022_stack_custom_error.ts.out", - exit_code: 1, -}); - -itest!(error_023_stack_async { - args: "run run/error_023_stack_async.ts", - output: "run/error_023_stack_async.ts.out", - exit_code: 1, -}); - -itest!(error_024_stack_promise_all { - args: "run run/error_024_stack_promise_all.ts", - output: "run/error_024_stack_promise_all.ts.out", - exit_code: 1, -}); - -itest!(error_025_tab_indent { - args: "run run/error_025_tab_indent", - output: "run/error_025_tab_indent.out", - exit_code: 1, -}); - itest!(error_026_remote_import_error { args: "run --allow-import run/error_026_remote_import_error.ts", output: "run/error_026_remote_import_error.ts.out", @@ -1439,42 +856,6 @@ itest!(error_026_remote_import_error { http_server: true, }); -itest!(error_for_await { - args: "run --reload --check run/error_for_await.ts", - output: "run/error_for_await.ts.out", - exit_code: 1, -}); - -itest!(error_missing_module_named_import { - args: "run --reload run/error_missing_module_named_import.ts", - output: "run/error_missing_module_named_import.ts.out", - exit_code: 1, -}); - -itest!(error_no_check { - args: "run --reload --no-check run/error_no_check.ts", - output: "run/error_no_check.ts.out", - exit_code: 1, -}); - -itest!(error_syntax { - args: "run --reload run/error_syntax.js", - exit_code: 1, - output: "run/error_syntax.js.out", -}); - -itest!(error_syntax_empty_trailing_line { - args: "run --reload run/error_syntax_empty_trailing_line.mjs", - exit_code: 1, - output: "run/error_syntax_empty_trailing_line.mjs.out", -}); - -itest!(error_type_definitions { - args: "run --reload --check run/error_type_definitions.ts", - exit_code: 1, - output: "run/error_type_definitions.ts.out", -}); - itest!(error_local_static_import_from_remote_ts { args: "run --allow-import --reload http://localhost:4545/run/error_local_static_import_from_remote.ts", exit_code: 1, @@ -1489,138 +870,18 @@ itest!(error_local_static_import_from_remote_js { output: "run/error_local_static_import_from_remote.js.out", }); -itest!(exit_error42 { - exit_code: 42, - args: "run --quiet --reload run/exit_error42.ts", - output: "run/exit_error42.ts.out", -}); - -itest!(set_exit_code_0 { - args: "run --no-check run/set_exit_code_0.ts", - output_str: Some(""), - exit_code: 0, -}); - -itest!(set_exit_code_1 { - args: "run --no-check run/set_exit_code_1.ts", - output_str: Some(""), - exit_code: 42, -}); - -itest!(set_exit_code_2 { - args: "run --no-check run/set_exit_code_2.ts", - output_str: Some(""), - exit_code: 42, -}); - -itest!(op_exit_op_set_exit_code_in_worker { - args: "run --no-check --allow-read run/op_exit_op_set_exit_code_in_worker.ts", - exit_code: 21, - output_str: Some(""), -}); - -itest!(deno_exit_tampering { - args: "run --no-check run/deno_exit_tampering.ts", - output_str: Some(""), - exit_code: 42, -}); - -itest!(heapstats { - args: "run --quiet --v8-flags=--expose-gc run/heapstats.js", - output: "run/heapstats.js.out", -}); - -itest!(finalization_registry { - args: "run --quiet --v8-flags=--expose-gc run/finalization_registry.js", - output: "run/finalization_registry.js.out", -}); - -itest!(https_import { - args: "run --allow-import --quiet --reload --cert tls/RootCA.pem run/https_import.ts", - output: "run/https_import.ts.out", - http_server: true, -}); - -itest!(if_main { - args: "run --quiet --reload run/if_main.ts", - output: "run/if_main.ts.out", -}); - itest!(import_meta { args: "run --allow-import --quiet --reload --import-map=run/import_meta/importmap.json run/import_meta/main.ts", output: "run/import_meta/main.out", http_server: true, }); -itest!(main_module { - args: "run --quiet --reload run/main_module/main.ts", - output: "run/main_module/main.out", -}); - -itest!(no_check { - args: "run --allow-import --quiet --reload --no-check run/006_url_imports.ts", - output: "run/006_url_imports.ts.out", - http_server: true, -}); - -itest!(no_check_decorators { - args: "run --quiet --reload --no-check run/decorators/experimental/no_check/main.ts", - output: "run/decorators/experimental/no_check/main.out", -}); - -itest!(decorators_tc39_proposal { - args: "run --quiet --reload --check run/decorators/tc39_proposal/main.ts", - output: "run/decorators/tc39_proposal/main.out", -}); - -itest!(check_remote { - args: - "run --quiet --allow-import --reload --check=all run/no_check_remote.ts", - output: "run/no_check_remote.ts.disabled.out", - exit_code: 1, - http_server: true, -}); - itest!(no_check_remote { args: "run --allow-import --quiet --reload --no-check=remote run/no_check_remote.ts", output: "run/no_check_remote.ts.enabled.out", http_server: true, }); -itest!(runtime_decorators { - args: "run --quiet --reload --no-check run/decorators/experimental/runtime/main.ts", - output: "run/decorators/experimental/runtime/main.out", -}); - -itest!(seed_random { - args: "run --seed=100 run/seed_random.js", - output: "run/seed_random.js.out", -}); - -itest!(type_definitions { - args: "run --reload run/type_definitions.ts", - output: "run/type_definitions.ts.out", -}); - -itest!(type_definitions_for_export { - args: "run --reload --check run/type_definitions_for_export.ts", - output: "run/type_definitions_for_export.ts.out", - exit_code: 1, -}); - -itest!(type_directives_01 { - args: - "run --allow-import --reload --check=all -L debug run/type_directives_01.ts", - output: "run/type_directives_01.ts.out", - http_server: true, -}); - -itest!(type_directives_02 { - args: - "run --allow-import --reload --check=all -L debug run/type_directives_02.ts", - output: "run/type_directives_02.ts.out", -}); - #[test] fn type_directives_js_main() { let context = TestContext::default(); @@ -1642,216 +903,6 @@ itest!(type_directives_redirect { http_server: true, }); -itest!(type_headers_deno_types { - args: "run --allow-import --reload --check run/type_headers_deno_types.ts", - output: "run/type_headers_deno_types.ts.out", - http_server: true, -}); - -itest!(ts_type_imports { - args: "run --reload --check run/ts_type_imports.ts", - output: "run/ts_type_imports.ts.out", - exit_code: 1, -}); - -itest!(ts_decorators { - args: "run --reload --check run/decorators/experimental/ts/main.ts", - output: "run/decorators/experimental/ts/main.out", -}); - -itest!(ts_type_only_import { - args: "run --reload --check run/ts_type_only_import.ts", - output: "run/ts_type_only_import.ts.out", -}); - -itest!(swc_syntax_error { - args: "run --reload --check run/swc_syntax_error.ts", - output: "run/swc_syntax_error.ts.out", - exit_code: 1, -}); - -itest!(unbuffered_stderr { - args: "run --reload run/unbuffered_stderr.ts", - output: "run/unbuffered_stderr.ts.out", -}); - -itest!(unbuffered_stdout { - args: "run --quiet --reload run/unbuffered_stdout.ts", - output: "run/unbuffered_stdout.ts.out", -}); - -itest!(v8_flags_run { - args: "run --v8-flags=--expose-gc run/v8_flags.js", - output: "run/v8_flags.js.out", -}); - -itest!(v8_flags_env_run { - envs: vec![("DENO_V8_FLAGS".to_string(), "--expose-gc".to_string())], - args: "run run/v8_flags.js", - output: "run/v8_flags.js.out", -}); - -itest!(v8_flags_unrecognized { - args: "repl --v8-flags=--foo,bar,--trace-gc,-baz", - output: "run/v8_flags_unrecognized.out", - exit_code: 1, -}); - -itest!(v8_help { - args: "repl --v8-flags=--help", - output: "run/v8_help.out", -}); - -itest!(unsupported_dynamic_import_scheme { - args: "eval import('xxx:')", - output: "run/unsupported_dynamic_import_scheme.out", - exit_code: 1, -}); - -itest!(wasm { - args: "run --quiet run/wasm.ts", - output: "run/wasm.ts.out", -}); - -itest!(wasm_shared { - args: "run --quiet run/wasm_shared.ts", - output: "run/wasm_shared.out", -}); - -itest!(wasm_async { - args: "run run/wasm_async.js", - output: "run/wasm_async.out", -}); - -itest!(wasm_unreachable { - args: "run --allow-read run/wasm_unreachable.js", - output: "run/wasm_unreachable.out", - exit_code: 1, -}); - -itest!(wasm_url { - args: "run --quiet --allow-net=localhost:4545 run/wasm_url.js", - output: "run/wasm_url.out", - exit_code: 1, - http_server: true, -}); - -itest!(weakref { - args: "run --quiet --reload run/weakref.ts", - output: "run/weakref.ts.out", -}); - -itest!(top_level_await_order { - args: "run --allow-read run/top_level_await/order.js", - output: "run/top_level_await/order.out", -}); - -itest!(top_level_await_loop { - args: "run --allow-read run/top_level_await/loop.js", - output: "run/top_level_await/loop.out", -}); - -itest!(top_level_await_circular { - args: "run --allow-read run/top_level_await/circular.js", - output: "run/top_level_await/circular.out", - exit_code: 1, -}); - -// Regression test for https://github.com/denoland/deno/issues/11238. -itest!(top_level_await_nested { - args: "run --allow-read run/top_level_await/nested/main.js", - output: "run/top_level_await/nested.out", -}); - -itest!(top_level_await_unresolved { - args: "run run/top_level_await/unresolved.js", - output: "run/top_level_await/unresolved.out", - exit_code: 1, -}); - -itest!(top_level_await { - args: "run --allow-read run/top_level_await/top_level_await.js", - output: "run/top_level_await/top_level_await.out", -}); - -itest!(top_level_await_ts { - args: "run --quiet --allow-read run/top_level_await/top_level_await.ts", - output: "run/top_level_await/top_level_await.out", -}); - -itest!(top_level_for_await { - args: "run --quiet run/top_level_await/top_level_for_await.js", - output: "run/top_level_await/top_level_for_await.out", -}); - -itest!(top_level_for_await_ts { - args: "run --quiet run/top_level_await/top_level_for_await.ts", - output: "run/top_level_await/top_level_for_await.out", -}); - -itest!(unstable_worker { - args: "run --reload --quiet --allow-read run/unstable_worker.ts", - output: "run/unstable_worker.ts.out", -}); - -itest!(unstable_worker_options_disabled { - args: "run --quiet --reload --allow-read run/unstable_worker_options.js", - output: "run/unstable_worker_options.disabled.out", - exit_code: 70, -}); - -itest!(unstable_worker_options_enabled { - args: "run --quiet --reload --allow-read --unstable-worker-options run/unstable_worker_options.js", - output: "run/unstable_worker_options.enabled.out", -}); - -itest!(unstable_broadcast_channel_disabled { - args: "run --quiet --reload --allow-read run/unstable_broadcast_channel.js", - output: "run/unstable_broadcast_channel.disabled.out", -}); - -itest!(unstable_broadcast_channel_enabled { - args: "run --quiet --reload --allow-read --unstable-broadcast-channel run/unstable_broadcast_channel.js", - output: "run/unstable_broadcast_channel.enabled.out", -}); - -itest!(unstable_cron_disabled { - args: "run --quiet --reload --allow-read run/unstable_cron.js", - output: "run/unstable_cron.disabled.out", -}); - -itest!(unstable_cron_enabled { - args: - "run --quiet --reload --allow-read --unstable-cron run/unstable_cron.js", - output: "run/unstable_cron.enabled.out", -}); - -itest!(unstable_net_disabled { - args: "run --quiet --reload --allow-read run/unstable_net.js", - output: "run/unstable_net.disabled.out", -}); - -itest!(unstable_net_enabled { - args: "run --quiet --reload --allow-read --unstable-net run/unstable_net.js", - output: "run/unstable_net.enabled.out", -}); - -itest!(unstable_kv_disabled { - args: "run --quiet --reload --allow-read run/unstable_kv.js", - output: "run/unstable_kv.disabled.out", -}); - -itest!(unstable_kv_enabled { - args: "run --quiet --reload --allow-read --unstable-kv run/unstable_kv.js", - output: "run/unstable_kv.enabled.out", -}); - -itest!(import_compression { - args: "run --allow-import --quiet --reload --allow-net run/import_compression/main.ts", - output: "run/import_compression/main.out", - http_server: true, -}); - itest!(disallow_http_from_https_js { args: "run --allow-import --quiet --reload --cert tls/RootCA.pem https://localhost:5545/run/disallow_http_from_https.js", output: "run/disallow_http_from_https_js.out", @@ -1866,131 +917,6 @@ itest!(disallow_http_from_https_ts { exit_code: 1, }); -itest!(dynamic_import_conditional { - args: "run --quiet --reload run/dynamic_import_conditional.js", - output: "run/dynamic_import_conditional.js.out", -}); - -itest!(tsx_imports { - args: "run --reload --check run/tsx_imports/tsx_imports.ts", - output: "run/tsx_imports/tsx_imports.ts.out", -}); - -itest!(fix_dynamic_import_errors { - args: "run --reload run/fix_dynamic_import_errors.js", - output: "run/fix_dynamic_import_errors.js.out", -}); - -itest!(fix_emittable_skipped { - args: "run --reload run/fix_emittable_skipped.js", - output: "run/fix_emittable_skipped.ts.out", -}); - -itest!(fix_js_import_js { - args: "run --quiet --reload run/fix_js_import_js.ts", - output: "run/fix_js_import_js.ts.out", -}); - -itest!(fix_js_imports { - args: "run --quiet --reload run/fix_js_imports.ts", - output: "run/fix_js_imports.ts.out", -}); - -itest!(fix_tsc_file_exists { - args: "run --quiet --reload tsc/test.js", - output: "run/fix_tsc_file_exists.out", -}); - -itest!(fix_worker_dispatchevent { - args: "run --quiet --reload run/fix_worker_dispatchevent.ts", - output: "run/fix_worker_dispatchevent.ts.out", -}); - -itest!(es_private_fields { - args: "run --quiet --reload run/es_private_fields.js", - output: "run/es_private_fields.js.out", -}); - -itest!(ts_import_from_js { - args: "run --allow-import --quiet --reload run/ts_import_from_js/main.js", - output: "run/ts_import_from_js/main.out", - http_server: true, -}); - -itest!(jsx_import_from_ts { - args: "run --quiet --reload run/jsx_import_from_ts.ts", - output: "run/jsx_import_from_ts.ts.out", -}); - -itest!(jsx_import_source_pragma { - args: "run --reload --allow-import run/jsx_import_source_pragma.tsx", - output: "run/jsx_import_source.out", - http_server: true, -}); - -itest!(jsx_import_source_pragma_with_config { - args: - "run --reload --allow-import --config jsx/deno-jsx.jsonc --no-lock run/jsx_import_source_pragma.tsx", - output: "run/jsx_import_source.out", - http_server: true, -}); - -itest!(jsx_import_source_pragma_with_dev_config { - args: - "run --reload --allow-import --config jsx/deno-jsxdev.jsonc --no-lock run/jsx_import_source_pragma.tsx", - output: "run/jsx_import_source_dev.out", - http_server: true, -}); - -itest!(jsx_import_source_no_pragma { - args: - "run --allow-import --reload --config jsx/deno-jsx.jsonc --no-lock run/jsx_import_source_no_pragma.tsx", - output: "run/jsx_import_source.out", - http_server: true, -}); - -itest!(jsx_import_source_no_pragma_dev { - args: "run --allow-import --reload --config jsx/deno-jsxdev.jsonc --no-lock run/jsx_import_source_no_pragma.tsx", - output: "run/jsx_import_source_dev.out", - http_server: true, -}); - -itest!(jsx_import_source_pragma_import_map { - args: "run --allow-import --reload --import-map jsx/import-map.json run/jsx_import_source_pragma_import_map.tsx", - output: "run/jsx_import_source_import_map.out", - http_server: true, -}); - -itest!(jsx_import_source_pragma_import_map_dev { - args: "run --allow-import --reload --import-map jsx/import-map.json --config jsx/deno-jsxdev-import-map.jsonc run/jsx_import_source_pragma_import_map.tsx", - output: "run/jsx_import_source_import_map_dev.out", - http_server: true, -}); - -itest!(jsx_import_source_precompile_import_map { - args: "run --allow-import --reload --check --import-map jsx/import-map.json --no-lock --config jsx/deno-jsx-precompile.jsonc run/jsx_precompile/no_pragma.tsx", - output: "run/jsx_precompile/no_pragma.out", - http_server: true, -}); - -itest!(jsx_import_source_precompile_import_map_skip_element { - args: "run --allow-import --reload --check --import-map jsx/import-map.json --no-lock --config jsx/deno-jsx-precompile-skip.jsonc run/jsx_precompile/skip.tsx", - output: "run/jsx_precompile/skip.out", - http_server: true, -}); - -itest!(jsx_import_source_import_map { - args: "run --allow-import --reload --import-map jsx/import-map.json --no-lock --config jsx/deno-jsx-import-map.jsonc run/jsx_import_source_no_pragma.tsx", - output: "run/jsx_import_source_import_map.out", - http_server: true, -}); - -itest!(jsx_import_source_import_map_dev { - args: "run --allow-import --reload --import-map jsx/import-map.json --no-lock --config jsx/deno-jsxdev-import-map.jsonc run/jsx_import_source_no_pragma.tsx", - output: "run/jsx_import_source_import_map_dev.out", - http_server: true, -}); - itest!(jsx_import_source_import_map_scoped { args: "run --allow-import --reload --import-map jsx/import-map-scoped.json --no-lock --config jsx/deno-jsx-import-map.jsonc subdir/jsx_import_source_no_pragma.tsx", output: "run/jsx_import_source_import_map.out", @@ -2003,289 +929,12 @@ itest!(jsx_import_source_import_map_scoped_dev { http_server: true, }); -itest!(jsx_import_source_pragma_no_check { - args: - "run --allow-import --reload --no-check run/jsx_import_source_pragma.tsx", - output: "run/jsx_import_source.out", - http_server: true, -}); - -itest!(jsx_import_source_pragma_with_config_no_check { - args: "run --allow-import --reload --config jsx/deno-jsx.jsonc --no-lock --no-check run/jsx_import_source_pragma.tsx", - output: "run/jsx_import_source.out", - http_server: true, -}); - -itest!(jsx_import_source_pragma_with_config_vendor_dir { - args: "run --allow-import --reload --config jsx/deno-jsx.jsonc --no-lock --vendor $TESTDATA/run/jsx_import_source_pragma.tsx", - output: "run/jsx_import_source.out", - http_server: true, - temp_cwd: true, - copy_temp_dir: Some("jsx/"), -}); - -itest!(jsx_import_source_no_pragma_no_check { - args: - "run --allow-import --reload --config jsx/deno-jsx.jsonc --no-lock --no-check run/jsx_import_source_no_pragma.tsx", - output: "run/jsx_import_source.out", - http_server: true, -}); - -itest!(jsx_import_source_pragma_import_map_no_check { - args: "run --allow-import --reload --import-map jsx/import-map.json --no-check run/jsx_import_source_pragma_import_map.tsx", - output: "run/jsx_import_source_import_map.out", - http_server: true, -}); - -itest!(jsx_import_source_import_map_no_check { - args: "run --allow-import --reload --import-map jsx/import-map.json --no-lock --config jsx/deno-jsx-import-map.jsonc --no-check run/jsx_import_source_no_pragma.tsx", - output: "run/jsx_import_source_import_map.out", - http_server: true, -}); - -itest!(jsx_import_source_error { - args: "run --config jsx/deno-jsx-error.jsonc --check run/jsx_import_source_no_pragma.tsx", - output: "run/jsx_import_source_error.out", - exit_code: 1, -}); - -itest!(single_compile_with_reload { - args: "run --reload --allow-read run/single_compile_with_reload.ts", - output: "run/single_compile_with_reload.ts.out", -}); - -itest!(proto_exploit { - args: "run run/proto_exploit.js", - output: "run/proto_exploit.js.out", -}); - -itest!(reference_types { - args: "run --reload --quiet run/reference_types.ts", - output: "run/reference_types.ts.out", -}); - -itest!(references_types_remote { - http_server: true, - args: "run --reload --quiet run/reference_types_remote.ts", - output: "run/reference_types_remote.ts.out", -}); - -itest!(reference_types_error { - args: - "run --config run/checkjs.tsconfig.json --check run/reference_types_error.js", - output: "run/reference_types_error.js.out", - exit_code: 1, -}); - -itest!(reference_types_error_vendor_dir { - args: - "run --config run/checkjs.tsconfig.json --check --vendor $TESTDATA/run/reference_types_error.js", - output: "run/reference_types_error.js.out", - exit_code: 1, -}); - -itest!(reference_types_error_no_check { - args: "run --no-check run/reference_types_error.js", - output_str: Some(""), -}); - -itest!(import_data_url_error_stack { - args: "run --quiet --reload run/import_data_url_error_stack.ts", - output: "run/import_data_url_error_stack.ts.out", - exit_code: 1, -}); - -itest!(import_data_url_import_relative { - args: "run --quiet --reload run/import_data_url_import_relative.ts", - output: "run/import_data_url_import_relative.ts.out", - exit_code: 1, -}); - -itest!(import_data_url_imports { - args: "run --allow-import --quiet --reload run/import_data_url_imports.ts", - output: "run/import_data_url_imports.ts.out", - http_server: true, -}); - -itest!(import_data_url_jsx { - args: "run --quiet --reload run/import_data_url_jsx.ts", - output: "run/import_data_url_jsx.ts.out", -}); - -itest!(import_data_url { - args: "run --quiet --reload run/import_data_url.ts", - output: "run/import_data_url.ts.out", -}); - -itest!(import_dynamic_data_url { - args: "run --quiet --reload run/import_dynamic_data_url.ts", - output: "run/import_dynamic_data_url.ts.out", -}); - -itest!(import_blob_url_error_stack { - args: "run --quiet --reload run/import_blob_url_error_stack.ts", - output: "run/import_blob_url_error_stack.ts.out", - exit_code: 1, -}); - -itest!(import_blob_url_import_relative { - args: "run --quiet --reload run/import_blob_url_import_relative.ts", - output: "run/import_blob_url_import_relative.ts.out", - exit_code: 1, -}); - -itest!(import_blob_url_imports { - args: - "run --allow-import --quiet --reload --allow-net=localhost:4545 run/import_blob_url_imports.ts", - output: "run/import_blob_url_imports.ts.out", - http_server: true, -}); - -itest!(import_blob_url_jsx { - args: "run --quiet --reload run/import_blob_url_jsx.ts", - output: "run/import_blob_url_jsx.ts.out", -}); - -itest!(import_blob_url { - args: "run --quiet --reload run/import_blob_url.ts", - output: "run/import_blob_url.ts.out", -}); - -itest!(import_file_with_colon { - args: "run --allow-import --quiet --reload run/import_file_with_colon.ts", - output: "run/import_file_with_colon.ts.out", - http_server: true, -}); - -itest!(import_extensionless { - args: "run --allow-import --quiet --reload run/import_extensionless.ts", - output: "run/import_extensionless.ts.out", - http_server: true, -}); - -itest!(classic_workers_event_loop { - args: - "run --enable-testing-features-do-not-use run/classic_workers_event_loop.js", - output: "run/classic_workers_event_loop.js.out", -}); - // FIXME(bartlomieju): disabled, because this test is very flaky on CI // itest!(local_sources_not_cached_in_memory { // args: "run --allow-read --allow-write run/no_mem_cache.js", // output: "run/no_mem_cache.js.out", // }); -// This test checks that inline source map data is used. It uses a hand crafted -// source map that maps to a file that exists, but is not loaded into the module -// graph (inline_js_source_map_2.ts) (because there are no direct dependencies). -// Source line is not remapped because no inline source contents are included in -// the sourcemap and the file is not present in the dependency graph. -itest!(inline_js_source_map_2 { - args: "run --quiet run/inline_js_source_map_2.js", - output: "run/inline_js_source_map_2.js.out", - exit_code: 1, -}); - -// This test checks that inline source map data is used. It uses a hand crafted -// source map that maps to a file that exists, but is not loaded into the module -// graph (inline_js_source_map_2.ts) (because there are no direct dependencies). -// Source line remapped using th inline source contents that are included in the -// inline source map. -itest!(inline_js_source_map_2_with_inline_contents { - args: "run --quiet run/inline_js_source_map_2_with_inline_contents.js", - output: "run/inline_js_source_map_2_with_inline_contents.js.out", - exit_code: 1, -}); - -// This test checks that inline source map data is used. It uses a hand crafted -// source map that maps to a file that exists, and is loaded into the module -// graph because of a direct import statement (inline_js_source_map.ts). The -// source map was generated from an earlier version of this file, where the throw -// was not commented out. The source line is remapped using source contents that -// from the module graph. -itest!(inline_js_source_map_with_contents_from_graph { - args: "run --allow-import --quiet run/inline_js_source_map_with_contents_from_graph.js", - output: "run/inline_js_source_map_with_contents_from_graph.js.out", - exit_code: 1, - http_server: true, -}); - -// This test ensures that a descriptive error is shown when we're unable to load -// the import map. Even though this tests only the `run` subcommand, we can be sure -// that the error message is similar for other subcommands as they all use -// `program_state.maybe_import_map` to access the import map underneath. -itest!(error_import_map_unable_to_load { - args: "run --import-map=import_maps/does_not_exist.json import_maps/test.ts", - output: "run/error_import_map_unable_to_load.out", - exit_code: 1, -}); - -// Test that setting `self` in the main thread to some other value doesn't break -// the world. -itest!(replace_self { - args: "run run/replace_self.js", - output: "run/replace_self.js.out", -}); - -itest!(worker_event_handler_test { - args: "run --quiet --reload --allow-read run/worker_event_handler_test.js", - output: "run/worker_event_handler_test.js.out", -}); - -itest!(worker_close_race { - args: "run --quiet --reload --allow-read run/worker_close_race.js", - output: "run/worker_close_race.js.out", -}); - -itest!(worker_drop_handle_race { - args: "run --quiet --reload --allow-read run/worker_drop_handle_race.js", - output: "run/worker_drop_handle_race.js.out", - exit_code: 1, -}); - -itest!(worker_drop_handle_race_terminate { - args: "run run/worker_drop_handle_race_terminate.js", - output: "run/worker_drop_handle_race_terminate.js.out", -}); - -itest!(worker_close_nested { - args: "run --quiet --reload --allow-read run/worker_close_nested.js", - output: "run/worker_close_nested.js.out", -}); - -itest!(worker_message_before_close { - args: "run --quiet --reload --allow-read run/worker_message_before_close.js", - output: "run/worker_message_before_close.js.out", -}); - -itest!(worker_close_in_wasm_reactions { - args: - "run --quiet --reload --allow-read run/worker_close_in_wasm_reactions.js", - output: "run/worker_close_in_wasm_reactions.js.out", -}); - -itest!(shebang_tsc { - args: "run --quiet --check run/shebang.ts", - output: "run/shebang.ts.out", -}); - -itest!(shebang_swc { - args: "run --quiet run/shebang.ts", - output: "run/shebang.ts.out", -}); - -itest!(shebang_with_json_imports_tsc { - args: "run --quiet import_attributes/json_with_shebang.ts", - output: "import_attributes/json_with_shebang.ts.out", - exit_code: 1, -}); - -itest!(shebang_with_json_imports_swc { - args: "run --quiet --no-check import_attributes/json_with_shebang.ts", - output: "import_attributes/json_with_shebang.ts.out", - exit_code: 1, -}); - #[test] fn no_validate_asm() { let output = util::deno_cmd() @@ -2523,7 +1172,6 @@ fn dont_cache_on_check_fail() { mod permissions { use test_util as util; - use test_util::itest; use util::TestContext; #[test] @@ -2973,31 +1621,6 @@ mod permissions { }); } - itest!(_063_permissions_revoke { - args: "run --allow-read=foo,bar run/063_permissions_revoke.ts", - output: "run/063_permissions_revoke.ts.out", - }); - - itest!(_063_permissions_revoke_sync { - args: "run --allow-read=foo,bar run/063_permissions_revoke_sync.ts", - output: "run/063_permissions_revoke.ts.out", - }); - - itest!(_064_permissions_revoke_global { - args: "run --allow-read=foo,bar run/064_permissions_revoke_global.ts", - output: "run/064_permissions_revoke_global.ts.out", - }); - - itest!(_064_permissions_revoke_global_sync { - args: "run --allow-read=foo,bar run/064_permissions_revoke_global_sync.ts", - output: "run/064_permissions_revoke_global.ts.out", - }); - - itest!(_065_permissions_revoke_net { - args: "run --allow-net run/065_permissions_revoke_net.ts", - output: "run/065_permissions_revoke_net.ts.out", - }); - #[test] fn _066_prompt() { TestContext::default() @@ -3033,63 +1656,8 @@ mod permissions { console.expect("The end of test"); }); } - - itest!(dynamic_import_static_analysis_no_permissions { - args: "run --quiet --reload --no-prompt dynamic_import/static_analysis_no_permissions.ts", - output: "dynamic_import/static_analysis_no_permissions.ts.out", - }); - - itest!(dynamic_import_permissions_remote_remote { - args: "run --quiet --reload --allow-import=localhost:4545 dynamic_import/permissions_remote_remote.ts", - output: "dynamic_import/permissions_remote_remote.ts.out", - http_server: true, - exit_code: 1, - }); - - itest!(dynamic_import_permissions_data_remote { - args: "run --quiet --reload --allow-import=localhost:4545 dynamic_import/permissions_data_remote.ts", - output: "dynamic_import/permissions_data_remote.ts.out", - http_server: true, - exit_code: 1, - }); - - itest!(dynamic_import_permissions_blob_remote { - args: "run --quiet --reload --allow-net=localhost:4545 dynamic_import/permissions_blob_remote.ts", - output: "dynamic_import/permissions_blob_remote.ts.out", - http_server: true, - exit_code: 1, - }); - - itest!(dynamic_import_permissions_data_local { - args: "run --quiet --reload --allow-net=localhost:4545 dynamic_import/permissions_data_local.ts", - output: "dynamic_import/permissions_data_local.ts.out", - http_server: true, - exit_code: 1, - }); - - itest!(dynamic_import_permissions_blob_local { - args: "run --quiet --reload --allow-net=localhost:4545 dynamic_import/permissions_blob_local.ts", - output: "dynamic_import/permissions_blob_local.ts.out", - http_server: true, - exit_code: 1, - }); } -itest!(tls_starttls { - args: "run --quiet --reload --allow-net --allow-read --cert tls/RootCA.pem --config ../config/deno.json run/tls_starttls.js", - output: "run/tls.out", -}); - -itest!(tls_connecttls { - args: "run --quiet --reload --allow-net --allow-read --cert tls/RootCA.pem --config ../config/deno.json run/tls_connecttls.js", - output: "run/tls.out", -}); - -itest!(byte_order_mark { - args: "run --no-check run/byte_order_mark.ts", - output: "run/byte_order_mark.out", -}); - #[test] #[cfg(windows)] fn process_stdin_read_unblock() { @@ -3137,24 +1705,6 @@ fn issue9750() { }); } -// Regression test for https://github.com/denoland/deno/issues/11451. -itest!(dom_exception_formatting { - args: "run run/dom_exception_formatting.ts", - output: "run/dom_exception_formatting.ts.out", - exit_code: 1, -}); - -itest!(long_data_url_formatting { - args: "run run/long_data_url_formatting.ts", - output: "run/long_data_url_formatting.ts.out", - exit_code: 1, -}); - -itest!(eval_context_throw_dom_exception { - args: "run run/eval_context_throw_dom_exception.js", - output: "run/eval_context_throw_dom_exception.js.out", -}); - #[test] #[cfg(unix)] fn navigator_language_unix() { @@ -3278,85 +1828,6 @@ fn issue12807() { assert!(status.success()); } -itest!(issue_13562 { - args: "run run/issue13562.ts", - output: "run/issue13562.ts.out", -}); - -itest!(import_attributes_static_import { - args: "run --allow-read import_attributes/static_import.ts", - output: "import_attributes/static_import.out", -}); - -itest!(import_attributes_static_export { - args: "run --allow-read import_attributes/static_export.ts", - output: "import_attributes/static_export.out", -}); - -itest!(import_attributes_static_error { - args: "run --allow-read import_attributes/static_error.ts", - output: "import_attributes/static_error.out", - exit_code: 1, -}); - -itest!(import_attributes_dynamic_import { - args: "run --allow-read --check import_attributes/dynamic_import.ts", - output: "import_attributes/dynamic_import.out", -}); - -itest!(import_attributes_dynamic_error { - args: "run --allow-read import_attributes/dynamic_error.ts", - output: "import_attributes/dynamic_error.out", - exit_code: 1, -}); - -itest!(import_attributes_type_check { - args: "run --allow-read --check import_attributes/type_check.ts", - output: "import_attributes/type_check.out", - exit_code: 1, -}); - -itest!(colors_without_global_this { - args: "run run/colors_without_globalThis.js", - output_str: Some("true\n"), -}); - -itest!(config_auto_discovered_for_local_script { - args: "run --quiet run/with_config/frontend_work.ts", - output_str: Some("ok\n"), -}); - -itest!(config_auto_discovered_for_local_script_log { - args: "run -L debug run/with_config/frontend_work.ts", - output: "run/with_config/auto_discovery_log.out", -}); - -itest!(no_config_auto_discovery_for_local_script { - args: "run --quiet --no-config --check run/with_config/frontend_work.ts", - output: "run/with_config/no_auto_discovery.out", - exit_code: 1, -}); - -itest!(config_not_auto_discovered_for_remote_script { - args: "run --allow-import --quiet http://127.0.0.1:4545/run/with_config/server_side_work.ts", - output_str: Some("ok\n"), - http_server: true, -}); - -// In this case we shouldn't discover `package.json` file, because it's in a -// directory that is above the directory containing `deno.json` file. -itest!( - package_json_auto_discovered_for_local_script_arg_with_stop { - args: "run -L debug with_stop/some/nested/dir/main.ts", - output: "run/with_package_json/with_stop/main.out", - cwd: Some("run/with_package_json/"), - copy_temp_dir: Some("run/with_package_json/"), - envs: env_vars_for_npm_tests(), - http_server: true, - exit_code: 1, - } -); - #[test] fn package_json_no_node_modules_dir_created() { // it should not create a node_modules directory @@ -3395,81 +1866,6 @@ fn node_modules_dir_no_npm_specifiers_no_dir_created() { assert!(!temp_dir.path().join("node_modules").exists()); } -itest!(wasm_streaming_panic_test { - args: "run run/wasm_streaming_panic_test.js", - output: "run/wasm_streaming_panic_test.js.out", - exit_code: 1, -}); - -// Regression test for https://github.com/denoland/deno/issues/13897. -itest!(fetch_async_error_stack { - args: "run --quiet -A run/fetch_async_error_stack.ts", - output: "run/fetch_async_error_stack.ts.out", - exit_code: 1, -}); - -itest!(event_listener_error { - args: "run --quiet run/event_listener_error.ts", - output: "run/event_listener_error.ts.out", - exit_code: 1, -}); - -itest!(event_listener_error_handled { - args: "run --quiet run/event_listener_error_handled.ts", - output: "run/event_listener_error_handled.ts.out", -}); - -// https://github.com/denoland/deno/pull/14159#issuecomment-1092285446 -itest!(event_listener_error_immediate_exit { - args: "run --quiet run/event_listener_error_immediate_exit.ts", - output: "run/event_listener_error_immediate_exit.ts.out", - exit_code: 1, -}); - -// https://github.com/denoland/deno/pull/14159#issuecomment-1092285446 -itest!(event_listener_error_immediate_exit_worker { - args: "run --quiet -A run/event_listener_error_immediate_exit_worker.ts", - output: "run/event_listener_error_immediate_exit_worker.ts.out", - exit_code: 1, -}); - -itest!(set_timeout_error { - args: "run --quiet run/set_timeout_error.ts", - output: "run/set_timeout_error.ts.out", - exit_code: 1, -}); - -itest!(set_timeout_error_handled { - args: "run --quiet run/set_timeout_error_handled.ts", - output: "run/set_timeout_error_handled.ts.out", -}); - -itest!(aggregate_error { - args: "run --quiet run/aggregate_error.ts", - output: "run/aggregate_error.out", - exit_code: 1, -}); - -itest!(complex_error { - args: "run --quiet run/complex_error.ts", - output: "run/complex_error.ts.out", - exit_code: 1, -}); - -// Regression test for https://github.com/denoland/deno/issues/16340. -itest!(error_with_errors_prop { - args: "run --quiet run/error_with_errors_prop.js", - output: "run/error_with_errors_prop.js.out", - exit_code: 1, -}); - -// Regression test for https://github.com/denoland/deno/issues/12143. -itest!(js_root_with_ts_check { - args: "run --quiet --check run/js_root_with_ts_check.js", - output: "run/js_root_with_ts_check.js.out", - exit_code: 1, -}); - #[test] fn check_local_then_remote() { let _http_guard = util::http_server(); @@ -3502,18 +1898,6 @@ fn check_local_then_remote() { assert_contains!(stderr, "Type 'string' is not assignable to type 'number'."); } -// Regression test for https://github.com/denoland/deno/issues/15163 -itest!(check_js_points_to_ts { - args: "run --quiet --check --config run/checkjs.tsconfig.json run/check_js_points_to_ts/test.js", - output: "run/check_js_points_to_ts/test.js.out", - exit_code: 1, -}); - -itest!(no_prompt_flag { - args: "run --quiet --no-prompt run/no_prompt.ts", - output_str: Some(""), -}); - #[test] fn permission_request_with_no_prompt() { TestContext::default() @@ -3544,57 +1928,6 @@ fn deno_no_prompt_environment_variable() { assert!(output.status.success()); } -itest!(report_error { - args: "run --quiet run/report_error.ts", - output: "run/report_error.ts.out", - exit_code: 1, -}); - -itest!(report_error_handled { - args: "run --quiet run/report_error_handled.ts", - output: "run/report_error_handled.ts.out", -}); - -// Regression test for https://github.com/denoland/deno/issues/15513. -itest!(report_error_end_of_program { - args: "run --quiet run/report_error_end_of_program.ts", - output: "run/report_error_end_of_program.ts.out", - exit_code: 1, -}); - -itest!(queue_microtask_error { - args: "run --quiet run/queue_microtask_error.ts", - output: "run/queue_microtask_error.ts.out", - exit_code: 1, -}); - -itest!(queue_microtask_error_handled { - args: "run --quiet run/queue_microtask_error_handled.ts", - output: "run/queue_microtask_error_handled.ts.out", -}); - -itest!(spawn_stdout_inherit { - args: "run --quiet -A run/spawn_stdout_inherit.ts", - output: "run/spawn_stdout_inherit.ts.out", -}); - -itest!(error_name_non_string { - args: "run --quiet run/error_name_non_string.js", - output: "run/error_name_non_string.js.out", - exit_code: 1, -}); - -itest!(custom_inspect_url { - args: "run run/custom_inspect_url.js", - output: "run/custom_inspect_url.js.out", -}); - -itest!(config_json_import { - args: "run --quiet -c jsx/deno-jsx.json run/config_json_import.ts", - output: "run/config_json_import.ts.out", - http_server: true, -}); - #[test] fn running_declaration_files() { let context = TestContextBuilder::new().use_temp_cwd().build(); @@ -3605,18 +1938,14 @@ fn running_declaration_files() { temp_dir.write(file, ""); context .new_command() - .args_vec(["run", file]) + // todo(dsherret): investigate why --allow-read is required here + .args_vec(["run", "--allow-read", file]) .run() .skip_output_check() .assert_exit_code(0); } } -itest!(test_and_bench_are_noops_in_run { - args: "run run/test_and_bench_in_run.js", - output_str: Some(""), -}); - #[cfg(not(target_os = "windows"))] itest!(spawn_kill_permissions { args: "run --quiet --allow-run=cat spawn_kill_permissions.ts", @@ -3627,51 +1956,6 @@ itest!(spawn_kill_permissions { output_str: Some(""), }); -itest!(followup_dyn_import_resolved { - args: "run --allow-read run/followup_dyn_import_resolves/main.ts", - output: "run/followup_dyn_import_resolves/main.ts.out", -}); - -itest!(unhandled_rejection { - args: "run --check run/unhandled_rejection.ts", - output: "run/unhandled_rejection.ts.out", -}); - -itest!(unhandled_rejection_sync_error { - args: "run --check run/unhandled_rejection_sync_error.ts", - output: "run/unhandled_rejection_sync_error.ts.out", -}); - -// Regression test for https://github.com/denoland/deno/issues/15661 -itest!(unhandled_rejection_dynamic_import { - args: "run --allow-read run/unhandled_rejection_dynamic_import/main.ts", - output: "run/unhandled_rejection_dynamic_import/main.ts.out", - exit_code: 1, -}); - -// Regression test for https://github.com/denoland/deno/issues/16909 -itest!(unhandled_rejection_dynamic_import2 { - args: "run --allow-read run/unhandled_rejection_dynamic_import2/main.ts", - output: "run/unhandled_rejection_dynamic_import2/main.ts.out", -}); - -itest!(rejection_handled { - args: "run --check run/rejection_handled.ts", - output: "run/rejection_handled.out", -}); - -itest!(nested_error { - args: "run run/nested_error/main.ts", - output: "run/nested_error/main.ts.out", - exit_code: 1, -}); - -itest!(node_env_var_allowlist { - args: "run --no-prompt run/node_env_var_allowlist.ts", - output: "run/node_env_var_allowlist.ts.out", - exit_code: 1, -}); - #[test] fn cache_test() { let _g = util::http_server(); @@ -3890,6 +2174,11 @@ fn basic_auth_tokens() { #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn test_resolve_dns() { + use hickory_server::authority::Catalog; + use hickory_server::authority::ZoneType; + use hickory_server::proto::rr::Name; + use hickory_server::store::in_memory::InMemoryAuthority; + use hickory_server::ServerFuture; use std::net::SocketAddr; use std::str::FromStr; use std::sync::Arc; @@ -3897,11 +2186,6 @@ async fn test_resolve_dns() { use tokio::net::TcpListener; use tokio::net::UdpSocket; use tokio::sync::oneshot; - use trust_dns_server::authority::Catalog; - use trust_dns_server::authority::ZoneType; - use trust_dns_server::proto::rr::Name; - use trust_dns_server::store::in_memory::InMemoryAuthority; - use trust_dns_server::ServerFuture; const DNS_PORT: u16 = 4553; @@ -3911,9 +2195,12 @@ async fn test_resolve_dns() { util::testdata_path().join("run/resolve_dns.zone.in"), ) .unwrap(); - let lexer = Lexer::new(&zone_file); - let records = - Parser::new().parse(lexer, Some(Name::from_str("example.com").unwrap())); + let records = Parser::new( + &zone_file, + None, + Some(Name::from_str("example.com").unwrap()), + ) + .parse(); if records.is_err() { panic!("failed to parse: {:?}", records.err()) } @@ -4211,48 +2498,6 @@ fn broken_stdout_repl() { assert_not_contains!(stderr, "panic"); } -itest!(error_cause { - args: "run run/error_cause.ts", - output: "run/error_cause.ts.out", - exit_code: 1, -}); - -itest!(error_cause_recursive_aggregate { - args: "run error_cause_recursive_aggregate.ts", - output: "error_cause_recursive_aggregate.ts.out", - exit_code: 1, -}); - -itest!(error_cause_recursive_tail { - args: "run error_cause_recursive_tail.ts", - output: "error_cause_recursive_tail.ts.out", - exit_code: 1, -}); - -itest!(error_cause_recursive { - args: "run run/error_cause_recursive.ts", - output: "run/error_cause_recursive.ts.out", - exit_code: 1, -}); - -itest!(js_without_extension { - args: "run --ext js --check file_extensions/js_without_extension", - output: "file_extensions/js_without_extension.out", - exit_code: 0, -}); - -itest!(ts_without_extension { - args: "run --ext ts --check file_extensions/ts_without_extension", - output: "file_extensions/ts_without_extension.out", - exit_code: 0, -}); - -itest!(ext_flag_takes_precedence_over_extension { - args: "run --ext ts --check file_extensions/ts_with_js_extension.js", - output: "file_extensions/ts_with_js_extension.out", - exit_code: 0, -}); - #[tokio::test(flavor = "multi_thread")] async fn websocketstream_ping() { let _g = util::http_server(); @@ -4434,24 +2679,6 @@ async fn websocket_server_idletimeout() { assert_eq!(child.wait().unwrap().code(), Some(123)); } -itest!(no_lock_flag { - args: "run --allow-import --no-lock run/no_lock_flag/main.ts", - output: "run/no_lock_flag/main.out", - http_server: true, - exit_code: 0, -}); - -itest!(permission_args { - args: "run run/001_hello.js --allow-net", - output: "run/permission_args.out", - envs: vec![("NO_COLOR".to_string(), "1".to_string())], -}); - -itest!(permission_args_quiet { - args: "run --quiet run/001_hello.js --allow-net", - output: "run/001_hello.js.out", -}); - // Regression test for https://github.com/denoland/deno/issues/16772 #[test] fn file_fetcher_preserves_permissions() { @@ -4559,12 +2786,6 @@ fn permission_prompt_escapes_ansi_codes_and_control_chars() { } } -itest!(dynamic_import_syntax_error { - args: "run -A run/dynamic_import_syntax_error.js", - output: "run/dynamic_import_syntax_error.js.out", - exit_code: 1, -}); - itest!(extension_import { args: "run run/extension_import.ts", output: "run/extension_import.ts.out", @@ -4678,25 +2899,6 @@ console.log(returnsHi());"#, .assert_exit_code(1); } -itest!(explicit_resource_management { - args: "run --quiet --check run/explicit_resource_management/main.ts", - output: "run/explicit_resource_management/main.out", -}); - -itest!(unsafe_proto { - args: "run -A run/unsafe_proto/main.js", - output: "run/unsafe_proto/main.out", - http_server: false, - exit_code: 0, -}); - -itest!(unsafe_proto_flag { - args: "run -A --unstable-unsafe-proto run/unsafe_proto/main.js", - output: "run/unsafe_proto/main_with_unsafe_proto_flag.out", - http_server: false, - exit_code: 0, -}); - // TODO(bartlomieju): temporary disabled // itest!(warn_on_deprecated_api { // args: "run -A run/warn_on_deprecated_api/main.js", diff --git a/tests/integration/watcher_tests.rs b/tests/integration/watcher_tests.rs index 122353bba9..e8f264632d 100644 --- a/tests/integration/watcher_tests.rs +++ b/tests/integration/watcher_tests.rs @@ -566,6 +566,82 @@ async fn run_watch_no_dynamic() { check_alive_then_kill(child); } +#[flaky_test(tokio)] +async fn serve_watch_all() { + let t = TempDir::new(); + let main_file_to_watch = t.path().join("main_file_to_watch.js"); + main_file_to_watch.write( + "export default { + fetch(_request) { + return new Response(\"aaaaaaqqq!\"); + }, + };", + ); + + let another_file = t.path().join("another_file.js"); + another_file.write(""); + + let mut child = util::deno_cmd() + .current_dir(t.path()) + .arg("serve") + .arg(format!("--watch={another_file}")) + .arg("-L") + .arg("debug") + .arg(&main_file_to_watch) + .env("NO_COLOR", "1") + .piped_output() + .spawn() + .unwrap(); + let (mut stdout_lines, mut stderr_lines) = child_lines(&mut child); + + wait_for_watcher("main_file_to_watch.js", &mut stderr_lines).await; + + // Change content of the file + main_file_to_watch.write( + "export default { + fetch(_request) { + return new Response(\"aaaaaaqqq123!\"); + }, + };", + ); + wait_contains("Restarting", &mut stderr_lines).await; + wait_for_watcher("main_file_to_watch.js", &mut stderr_lines).await; + + another_file.write("export const foo = 0;"); + // Confirm that the added file is watched as well + wait_contains("Restarting", &mut stderr_lines).await; + wait_for_watcher("main_file_to_watch.js", &mut stderr_lines).await; + + main_file_to_watch + .write("import { foo } from './another_file.js'; console.log(foo);"); + wait_contains("Restarting", &mut stderr_lines).await; + wait_for_watcher("main_file_to_watch.js", &mut stderr_lines).await; + wait_contains("0", &mut stdout_lines).await; + + another_file.write("export const foo = 42;"); + wait_contains("Restarting", &mut stderr_lines).await; + wait_for_watcher("main_file_to_watch.js", &mut stderr_lines).await; + wait_contains("42", &mut stdout_lines).await; + + // Confirm that watch continues even with wrong syntax error + another_file.write("syntax error ^^"); + + wait_contains("Restarting", &mut stderr_lines).await; + wait_contains("error:", &mut stderr_lines).await; + wait_for_watcher("main_file_to_watch.js", &mut stderr_lines).await; + + main_file_to_watch.write( + "export default { + fetch(_request) { + return new Response(\"aaaaaaqqq!\"); + }, + };", + ); + wait_contains("Restarting", &mut stderr_lines).await; + wait_for_watcher("main_file_to_watch.js", &mut stderr_lines).await; + check_alive_then_kill(child); +} + #[flaky_test(tokio)] async fn run_watch_npm_specifier() { let _g = util::http_server(); diff --git a/tests/napi/src/async.rs b/tests/napi/src/async.rs index 3d3827b51c..367d2e9ef0 100644 --- a/tests/napi/src/async.rs +++ b/tests/napi/src/async.rs @@ -95,7 +95,7 @@ extern "C" fn test_async_work( )); let mut baton = unsafe { Box::from_raw(baton_ptr as *mut Baton) }; baton.task = async_work; - Box::into_raw(baton); + let _ = Box::into_raw(baton); assert_napi_ok!(napi_queue_async_work(env, async_work)); ptr::null_mut() diff --git a/tests/node_compat/config.jsonc b/tests/node_compat/config.jsonc index 75f463342e..16951d9ede 100644 --- a/tests/node_compat/config.jsonc +++ b/tests/node_compat/config.jsonc @@ -43,9 +43,6 @@ // TODO(littledivy): windows ipc streams not yet implemented "test-child-process-fork-ref.js", "test-child-process-fork-ref2.js", - // TODO(bartlomieju): this test is very flaky on CI - // https://github.com/denoland/deno/issues/25855 - // "test-child-process-ipc-next-tick.js", "test-child-process-ipc.js", "test-child-process-spawnsync-env.js", "test-child-process-stdio-inherit.js", @@ -90,8 +87,6 @@ "test-net-server-try-ports.js", "test-net-socket-timeout.js", "test-net-write-arguments.js", - // TODO(nathanwhit): Disable os.userInfo is slightly incorrect - // "test-os.js", "test-path-resolve.js", "test-querystring.js", "test-readline-interface.js", @@ -240,6 +235,7 @@ "test-child-process-execfilesync-maxbuf.js", "test-child-process-execsync-maxbuf.js", "test-child-process-flush-stdio.js", + "test-child-process-ipc-next-tick.js", "test-child-process-kill.js", "test-child-process-set-blocking.js", "test-child-process-spawn-args.js", @@ -450,6 +446,7 @@ "test-next-tick-when-exiting.js", "test-next-tick.js", "test-nodeeventtarget.js", + "test-os.js", "test-outgoing-message-destroy.js", "test-outgoing-message-pipe.js", "test-parse-args.mjs", diff --git a/tests/node_compat/runner/TODO.md b/tests/node_compat/runner/TODO.md index 11b5d28053..231a4f62c9 100644 --- a/tests/node_compat/runner/TODO.md +++ b/tests/node_compat/runner/TODO.md @@ -280,7 +280,6 @@ NOTE: This file should not be manually edited. Please edit `tests/node_compat/co - [parallel/test-child-process-fork3.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-fork3.js) - [parallel/test-child-process-http-socket-leak.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-http-socket-leak.js) - [parallel/test-child-process-internal.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-internal.js) -- [parallel/test-child-process-ipc-next-tick.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-ipc-next-tick.js) - [parallel/test-child-process-ipc.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-ipc.js) - [parallel/test-child-process-no-deprecation.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-no-deprecation.js) - [parallel/test-child-process-pipe-dataflow.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-child-process-pipe-dataflow.js) @@ -1879,7 +1878,6 @@ NOTE: This file should not be manually edited. Please edit `tests/node_compat/co - [parallel/test-os-homedir-no-envvar.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-os-homedir-no-envvar.js) - [parallel/test-os-process-priority.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-os-process-priority.js) - [parallel/test-os-userinfo-handles-getter-errors.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-os-userinfo-handles-getter-errors.js) -- [parallel/test-os.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-os.js) - [parallel/test-path-posix-relative-on-windows.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-path-posix-relative-on-windows.js) - [parallel/test-pending-deprecation.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-pending-deprecation.js) - [parallel/test-perf-gc-crash.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-perf-gc-crash.js) diff --git a/tests/node_compat/test.ts b/tests/node_compat/test.ts index f6db4ee1ae..6cb41d2e45 100644 --- a/tests/node_compat/test.ts +++ b/tests/node_compat/test.ts @@ -19,6 +19,7 @@ import { magenta } from "@std/fmt/colors"; import { pooledMap } from "@std/async/pool"; import { dirname, fromFileUrl, join } from "@std/path"; import { assertEquals, fail } from "@std/assert"; +import { distinct } from "@std/collections"; import { config, getPathsFromTestSuites, @@ -36,6 +37,9 @@ const testPaths = partitionParallelTestPaths( getPathsFromTestSuites(config.ignore), ), ); +testPaths.sequential = distinct(testPaths.sequential); +testPaths.parallel = distinct(testPaths.parallel); + const cwd = new URL(".", import.meta.url); const windowsIgnorePaths = new Set( getPathsFromTestSuites(config.windowsIgnore), diff --git a/tests/node_compat/test/fixtures/child-process-spawn-node.js b/tests/node_compat/test/fixtures/child-process-spawn-node.js index d403aabf91..7112567e01 100644 --- a/tests/node_compat/test/fixtures/child-process-spawn-node.js +++ b/tests/node_compat/test/fixtures/child-process-spawn-node.js @@ -1,5 +1,6 @@ const assert = require("assert"); const debug = require('util').debuglog('test'); +const process = require("process"); function onmessage(m) { debug("CHILD got message:", m); diff --git a/tests/node_compat/test/parallel/test-child-process-ipc-next-tick.js b/tests/node_compat/test/parallel/test-child-process-ipc-next-tick.js new file mode 100644 index 0000000000..f511d25004 --- /dev/null +++ b/tests/node_compat/test/parallel/test-child-process-ipc-next-tick.js @@ -0,0 +1,46 @@ +// deno-fmt-ignore-file +// deno-lint-ignore-file + +// Copyright Joyent and Node contributors. All rights reserved. MIT license. +// Taken from Node 18.12.1 +// This file is automatically generated by `tests/node_compat/runner/setup.ts`. Do not modify this file manually. + +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const cp = require('child_process'); +const NUM_MESSAGES = 10; +const values = []; + +for (let i = 0; i < NUM_MESSAGES; ++i) { + values[i] = i; +} + +if (process.argv[2] === 'child') { + const received = values.map(() => { return false; }); + + process.on('uncaughtException', common.mustCall((err) => { + received[err] = true; + const done = received.every((element) => { return element === true; }); + + if (done) + process.disconnect(); + }, NUM_MESSAGES)); + + process.on('message', (msg) => { + // If messages are handled synchronously, throwing should break the IPC + // message processing. + throw msg; + }); + + process.send('ready'); +} else { + const child = cp.fork(__filename, ['child']); + + child.on('message', common.mustCall((msg) => { + assert.strictEqual(msg, 'ready'); + values.forEach((value) => { + child.send(value); + }); + })); +} diff --git a/tests/node_compat/test/parallel/test-os.js b/tests/node_compat/test/parallel/test-os.js new file mode 100644 index 0000000000..f7c24342ad --- /dev/null +++ b/tests/node_compat/test/parallel/test-os.js @@ -0,0 +1,286 @@ +// deno-fmt-ignore-file +// deno-lint-ignore-file + +// Copyright Joyent and Node contributors. All rights reserved. MIT license. +// Taken from Node 18.12.1 +// This file is automatically generated by `tests/node_compat/runner/setup.ts`. Do not modify this file manually. + +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const os = require('os'); +const path = require('path'); +const { inspect } = require('util'); + +const is = { + number: (value, key) => { + assert(!Number.isNaN(value), `${key} should not be NaN`); + assert.strictEqual(typeof value, 'number'); + }, + string: (value) => { assert.strictEqual(typeof value, 'string'); }, + array: (value) => { assert.ok(Array.isArray(value)); }, + object: (value) => { + assert.strictEqual(typeof value, 'object'); + assert.notStrictEqual(value, null); + } +}; + +process.env.TMPDIR = '/tmpdir'; +process.env.TMP = '/tmp'; +process.env.TEMP = '/temp'; +if (common.isWindows) { + assert.strictEqual(os.tmpdir(), '/temp'); + process.env.TEMP = ''; + assert.strictEqual(os.tmpdir(), '/tmp'); + process.env.TMP = ''; + const expected = `${process.env.SystemRoot || process.env.windir}\\temp`; + assert.strictEqual(os.tmpdir(), expected); + process.env.TEMP = '\\temp\\'; + assert.strictEqual(os.tmpdir(), '\\temp'); + process.env.TEMP = '\\tmpdir/'; + assert.strictEqual(os.tmpdir(), '\\tmpdir/'); + process.env.TEMP = '\\'; + assert.strictEqual(os.tmpdir(), '\\'); + process.env.TEMP = 'C:\\'; + assert.strictEqual(os.tmpdir(), 'C:\\'); +} else { + assert.strictEqual(os.tmpdir(), '/tmpdir'); + process.env.TMPDIR = ''; + assert.strictEqual(os.tmpdir(), '/tmp'); + process.env.TMP = ''; + assert.strictEqual(os.tmpdir(), '/temp'); + process.env.TEMP = ''; + assert.strictEqual(os.tmpdir(), '/tmp'); + process.env.TMPDIR = '/tmpdir/'; + assert.strictEqual(os.tmpdir(), '/tmpdir'); + process.env.TMPDIR = '/tmpdir\\'; + assert.strictEqual(os.tmpdir(), '/tmpdir\\'); + process.env.TMPDIR = '/'; + assert.strictEqual(os.tmpdir(), '/'); +} + +const endianness = os.endianness(); +is.string(endianness); +assert.match(endianness, /[BL]E/); + +const hostname = os.hostname(); +is.string(hostname); +assert.ok(hostname.length > 0); + +// IBMi process priority is different. +if (!common.isIBMi) { + const DUMMY_PRIORITY = 10; + os.setPriority(DUMMY_PRIORITY); + const priority = os.getPriority(); + is.number(priority); + assert.strictEqual(priority, DUMMY_PRIORITY); +} + +// On IBMi, os.uptime() returns 'undefined' +if (!common.isIBMi) { + const uptime = os.uptime(); + is.number(uptime); + assert.ok(uptime > 0); +} + +const cpus = os.cpus(); +is.array(cpus); +assert.ok(cpus.length > 0); +for (const cpu of cpus) { + assert.strictEqual(typeof cpu.model, 'string'); + assert.strictEqual(typeof cpu.speed, 'number'); + assert.strictEqual(typeof cpu.times.user, 'number'); + assert.strictEqual(typeof cpu.times.nice, 'number'); + assert.strictEqual(typeof cpu.times.sys, 'number'); + assert.strictEqual(typeof cpu.times.idle, 'number'); + assert.strictEqual(typeof cpu.times.irq, 'number'); +} + +const type = os.type(); +is.string(type); +assert.ok(type.length > 0); + +const release = os.release(); +is.string(release); +assert.ok(release.length > 0); +// TODO: Check format on more than just AIX +if (common.isAIX) + assert.match(release, /^\d+\.\d+$/); + +const platform = os.platform(); +is.string(platform); +assert.ok(platform.length > 0); + +const arch = os.arch(); +is.string(arch); +assert.ok(arch.length > 0); + +if (!common.isSunOS) { + // not implemented yet + assert.ok(os.loadavg().length > 0); + assert.ok(os.freemem() > 0); + assert.ok(os.totalmem() > 0); +} + +const interfaces = os.networkInterfaces(); +switch (platform) { + case 'linux': { + const filter = (e) => + e.address === '127.0.0.1' && + e.netmask === '255.0.0.0'; + + const actual = interfaces.lo.filter(filter); + const expected = [{ + address: '127.0.0.1', + netmask: '255.0.0.0', + family: 'IPv4', + mac: '00:00:00:00:00:00', + internal: true, + cidr: '127.0.0.1/8' + }]; + assert.deepStrictEqual(actual, expected); + break; + } + case 'win32': { + const filter = (e) => + e.address === '127.0.0.1'; + + const actual = interfaces['Loopback Pseudo-Interface 1'].filter(filter); + const expected = [{ + address: '127.0.0.1', + netmask: '255.0.0.0', + family: 'IPv4', + mac: '00:00:00:00:00:00', + internal: true, + cidr: '127.0.0.1/8' + }]; + assert.deepStrictEqual(actual, expected); + break; + } +} +const netmaskToCIDRSuffixMap = new Map(Object.entries({ + '255.0.0.0': 8, + '255.255.255.0': 24, + 'ffff:ffff:ffff:ffff::': 64, + 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff': 128 +})); + +Object.values(interfaces) + .flat(Infinity) + .map((v) => ({ v, mask: netmaskToCIDRSuffixMap.get(v.netmask) })) + .forEach(({ v, mask }) => { + assert.ok('cidr' in v, `"cidr" prop not found in ${inspect(v)}`); + if (mask) { + assert.strictEqual(v.cidr, `${v.address}/${mask}`); + } + }); + +const EOL = os.EOL; +if (common.isWindows) { + assert.strictEqual(EOL, '\r\n'); +} else { + assert.strictEqual(EOL, '\n'); +} + +const home = os.homedir(); +is.string(home); +assert.ok(home.includes(path.sep)); + +const version = os.version(); +assert.strictEqual(typeof version, 'string'); +assert(version); + +if (common.isWindows && process.env.USERPROFILE) { + assert.strictEqual(home, process.env.USERPROFILE); + delete process.env.USERPROFILE; + assert.ok(os.homedir().includes(path.sep)); + process.env.USERPROFILE = home; +} else if (!common.isWindows && process.env.HOME) { + assert.strictEqual(home, process.env.HOME); + delete process.env.HOME; + assert.ok(os.homedir().includes(path.sep)); + process.env.HOME = home; +} + +const pwd = os.userInfo(); +is.object(pwd); +const pwdBuf = os.userInfo({ encoding: 'buffer' }); + +if (common.isWindows) { + assert.strictEqual(pwd.uid, -1); + assert.strictEqual(pwd.gid, -1); + assert.strictEqual(pwd.shell, null); + assert.strictEqual(pwdBuf.uid, -1); + assert.strictEqual(pwdBuf.gid, -1); + assert.strictEqual(pwdBuf.shell, null); +} else { + is.number(pwd.uid); + is.number(pwd.gid); + assert.strictEqual(typeof pwd.shell, 'string'); + // It's possible for /etc/passwd to leave the user's shell blank. + if (pwd.shell.length > 0) { + assert(pwd.shell.includes(path.sep)); + } + assert.strictEqual(pwd.uid, pwdBuf.uid); + assert.strictEqual(pwd.gid, pwdBuf.gid); + assert.strictEqual(pwd.shell, pwdBuf.shell.toString('utf8')); +} + +is.string(pwd.username); +assert.ok(pwd.homedir.includes(path.sep)); +assert.strictEqual(pwd.username, pwdBuf.username.toString('utf8')); +assert.strictEqual(pwd.homedir, pwdBuf.homedir.toString('utf8')); + +assert.strictEqual(`${os.hostname}`, os.hostname()); +assert.strictEqual(`${os.homedir}`, os.homedir()); +assert.strictEqual(`${os.release}`, os.release()); +assert.strictEqual(`${os.type}`, os.type()); +assert.strictEqual(`${os.endianness}`, os.endianness()); +assert.strictEqual(`${os.tmpdir}`, os.tmpdir()); +assert.strictEqual(`${os.arch}`, os.arch()); +assert.strictEqual(`${os.platform}`, os.platform()); +assert.strictEqual(`${os.version}`, os.version()); +assert.strictEqual(`${os.machine}`, os.machine()); +assert.strictEqual(+os.totalmem, os.totalmem()); + +// Assert that the following values are coercible to numbers. +// On IBMi, os.uptime() returns 'undefined' +if (!common.isIBMi) { + is.number(+os.uptime, 'uptime'); + is.number(os.uptime(), 'uptime'); +} + +is.number(+os.availableParallelism, 'availableParallelism'); +is.number(os.availableParallelism(), 'availableParallelism'); +is.number(+os.freemem, 'freemem'); +is.number(os.freemem(), 'freemem'); + +const devNull = os.devNull; +if (common.isWindows) { + assert.strictEqual(devNull, '\\\\.\\nul'); +} else { + assert.strictEqual(devNull, '/dev/null'); +} + +assert.ok(os.availableParallelism() > 0); diff --git a/tests/registry/jsr/@denotest/multiple-exports/1.0.0/add.ts b/tests/registry/jsr/@denotest/multiple-exports/1.0.0/add.ts new file mode 100644 index 0000000000..de02f69024 --- /dev/null +++ b/tests/registry/jsr/@denotest/multiple-exports/1.0.0/add.ts @@ -0,0 +1 @@ +export * from "jsr:@denotest/add@1"; diff --git a/tests/registry/jsr/@denotest/multiple-exports/1.0.0/data.json b/tests/registry/jsr/@denotest/multiple-exports/1.0.0/data.json new file mode 100644 index 0000000000..885e71c6cc --- /dev/null +++ b/tests/registry/jsr/@denotest/multiple-exports/1.0.0/data.json @@ -0,0 +1,3 @@ +{ + "a": 1 +} \ No newline at end of file diff --git a/tests/registry/jsr/@denotest/multiple-exports/1.0.0/subtract.ts b/tests/registry/jsr/@denotest/multiple-exports/1.0.0/subtract.ts new file mode 100644 index 0000000000..215c42310d --- /dev/null +++ b/tests/registry/jsr/@denotest/multiple-exports/1.0.0/subtract.ts @@ -0,0 +1 @@ +export * from "jsr:@denotest/subtract@1"; diff --git a/tests/registry/jsr/@denotest/multiple-exports/1.0.0_meta.json b/tests/registry/jsr/@denotest/multiple-exports/1.0.0_meta.json new file mode 100644 index 0000000000..d9f58b9a61 --- /dev/null +++ b/tests/registry/jsr/@denotest/multiple-exports/1.0.0_meta.json @@ -0,0 +1,7 @@ +{ + "exports": { + "./add": "./add.ts", + "./subtract": "./subtract.ts", + "./data-json": "./data.json" + } +} diff --git a/tests/registry/jsr/@denotest/multiple-exports/meta.json b/tests/registry/jsr/@denotest/multiple-exports/meta.json new file mode 100644 index 0000000000..02601e4d0d --- /dev/null +++ b/tests/registry/jsr/@denotest/multiple-exports/meta.json @@ -0,0 +1,5 @@ +{ + "versions": { + "1.0.0": {} + } +} diff --git a/tests/registry/jsr/@denotest/type-only-import/1.0.0/foo.ts b/tests/registry/jsr/@denotest/type-only-import/1.0.0/foo.ts new file mode 100644 index 0000000000..4e09512c03 --- /dev/null +++ b/tests/registry/jsr/@denotest/type-only-import/1.0.0/foo.ts @@ -0,0 +1,3 @@ +export interface Foo { + bar: string; +} \ No newline at end of file diff --git a/tests/registry/jsr/@denotest/type-only-import/1.0.0/mod.ts b/tests/registry/jsr/@denotest/type-only-import/1.0.0/mod.ts new file mode 100644 index 0000000000..093a57d8a1 --- /dev/null +++ b/tests/registry/jsr/@denotest/type-only-import/1.0.0/mod.ts @@ -0,0 +1,3 @@ +import type { Foo } from "./foo.ts"; + +export const foo: Foo = { bar: "foo" }; \ No newline at end of file diff --git a/tests/registry/jsr/@denotest/type-only-import/1.0.0_meta.json b/tests/registry/jsr/@denotest/type-only-import/1.0.0_meta.json new file mode 100644 index 0000000000..631a18d0e5 --- /dev/null +++ b/tests/registry/jsr/@denotest/type-only-import/1.0.0_meta.json @@ -0,0 +1,5 @@ +{ + "exports": { + ".": "./mod.ts" + } +} diff --git a/tests/registry/jsr/@denotest/type-only-import/meta.json b/tests/registry/jsr/@denotest/type-only-import/meta.json new file mode 100644 index 0000000000..02601e4d0d --- /dev/null +++ b/tests/registry/jsr/@denotest/type-only-import/meta.json @@ -0,0 +1,5 @@ +{ + "versions": { + "1.0.0": {} + } +} diff --git a/tests/registry/jsr/@denotest/unstable/1.0.0-beta.1/mod.ts b/tests/registry/jsr/@denotest/unstable/1.0.0-beta.1/mod.ts new file mode 100644 index 0000000000..de63686189 --- /dev/null +++ b/tests/registry/jsr/@denotest/unstable/1.0.0-beta.1/mod.ts @@ -0,0 +1,3 @@ +export function doThing() { + return "thing"; +} \ No newline at end of file diff --git a/tests/registry/jsr/@denotest/unstable/1.0.0-beta.1_meta.json b/tests/registry/jsr/@denotest/unstable/1.0.0-beta.1_meta.json new file mode 100644 index 0000000000..631a18d0e5 --- /dev/null +++ b/tests/registry/jsr/@denotest/unstable/1.0.0-beta.1_meta.json @@ -0,0 +1,5 @@ +{ + "exports": { + ".": "./mod.ts" + } +} diff --git a/tests/registry/jsr/@denotest/unstable/1.0.0-beta.2/mod.ts b/tests/registry/jsr/@denotest/unstable/1.0.0-beta.2/mod.ts new file mode 100644 index 0000000000..4e599641ef --- /dev/null +++ b/tests/registry/jsr/@denotest/unstable/1.0.0-beta.2/mod.ts @@ -0,0 +1,3 @@ +export function doThing() { + return "thing2"; +} \ No newline at end of file diff --git a/tests/registry/jsr/@denotest/unstable/1.0.0-beta.2_meta.json b/tests/registry/jsr/@denotest/unstable/1.0.0-beta.2_meta.json new file mode 100644 index 0000000000..631a18d0e5 --- /dev/null +++ b/tests/registry/jsr/@denotest/unstable/1.0.0-beta.2_meta.json @@ -0,0 +1,5 @@ +{ + "exports": { + ".": "./mod.ts" + } +} diff --git a/tests/registry/jsr/@denotest/unstable/meta.json b/tests/registry/jsr/@denotest/unstable/meta.json new file mode 100644 index 0000000000..7c5c9971ef --- /dev/null +++ b/tests/registry/jsr/@denotest/unstable/meta.json @@ -0,0 +1,6 @@ +{ + "versions": { + "1.0.0-beta.1": {}, + "1.0.0-beta.2": {} + } +} diff --git a/tests/registry/npm-private3/@denotest3/basic/1.0.0/main.d.mts b/tests/registry/npm-private3/@denotest3/basic/1.0.0/main.d.mts new file mode 100644 index 0000000000..29da1e6d7b --- /dev/null +++ b/tests/registry/npm-private3/@denotest3/basic/1.0.0/main.d.mts @@ -0,0 +1,3 @@ +export declare function setValue(val: number): void; +export declare function getValue(): number; +export declare const url: string; diff --git a/tests/registry/npm-private3/@denotest3/basic/1.0.0/main.mjs b/tests/registry/npm-private3/@denotest3/basic/1.0.0/main.mjs new file mode 100644 index 0000000000..0a44f75859 --- /dev/null +++ b/tests/registry/npm-private3/@denotest3/basic/1.0.0/main.mjs @@ -0,0 +1,11 @@ +let value = 0; + +export function setValue(newValue) { + value = newValue; +} + +export function getValue() { + return value; +} + +export const url = import.meta.url; diff --git a/tests/registry/npm-private3/@denotest3/basic/1.0.0/other.mjs b/tests/registry/npm-private3/@denotest3/basic/1.0.0/other.mjs new file mode 100644 index 0000000000..00ed99da45 --- /dev/null +++ b/tests/registry/npm-private3/@denotest3/basic/1.0.0/other.mjs @@ -0,0 +1,3 @@ +export function hello() { + return "hello, world!"; +} \ No newline at end of file diff --git a/tests/registry/npm-private3/@denotest3/basic/1.0.0/package.json b/tests/registry/npm-private3/@denotest3/basic/1.0.0/package.json new file mode 100644 index 0000000000..ce6ea33830 --- /dev/null +++ b/tests/registry/npm-private3/@denotest3/basic/1.0.0/package.json @@ -0,0 +1,7 @@ +{ + "name": "@denotest3/basic", + "version": "1.0.0", + "type": "module", + "main": "main.mjs", + "types": "main.d.mts" +} diff --git a/tests/registry/npm/@denotest/cjs-multiple-exports/1.0.0/package.json b/tests/registry/npm/@denotest/cjs-multiple-exports/1.0.0/package.json new file mode 100644 index 0000000000..43f07a2351 --- /dev/null +++ b/tests/registry/npm/@denotest/cjs-multiple-exports/1.0.0/package.json @@ -0,0 +1,8 @@ +{ + "name": "@denotest/cjs-multiple-exports", + "version": "1.0.0", + "exports": { + ".": "./src/index.js", + "./add": "./src/add.js" + } +} \ No newline at end of file diff --git a/tests/registry/npm/@denotest/cjs-multiple-exports/1.0.0/src/add.js b/tests/registry/npm/@denotest/cjs-multiple-exports/1.0.0/src/add.js new file mode 100644 index 0000000000..42c8a7c604 --- /dev/null +++ b/tests/registry/npm/@denotest/cjs-multiple-exports/1.0.0/src/add.js @@ -0,0 +1,3 @@ +module.exports = function add(a, b) { + return a + b; +}; \ No newline at end of file diff --git a/tests/registry/npm/@denotest/cjs-multiple-exports/1.0.0/src/index.js b/tests/registry/npm/@denotest/cjs-multiple-exports/1.0.0/src/index.js new file mode 100644 index 0000000000..432ed652ed --- /dev/null +++ b/tests/registry/npm/@denotest/cjs-multiple-exports/1.0.0/src/index.js @@ -0,0 +1,3 @@ +module.exports = { + hello: "world" +}; \ No newline at end of file diff --git a/tests/registry/npm/@denotest/node-addon/1.0.0/package.json b/tests/registry/npm/@denotest/node-addon/1.0.0/package.json index 5d50aa119a..d730e3dd03 100644 --- a/tests/registry/npm/@denotest/node-addon/1.0.0/package.json +++ b/tests/registry/npm/@denotest/node-addon/1.0.0/package.json @@ -2,7 +2,7 @@ "name": "@denotest/node-addon", "version": "1.0.0", "scripts": { - "install": "node-gyp configure build" + "install": "node-gyp configure --verbose build" }, "dependencies": { "node-gyp": "10.1.0" diff --git a/tests/registry/npm/@denotest/print-npm-user-agent/1.0.0/index.js b/tests/registry/npm/@denotest/print-npm-user-agent/1.0.0/index.js new file mode 100644 index 0000000000..b835b8e25e --- /dev/null +++ b/tests/registry/npm/@denotest/print-npm-user-agent/1.0.0/index.js @@ -0,0 +1,2 @@ +#!/usr/bin/env node +console.log(`npm_config_user_agent: ${process.env["npm_config_user_agent"]}`); \ No newline at end of file diff --git a/tests/registry/npm/@denotest/print-npm-user-agent/1.0.0/package.json b/tests/registry/npm/@denotest/print-npm-user-agent/1.0.0/package.json new file mode 100644 index 0000000000..9071173467 --- /dev/null +++ b/tests/registry/npm/@denotest/print-npm-user-agent/1.0.0/package.json @@ -0,0 +1,10 @@ +{ + "name": "@denotest/print-npm-user-agent", + "version": "1.0.0", + "bin": { + "print-npm-user-agent": "index.js" + }, + "scripts": { + "postinstall": "echo postinstall && node index.js && exit 1" + } +} \ No newline at end of file diff --git a/tests/registry/npm/@denotest/type-commonjs/1.0.0/index.js b/tests/registry/npm/@denotest/type-commonjs/1.0.0/index.js index cb0ff5c3b5..d3f80a0495 100644 --- a/tests/registry/npm/@denotest/type-commonjs/1.0.0/index.js +++ b/tests/registry/npm/@denotest/type-commonjs/1.0.0/index.js @@ -1 +1,5 @@ -export {}; +// this module is declared as CommonJS, but during loading we'll +// discover it's ESM and load it fine +export function add(a, b) { + return a + b; +} diff --git a/tests/specs/add/comments_deno_json/__test__.jsonc b/tests/specs/add/comments_deno_json/__test__.jsonc new file mode 100644 index 0000000000..7197eb473f --- /dev/null +++ b/tests/specs/add/comments_deno_json/__test__.jsonc @@ -0,0 +1,16 @@ +{ + "tempDir": true, + "steps": [ + { + "args": "add npm:@denotest/esm-basic jsr:@denotest/add", + "output": "[WILDCARD]" + }, + { + "args": [ + "eval", + "console.log(Deno.readTextFileSync('deno.jsonc').trim())" + ], + "output": "deno.jsonc.out" + } + ] +} diff --git a/tests/specs/add/comments_deno_json/deno.jsonc b/tests/specs/add/comments_deno_json/deno.jsonc new file mode 100644 index 0000000000..6a37b343d9 --- /dev/null +++ b/tests/specs/add/comments_deno_json/deno.jsonc @@ -0,0 +1,7 @@ +// Testing four space indents too +// deno-fmt-ignore +{ + "imports": { + "@denotest/add2": "npm:@denotest/add" // some comment + } +} diff --git a/tests/specs/add/comments_deno_json/deno.jsonc.out b/tests/specs/add/comments_deno_json/deno.jsonc.out new file mode 100644 index 0000000000..0cca35c64c --- /dev/null +++ b/tests/specs/add/comments_deno_json/deno.jsonc.out @@ -0,0 +1,9 @@ +// Testing four space indents too +// deno-fmt-ignore +{ + "imports": { + "@denotest/add": "jsr:@denotest/add@^1.0.0", + "@denotest/add2": "npm:@denotest/add", // some comment + "@denotest/esm-basic": "npm:@denotest/esm-basic@^1.0.0" + } +} diff --git a/tests/specs/add/dev/package.json.out b/tests/specs/add/dev/package.json.out index 866724397f..d5ca56e004 100644 --- a/tests/specs/add/dev/package.json.out +++ b/tests/specs/add/dev/package.json.out @@ -1,5 +1,3 @@ { - "devDependencies": { - "@denotest/esm-basic": "^1.0.0" - } + "devDependencies": { "@denotest/esm-basic": "^1.0.0" } } diff --git a/tests/specs/add/existing_deps_singleline/__test__.jsonc b/tests/specs/add/existing_deps_singleline/__test__.jsonc new file mode 100644 index 0000000000..8a6ef1a5fa --- /dev/null +++ b/tests/specs/add/existing_deps_singleline/__test__.jsonc @@ -0,0 +1,16 @@ +{ + "tempDir": true, + "steps": [ + { + "args": "add npm:@denotest/esm-basic", + "output": "[WILDCARD]" + }, + { + "args": [ + "eval", + "console.log(Deno.readTextFileSync('package.json').trim())" + ], + "output": "package.json.out" + } + ] +} diff --git a/tests/specs/add/existing_deps_singleline/package.json b/tests/specs/add/existing_deps_singleline/package.json new file mode 100644 index 0000000000..3677a6e2e8 --- /dev/null +++ b/tests/specs/add/existing_deps_singleline/package.json @@ -0,0 +1,3 @@ +{ + "dependencies": { "cowsay": "*" } +} diff --git a/tests/specs/add/existing_deps_singleline/package.json.out b/tests/specs/add/existing_deps_singleline/package.json.out new file mode 100644 index 0000000000..5a7fa36805 --- /dev/null +++ b/tests/specs/add/existing_deps_singleline/package.json.out @@ -0,0 +1,6 @@ +{ + "dependencies": { + "@denotest/esm-basic": "^1.0.0", + "cowsay": "*" + } +} diff --git a/tests/specs/add/exiting_dev_deps/__test__.jsonc b/tests/specs/add/exiting_dev_deps/__test__.jsonc new file mode 100644 index 0000000000..8a6ef1a5fa --- /dev/null +++ b/tests/specs/add/exiting_dev_deps/__test__.jsonc @@ -0,0 +1,16 @@ +{ + "tempDir": true, + "steps": [ + { + "args": "add npm:@denotest/esm-basic", + "output": "[WILDCARD]" + }, + { + "args": [ + "eval", + "console.log(Deno.readTextFileSync('package.json').trim())" + ], + "output": "package.json.out" + } + ] +} diff --git a/tests/specs/add/exiting_dev_deps/package.json b/tests/specs/add/exiting_dev_deps/package.json new file mode 100644 index 0000000000..e7b8c50618 --- /dev/null +++ b/tests/specs/add/exiting_dev_deps/package.json @@ -0,0 +1,5 @@ +{ + "devDependencies": { + "cowsay": "*" + } +} diff --git a/tests/specs/add/exiting_dev_deps/package.json.out b/tests/specs/add/exiting_dev_deps/package.json.out new file mode 100644 index 0000000000..9bff99bc28 --- /dev/null +++ b/tests/specs/add/exiting_dev_deps/package.json.out @@ -0,0 +1,8 @@ +{ + "dependencies": { + "@denotest/esm-basic": "^1.0.0" + }, + "devDependencies": { + "cowsay": "*" + } +} diff --git a/tests/specs/add/only_unstable_versions/__test__.jsonc b/tests/specs/add/only_unstable_versions/__test__.jsonc index d05628b6f5..ea5a270b9c 100644 --- a/tests/specs/add/only_unstable_versions/__test__.jsonc +++ b/tests/specs/add/only_unstable_versions/__test__.jsonc @@ -1,9 +1,22 @@ { "tempDir": true, - "steps": [ - { - "args": "add npm:@denotest/unstable", - "output": "add.out" + "tests": { + "npm_package": { + "steps": [ + { + "args": "add npm:@denotest/unstable", + "output": "add.out" + } + ] + }, + "jsr_package": { + "steps": [ + { + "args": "add jsr:@denotest/unstable", + "output": "add_jsr.out", + "exitCode": 1 + } + ] } - ] + } } diff --git a/tests/specs/add/only_unstable_versions/add_jsr.out b/tests/specs/add/only_unstable_versions/add_jsr.out new file mode 100644 index 0000000000..95f0630bf3 --- /dev/null +++ b/tests/specs/add/only_unstable_versions/add_jsr.out @@ -0,0 +1 @@ +error: jsr:@denotest/unstable has only pre-release versions available. Try specifying a version: `deno add jsr:@denotest/unstable@^1.0.0-beta.2` diff --git a/tests/specs/add/package_json_and_deno_json/__test__.jsonc b/tests/specs/add/package_json_and_deno_json/__test__.jsonc index 0beee02d15..8d67a07c82 100644 --- a/tests/specs/add/package_json_and_deno_json/__test__.jsonc +++ b/tests/specs/add/package_json_and_deno_json/__test__.jsonc @@ -41,6 +41,29 @@ "output": "good\n" } ] + }, + "only_prefers_package_json_if_closer": { + "steps": [ + { + "cwd": "./subdir", + "args": "add npm:@denotest/esm-basic jsr:@denotest/add npm:@denotest/say-hello", + "output": "[WILDCARD]" + }, + { + "args": [ + "eval", + "console.log(Deno.readTextFileSync('package.json').trim())" + ], + "output": "{}\n" + }, + { + "args": [ + "eval", + "console.log(Deno.readTextFileSync('./subdir/deno.json').trim())" + ], + "output": "subdir/prefer_if_closer_deno.json.out" + } + ] } } } diff --git a/tests/specs/add/package_json_and_deno_json/subdir/deno.json b/tests/specs/add/package_json_and_deno_json/subdir/deno.json new file mode 100644 index 0000000000..cc54bbd8a2 --- /dev/null +++ b/tests/specs/add/package_json_and_deno_json/subdir/deno.json @@ -0,0 +1,4 @@ +{ + "name": "@test/subdir", + "exports": "./mod.ts" +} diff --git a/tests/testdata/run/check_js_points_to_ts/foo.d.ts b/tests/specs/add/package_json_and_deno_json/subdir/mod.ts similarity index 100% rename from tests/testdata/run/check_js_points_to_ts/foo.d.ts rename to tests/specs/add/package_json_and_deno_json/subdir/mod.ts diff --git a/tests/specs/add/package_json_and_deno_json/subdir/prefer_if_closer_deno.json.out b/tests/specs/add/package_json_and_deno_json/subdir/prefer_if_closer_deno.json.out new file mode 100644 index 0000000000..8a5819d503 --- /dev/null +++ b/tests/specs/add/package_json_and_deno_json/subdir/prefer_if_closer_deno.json.out @@ -0,0 +1,9 @@ +{ + "name": "@test/subdir", + "exports": "./mod.ts", + "imports": { + "@denotest/add": "jsr:@denotest/add@^1.0.0", + "@denotest/esm-basic": "npm:@denotest/esm-basic@^1.0.0", + "@denotest/say-hello": "npm:@denotest/say-hello@^1.0.0" + } +} diff --git a/tests/specs/add/update_lockfile_if_package_json/__test__.jsonc b/tests/specs/add/update_lockfile_if_package_json/__test__.jsonc new file mode 100644 index 0000000000..80b35dc3c2 --- /dev/null +++ b/tests/specs/add/update_lockfile_if_package_json/__test__.jsonc @@ -0,0 +1,31 @@ +{ + "tempDir": true, + "tests": { + "updates_lockfile": { + "steps": [ + { + "args": "add npm:@denotest/esm-basic npm:@denotest/say-hello", + "output": "add.out" + }, + { + "args": [ + "eval", + "console.log(Deno.readTextFileSync('deno.lock'))" + ], + "output": "lockfile_add.out" + }, + { + "args": "remove @denotest/esm-basic @denotest/say-hello", + "output": "remove.out" + }, + { + "args": [ + "eval", + "console.log(Deno.readTextFileSync('deno.lock'))" + ], + "output": "lockfile_remove.out" + } + ] + } + } +} diff --git a/tests/specs/add/update_lockfile_if_package_json/add.out b/tests/specs/add/update_lockfile_if_package_json/add.out new file mode 100644 index 0000000000..97989b0c38 --- /dev/null +++ b/tests/specs/add/update_lockfile_if_package_json/add.out @@ -0,0 +1,5 @@ +[UNORDERED_START] +Add npm:@denotest/esm-basic@1.0.0 +Add npm:@denotest/say-hello@1.0.0 +[UNORDERED_END] +[WILDCARD] \ No newline at end of file diff --git a/tests/specs/add/update_lockfile_if_package_json/lockfile_add.out b/tests/specs/add/update_lockfile_if_package_json/lockfile_add.out new file mode 100644 index 0000000000..d452c2de82 --- /dev/null +++ b/tests/specs/add/update_lockfile_if_package_json/lockfile_add.out @@ -0,0 +1,7 @@ +{ + "version": "4", + "specifiers": { + "npm:@denotest/esm-basic@1": "1.0.0", + "npm:@denotest/say-hello@1": "1.0.0" + }, +[WILDCARD] diff --git a/tests/specs/add/update_lockfile_if_package_json/lockfile_remove.out b/tests/specs/add/update_lockfile_if_package_json/lockfile_remove.out new file mode 100644 index 0000000000..fdd4f1cc60 --- /dev/null +++ b/tests/specs/add/update_lockfile_if_package_json/lockfile_remove.out @@ -0,0 +1,4 @@ +{ + "version": "4" +} + diff --git a/tests/testdata/run/empty.ts b/tests/specs/add/update_lockfile_if_package_json/package.json similarity index 100% rename from tests/testdata/run/empty.ts rename to tests/specs/add/update_lockfile_if_package_json/package.json diff --git a/tests/specs/add/update_lockfile_if_package_json/remove.out b/tests/specs/add/update_lockfile_if_package_json/remove.out new file mode 100644 index 0000000000..8efb0810cf --- /dev/null +++ b/tests/specs/add/update_lockfile_if_package_json/remove.out @@ -0,0 +1,4 @@ +[UNORDERED_START] +Removed @denotest/esm-basic +Removed @denotest/say-hello +[UNORDERED_END] diff --git a/tests/specs/check/css_import/exists_run_with_check.out b/tests/specs/check/css_import/exists_run_with_check.out index 1a1dafeb74..315769e40c 100644 --- a/tests/specs/check/css_import/exists_run_with_check.out +++ b/tests/specs/check/css_import/exists_run_with_check.out @@ -1,3 +1,3 @@ -error: Expected a JavaScript or TypeScript module, but identified a Unknown module. Importing these types of modules is currently not supported. +error: Expected a JavaScript or TypeScript module, but identified a Css module. Importing these types of modules is currently not supported. Specifier: file:///[WILDLINE]/app.css at file:///[WILDLINE]/exists.ts:2:8 diff --git a/tests/specs/check/jsx_import_source_not_in_graph/__test__.jsonc b/tests/specs/check/jsx_import_source_not_in_graph/__test__.jsonc new file mode 100644 index 0000000000..de0339cfb4 --- /dev/null +++ b/tests/specs/check/jsx_import_source_not_in_graph/__test__.jsonc @@ -0,0 +1,4 @@ +{ + "args": "check main.ts", + "output": "main.out" +} diff --git a/tests/specs/check/jsx_import_source_not_in_graph/deno.json b/tests/specs/check/jsx_import_source_not_in_graph/deno.json new file mode 100644 index 0000000000..219ef515f6 --- /dev/null +++ b/tests/specs/check/jsx_import_source_not_in_graph/deno.json @@ -0,0 +1,6 @@ +{ + "compilerOptions": { + "jsx": "react-jsx", + "jsxImportSource": "npm:preact" + } +} diff --git a/tests/specs/check/jsx_import_source_not_in_graph/main.out b/tests/specs/check/jsx_import_source_not_in_graph/main.out new file mode 100644 index 0000000000..1830c3186d --- /dev/null +++ b/tests/specs/check/jsx_import_source_not_in_graph/main.out @@ -0,0 +1 @@ +Check file:///[WILDLINE]/main.ts diff --git a/tests/specs/check/jsx_import_source_not_in_graph/main.ts b/tests/specs/check/jsx_import_source_not_in_graph/main.ts new file mode 100644 index 0000000000..bf6b817963 --- /dev/null +++ b/tests/specs/check/jsx_import_source_not_in_graph/main.ts @@ -0,0 +1 @@ +console.log("Hello"); diff --git a/tests/specs/compile/byonm_main_sub_dir/deno.json b/tests/specs/compile/byonm_main_sub_dir/deno.json index 6134d86d1c..fde86a1efb 100644 --- a/tests/specs/compile/byonm_main_sub_dir/deno.json +++ b/tests/specs/compile/byonm_main_sub_dir/deno.json @@ -1,3 +1,3 @@ { - "unstable": ["byonm"] + "nodeModulesDir": "manual" } diff --git a/tests/specs/compile/cjs/__test__.jsonc b/tests/specs/compile/cjs/__test__.jsonc new file mode 100644 index 0000000000..9bdcf4724f --- /dev/null +++ b/tests/specs/compile/cjs/__test__.jsonc @@ -0,0 +1,24 @@ +{ + "tempDir": true, + "steps": [{ + "if": "unix", + "args": "compile --output main main.js", + "output": "[WILDCARD]" + }, { + "if": "unix", + "commandName": "./main", + "args": [], + "output": "output.out", + "exitCode": 0 + }, { + "if": "windows", + "args": "compile --output main.exe main.js", + "output": "[WILDCARD]" + }, { + "if": "windows", + "commandName": "./main.exe", + "args": [], + "output": "output.out", + "exitCode": 0 + }] +} diff --git a/tests/specs/compile/cjs/add.cjs b/tests/specs/compile/cjs/add.cjs new file mode 100644 index 0000000000..bf90601267 --- /dev/null +++ b/tests/specs/compile/cjs/add.cjs @@ -0,0 +1 @@ +module.exports = (a, b) => a + b; diff --git a/tests/specs/compile/cjs/divide.cts b/tests/specs/compile/cjs/divide.cts new file mode 100644 index 0000000000..d89a600a4e --- /dev/null +++ b/tests/specs/compile/cjs/divide.cts @@ -0,0 +1 @@ +module.exports.divide = (a: number, b: number) => a / b; diff --git a/tests/specs/compile/cjs/main.js b/tests/specs/compile/cjs/main.js new file mode 100644 index 0000000000..c2a8c7be02 --- /dev/null +++ b/tests/specs/compile/cjs/main.js @@ -0,0 +1,5 @@ +import { add } from "./reexport.cjs"; +import { multiply } from "./multiply.cts"; + +console.log(add(1, 2)); +console.log(multiply(2, 3)); diff --git a/tests/specs/compile/cjs/multiply.cts b/tests/specs/compile/cjs/multiply.cts new file mode 100644 index 0000000000..3c0618cfc5 --- /dev/null +++ b/tests/specs/compile/cjs/multiply.cts @@ -0,0 +1,4 @@ +/// +exports.multiply = function (a: number, b: number): number { + return require("./divide.cts").divide(a, 1 / b); +}; diff --git a/tests/specs/compile/cjs/output.out b/tests/specs/compile/cjs/output.out new file mode 100644 index 0000000000..2559e5c49e --- /dev/null +++ b/tests/specs/compile/cjs/output.out @@ -0,0 +1,2 @@ +3 +6 diff --git a/tests/specs/compile/cjs/reexport.cjs b/tests/specs/compile/cjs/reexport.cjs new file mode 100644 index 0000000000..af7cecfdf4 --- /dev/null +++ b/tests/specs/compile/cjs/reexport.cjs @@ -0,0 +1 @@ +module.exports.add = require("./add.cjs"); diff --git a/tests/specs/compile/detect_cjs/__test__.jsonc b/tests/specs/compile/detect_cjs/__test__.jsonc index 32bebb7a57..0abf121f05 100644 --- a/tests/specs/compile/detect_cjs/__test__.jsonc +++ b/tests/specs/compile/detect_cjs/__test__.jsonc @@ -1,24 +1,27 @@ { "tempDir": true, "steps": [{ + "args": "install", + "output": "[WILDCARD]" + }, { "if": "unix", - "args": "compile --allow-read --output main main.js", + "args": "compile --output main main.js", "output": "compile.out" }, { "if": "unix", "commandName": "./main", "args": [], "output": "output.out", - "exitCode": 1 + "exitCode": 0 }, { "if": "windows", - "args": "compile --allow-read --output main.exe main.js", + "args": "compile --output main.exe main.js", "output": "compile.out" }, { "if": "windows", "commandName": "./main.exe", "args": [], "output": "output.out", - "exitCode": 1 + "exitCode": 0 }] } diff --git a/tests/specs/compile/detect_cjs/add.js b/tests/specs/compile/detect_cjs/add.js index 2a886fbc18..94b0263f0e 100644 --- a/tests/specs/compile/detect_cjs/add.js +++ b/tests/specs/compile/detect_cjs/add.js @@ -1,3 +1,3 @@ module.exports.add = function (a, b) { - return a + b; + return require("./subtract.ts").subtract(a, -b); }; diff --git a/tests/specs/compile/detect_cjs/compile.out b/tests/specs/compile/detect_cjs/compile.out index 6509b7f29c..913e363c3e 100644 --- a/tests/specs/compile/detect_cjs/compile.out +++ b/tests/specs/compile/detect_cjs/compile.out @@ -1,3 +1,2 @@ -Warning --unstable-detect-cjs is not properly supported in deno compile. The compiled executable may encounter runtime errors. Check file:///[WILDLINE]/main.js Compile file:///[WILDLINE] diff --git a/tests/specs/compile/detect_cjs/output.out b/tests/specs/compile/detect_cjs/output.out index b53c443698..00750edc07 100644 --- a/tests/specs/compile/detect_cjs/output.out +++ b/tests/specs/compile/detect_cjs/output.out @@ -1 +1 @@ -error: Module not found: file:///[WILDLINE]/add.js +3 diff --git a/tests/specs/compile/detect_cjs/package.json b/tests/specs/compile/detect_cjs/package.json index 5bbefffbab..6e65b32ed5 100644 --- a/tests/specs/compile/detect_cjs/package.json +++ b/tests/specs/compile/detect_cjs/package.json @@ -1,3 +1,6 @@ { - "type": "commonjs" + "type": "commonjs", + "dependencies": { + "@types/node": "*" + } } diff --git a/tests/specs/compile/detect_cjs/subtract.ts b/tests/specs/compile/detect_cjs/subtract.ts new file mode 100644 index 0000000000..e4f6760b77 --- /dev/null +++ b/tests/specs/compile/detect_cjs/subtract.ts @@ -0,0 +1,2 @@ +/// +module.exports.subtract = (a: number, b: number) => a - b; diff --git a/tests/specs/compile/redirects/__test__.jsonc b/tests/specs/compile/redirects/__test__.jsonc new file mode 100644 index 0000000000..07ce693999 --- /dev/null +++ b/tests/specs/compile/redirects/__test__.jsonc @@ -0,0 +1,22 @@ +{ + "tempDir": true, + "steps": [{ + "if": "unix", + "args": "compile -A --output main main.ts", + "output": "[WILDCARD]" + }, { + "if": "unix", + "commandName": "./main", + "args": [], + "output": "main.out" + }, { + "if": "windows", + "args": "compile -A --output main.exe main.ts", + "output": "[WILDCARD]" + }, { + "if": "windows", + "commandName": "./main.exe", + "args": [], + "output": "main.out" + }] +} diff --git a/tests/testdata/run/003_relative_import.ts.out b/tests/specs/compile/redirects/main.out similarity index 100% rename from tests/testdata/run/003_relative_import.ts.out rename to tests/specs/compile/redirects/main.out diff --git a/tests/specs/compile/redirects/main.ts b/tests/specs/compile/redirects/main.ts new file mode 100644 index 0000000000..4396319014 --- /dev/null +++ b/tests/specs/compile/redirects/main.ts @@ -0,0 +1 @@ +import "http://localhost:4546/run/003_relative_import.ts"; diff --git a/tests/specs/fmt/html/well_formatted.html b/tests/specs/fmt/html/well_formatted.html index c0c06cd9bd..7af705c049 100644 --- a/tests/specs/fmt/html/well_formatted.html +++ b/tests/specs/fmt/html/well_formatted.html @@ -1,4 +1,4 @@ -

content
+
content