1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-21 15:04:11 -05:00

Merge branch 'main' into Add-Dynamic-Device-Path-Handling-for-Windows-File-Access

This commit is contained in:
Yazan AbdAl-Rahman 2024-11-12 10:07:30 +02:00 committed by GitHub
commit f436751d16
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2200 changed files with 20448 additions and 8248 deletions

View file

@ -65,11 +65,15 @@
"tests/wpt/runner/expectation.json", "tests/wpt/runner/expectation.json",
"tests/wpt/runner/manifest.json", "tests/wpt/runner/manifest.json",
"tests/wpt/suite", "tests/wpt/suite",
"third_party" "third_party",
"tests/specs/run/shebang_with_json_imports_tsc",
"tests/specs/run/shebang_with_json_imports_swc",
"tests/specs/run/ext_flag_takes_precedence_over_extension",
"tests/specs/run/error_syntax_empty_trailing_line/error_syntax_empty_trailing_line.mjs"
], ],
"plugins": [ "plugins": [
"https://plugins.dprint.dev/typescript-0.93.0.wasm", "https://plugins.dprint.dev/typescript-0.93.2.wasm",
"https://plugins.dprint.dev/json-0.19.3.wasm", "https://plugins.dprint.dev/json-0.19.4.wasm",
"https://plugins.dprint.dev/markdown-0.17.8.wasm", "https://plugins.dprint.dev/markdown-0.17.8.wasm",
"https://plugins.dprint.dev/toml-0.6.3.wasm", "https://plugins.dprint.dev/toml-0.6.3.wasm",
"https://plugins.dprint.dev/exec-0.5.0.json@8d9972eee71fa1590e04873540421f3eda7674d0f1aae3d7c788615e7b7413d0", "https://plugins.dprint.dev/exec-0.5.0.json@8d9972eee71fa1590e04873540421f3eda7674d0f1aae3d7c788615e7b7413d0",

View file

@ -10,7 +10,7 @@ concurrency:
jobs: jobs:
build: build:
name: cargo publish name: cargo publish
runs-on: ubuntu-20.04-xl runs-on: ubuntu-24.04-xl
timeout-minutes: 90 timeout-minutes: 90
env: env:
@ -33,7 +33,7 @@ jobs:
- uses: dsherret/rust-toolchain-file@v1 - uses: dsherret/rust-toolchain-file@v1
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x

View file

@ -5,15 +5,16 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
// Bump this number when you want to purge the cache. // Bump this number when you want to purge the cache.
// Note: the tools/release/01_bump_crate_versions.ts script will update this version // Note: the tools/release/01_bump_crate_versions.ts script will update this version
// automatically via regex, so ensure that this line maintains this format. // automatically via regex, so ensure that this line maintains this format.
const cacheVersion = 21; const cacheVersion = 25;
const ubuntuX86Runner = "ubuntu-22.04"; const ubuntuX86Runner = "ubuntu-24.04";
const ubuntuX86XlRunner = "ubuntu-22.04-xl"; const ubuntuX86XlRunner = "ubuntu-24.04-xl";
const ubuntuARMRunner = "ubicloud-standard-16-arm"; const ubuntuARMRunner = "ubicloud-standard-16-arm";
const windowsX86Runner = "windows-2022"; const windowsX86Runner = "windows-2022";
const windowsX86XlRunner = "windows-2022-xl"; const windowsX86XlRunner = "windows-2022-xl";
const macosX86Runner = "macos-13"; const macosX86Runner = "macos-13";
const macosArmRunner = "macos-14"; const macosArmRunner = "macos-14";
const selfHostedMacosArmRunner = "self-hosted";
const Runners = { const Runners = {
linuxX86: { linuxX86: {
@ -40,7 +41,8 @@ const Runners = {
macosArm: { macosArm: {
os: "macos", os: "macos",
arch: "aarch64", arch: "aarch64",
runner: macosArmRunner, runner:
`\${{ github.repository == 'denoland/deno' && startsWith(github.ref, 'refs/tags/') && '${selfHostedMacosArmRunner}' || '${macosArmRunner}' }}`,
}, },
windowsX86: { windowsX86: {
os: "windows", os: "windows",
@ -59,7 +61,7 @@ const prCacheKeyPrefix =
`${cacheVersion}-cargo-target-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ matrix.profile }}-\${{ matrix.job }}-`; `${cacheVersion}-cargo-target-\${{ matrix.os }}-\${{ matrix.arch }}-\${{ matrix.profile }}-\${{ matrix.job }}-`;
// Note that you may need to add more version to the `apt-get remove` line below if you change this // Note that you may need to add more version to the `apt-get remove` line below if you change this
const llvmVersion = 18; const llvmVersion = 19;
const installPkgsCommand = const installPkgsCommand =
`sudo apt-get install --no-install-recommends clang-${llvmVersion} lld-${llvmVersion} clang-tools-${llvmVersion} clang-format-${llvmVersion} clang-tidy-${llvmVersion}`; `sudo apt-get install --no-install-recommends clang-${llvmVersion} lld-${llvmVersion} clang-tools-${llvmVersion} clang-format-${llvmVersion} clang-tidy-${llvmVersion}`;
const sysRootStep = { const sysRootStep = {
@ -71,7 +73,7 @@ export DEBIAN_FRONTEND=noninteractive
sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null
# Remove older clang before we install # Remove older clang before we install
sudo apt-get -qq remove \ sudo apt-get -qq remove \
'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' > /dev/null 2> /dev/null 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'clang-17*' 'clang-18*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' 'lld-17*' 'lld-18*' > /dev/null 2> /dev/null
# Install clang-XXX, lld-XXX, and debootstrap. # Install clang-XXX, lld-XXX, and debootstrap.
echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-${llvmVersion} main" | echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-${llvmVersion} main" |
@ -86,7 +88,7 @@ ${installPkgsCommand} || echo 'Failed. Trying again.' && sudo apt-get clean && s
(yes '' | sudo update-alternatives --force --all) > /dev/null 2> /dev/null || true (yes '' | sudo update-alternatives --force --all) > /dev/null 2> /dev/null || true
echo "Decompressing sysroot..." echo "Decompressing sysroot..."
wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20240528/sysroot-\`uname -m\`.tar.xz -O /tmp/sysroot.tar.xz wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20241030/sysroot-\`uname -m\`.tar.xz -O /tmp/sysroot.tar.xz
cd / cd /
xzcat /tmp/sysroot.tar.xz | sudo tar -x xzcat /tmp/sysroot.tar.xz | sudo tar -x
sudo mount --rbind /dev /sysroot/dev sudo mount --rbind /dev /sysroot/dev
@ -193,7 +195,7 @@ const installNodeStep = {
}; };
const installDenoStep = { const installDenoStep = {
name: "Install Deno", name: "Install Deno",
uses: "denoland/setup-deno@v1", uses: "denoland/setup-deno@v2",
with: { "deno-version": "v1.x" }, with: { "deno-version": "v1.x" },
}; };

View file

@ -62,18 +62,18 @@ jobs:
profile: debug profile: debug
- os: macos - os: macos
arch: x86_64 arch: x86_64
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-22.04'' || ''macos-13'' }}' runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || ''macos-13'' }}'
job: test job: test
profile: release profile: release
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}' skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
- os: macos - os: macos
arch: aarch64 arch: aarch64
runner: macos-14 runner: '${{ github.repository == ''denoland/deno'' && startsWith(github.ref, ''refs/tags/'') && ''self-hosted'' || ''macos-14'' }}'
job: test job: test
profile: debug profile: debug
- os: macos - os: macos
arch: aarch64 arch: aarch64
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-22.04'' || ''macos-14'' }}' runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && startsWith(github.ref, ''refs/tags/'') && ''self-hosted'' || ''macos-14'' }}'
job: test job: test
profile: release profile: release
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}' skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
@ -84,33 +84,33 @@ jobs:
profile: debug profile: debug
- os: windows - os: windows
arch: x86_64 arch: x86_64
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-22.04'' || github.repository == ''denoland/deno'' && ''windows-2022-xl'' || ''windows-2022'' }}' runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && ''windows-2022-xl'' || ''windows-2022'' }}'
job: test job: test
profile: release profile: release
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}' skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}'
- os: linux - os: linux
arch: x86_64 arch: x86_64
runner: '${{ github.repository == ''denoland/deno'' && ''ubuntu-22.04-xl'' || ''ubuntu-22.04'' }}' runner: '${{ github.repository == ''denoland/deno'' && ''ubuntu-24.04-xl'' || ''ubuntu-24.04'' }}'
job: test job: test
profile: release profile: release
use_sysroot: true use_sysroot: true
wpt: '${{ !startsWith(github.ref, ''refs/tags/'') }}' wpt: '${{ !startsWith(github.ref, ''refs/tags/'') }}'
- os: linux - os: linux
arch: x86_64 arch: x86_64
runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'' && !contains(github.event.pull_request.labels.*.name, ''ci-bench''))) && ''ubuntu-22.04'' || github.repository == ''denoland/deno'' && ''ubuntu-22.04-xl'' || ''ubuntu-22.04'' }}' runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'' && !contains(github.event.pull_request.labels.*.name, ''ci-bench''))) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && ''ubuntu-24.04-xl'' || ''ubuntu-24.04'' }}'
job: bench job: bench
profile: release profile: release
use_sysroot: true use_sysroot: true
skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'' && !contains(github.event.pull_request.labels.*.name, ''ci-bench'')) }}' skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'' && !contains(github.event.pull_request.labels.*.name, ''ci-bench'')) }}'
- os: linux - os: linux
arch: x86_64 arch: x86_64
runner: ubuntu-22.04 runner: ubuntu-24.04
job: test job: test
profile: debug profile: debug
use_sysroot: true use_sysroot: true
- os: linux - os: linux
arch: x86_64 arch: x86_64
runner: ubuntu-22.04 runner: ubuntu-24.04
job: lint job: lint
profile: debug profile: debug
- os: linux - os: linux
@ -178,7 +178,7 @@ jobs:
if: '!(matrix.skip)' if: '!(matrix.skip)'
- if: '!(matrix.skip) && (matrix.job == ''lint'' || matrix.job == ''test'' || matrix.job == ''bench'')' - if: '!(matrix.skip) && (matrix.job == ''lint'' || matrix.job == ''test'' || matrix.job == ''bench'')'
name: Install Deno name: Install Deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x
- name: Install Python - name: Install Python
@ -252,22 +252,22 @@ jobs:
# to complete. # to complete.
sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null
# Remove older clang before we install # Remove older clang before we install
sudo apt-get -qq remove 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' > /dev/null 2> /dev/null sudo apt-get -qq remove 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'clang-17*' 'clang-18*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' 'lld-17*' 'lld-18*' > /dev/null 2> /dev/null
# Install clang-XXX, lld-XXX, and debootstrap. # Install clang-XXX, lld-XXX, and debootstrap.
echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-18 main" | echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-19 main" |
sudo dd of=/etc/apt/sources.list.d/llvm-toolchain-jammy-18.list sudo dd of=/etc/apt/sources.list.d/llvm-toolchain-jammy-19.list
curl https://apt.llvm.org/llvm-snapshot.gpg.key | curl https://apt.llvm.org/llvm-snapshot.gpg.key |
gpg --dearmor | gpg --dearmor |
sudo dd of=/etc/apt/trusted.gpg.d/llvm-snapshot.gpg sudo dd of=/etc/apt/trusted.gpg.d/llvm-snapshot.gpg
sudo apt-get update sudo apt-get update
# this was unreliable sometimes, so try again if it fails # this was unreliable sometimes, so try again if it fails
sudo apt-get install --no-install-recommends clang-18 lld-18 clang-tools-18 clang-format-18 clang-tidy-18 || echo 'Failed. Trying again.' && sudo apt-get clean && sudo apt-get update && sudo apt-get install --no-install-recommends clang-18 lld-18 clang-tools-18 clang-format-18 clang-tidy-18 sudo apt-get install --no-install-recommends clang-19 lld-19 clang-tools-19 clang-format-19 clang-tidy-19 || echo 'Failed. Trying again.' && sudo apt-get clean && sudo apt-get update && sudo apt-get install --no-install-recommends clang-19 lld-19 clang-tools-19 clang-format-19 clang-tidy-19
# Fix alternatives # Fix alternatives
(yes '' | sudo update-alternatives --force --all) > /dev/null 2> /dev/null || true (yes '' | sudo update-alternatives --force --all) > /dev/null 2> /dev/null || true
echo "Decompressing sysroot..." echo "Decompressing sysroot..."
wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20240528/sysroot-`uname -m`.tar.xz -O /tmp/sysroot.tar.xz wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20241030/sysroot-`uname -m`.tar.xz -O /tmp/sysroot.tar.xz
cd / cd /
xzcat /tmp/sysroot.tar.xz | sudo tar -x xzcat /tmp/sysroot.tar.xz | sudo tar -x
sudo mount --rbind /dev /sysroot/dev sudo mount --rbind /dev /sysroot/dev
@ -299,8 +299,8 @@ jobs:
CARGO_PROFILE_RELEASE_LTO=false CARGO_PROFILE_RELEASE_LTO=false
RUSTFLAGS<<__1 RUSTFLAGS<<__1
-C linker-plugin-lto=true -C linker-plugin-lto=true
-C linker=clang-18 -C linker=clang-19
-C link-arg=-fuse-ld=lld-18 -C link-arg=-fuse-ld=lld-19
-C link-arg=-ldl -C link-arg=-ldl
-C link-arg=-Wl,--allow-shlib-undefined -C link-arg=-Wl,--allow-shlib-undefined
-C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache -C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache
@ -310,8 +310,8 @@ jobs:
__1 __1
RUSTDOCFLAGS<<__1 RUSTDOCFLAGS<<__1
-C linker-plugin-lto=true -C linker-plugin-lto=true
-C linker=clang-18 -C linker=clang-19
-C link-arg=-fuse-ld=lld-18 -C link-arg=-fuse-ld=lld-19
-C link-arg=-ldl -C link-arg=-ldl
-C link-arg=-Wl,--allow-shlib-undefined -C link-arg=-Wl,--allow-shlib-undefined
-C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache -C link-arg=-Wl,--thinlto-cache-dir=$(pwd)/target/release/lto-cache
@ -319,7 +319,7 @@ jobs:
--cfg tokio_unstable --cfg tokio_unstable
$RUSTFLAGS $RUSTFLAGS
__1 __1
CC=/usr/bin/clang-18 CC=/usr/bin/clang-19
CFLAGS=-flto=thin $CFLAGS CFLAGS=-flto=thin $CFLAGS
" > $GITHUB_ENV " > $GITHUB_ENV
- name: Remove macOS cURL --ipv4 flag - name: Remove macOS cURL --ipv4 flag
@ -361,8 +361,8 @@ jobs:
path: |- path: |-
~/.cargo/registry/index ~/.cargo/registry/index
~/.cargo/registry/cache ~/.cargo/registry/cache
key: '21-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' key: '25-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '21-cargo-home-${{ matrix.os }}-${{ matrix.arch }}' restore-keys: '25-cargo-home-${{ matrix.os }}-${{ matrix.arch }}'
if: '!(matrix.skip)' if: '!(matrix.skip)'
- name: Restore cache build output (PR) - name: Restore cache build output (PR)
uses: actions/cache/restore@v4 uses: actions/cache/restore@v4
@ -375,7 +375,7 @@ jobs:
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: never_saved key: never_saved
restore-keys: '21-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' restore-keys: '25-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
- name: Apply and update mtime cache - name: Apply and update mtime cache
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))' if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
uses: ./.github/mtime_cache uses: ./.github/mtime_cache
@ -685,10 +685,10 @@ jobs:
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.sha256sum !./target/*/*.sha256sum
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: '21-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' key: '25-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
publish-canary: publish-canary:
name: publish canary name: publish canary
runs-on: ubuntu-22.04 runs-on: ubuntu-24.04
needs: needs:
- build - build
if: github.repository == 'denoland/deno' && github.ref == 'refs/heads/main' if: github.repository == 'denoland/deno' && github.ref == 'refs/heads/main'

View file

@ -7,7 +7,7 @@ on:
jobs: jobs:
update-dl-version: update-dl-version:
name: update dl.deno.land version name: update dl.deno.land version
runs-on: ubuntu-22.04 runs-on: ubuntu-24.04
if: github.repository == 'denoland/deno' if: github.repository == 'denoland/deno'
steps: steps:
- name: Authenticate with Google Cloud - name: Authenticate with Google Cloud

View file

@ -40,7 +40,7 @@ jobs:
project_id: denoland project_id: denoland
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x

View file

@ -16,7 +16,7 @@ on:
jobs: jobs:
build: build:
name: start release name: start release
runs-on: ubuntu-22.04 runs-on: ubuntu-24.04
timeout-minutes: 30 timeout-minutes: 30
env: env:
@ -34,7 +34,7 @@ jobs:
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x

View file

@ -16,7 +16,7 @@ on:
jobs: jobs:
build: build:
name: version bump name: version bump
runs-on: ubuntu-22.04 runs-on: ubuntu-24.04
timeout-minutes: 90 timeout-minutes: 90
env: env:
@ -39,7 +39,7 @@ jobs:
- uses: dsherret/rust-toolchain-file@v1 - uses: dsherret/rust-toolchain-file@v1
- name: Install deno - name: Install deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: v1.x deno-version: v1.x

View file

@ -20,7 +20,7 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
deno-version: [v1.x, canary] deno-version: [v1.x, canary]
os: [ubuntu-22.04-xl] os: [ubuntu-24.04-xl]
steps: steps:
- name: Clone repository - name: Clone repository
@ -30,7 +30,7 @@ jobs:
persist-credentials: false persist-credentials: false
- name: Setup Deno - name: Setup Deno
uses: denoland/setup-deno@v1 uses: denoland/setup-deno@v2
with: with:
deno-version: ${{ matrix.deno-version }} deno-version: ${{ matrix.deno-version }}

427
Cargo.lock generated
View file

@ -765,6 +765,8 @@ dependencies = [
"fastwebsockets", "fastwebsockets",
"file_test_runner", "file_test_runner",
"flaky_test", "flaky_test",
"hickory-client",
"hickory-server",
"http 1.1.0", "http 1.1.0",
"http-body-util", "http-body-util",
"hyper 1.4.1", "hyper 1.4.1",
@ -778,8 +780,6 @@ dependencies = [
"serde", "serde",
"test_server", "test_server",
"tokio", "tokio",
"trust-dns-client",
"trust-dns-server",
"url", "url",
"uuid", "uuid",
"zeromq", "zeromq",
@ -1154,7 +1154,7 @@ dependencies = [
[[package]] [[package]]
name = "deno" name = "deno"
version = "2.0.2" version = "2.0.6"
dependencies = [ dependencies = [
"anstream", "anstream",
"async-trait", "async-trait",
@ -1196,7 +1196,6 @@ dependencies = [
"dprint-plugin-markdown", "dprint-plugin-markdown",
"dprint-plugin-typescript", "dprint-plugin-typescript",
"env_logger", "env_logger",
"eszip",
"fancy-regex", "fancy-regex",
"faster-hex", "faster-hex",
"flate2", "flate2",
@ -1214,7 +1213,6 @@ dependencies = [
"lazy-regex", "lazy-regex",
"libc", "libc",
"libsui", "libsui",
"libuv-sys-lite",
"libz-sys", "libz-sys",
"log", "log",
"lsp-types", "lsp-types",
@ -1222,7 +1220,6 @@ dependencies = [
"markup_fmt", "markup_fmt",
"memmem", "memmem",
"monch", "monch",
"napi_sym",
"nix", "nix",
"node_resolver", "node_resolver",
"notify", "notify",
@ -1263,9 +1260,7 @@ dependencies = [
"walkdir", "walkdir",
"which 4.4.2", "which 4.4.2",
"winapi", "winapi",
"windows-sys 0.52.0",
"winres", "winres",
"yoke",
"zeromq", "zeromq",
"zip", "zip",
"zstd", "zstd",
@ -1284,9 +1279,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_ast" name = "deno_ast"
version = "0.42.2" version = "0.43.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2b9d03b1bbeeecdac54367f075d572131736d06c5be3bc49037855bc5ab1bbb" checksum = "48d00b724e06d2081a141ec1155756a0b465d413d8e2a7515221f61d482eb2ee"
dependencies = [ dependencies = [
"base64 0.21.7", "base64 0.21.7",
"deno_media_type", "deno_media_type",
@ -1328,7 +1323,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_bench_util" name = "deno_bench_util"
version = "0.167.0" version = "0.171.0"
dependencies = [ dependencies = [
"bencher", "bencher",
"deno_core", "deno_core",
@ -1337,7 +1332,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_broadcast_channel" name = "deno_broadcast_channel"
version = "0.167.0" version = "0.171.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"deno_core", "deno_core",
@ -1348,7 +1343,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_cache" name = "deno_cache"
version = "0.105.0" version = "0.109.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"deno_core", "deno_core",
@ -1361,9 +1356,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_cache_dir" name = "deno_cache_dir"
version = "0.13.0" version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "186a102b13b4512841f5f40784cd25822042d22954afe3b5b070d406d15eb4f2" checksum = "08c1f52170cd7715f8006da54cde1444863a0d6fbd9c11d037a737db2dec8e22"
dependencies = [ dependencies = [
"base32", "base32",
"deno_media_type", "deno_media_type",
@ -1381,7 +1376,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_canvas" name = "deno_canvas"
version = "0.42.0" version = "0.46.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_webgpu", "deno_webgpu",
@ -1392,9 +1387,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_config" name = "deno_config"
version = "0.37.1" version = "0.38.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3cb7a1723676fba5964f8d7441d8b53748f9e74d6d4241be7de9730da021859a" checksum = "966825073480a6ac7e01977a3879d13edc8d6ea2d65ea164b37156a5fb206e9a"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"deno_package_json", "deno_package_json",
@ -1416,16 +1411,16 @@ dependencies = [
[[package]] [[package]]
name = "deno_console" name = "deno_console"
version = "0.173.0" version = "0.177.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
] ]
[[package]] [[package]]
name = "deno_core" name = "deno_core"
version = "0.314.2" version = "0.318.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83138917579676069b423c3eb9be3c1e579f60dc022d85f6ded4c792456255ff" checksum = "10cae2393219ff9278123f7b24799cdfab37c7d6561b69ca06ced115cac92111"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -1461,7 +1456,7 @@ checksum = "a13951ea98c0a4c372f162d669193b4c9d991512de9f2381dd161027f34b26b1"
[[package]] [[package]]
name = "deno_cron" name = "deno_cron"
version = "0.53.0" version = "0.57.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1474,7 +1469,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_crypto" name = "deno_crypto"
version = "0.187.0" version = "0.191.0"
dependencies = [ dependencies = [
"aes", "aes",
"aes-gcm", "aes-gcm",
@ -1511,9 +1506,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_doc" name = "deno_doc"
version = "0.154.0" version = "0.156.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17e204e45b0d79750880114e37b34abe19ad0710d8435a8da8f23a528fe98de4" checksum = "2585b98d6ad76dae30bf2d7b6d71b8363cae041158b8780d14a2f4fe17590a61"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cfg-if", "cfg-if",
@ -1536,7 +1531,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_fetch" name = "deno_fetch"
version = "0.197.0" version = "0.201.0"
dependencies = [ dependencies = [
"base64 0.21.7", "base64 0.21.7",
"bytes", "bytes",
@ -1569,7 +1564,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_ffi" name = "deno_ffi"
version = "0.160.0" version = "0.164.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_permissions", "deno_permissions",
@ -1578,6 +1573,7 @@ dependencies = [
"libffi", "libffi",
"libffi-sys", "libffi-sys",
"log", "log",
"num-bigint",
"serde", "serde",
"serde-value", "serde-value",
"serde_json", "serde_json",
@ -1588,7 +1584,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_fs" name = "deno_fs"
version = "0.83.0" version = "0.87.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"base32", "base32",
@ -1610,9 +1606,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_graph" name = "deno_graph"
version = "0.83.3" version = "0.84.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77163c46755676d8f793fc19e365537ba660a8db173cd1e02d21eb010c0b3cef" checksum = "cd4f4a14aa069087be41c2998077b0453f0191747898f96e6343f700abfc2c18"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1639,7 +1635,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_http" name = "deno_http"
version = "0.171.0" version = "0.175.0"
dependencies = [ dependencies = [
"async-compression", "async-compression",
"async-trait", "async-trait",
@ -1678,7 +1674,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_io" name = "deno_io"
version = "0.83.0" version = "0.87.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"deno_core", "deno_core",
@ -1699,7 +1695,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_kv" name = "deno_kv"
version = "0.81.0" version = "0.85.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1730,9 +1726,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_lint" name = "deno_lint"
version = "0.67.0" version = "0.68.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "871b60e32bfb6c110cbb9b0688dbf048f81e5d347fe4ce5a42239263de9dd938" checksum = "bb994e6d1b18223df0a756c7948143b35682941d615edffef60d5b38822f38ac"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"deno_ast", "deno_ast",
@ -1760,9 +1756,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_media_type" name = "deno_media_type"
version = "0.1.4" version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8978229b82552bf8457a0125aa20863f023619cfc21ebb007b1e571d68fd85b" checksum = "7fcf552fbdedbe81c89705349d7d2485c7051382b000dfddbdbf7fc25931cf83"
dependencies = [ dependencies = [
"data-url", "data-url",
"serde", "serde",
@ -1771,12 +1767,17 @@ dependencies = [
[[package]] [[package]]
name = "deno_napi" name = "deno_napi"
version = "0.104.0" version = "0.108.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_permissions", "deno_permissions",
"libc",
"libloading 0.7.4", "libloading 0.7.4",
"libuv-sys-lite",
"log",
"napi_sym",
"thiserror", "thiserror",
"windows-sys 0.52.0",
] ]
[[package]] [[package]]
@ -1794,24 +1795,24 @@ dependencies = [
[[package]] [[package]]
name = "deno_net" name = "deno_net"
version = "0.165.0" version = "0.169.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_permissions", "deno_permissions",
"deno_tls", "deno_tls",
"hickory-proto",
"hickory-resolver",
"pin-project", "pin-project",
"rustls-tokio-stream", "rustls-tokio-stream",
"serde", "serde",
"socket2", "socket2",
"thiserror", "thiserror",
"tokio", "tokio",
"trust-dns-proto",
"trust-dns-resolver",
] ]
[[package]] [[package]]
name = "deno_node" name = "deno_node"
version = "0.110.0" version = "0.114.0"
dependencies = [ dependencies = [
"aead-gcm-stream", "aead-gcm-stream",
"aes", "aes",
@ -1920,9 +1921,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_ops" name = "deno_ops"
version = "0.190.1" version = "0.194.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26f46d4e4f52f26c882b74a9b58810ea75252b807cf0966166ec333077cdfd85" checksum = "f760b492bd638c1dc3e992d11672c259fbe9a233162099a8347591c9e22d0391"
dependencies = [ dependencies = [
"proc-macro-rules", "proc-macro-rules",
"proc-macro2", "proc-macro2",
@ -1960,7 +1961,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_permissions" name = "deno_permissions"
version = "0.33.0" version = "0.37.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_path_util", "deno_path_util",
@ -1971,13 +1972,14 @@ dependencies = [
"once_cell", "once_cell",
"percent-encoding", "percent-encoding",
"serde", "serde",
"thiserror",
"which 4.4.2", "which 4.4.2",
"winapi", "winapi",
] ]
[[package]] [[package]]
name = "deno_resolver" name = "deno_resolver"
version = "0.5.0" version = "0.9.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"base32", "base32",
@ -1993,7 +1995,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_runtime" name = "deno_runtime"
version = "0.182.0" version = "0.186.0"
dependencies = [ dependencies = [
"color-print", "color-print",
"deno_ast", "deno_ast",
@ -2043,11 +2045,13 @@ dependencies = [
"percent-encoding", "percent-encoding",
"regex", "regex",
"rustyline", "rustyline",
"same-file",
"serde", "serde",
"signal-hook", "signal-hook",
"signal-hook-registry", "signal-hook-registry",
"tempfile", "tempfile",
"test_server", "test_server",
"thiserror",
"tokio", "tokio",
"tokio-metrics", "tokio-metrics",
"twox-hash", "twox-hash",
@ -2059,9 +2063,9 @@ dependencies = [
[[package]] [[package]]
name = "deno_semver" name = "deno_semver"
version = "0.5.14" version = "0.5.16"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "670fec7ef309384e23c2a90ac5d2d9d91a776d225306c75f5cdd28cf6cc8a59f" checksum = "c957c6a57c38b7dde2315df0da0ec228911e56a74f185b108a488d0401841a67"
dependencies = [ dependencies = [
"monch", "monch",
"once_cell", "once_cell",
@ -2109,7 +2113,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_tls" name = "deno_tls"
version = "0.160.0" version = "0.164.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_native_certs", "deno_native_certs",
@ -2158,7 +2162,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_url" name = "deno_url"
version = "0.173.0" version = "0.177.0"
dependencies = [ dependencies = [
"deno_bench_util", "deno_bench_util",
"deno_console", "deno_console",
@ -2170,7 +2174,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_web" name = "deno_web"
version = "0.204.0" version = "0.208.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"base64-simd 0.8.0", "base64-simd 0.8.0",
@ -2192,7 +2196,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_webgpu" name = "deno_webgpu"
version = "0.140.0" version = "0.144.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"raw-window-handle", "raw-window-handle",
@ -2205,7 +2209,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_webidl" name = "deno_webidl"
version = "0.173.0" version = "0.177.0"
dependencies = [ dependencies = [
"deno_bench_util", "deno_bench_util",
"deno_core", "deno_core",
@ -2213,7 +2217,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_websocket" name = "deno_websocket"
version = "0.178.0" version = "0.182.0"
dependencies = [ dependencies = [
"bytes", "bytes",
"deno_core", "deno_core",
@ -2235,7 +2239,7 @@ dependencies = [
[[package]] [[package]]
name = "deno_webstorage" name = "deno_webstorage"
version = "0.168.0" version = "0.172.0"
dependencies = [ dependencies = [
"deno_core", "deno_core",
"deno_web", "deno_web",
@ -2563,9 +2567,9 @@ dependencies = [
[[package]] [[package]]
name = "dprint-plugin-json" name = "dprint-plugin-json"
version = "0.19.3" version = "0.19.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a19f4a9f2f548b2098b8ec597d7bb40af133b6e9a3187c1d3c4caa101b8c93c3" checksum = "57f91e594559b450b7c5d6a0ba9f3f9fe951c1ea371168f7c95973da3fdbd85a"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"dprint-core", "dprint-core",
@ -2577,9 +2581,9 @@ dependencies = [
[[package]] [[package]]
name = "dprint-plugin-jupyter" name = "dprint-plugin-jupyter"
version = "0.1.3" version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c67b0e54b552a4775c221b44ed33be918c400bd8041d1f044f947fbb01025cc0" checksum = "d0d20684e37b3824e2bc917cfcb14e2cdf88398eef507335d839cbd78172bfee"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"dprint-core", "dprint-core",
@ -2605,9 +2609,9 @@ dependencies = [
[[package]] [[package]]
name = "dprint-plugin-typescript" name = "dprint-plugin-typescript"
version = "0.93.0" version = "0.93.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9308d98b923b7c0335c2ee1560199e3f2321b1be82803107b4ba4ed5dac46cc" checksum = "3ff29fd136541e59d51946f0d2d353fefc886776f61a799ebfb5838b06cef13b"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"deno_ast", "deno_ast",
@ -2635,15 +2639,6 @@ dependencies = [
"text_lines", "text_lines",
] ]
[[package]]
name = "drain"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d105028bd2b5dfcb33318fd79a445001ead36004dd8dffef1bdd7e493d8bc1e"
dependencies = [
"tokio",
]
[[package]] [[package]]
name = "dsa" name = "dsa"
version = "0.6.3" version = "0.6.3"
@ -2891,29 +2886,6 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31ae425815400e5ed474178a7a22e275a9687086a12ca63ec793ff292d8fdae8" checksum = "31ae425815400e5ed474178a7a22e275a9687086a12ca63ec793ff292d8fdae8"
[[package]]
name = "eszip"
version = "0.79.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8eb55c89bdde75a3826a79d49c9d847623ae7fbdb2695b542982982da990d33e"
dependencies = [
"anyhow",
"async-trait",
"base64 0.21.7",
"deno_ast",
"deno_graph",
"deno_npm",
"deno_semver",
"futures",
"hashlink 0.8.4",
"indexmap",
"serde",
"serde_json",
"sha2",
"thiserror",
"url",
]
[[package]] [[package]]
name = "fallible-iterator" name = "fallible-iterator"
version = "0.3.0" version = "0.3.0"
@ -3525,15 +3497,6 @@ dependencies = [
"allocator-api2", "allocator-api2",
] ]
[[package]]
name = "hashlink"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7"
dependencies = [
"hashbrown",
]
[[package]] [[package]]
name = "hashlink" name = "hashlink"
version = "0.9.1" version = "0.9.1"
@ -3573,6 +3536,92 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dfa686283ad6dd069f105e5ab091b04c62850d3e4cf5d67debad1933f55023df" checksum = "dfa686283ad6dd069f105e5ab091b04c62850d3e4cf5d67debad1933f55023df"
[[package]]
name = "hickory-client"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bab9683b08d8f8957a857b0236455d80e1886eaa8c6178af556aa7871fb61b55"
dependencies = [
"cfg-if",
"data-encoding",
"futures-channel",
"futures-util",
"hickory-proto",
"once_cell",
"radix_trie",
"rand",
"thiserror",
"tokio",
"tracing",
]
[[package]]
name = "hickory-proto"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07698b8420e2f0d6447a436ba999ec85d8fbf2a398bbd737b82cac4a2e96e512"
dependencies = [
"async-trait",
"cfg-if",
"data-encoding",
"enum-as-inner",
"futures-channel",
"futures-io",
"futures-util",
"idna 0.4.0",
"ipnet",
"once_cell",
"rand",
"serde",
"thiserror",
"tinyvec",
"tokio",
"tracing",
"url",
]
[[package]]
name = "hickory-resolver"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28757f23aa75c98f254cf0405e6d8c25b831b32921b050a66692427679b1f243"
dependencies = [
"cfg-if",
"futures-util",
"hickory-proto",
"ipconfig",
"lru-cache",
"once_cell",
"parking_lot",
"rand",
"resolv-conf",
"serde",
"smallvec",
"thiserror",
"tokio",
"tracing",
]
[[package]]
name = "hickory-server"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9be0e43c556b9b3fdb6c7c71a9a32153a2275d02419e3de809e520bfcfe40c37"
dependencies = [
"async-trait",
"bytes",
"cfg-if",
"enum-as-inner",
"futures-util",
"hickory-proto",
"serde",
"thiserror",
"time",
"tokio",
"tokio-util",
"tracing",
]
[[package]] [[package]]
name = "hkdf" name = "hkdf"
version = "0.12.4" version = "0.12.4"
@ -4034,9 +4083,9 @@ dependencies = [
[[package]] [[package]]
name = "jsonc-parser" name = "jsonc-parser"
version = "0.23.0" version = "0.26.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7725c320caac8c21d8228c1d055af27a995d371f78cc763073d3e068323641b5" checksum = "b558af6b49fd918e970471374e7a798b2c9bbcda624a210ffa3901ee5614bc8e"
dependencies = [ dependencies = [
"serde_json", "serde_json",
] ]
@ -4223,9 +4272,9 @@ dependencies = [
[[package]] [[package]]
name = "libsui" name = "libsui"
version = "0.4.0" version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "205eca4e7beaad637dcd38fe41292065894ee7f498077cf3c135d5f7252b9f27" checksum = "89795977654ad6250d6c0915411b622bac22f9efb4f852af94b2e00964cab832"
dependencies = [ dependencies = [
"editpe", "editpe",
"libc", "libc",
@ -4327,9 +4376,9 @@ dependencies = [
[[package]] [[package]]
name = "malva" name = "malva"
version = "0.10.1" version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "484beda6e5d775ed06a8ec0fce79e51d39f49d834ed2a29da3f437079321804f" checksum = "1c67b97ed99f56b86fa3c010843441f1fcdb71884bab96b8551bb3d1e7c6d529"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"itertools 0.13.0", "itertools 0.13.0",
@ -4340,9 +4389,9 @@ dependencies = [
[[package]] [[package]]
name = "markup_fmt" name = "markup_fmt"
version = "0.13.1" version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9dab5ae899659fbe5c8835b2c8ca8d3e357974a3e454138925b404004973361f" checksum = "ebae65c91eab3d42231232bf48107f351e5a8d511454927218c53aeb68bbdb6f"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"css_dataset", "css_dataset",
@ -4512,7 +4561,7 @@ dependencies = [
[[package]] [[package]]
name = "napi_sym" name = "napi_sym"
version = "0.103.0" version = "0.107.0"
dependencies = [ dependencies = [
"quote", "quote",
"serde", "serde",
@ -4567,7 +4616,7 @@ dependencies = [
[[package]] [[package]]
name = "node_resolver" name = "node_resolver"
version = "0.12.0" version = "0.16.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -5811,7 +5860,7 @@ dependencies = [
"bitflags 2.6.0", "bitflags 2.6.0",
"fallible-iterator", "fallible-iterator",
"fallible-streaming-iterator", "fallible-streaming-iterator",
"hashlink 0.9.1", "hashlink",
"libsqlite3-sys", "libsqlite3-sys",
"smallvec", "smallvec",
] ]
@ -6175,15 +6224,6 @@ dependencies = [
"syn 2.0.72", "syn 2.0.72",
] ]
[[package]]
name = "serde_spanned"
version = "0.6.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0"
dependencies = [
"serde",
]
[[package]] [[package]]
name = "serde_urlencoded" name = "serde_urlencoded"
version = "0.7.1" version = "0.7.1"
@ -6198,9 +6238,9 @@ dependencies = [
[[package]] [[package]]
name = "serde_v8" name = "serde_v8"
version = "0.223.1" version = "0.227.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9cf3d859dda87ee96423c01244f10af864fa6d6a9fcdc2b77e0595078ea0ea11" checksum = "0a8294c2223c53bed343be8b80564ece4dc0d03b643b06fa86c4ccc0e064eda0"
dependencies = [ dependencies = [
"num-bigint", "num-bigint",
"serde", "serde",
@ -7150,6 +7190,7 @@ dependencies = [
"console_static_text", "console_static_text",
"deno_unsync", "deno_unsync",
"denokv_proto", "denokv_proto",
"faster-hex",
"fastwebsockets", "fastwebsockets",
"flate2", "flate2",
"futures", "futures",
@ -7397,40 +7438,6 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "toml"
version = "0.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257"
dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"toml_edit",
]
[[package]]
name = "toml_datetime"
version = "0.6.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf"
dependencies = [
"serde",
]
[[package]]
name = "toml_edit"
version = "0.19.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421"
dependencies = [
"indexmap",
"serde",
"serde_spanned",
"toml_datetime",
"winnow 0.5.40",
]
[[package]] [[package]]
name = "tower" name = "tower"
version = "0.4.13" version = "0.4.13"
@ -7520,95 +7527,6 @@ dependencies = [
"stable_deref_trait", "stable_deref_trait",
] ]
[[package]]
name = "trust-dns-client"
version = "0.23.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14135e72c7e6d4c9b6902d4437881a8598f0145dbb2e3f86f92dbad845b61e63"
dependencies = [
"cfg-if",
"data-encoding",
"futures-channel",
"futures-util",
"once_cell",
"radix_trie",
"rand",
"thiserror",
"tokio",
"tracing",
"trust-dns-proto",
]
[[package]]
name = "trust-dns-proto"
version = "0.23.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3119112651c157f4488931a01e586aa459736e9d6046d3bd9105ffb69352d374"
dependencies = [
"async-trait",
"cfg-if",
"data-encoding",
"enum-as-inner",
"futures-channel",
"futures-io",
"futures-util",
"idna 0.4.0",
"ipnet",
"once_cell",
"rand",
"serde",
"smallvec",
"thiserror",
"tinyvec",
"tokio",
"tracing",
"url",
]
[[package]]
name = "trust-dns-resolver"
version = "0.23.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10a3e6c3aff1718b3c73e395d1f35202ba2ffa847c6a62eea0db8fb4cfe30be6"
dependencies = [
"cfg-if",
"futures-util",
"ipconfig",
"lru-cache",
"once_cell",
"parking_lot",
"rand",
"resolv-conf",
"serde",
"smallvec",
"thiserror",
"tokio",
"tracing",
"trust-dns-proto",
]
[[package]]
name = "trust-dns-server"
version = "0.23.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c540f73c2b2ec2f6c54eabd0900e7aafb747a820224b742f556e8faabb461bc7"
dependencies = [
"async-trait",
"bytes",
"cfg-if",
"drain",
"enum-as-inner",
"futures-executor",
"futures-util",
"serde",
"thiserror",
"time",
"tokio",
"toml 0.7.8",
"tracing",
"trust-dns-proto",
]
[[package]] [[package]]
name = "try-lock" name = "try-lock"
version = "0.2.5" version = "0.2.5"
@ -8358,15 +8276,6 @@ version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8"
[[package]]
name = "winnow"
version = "0.5.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "winnow" name = "winnow"
version = "0.6.15" version = "0.6.15"
@ -8402,7 +8311,7 @@ version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b68db261ef59e9e52806f688020631e987592bd83619edccda9c47d42cde4f6c" checksum = "b68db261ef59e9e52806f688020631e987592bd83619edccda9c47d42cde4f6c"
dependencies = [ dependencies = [
"toml 0.5.11", "toml",
] ]
[[package]] [[package]]
@ -8479,7 +8388,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a6a39b6b5ba0d02c910d05d7fbc366a4befb8901ea107dcde9c1c97acb8a366" checksum = "2a6a39b6b5ba0d02c910d05d7fbc366a4befb8901ea107dcde9c1c97acb8a366"
dependencies = [ dependencies = [
"rowan", "rowan",
"winnow 0.6.15", "winnow",
] ]
[[package]] [[package]]

View file

@ -5,7 +5,6 @@ resolver = "2"
members = [ members = [
"bench_util", "bench_util",
"cli", "cli",
"cli/napi/sym",
"ext/broadcast_channel", "ext/broadcast_channel",
"ext/cache", "ext/cache",
"ext/canvas", "ext/canvas",
@ -19,6 +18,7 @@ members = [
"ext/io", "ext/io",
"ext/kv", "ext/kv",
"ext/napi", "ext/napi",
"ext/napi/sym",
"ext/net", "ext/net",
"ext/node", "ext/node",
"ext/url", "ext/url",
@ -45,19 +45,19 @@ license = "MIT"
repository = "https://github.com/denoland/deno" repository = "https://github.com/denoland/deno"
[workspace.dependencies] [workspace.dependencies]
deno_ast = { version = "=0.42.2", features = ["transpiling"] } deno_ast = { version = "=0.43.3", features = ["transpiling"] }
deno_core = { version = "0.314.2" } deno_core = { version = "0.318.0" }
deno_bench_util = { version = "0.167.0", path = "./bench_util" } deno_bench_util = { version = "0.171.0", path = "./bench_util" }
deno_lockfile = "=0.23.1" deno_lockfile = "=0.23.1"
deno_media_type = { version = "0.1.4", features = ["module_specifier"] } deno_media_type = { version = "0.2.0", features = ["module_specifier"] }
deno_npm = "=0.25.4" deno_npm = "=0.25.4"
deno_path_util = "=0.2.1" deno_path_util = "=0.2.1"
deno_permissions = { version = "0.33.0", path = "./runtime/permissions" } deno_permissions = { version = "0.37.0", path = "./runtime/permissions" }
deno_runtime = { version = "0.182.0", path = "./runtime" } deno_runtime = { version = "0.186.0", path = "./runtime" }
deno_semver = "=0.5.14" deno_semver = "=0.5.16"
deno_terminal = "0.2.0" deno_terminal = "0.2.0"
napi_sym = { version = "0.103.0", path = "./cli/napi/sym" } napi_sym = { version = "0.107.0", path = "./ext/napi/sym" }
test_util = { package = "test_server", path = "./tests/util/server" } test_util = { package = "test_server", path = "./tests/util/server" }
denokv_proto = "0.8.1" denokv_proto = "0.8.1"
@ -66,32 +66,32 @@ denokv_remote = "0.8.1"
denokv_sqlite = { default-features = false, version = "0.8.2" } denokv_sqlite = { default-features = false, version = "0.8.2" }
# exts # exts
deno_broadcast_channel = { version = "0.167.0", path = "./ext/broadcast_channel" } deno_broadcast_channel = { version = "0.171.0", path = "./ext/broadcast_channel" }
deno_cache = { version = "0.105.0", path = "./ext/cache" } deno_cache = { version = "0.109.0", path = "./ext/cache" }
deno_canvas = { version = "0.42.0", path = "./ext/canvas" } deno_canvas = { version = "0.46.0", path = "./ext/canvas" }
deno_console = { version = "0.173.0", path = "./ext/console" } deno_console = { version = "0.177.0", path = "./ext/console" }
deno_cron = { version = "0.53.0", path = "./ext/cron" } deno_cron = { version = "0.57.0", path = "./ext/cron" }
deno_crypto = { version = "0.187.0", path = "./ext/crypto" } deno_crypto = { version = "0.191.0", path = "./ext/crypto" }
deno_fetch = { version = "0.197.0", path = "./ext/fetch" } deno_fetch = { version = "0.201.0", path = "./ext/fetch" }
deno_ffi = { version = "0.160.0", path = "./ext/ffi" } deno_ffi = { version = "0.164.0", path = "./ext/ffi" }
deno_fs = { version = "0.83.0", path = "./ext/fs" } deno_fs = { version = "0.87.0", path = "./ext/fs" }
deno_http = { version = "0.171.0", path = "./ext/http" } deno_http = { version = "0.175.0", path = "./ext/http" }
deno_io = { version = "0.83.0", path = "./ext/io" } deno_io = { version = "0.87.0", path = "./ext/io" }
deno_kv = { version = "0.81.0", path = "./ext/kv" } deno_kv = { version = "0.85.0", path = "./ext/kv" }
deno_napi = { version = "0.104.0", path = "./ext/napi" } deno_napi = { version = "0.108.0", path = "./ext/napi" }
deno_net = { version = "0.165.0", path = "./ext/net" } deno_net = { version = "0.169.0", path = "./ext/net" }
deno_node = { version = "0.110.0", path = "./ext/node" } deno_node = { version = "0.114.0", path = "./ext/node" }
deno_tls = { version = "0.160.0", path = "./ext/tls" } deno_tls = { version = "0.164.0", path = "./ext/tls" }
deno_url = { version = "0.173.0", path = "./ext/url" } deno_url = { version = "0.177.0", path = "./ext/url" }
deno_web = { version = "0.204.0", path = "./ext/web" } deno_web = { version = "0.208.0", path = "./ext/web" }
deno_webgpu = { version = "0.140.0", path = "./ext/webgpu" } deno_webgpu = { version = "0.144.0", path = "./ext/webgpu" }
deno_webidl = { version = "0.173.0", path = "./ext/webidl" } deno_webidl = { version = "0.177.0", path = "./ext/webidl" }
deno_websocket = { version = "0.178.0", path = "./ext/websocket" } deno_websocket = { version = "0.182.0", path = "./ext/websocket" }
deno_webstorage = { version = "0.168.0", path = "./ext/webstorage" } deno_webstorage = { version = "0.172.0", path = "./ext/webstorage" }
# resolvers # resolvers
deno_resolver = { version = "0.5.0", path = "./resolvers/deno" } deno_resolver = { version = "0.9.0", path = "./resolvers/deno" }
node_resolver = { version = "0.12.0", path = "./resolvers/node" } node_resolver = { version = "0.16.0", path = "./resolvers/node" }
aes = "=0.8.3" aes = "=0.8.3"
anyhow = "1.0.57" anyhow = "1.0.57"
@ -111,7 +111,7 @@ console_static_text = "=0.8.1"
dashmap = "5.5.3" dashmap = "5.5.3"
data-encoding = "2.3.3" data-encoding = "2.3.3"
data-url = "=0.3.0" data-url = "=0.3.0"
deno_cache_dir = "=0.13.0" deno_cache_dir = "=0.13.2"
deno_package_json = { version = "0.1.2", default-features = false } deno_package_json = { version = "0.1.2", default-features = false }
dlopen2 = "0.6.1" dlopen2 = "0.6.1"
ecb = "=0.1.2" ecb = "=0.1.2"
@ -137,7 +137,7 @@ hyper-util = { version = "=0.1.7", features = ["tokio", "client", "client-legacy
hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] } hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] }
indexmap = { version = "2", features = ["serde"] } indexmap = { version = "2", features = ["serde"] }
ipnet = "2.3" ipnet = "2.3"
jsonc-parser = { version = "=0.23.0", features = ["serde"] } jsonc-parser = { version = "=0.26.2", features = ["serde"] }
lazy-regex = "3" lazy-regex = "3"
libc = "0.2.126" libc = "0.2.126"
libz-sys = { version = "1.1.20", default-features = false } libz-sys = { version = "1.1.20", default-features = false }
@ -187,7 +187,7 @@ tar = "=0.4.40"
tempfile = "3.4.0" tempfile = "3.4.0"
termcolor = "1.1.3" termcolor = "1.1.3"
thiserror = "1.0.61" thiserror = "1.0.61"
tokio = { version = "=1.36.0", features = ["full"] } tokio = { version = "1.36.0", features = ["full"] }
tokio-metrics = { version = "0.3.0", features = ["rt"] } tokio-metrics = { version = "0.3.0", features = ["rt"] }
tokio-rustls = { version = "0.26.0", default-features = false, features = ["ring", "tls12"] } tokio-rustls = { version = "0.26.0", default-features = false, features = ["ring", "tls12"] }
tokio-socks = "0.5.1" tokio-socks = "0.5.1"

View file

@ -46,6 +46,12 @@ brew install deno
choco install deno choco install deno
``` ```
[WinGet](https://winstall.app/apps/DenoLand.Deno) (Windows):
```powershell
winget install --id=DenoLand.Deno
```
### Build and install from source ### Build and install from source
Complete instructions for building Deno from source can be found in the manual Complete instructions for building Deno from source can be found in the manual

View file

@ -6,6 +6,124 @@ https://github.com/denoland/deno/releases
We also have one-line install commands at: We also have one-line install commands at:
https://github.com/denoland/deno_install https://github.com/denoland/deno_install
### 2.0.6 / 2024.11.10
- feat(ext/http): abort event when request is cancelled (#26781)
- feat(ext/http): abort signal when request is cancelled (#26761)
- feat(lsp): auto-import completions from byonm dependencies (#26680)
- fix(ext/cache): don't panic when creating cache (#26780)
- fix(ext/node): better inspector support (#26471)
- fix(fmt): don't use self-closing tags in HTML (#26754)
- fix(install): cache jsr deps from all workspace config files (#26779)
- fix(node:zlib): gzip & gzipSync should accept ArrayBuffer (#26762)
- fix: performance.timeOrigin (#26787)
### 2.0.5 / 2024.11.05
- fix(add): better error message when adding package that only has pre-release
versions (#26724)
- fix(add): only add npm deps to package.json if it's at least as close as
deno.json (#26683)
- fix(cli): set `npm_config_user_agent` when running npm packages or tasks
(#26639)
- fix(coverage): exclude comment lines from coverage reports (#25939)
- fix(ext/node): add `findSourceMap` to the default export of `node:module`
(#26720)
- fix(ext/node): convert errors from `fs.readFile/fs.readFileSync` to node
format (#26632)
- fix(ext/node): resolve exports even if parent module filename isn't present
(#26553)
- fix(ext/node): return `this` from `http.Server.ref/unref()` (#26647)
- fix(fmt): do not panic for jsx ignore container followed by jsx text (#26723)
- fix(fmt): fix several HTML and components issues (#26654)
- fix(fmt): ignore file directive for YAML files (#26717)
- fix(install): handle invalid function error, and fallback to junctions
regardless of the error (#26730)
- fix(lsp): include unstable features from editor settings (#26655)
- fix(lsp): scope attribution for lazily loaded assets (#26699)
- fix(node): Implement `os.userInfo` properly, add missing `toPrimitive`
(#24702)
- fix(serve): support serve hmr (#26078)
- fix(types): missing `import` permission on `PermissionOptionsObject` (#26627)
- fix(workspace): support wildcard packages (#26568)
- fix: clamp smi in fast calls by default (#26506)
- fix: improved support for cjs and cts modules (#26558)
- fix: op_run_microtasks crash (#26718)
- fix: panic_hook hangs without procfs (#26732)
- fix: remove permission check in op_require_node_module_paths (#26645)
- fix: surface package.json location on dep parse failure (#26665)
- perf(lsp): don't walk coverage directory (#26715)
### 2.0.4 / 2024.10.29
- Revert "fix(ext/node): fix dns.lookup result ordering (#26264)" (#26621)
- Revert "fix(ext/node): use primordials in `ext/node/polyfills/https.ts`
(#26323)" (#26613)
- feat(lsp): "typescript.preferences.preferTypeOnlyAutoImports" setting (#26546)
- fix(check): expose more globals from @types/node (#26603)
- fix(check): ignore resolving `jsxImportSource` when jsx is not used in graph
(#26548)
- fix(cli): Make --watcher CLEAR_SCREEN clear scrollback buffer as well as
visible screen (#25997)
- fix(compile): regression handling redirects (#26586)
- fix(ext/napi): export dynamic symbols list for {Free,Open}BSD (#26605)
- fix(ext/node): add path to `fs.stat` and `fs.statSync` error (#26037)
- fix(ext/node): compatibility with {Free,Open}BSD (#26604)
- fix(ext/node): use primordials in
ext\node\polyfills\internal\crypto\_randomInt.ts (#26534)
- fix(install): cache json exports of JSR packages (#26552)
- fix(install): regression - do not panic when config file contains \r\n
newlines (#26547)
- fix(lsp): make missing import action fix infallible (#26539)
- fix(npm): match npm bearer token generation (#26544)
- fix(upgrade): stop running `deno lsp` processes on windows before attempting
to replace executable (#26542)
- fix(watch): don't panic on invalid file specifiers (#26577)
- fix: do not panic when failing to write to http cache (#26591)
- fix: provide hints in terminal errors for Node.js globals (#26610)
- fix: report exceptions from nextTick (#26579)
- fix: support watch flag to enable watching other files than the main module on
serve subcommand (#26622)
- perf: pass transpiled module to deno_core as known string (#26555)
### 2.0.3 / 2024.10.25
- feat(lsp): interactive inlay hints (#26382)
- fix: support node-api in denort (#26389)
- fix(check): support `--frozen` on deno check (#26479)
- fix(cli): increase size of blocking task threadpool on windows (#26465)
- fix(config): schemas for lint rule and tag autocompletion (#26515)
- fix(ext/console): ignore casing for named colors in css parsing (#26466)
- fix(ext/ffi): return u64/i64 as bigints from nonblocking ffi calls (#26486)
- fix(ext/node): cancel pending ipc writes on channel close (#26504)
- fix(ext/node): map `ERROR_INVALID_NAME` to `ENOENT` on windows (#26475)
- fix(ext/node): only set our end of child process pipe to nonblocking mode
(#26495)
- fix(ext/node): properly map reparse point error in readlink (#26375)
- fix(ext/node): refactor http.ServerResponse into function class (#26210)
- fix(ext/node): stub HTTPParser internal binding (#26401)
- fix(ext/node): use primordials in `ext/node/polyfills/https.ts` (#26323)
- fix(fmt): --ext flag requires to pass files (#26525)
- fix(fmt): upgrade formatters (#26469)
- fix(help): missing package specifier (#26380)
- fix(info): resolve workspace member mappings (#26350)
- fix(install): better json editing (#26450)
- fix(install): cache all exports of JSR packages listed in `deno.json` (#26501)
- fix(install): cache type only module deps in `deno install` (#26497)
- fix(install): don't cache json exports of JSR packages (for now) (#26530)
- fix(install): update lockfile when using package.json (#26458)
- fix(lsp): import-map-remap quickfix for type imports (#26454)
- fix(node/util): support array formats in `styleText` (#26507)
- fix(node:tls): set TLSSocket.alpnProtocol for client connections (#26476)
- fix(npm): ensure scoped package name is encoded in URLs (#26390)
- fix(npm): support version ranges with && or comma (#26453)
- fix: `.npmrc` settings not being passed to install/add command (#26473)
- fix: add 'fmt-component' to unstable features in schema file (#26526)
- fix: share inotify fd across watchers (#26200)
- fix: unpin tokio version (#26457)
- perf(compile): pass module source data from binary directly to v8 (#26494)
- perf: avoid multiple calls to runMicrotask (#26378)
### 2.0.2 / 2024.10.17 ### 2.0.2 / 2024.10.17
- fix(cli): set napi object property properly (#26344) - fix(cli): set napi object property properly (#26344)

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno_bench_util" name = "deno_bench_util"
version = "0.167.0" version = "0.171.0"
authors.workspace = true authors.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true

View file

@ -2,7 +2,7 @@
[package] [package]
name = "deno" name = "deno"
version = "2.0.2" version = "2.0.6"
authors.workspace = true authors.workspace = true
default-run = "deno" default-run = "deno"
edition.workspace = true edition.workspace = true
@ -70,11 +70,11 @@ winres.workspace = true
[dependencies] [dependencies]
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] } deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
deno_cache_dir = { workspace = true } deno_cache_dir = { workspace = true }
deno_config = { version = "=0.37.1", features = ["workspace", "sync"] } deno_config = { version = "=0.38.2", features = ["workspace", "sync"] }
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = { version = "0.154.0", default-features = false, features = ["rust", "html", "syntect"] } deno_doc = { version = "0.156.0", default-features = false, features = ["rust", "html", "syntect"] }
deno_graph = { version = "=0.83.3" } deno_graph = { version = "=0.84.1" }
deno_lint = { version = "=0.67.0", features = ["docs"] } deno_lint = { version = "=0.68.0", features = ["docs"] }
deno_lockfile.workspace = true deno_lockfile.workspace = true
deno_npm.workspace = true deno_npm.workspace = true
deno_package_json.workspace = true deno_package_json.workspace = true
@ -84,9 +84,7 @@ deno_runtime = { workspace = true, features = ["include_js_files_for_snapshottin
deno_semver.workspace = true deno_semver.workspace = true
deno_task_shell = "=0.18.1" deno_task_shell = "=0.18.1"
deno_terminal.workspace = true deno_terminal.workspace = true
eszip = "=0.79.1" libsui = "0.5.0"
libsui = "0.4.0"
napi_sym.workspace = true
node_resolver.workspace = true node_resolver.workspace = true
anstream = "0.6.14" anstream = "0.6.14"
@ -106,10 +104,10 @@ data-encoding.workspace = true
dhat = { version = "0.3.3", optional = true } dhat = { version = "0.3.3", optional = true }
dissimilar = "=1.0.4" dissimilar = "=1.0.4"
dotenvy = "0.15.7" dotenvy = "0.15.7"
dprint-plugin-json = "=0.19.3" dprint-plugin-json = "=0.19.4"
dprint-plugin-jupyter = "=0.1.3" dprint-plugin-jupyter = "=0.1.5"
dprint-plugin-markdown = "=0.17.8" dprint-plugin-markdown = "=0.17.8"
dprint-plugin-typescript = "=0.93.0" dprint-plugin-typescript = "=0.93.2"
env_logger = "=0.10.0" env_logger = "=0.10.0"
fancy-regex = "=0.10.0" fancy-regex = "=0.10.0"
faster-hex.workspace = true faster-hex.workspace = true
@ -123,15 +121,15 @@ http-body-util.workspace = true
hyper-util.workspace = true hyper-util.workspace = true
import_map = { version = "=0.20.1", features = ["ext"] } import_map = { version = "=0.20.1", features = ["ext"] }
indexmap.workspace = true indexmap.workspace = true
jsonc-parser.workspace = true jsonc-parser = { workspace = true, features = ["cst", "serde"] }
jupyter_runtime = { package = "runtimelib", version = "=0.14.0" } jupyter_runtime = { package = "runtimelib", version = "=0.14.0" }
lazy-regex.workspace = true lazy-regex.workspace = true
libc.workspace = true libc.workspace = true
libz-sys.workspace = true libz-sys.workspace = true
log = { workspace = true, features = ["serde"] } log = { workspace = true, features = ["serde"] }
lsp-types.workspace = true lsp-types.workspace = true
malva = "=0.10.1" malva = "=0.11.0"
markup_fmt = "=0.13.1" markup_fmt = "=0.15.0"
memmem.workspace = true memmem.workspace = true
monch.workspace = true monch.workspace = true
notify.workspace = true notify.workspace = true
@ -168,7 +166,6 @@ typed-arena = "=2.0.2"
uuid = { workspace = true, features = ["serde"] } uuid = { workspace = true, features = ["serde"] }
walkdir = "=2.3.2" walkdir = "=2.3.2"
which.workspace = true which.workspace = true
yoke.workspace = true
zeromq.workspace = true zeromq.workspace = true
zip = { version = "2.1.6", default-features = false, features = ["deflate-flate2"] } zip = { version = "2.1.6", default-features = false, features = ["deflate-flate2"] }
zstd.workspace = true zstd.workspace = true
@ -176,14 +173,12 @@ zstd.workspace = true
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
junction.workspace = true junction.workspace = true
winapi = { workspace = true, features = ["knownfolders", "mswsock", "objbase", "shlobj", "tlhelp32", "winbase", "winerror", "winsock2"] } winapi = { workspace = true, features = ["knownfolders", "mswsock", "objbase", "shlobj", "tlhelp32", "winbase", "winerror", "winsock2"] }
windows-sys.workspace = true
[target.'cfg(unix)'.dependencies] [target.'cfg(unix)'.dependencies]
nix.workspace = true nix.workspace = true
[dev-dependencies] [dev-dependencies]
deno_bench_util.workspace = true deno_bench_util.workspace = true
libuv-sys-lite = "=1.48.2"
pretty_assertions.workspace = true pretty_assertions.workspace = true
test_util.workspace = true test_util.workspace = true

View file

@ -1179,8 +1179,8 @@ static DENO_HELP: &str = cstr!(
<y>Dependency management:</> <y>Dependency management:</>
<g>add</> Add dependencies <g>add</> Add dependencies
<p(245)>deno add jsr:@std/assert | deno add npm:express</> <p(245)>deno add jsr:@std/assert | deno add npm:express</>
<g>install</> Install script as an executable <g>install</> Installs dependencies either in the local project or globally to a bin directory
<g>uninstall</> Uninstall a script previously installed with deno install <g>uninstall</> Uninstalls a dependency or an executable script in the installation root's bin directory
<g>remove</> Remove dependencies from the configuration file <g>remove</> Remove dependencies from the configuration file
<y>Tooling:</> <y>Tooling:</>
@ -1856,6 +1856,7 @@ Unless --reload is specified, this command will not re-download already cached d
.required_unless_present("help") .required_unless_present("help")
.value_hint(ValueHint::FilePath), .value_hint(ValueHint::FilePath),
) )
.arg(frozen_lockfile_arg())
.arg(allow_import_arg()) .arg(allow_import_arg())
} }
) )
@ -2273,7 +2274,7 @@ Ignore formatting a file by adding an ignore comment at the top of the file:
"sass", "less", "html", "svelte", "vue", "astro", "yml", "yaml", "sass", "less", "html", "svelte", "vue", "astro", "yml", "yaml",
"ipynb", "ipynb",
]) ])
.help_heading(FMT_HEADING), .help_heading(FMT_HEADING).requires("files"),
) )
.arg( .arg(
Arg::new("ignore") Arg::new("ignore")
@ -3387,8 +3388,7 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
.value_name("IP_OR_HOSTNAME") .value_name("IP_OR_HOSTNAME")
.help("Allow network access. Optionally specify allowed IP addresses and host names, with ports as necessary") .help("Allow network access. Optionally specify allowed IP addresses and host names, with ports as necessary")
.value_parser(flags_net::validator) .value_parser(flags_net::validator)
.hide(true) .hide(true);
;
if let Some(requires) = requires { if let Some(requires) = requires {
arg = arg.requires(requires) arg = arg.requires(requires)
} }
@ -4373,6 +4373,7 @@ fn check_parse(
flags.type_check_mode = TypeCheckMode::Local; flags.type_check_mode = TypeCheckMode::Local;
compile_args_without_check_parse(flags, matches)?; compile_args_without_check_parse(flags, matches)?;
unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime); unstable_args_parse(flags, matches, UnstableArgsConfig::ResolutionAndRuntime);
frozen_lockfile_arg_parse(flags, matches);
let files = matches.remove_many::<String>("file").unwrap().collect(); let files = matches.remove_many::<String>("file").unwrap().collect();
if matches.get_flag("all") || matches.get_flag("remote") { if matches.get_flag("all") || matches.get_flag("remote") {
flags.type_check_mode = TypeCheckMode::All; flags.type_check_mode = TypeCheckMode::All;
@ -6800,6 +6801,32 @@ mod tests {
..Flags::default() ..Flags::default()
} }
); );
let r = flags_from_vec(svec!["deno", "fmt", "--ext", "html"]);
assert!(r.is_err());
let r = flags_from_vec(svec!["deno", "fmt", "--ext", "html", "./**"]);
assert_eq!(
r.unwrap(),
Flags {
subcommand: DenoSubcommand::Fmt(FmtFlags {
check: false,
files: FileFlags {
include: vec!["./**".to_string()],
ignore: vec![],
},
use_tabs: None,
line_width: None,
indent_width: None,
single_quote: None,
prose_wrap: None,
no_semicolons: None,
unstable_component: false,
watch: Default::default(),
}),
ext: Some("html".to_string()),
..Flags::default()
}
);
} }
#[test] #[test]

View file

@ -51,7 +51,7 @@ pub fn parse(paths: Vec<String>) -> clap::error::Result<Vec<String>> {
} }
} else { } else {
NetDescriptor::parse(&host_and_port).map_err(|e| { NetDescriptor::parse(&host_and_port).map_err(|e| {
clap::Error::raw(clap::error::ErrorKind::InvalidValue, format!("{e:?}")) clap::Error::raw(clap::error::ErrorKind::InvalidValue, e.to_string())
})?; })?;
out.push(host_and_port) out.push(host_and_port)
} }

View file

@ -46,6 +46,7 @@ pub use flags::*;
pub use lockfile::CliLockfile; pub use lockfile::CliLockfile;
pub use lockfile::CliLockfileReadFromPathOptions; pub use lockfile::CliLockfileReadFromPathOptions;
pub use package_json::NpmInstallDepsProvider; pub use package_json::NpmInstallDepsProvider;
pub use package_json::PackageJsonDepValueParseWithLocationError;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
@ -200,6 +201,8 @@ pub fn ts_config_to_transpile_and_emit_options(
precompile_jsx_dynamic_props: None, precompile_jsx_dynamic_props: None,
transform_jsx, transform_jsx,
var_decl_imports: false, var_decl_imports: false,
// todo(dsherret): support verbatim_module_syntax here properly
verbatim_module_syntax: false,
}, },
deno_ast::EmitOptions { deno_ast::EmitOptions {
inline_sources: options.inline_sources, inline_sources: options.inline_sources,
@ -578,6 +581,7 @@ fn discover_npmrc(
let resolved = npmrc let resolved = npmrc
.as_resolved(npm_registry_url()) .as_resolved(npm_registry_url())
.context("Failed to resolve .npmrc options")?; .context("Failed to resolve .npmrc options")?;
log::debug!(".npmrc found at: '{}'", path.display());
Ok(Arc::new(resolved)) Ok(Arc::new(resolved))
} }
@ -963,6 +967,9 @@ impl CliOptions {
match self.sub_command() { match self.sub_command() {
DenoSubcommand::Cache(_) => GraphKind::All, DenoSubcommand::Cache(_) => GraphKind::All,
DenoSubcommand::Check(_) => GraphKind::TypesOnly, DenoSubcommand::Check(_) => GraphKind::TypesOnly,
DenoSubcommand::Install(InstallFlags {
kind: InstallKind::Local(_),
}) => GraphKind::All,
_ => self.type_check_mode().as_graph_kind(), _ => self.type_check_mode().as_graph_kind(),
} }
} }
@ -1448,6 +1455,12 @@ impl CliOptions {
watch: Some(WatchFlagsWithPaths { hmr, .. }), watch: Some(WatchFlagsWithPaths { hmr, .. }),
.. ..
}) = &self.flags.subcommand }) = &self.flags.subcommand
{
*hmr
} else if let DenoSubcommand::Serve(ServeFlags {
watch: Some(WatchFlagsWithPaths { hmr, .. }),
..
}) = &self.flags.subcommand
{ {
*hmr *hmr
} else { } else {
@ -1591,6 +1604,15 @@ impl CliOptions {
} }
pub fn use_byonm(&self) -> bool { pub fn use_byonm(&self) -> bool {
if matches!(
self.sub_command(),
DenoSubcommand::Install(_)
| DenoSubcommand::Add(_)
| DenoSubcommand::Remove(_)
) {
// For `deno install/add/remove` we want to force the managed resolver so it can set up `node_modules/` directory.
return false;
}
if self.node_modules_dir().ok().flatten().is_none() if self.node_modules_dir().ok().flatten().is_none()
&& self.maybe_node_modules_folder.is_some() && self.maybe_node_modules_folder.is_some()
&& self && self
@ -1668,6 +1690,10 @@ impl CliOptions {
if let DenoSubcommand::Run(RunFlags { if let DenoSubcommand::Run(RunFlags {
watch: Some(WatchFlagsWithPaths { paths, .. }), watch: Some(WatchFlagsWithPaths { paths, .. }),
.. ..
})
| DenoSubcommand::Serve(ServeFlags {
watch: Some(WatchFlagsWithPaths { paths, .. }),
..
}) = &self.flags.subcommand }) = &self.flags.subcommand
{ {
full_paths.extend(paths.iter().map(|path| self.initial_cwd.join(path))); full_paths.extend(paths.iter().map(|path| self.initial_cwd.join(path)));

View file

@ -5,10 +5,12 @@ use std::sync::Arc;
use deno_config::workspace::Workspace; use deno_config::workspace::Workspace;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::url::Url;
use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepValue;
use deno_package_json::PackageJsonDepValueParseError; use deno_package_json::PackageJsonDepValueParseError;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use thiserror::Error;
#[derive(Debug)] #[derive(Debug)]
pub struct InstallNpmRemotePkg { pub struct InstallNpmRemotePkg {
@ -23,11 +25,20 @@ pub struct InstallNpmWorkspacePkg {
pub target_dir: PathBuf, pub target_dir: PathBuf,
} }
#[derive(Debug, Error, Clone)]
#[error("Failed to install '{}'\n at {}", alias, location)]
pub struct PackageJsonDepValueParseWithLocationError {
pub location: Url,
pub alias: String,
#[source]
pub source: PackageJsonDepValueParseError,
}
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub struct NpmInstallDepsProvider { pub struct NpmInstallDepsProvider {
remote_pkgs: Vec<InstallNpmRemotePkg>, remote_pkgs: Vec<InstallNpmRemotePkg>,
workspace_pkgs: Vec<InstallNpmWorkspacePkg>, workspace_pkgs: Vec<InstallNpmWorkspacePkg>,
pkg_json_dep_errors: Vec<PackageJsonDepValueParseError>, pkg_json_dep_errors: Vec<PackageJsonDepValueParseWithLocationError>,
} }
impl NpmInstallDepsProvider { impl NpmInstallDepsProvider {
@ -89,7 +100,13 @@ impl NpmInstallDepsProvider {
let dep = match dep { let dep = match dep {
Ok(dep) => dep, Ok(dep) => dep,
Err(err) => { Err(err) => {
pkg_json_dep_errors.push(err); pkg_json_dep_errors.push(
PackageJsonDepValueParseWithLocationError {
location: pkg_json.specifier(),
alias,
source: err,
},
);
continue; continue;
} }
}; };
@ -150,7 +167,9 @@ impl NpmInstallDepsProvider {
&self.workspace_pkgs &self.workspace_pkgs
} }
pub fn pkg_json_dep_errors(&self) -> &[PackageJsonDepValueParseError] { pub fn pkg_json_dep_errors(
&self,
) -> &[PackageJsonDepValueParseWithLocationError] {
&self.pkg_json_dep_errors &self.pkg_json_dep_errors
} }
} }

View file

@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
// deno-lint-ignore-file no-console
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args

View file

@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
// deno-lint-ignore-file no-console
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args

View file

@ -1,167 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::collections::HashMap;
use std::net::TcpStream;
use std::path::Path;
use std::process::Command;
use std::sync::atomic::AtomicU16;
use std::sync::atomic::Ordering;
use std::time::Duration;
use std::time::Instant;
use super::Result;
pub use test_util::parse_wrk_output;
pub use test_util::WrkOutput as HttpBenchmarkResult;
// Some of the benchmarks in this file have been renamed. In case the history
// somehow gets messed up:
// "node_http" was once called "node"
// "deno_tcp" was once called "deno"
// "deno_http" was once called "deno_net_http"
const DURATION: &str = "10s";
pub fn benchmark(
target_path: &Path,
) -> Result<HashMap<String, HttpBenchmarkResult>> {
let deno_exe = test_util::deno_exe_path();
let deno_exe = deno_exe.to_string();
let hyper_hello_exe = target_path.join("test_server");
let hyper_hello_exe = hyper_hello_exe.to_str().unwrap();
let mut res = HashMap::new();
let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR"));
let http_dir = manifest_dir.join("bench").join("http");
for entry in std::fs::read_dir(&http_dir)? {
let entry = entry?;
let pathbuf = entry.path();
let path = pathbuf.to_str().unwrap();
if path.ends_with(".lua") {
continue;
}
let file_stem = pathbuf.file_stem().unwrap().to_str().unwrap();
let lua_script = http_dir.join(format!("{file_stem}.lua"));
let mut maybe_lua = None;
if lua_script.exists() {
maybe_lua = Some(lua_script.to_str().unwrap());
}
let port = get_port();
// deno run -A --unstable-net <path> <addr>
res.insert(
file_stem.to_string(),
run(
&[
deno_exe.as_str(),
"run",
"--allow-all",
"--unstable-net",
"--enable-testing-features-do-not-use",
path,
&server_addr(port),
],
port,
None,
None,
maybe_lua,
)?,
);
}
res.insert("hyper".to_string(), hyper_http(hyper_hello_exe)?);
Ok(res)
}
fn run(
server_cmd: &[&str],
port: u16,
env: Option<Vec<(String, String)>>,
origin_cmd: Option<&[&str]>,
lua_script: Option<&str>,
) -> Result<HttpBenchmarkResult> {
// Wait for port 4544 to become available.
// TODO Need to use SO_REUSEPORT with tokio::net::TcpListener.
std::thread::sleep(Duration::from_secs(5));
let mut origin = None;
if let Some(cmd) = origin_cmd {
let mut com = Command::new(cmd[0]);
com.args(&cmd[1..]);
if let Some(env) = env.clone() {
com.envs(env);
}
origin = Some(com.spawn()?);
};
println!("{}", server_cmd.join(" "));
let mut server = {
let mut com = Command::new(server_cmd[0]);
com.args(&server_cmd[1..]);
if let Some(env) = env {
com.envs(env);
}
com.spawn()?
};
// Wait for server to wake up.
let now = Instant::now();
let addr = format!("127.0.0.1:{port}");
while now.elapsed().as_secs() < 30 {
if TcpStream::connect(&addr).is_ok() {
break;
}
std::thread::sleep(Duration::from_millis(10));
}
TcpStream::connect(&addr).expect("Failed to connect to server in time");
println!("Server took {} ms to start", now.elapsed().as_millis());
let wrk = test_util::prebuilt_tool_path("wrk");
assert!(wrk.is_file());
let addr = format!("http://{addr}/");
let wrk = wrk.to_string();
let mut wrk_cmd = vec![wrk.as_str(), "-d", DURATION, "--latency", &addr];
if let Some(lua_script) = lua_script {
wrk_cmd.push("-s");
wrk_cmd.push(lua_script);
}
println!("{}", wrk_cmd.join(" "));
let output = test_util::run_collect(&wrk_cmd, None, None, None, true).0;
std::thread::sleep(Duration::from_secs(1)); // wait to capture failure. TODO racy.
println!("{output}");
assert!(
server.try_wait()?.map(|s| s.success()).unwrap_or(true),
"server ended with error"
);
server.kill()?;
if let Some(mut origin) = origin {
origin.kill()?;
}
Ok(parse_wrk_output(&output))
}
static NEXT_PORT: AtomicU16 = AtomicU16::new(4544);
pub(crate) fn get_port() -> u16 {
let p = NEXT_PORT.load(Ordering::SeqCst);
NEXT_PORT.store(p.wrapping_add(1), Ordering::SeqCst);
p
}
fn server_addr(port: u16) -> String {
format!("0.0.0.0:{port}")
}
fn hyper_http(exe: &str) -> Result<HttpBenchmarkResult> {
let port = get_port();
println!("http_benchmark testing RUST hyper");
run(&[exe, &port.to_string()], port, None, None, None)
}

View file

@ -1,10 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
import { Hono } from "https://deno.land/x/hono@v2.0.9/mod.ts";
const addr = Deno.args[0] || "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const app = new Hono();
app.get("/", (c) => c.text("Hello, World!"));
Deno.serve({ port: Number(port), hostname }, app.fetch);

View file

@ -1,14 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
const addr = Deno.args[0] || "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const { serve } = Deno;
const path = new URL("../testdata/128k.bin", import.meta.url).pathname;
function handler() {
const file = Deno.openSync(path);
return new Response(file.readable);
}
serve({ hostname, port: Number(port) }, handler);

View file

@ -1,5 +0,0 @@
wrk.headers["foo"] = "bar"
wrk.headers["User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36"
wrk.headers["Viewport-Width"] = "1920"
wrk.headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9"
wrk.headers["Accept-Language"] = "en,la;q=0.9"

View file

@ -1,11 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
const addr = Deno.args[0] ?? "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const { serve } = Deno;
function handler() {
return new Response("Hello World");
}
serve({ hostname, port: Number(port), reusePort: true }, handler);

View file

@ -1,5 +0,0 @@
wrk.method = "POST"
wrk.headers["Content-Type"] = "application/octet-stream"
file = io.open("./cli/bench/testdata/128k.bin", "rb")
wrk.body = file:read("*a")

View file

@ -1,3 +0,0 @@
wrk.method = "POST"
wrk.headers["Content-Type"] = "application/json"
wrk.body = '{"hello":"deno"}'

View file

@ -1,25 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
import { renderToReadableStream } from "https://esm.run/react-dom/server";
import * as React from "https://esm.run/react";
const { serve } = Deno;
const addr = Deno.args[0] || "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const App = () => (
<html>
<body>
<h1>Hello World</h1>
</body>
</html>
);
const headers = {
headers: {
"Content-Type": "text/html",
},
};
serve({ hostname, port: Number(port) }, async () => {
return new Response(await renderToReadableStream(<App />), headers);
});

View file

@ -1,34 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// Used for benchmarking Deno's networking.
// TODO(bartlomieju): Replace this with a real HTTP server once
// https://github.com/denoland/deno/issues/726 is completed.
// Note: this is a keep-alive server.
// deno-lint-ignore-file no-console
const addr = Deno.args[0] || "127.0.0.1:4500";
const [hostname, port] = addr.split(":");
const listener = Deno.listen({ hostname, port: Number(port) });
const response = new TextEncoder().encode(
"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World\n",
);
async function handle(conn: Deno.Conn): Promise<void> {
const buffer = new Uint8Array(1024);
try {
while (true) {
await conn.read(buffer);
await conn.write(response);
}
} catch (e) {
if (
!(e instanceof Deno.errors.BrokenPipe) &&
!(e instanceof Deno.errors.ConnectionReset)
) {
throw e;
}
}
conn.close();
}
console.log("Listening on", addr);
for await (const conn of listener) {
handle(conn);
}

View file

@ -17,7 +17,6 @@ use std::process::Stdio;
use std::time::SystemTime; use std::time::SystemTime;
use test_util::PathRef; use test_util::PathRef;
mod http;
mod lsp; mod lsp;
fn read_json(filename: &Path) -> Result<Value> { fn read_json(filename: &Path) -> Result<Value> {
@ -345,9 +344,11 @@ struct BenchResult {
binary_size: HashMap<String, i64>, binary_size: HashMap<String, i64>,
bundle_size: HashMap<String, i64>, bundle_size: HashMap<String, i64>,
cargo_deps: usize, cargo_deps: usize,
// TODO(bartlomieju): remove
max_latency: HashMap<String, f64>, max_latency: HashMap<String, f64>,
max_memory: HashMap<String, i64>, max_memory: HashMap<String, i64>,
lsp_exec_time: HashMap<String, i64>, lsp_exec_time: HashMap<String, i64>,
// TODO(bartlomieju): remove
req_per_sec: HashMap<String, i64>, req_per_sec: HashMap<String, i64>,
syscall_count: HashMap<String, i64>, syscall_count: HashMap<String, i64>,
thread_count: HashMap<String, i64>, thread_count: HashMap<String, i64>,
@ -362,7 +363,6 @@ async fn main() -> Result<()> {
"binary_size", "binary_size",
"cargo_deps", "cargo_deps",
"lsp", "lsp",
"http",
"strace", "strace",
"mem_usage", "mem_usage",
]; ];
@ -427,21 +427,6 @@ async fn main() -> Result<()> {
new_data.lsp_exec_time = lsp_exec_times; new_data.lsp_exec_time = lsp_exec_times;
} }
if benchmarks.contains(&"http") && cfg!(not(target_os = "windows")) {
let stats = http::benchmark(target_dir.as_path())?;
let req_per_sec = stats
.iter()
.map(|(name, result)| (name.clone(), result.requests as i64))
.collect();
new_data.req_per_sec = req_per_sec;
let max_latency = stats
.iter()
.map(|(name, result)| (name.clone(), result.latency))
.collect();
new_data.max_latency = max_latency;
}
if cfg!(target_os = "linux") && benchmarks.contains(&"strace") { if cfg!(target_os = "linux") && benchmarks.contains(&"strace") {
use std::io::Read; use std::io::Read;

View file

@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
// deno-lint-ignore-file no-console
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"

View file

@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
// deno-lint-ignore-file no-console
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"
? Deno.args ? Deno.args

View file

@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
// deno-lint-ignore-file no-console
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"

View file

@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
// deno-lint-ignore-file no-console
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"

View file

@ -1,6 +1,5 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// deno-lint-ignore-file no-console no-process-globals
// deno-lint-ignore-file no-console
const queueMicrotask = globalThis.queueMicrotask || process.nextTick; const queueMicrotask = globalThis.queueMicrotask || process.nextTick;
let [total, count] = typeof Deno !== "undefined" let [total, count] = typeof Deno !== "undefined"

View file

@ -365,6 +365,9 @@ fn main() {
return; return;
} }
deno_napi::print_linker_flags("deno");
deno_napi::print_linker_flags("denort");
// Host snapshots won't work when cross compiling. // Host snapshots won't work when cross compiling.
let target = env::var("TARGET").unwrap(); let target = env::var("TARGET").unwrap();
let host = env::var("HOST").unwrap(); let host = env::var("HOST").unwrap();
@ -374,58 +377,6 @@ fn main() {
panic!("Cross compiling with snapshot is not supported."); panic!("Cross compiling with snapshot is not supported.");
} }
let symbols_file_name = match env::consts::OS {
"android" | "freebsd" | "openbsd" => {
"generated_symbol_exports_list_linux.def".to_string()
}
os => format!("generated_symbol_exports_list_{}.def", os),
};
let symbols_path = std::path::Path::new("napi")
.join(symbols_file_name)
.canonicalize()
.expect(
"Missing symbols list! Generate using tools/napi/generate_symbols_lists.js",
);
println!("cargo:rustc-rerun-if-changed={}", symbols_path.display());
#[cfg(target_os = "windows")]
println!(
"cargo:rustc-link-arg-bin=deno=/DEF:{}",
symbols_path.display()
);
#[cfg(target_os = "macos")]
println!(
"cargo:rustc-link-arg-bin=deno=-Wl,-exported_symbols_list,{}",
symbols_path.display()
);
#[cfg(target_os = "linux")]
{
// If a custom compiler is set, the glibc version is not reliable.
// Here, we assume that if a custom compiler is used, that it will be modern enough to support a dynamic symbol list.
if env::var("CC").is_err()
&& glibc_version::get_version()
.map(|ver| ver.major <= 2 && ver.minor < 35)
.unwrap_or(false)
{
println!("cargo:warning=Compiling with all symbols exported, this will result in a larger binary. Please use glibc 2.35 or later for an optimised build.");
println!("cargo:rustc-link-arg-bin=deno=-rdynamic");
} else {
println!(
"cargo:rustc-link-arg-bin=deno=-Wl,--export-dynamic-symbol-list={}",
symbols_path.display()
);
}
}
#[cfg(target_os = "android")]
println!(
"cargo:rustc-link-arg-bin=deno=-Wl,--export-dynamic-symbol-list={}",
symbols_path.display()
);
// To debug snapshot issues uncomment: // To debug snapshot issues uncomment:
// op_fetch_asset::trace_serializer(); // op_fetch_asset::trace_serializer();

View file

@ -57,7 +57,7 @@ impl rusqlite::types::FromSql for CacheDBHash {
} }
/// What should the cache should do on failure? /// What should the cache should do on failure?
#[derive(Default)] #[derive(Debug, Default)]
pub enum CacheFailure { pub enum CacheFailure {
/// Return errors if failure mode otherwise unspecified. /// Return errors if failure mode otherwise unspecified.
#[default] #[default]
@ -69,6 +69,7 @@ pub enum CacheFailure {
} }
/// Configuration SQL and other parameters for a [`CacheDB`]. /// Configuration SQL and other parameters for a [`CacheDB`].
#[derive(Debug)]
pub struct CacheDBConfiguration { pub struct CacheDBConfiguration {
/// SQL to run for a new database. /// SQL to run for a new database.
pub table_initializer: &'static str, pub table_initializer: &'static str,
@ -98,6 +99,7 @@ impl CacheDBConfiguration {
} }
} }
#[derive(Debug)]
enum ConnectionState { enum ConnectionState {
Connected(Connection), Connected(Connection),
Blackhole, Blackhole,
@ -106,7 +108,7 @@ enum ConnectionState {
/// A cache database that eagerly initializes itself off-thread, preventing initialization operations /// A cache database that eagerly initializes itself off-thread, preventing initialization operations
/// from blocking the main thread. /// from blocking the main thread.
#[derive(Clone)] #[derive(Debug, Clone)]
pub struct CacheDB { pub struct CacheDB {
// TODO(mmastrac): We can probably simplify our thread-safe implementation here // TODO(mmastrac): We can probably simplify our thread-safe implementation here
conn: Arc<Mutex<OnceCell<ConnectionState>>>, conn: Arc<Mutex<OnceCell<ConnectionState>>>,

27
cli/cache/emit.rs vendored
View file

@ -10,6 +10,7 @@ use deno_core::unsync::sync::AtomicFlag;
use super::DiskCache; use super::DiskCache;
/// The cache that stores previously emitted files. /// The cache that stores previously emitted files.
#[derive(Debug)]
pub struct EmitCache { pub struct EmitCache {
disk_cache: DiskCache, disk_cache: DiskCache,
emit_failed_flag: AtomicFlag, emit_failed_flag: AtomicFlag,
@ -39,7 +40,7 @@ impl EmitCache {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
expected_source_hash: u64, expected_source_hash: u64,
) -> Option<Vec<u8>> { ) -> Option<String> {
let emit_filename = self.get_emit_filename(specifier)?; let emit_filename = self.get_emit_filename(specifier)?;
let bytes = self.disk_cache.get(&emit_filename).ok()?; let bytes = self.disk_cache.get(&emit_filename).ok()?;
self self
@ -91,6 +92,7 @@ impl EmitCache {
const LAST_LINE_PREFIX: &str = "\n// denoCacheMetadata="; const LAST_LINE_PREFIX: &str = "\n// denoCacheMetadata=";
#[derive(Debug)]
struct EmitFileSerializer { struct EmitFileSerializer {
cli_version: &'static str, cli_version: &'static str,
} }
@ -100,7 +102,7 @@ impl EmitFileSerializer {
&self, &self,
mut bytes: Vec<u8>, mut bytes: Vec<u8>,
expected_source_hash: u64, expected_source_hash: u64,
) -> Option<Vec<u8>> { ) -> Option<String> {
let last_newline_index = bytes.iter().rposition(|&b| b == b'\n')?; let last_newline_index = bytes.iter().rposition(|&b| b == b'\n')?;
let (content, last_line) = bytes.split_at(last_newline_index); let (content, last_line) = bytes.split_at(last_newline_index);
let hashes = last_line.strip_prefix(LAST_LINE_PREFIX.as_bytes())?; let hashes = last_line.strip_prefix(LAST_LINE_PREFIX.as_bytes())?;
@ -120,7 +122,7 @@ impl EmitFileSerializer {
// everything looks good, truncate and return it // everything looks good, truncate and return it
bytes.truncate(content.len()); bytes.truncate(content.len());
Some(bytes) String::from_utf8(bytes).ok()
} }
pub fn serialize(&self, code: &[u8], source_hash: u64) -> Vec<u8> { pub fn serialize(&self, code: &[u8], source_hash: u64) -> Vec<u8> {
@ -170,8 +172,6 @@ mod test {
}, },
emit_failed_flag: Default::default(), emit_failed_flag: Default::default(),
}; };
let to_string =
|bytes: Vec<u8>| -> String { String::from_utf8(bytes).unwrap() };
let specifier1 = let specifier1 =
ModuleSpecifier::from_file_path(temp_dir.path().join("file1.ts")) ModuleSpecifier::from_file_path(temp_dir.path().join("file1.ts"))
@ -188,13 +188,10 @@ mod test {
assert_eq!(cache.get_emit_code(&specifier1, 5), None); assert_eq!(cache.get_emit_code(&specifier1, 5), None);
// providing the correct source hash // providing the correct source hash
assert_eq!( assert_eq!(
cache.get_emit_code(&specifier1, 10).map(to_string), cache.get_emit_code(&specifier1, 10),
Some(emit_code1.clone()), Some(emit_code1.clone()),
); );
assert_eq!( assert_eq!(cache.get_emit_code(&specifier2, 2), Some(emit_code2));
cache.get_emit_code(&specifier2, 2).map(to_string),
Some(emit_code2)
);
// try changing the cli version (should not load previous ones) // try changing the cli version (should not load previous ones)
let cache = EmitCache { let cache = EmitCache {
@ -215,18 +212,12 @@ mod test {
}, },
emit_failed_flag: Default::default(), emit_failed_flag: Default::default(),
}; };
assert_eq!( assert_eq!(cache.get_emit_code(&specifier1, 5), Some(emit_code1));
cache.get_emit_code(&specifier1, 5).map(to_string),
Some(emit_code1)
);
// adding when already exists should not cause issue // adding when already exists should not cause issue
let emit_code3 = "asdf".to_string(); let emit_code3 = "asdf".to_string();
cache.set_emit_code(&specifier1, 20, emit_code3.as_bytes()); cache.set_emit_code(&specifier1, 20, emit_code3.as_bytes());
assert_eq!(cache.get_emit_code(&specifier1, 5), None); assert_eq!(cache.get_emit_code(&specifier1, 5), None);
assert_eq!( assert_eq!(cache.get_emit_code(&specifier1, 20), Some(emit_code3));
cache.get_emit_code(&specifier1, 20).map(to_string),
Some(emit_code3)
);
} }
} }

90
cli/cache/mod.rs vendored
View file

@ -8,14 +8,9 @@ use crate::file_fetcher::FetchOptions;
use crate::file_fetcher::FetchPermissionsOptionRef; use crate::file_fetcher::FetchPermissionsOptionRef;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use crate::file_fetcher::FileOrRedirect; use crate::file_fetcher::FileOrRedirect;
use crate::npm::CliNpmResolver;
use crate::resolver::CliNodeResolver;
use crate::util::fs::atomic_write_file_with_retries; use crate::util::fs::atomic_write_file_with_retries;
use crate::util::fs::atomic_write_file_with_retries_and_fs; use crate::util::fs::atomic_write_file_with_retries_and_fs;
use crate::util::fs::AtomicWriteFileFsAdapter; use crate::util::fs::AtomicWriteFileFsAdapter;
use crate::util::path::specifier_has_extension;
use crate::util::text_encoding::arc_str_to_bytes;
use crate::util::text_encoding::from_utf8_lossy_owned;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_core::futures; use deno_core::futures;
@ -25,7 +20,9 @@ use deno_graph::source::CacheInfo;
use deno_graph::source::LoadFuture; use deno_graph::source::LoadFuture;
use deno_graph::source::LoadResponse; use deno_graph::source::LoadResponse;
use deno_graph::source::Loader; use deno_graph::source::Loader;
use deno_runtime::deno_fs;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use node_resolver::InNpmPackageChecker;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
@ -60,7 +57,6 @@ pub use fast_check::FastCheckCache;
pub use incremental::IncrementalCache; pub use incremental::IncrementalCache;
pub use module_info::ModuleInfoCache; pub use module_info::ModuleInfoCache;
pub use node::NodeAnalysisCache; pub use node::NodeAnalysisCache;
pub use parsed_source::EsmOrCjsChecker;
pub use parsed_source::LazyGraphSourceParser; pub use parsed_source::LazyGraphSourceParser;
pub use parsed_source::ParsedSourceCache; pub use parsed_source::ParsedSourceCache;
@ -181,46 +177,40 @@ pub struct FetchCacherOptions {
pub permissions: PermissionsContainer, pub permissions: PermissionsContainer,
/// If we're publishing for `deno publish`. /// If we're publishing for `deno publish`.
pub is_deno_publish: bool, pub is_deno_publish: bool,
pub unstable_detect_cjs: bool,
} }
/// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides /// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides
/// a concise interface to the DENO_DIR when building module graphs. /// a concise interface to the DENO_DIR when building module graphs.
pub struct FetchCacher { pub struct FetchCacher {
pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>, pub file_header_overrides: HashMap<ModuleSpecifier, HashMap<String, String>>,
esm_or_cjs_checker: Arc<EsmOrCjsChecker>,
file_fetcher: Arc<FileFetcher>, file_fetcher: Arc<FileFetcher>,
fs: Arc<dyn deno_fs::FileSystem>,
global_http_cache: Arc<GlobalHttpCache>, global_http_cache: Arc<GlobalHttpCache>,
node_resolver: Arc<CliNodeResolver>, in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>, module_info_cache: Arc<ModuleInfoCache>,
permissions: PermissionsContainer, permissions: PermissionsContainer,
is_deno_publish: bool, is_deno_publish: bool,
unstable_detect_cjs: bool,
cache_info_enabled: bool, cache_info_enabled: bool,
} }
impl FetchCacher { impl FetchCacher {
pub fn new( pub fn new(
esm_or_cjs_checker: Arc<EsmOrCjsChecker>,
file_fetcher: Arc<FileFetcher>, file_fetcher: Arc<FileFetcher>,
fs: Arc<dyn deno_fs::FileSystem>,
global_http_cache: Arc<GlobalHttpCache>, global_http_cache: Arc<GlobalHttpCache>,
node_resolver: Arc<CliNodeResolver>, in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>, module_info_cache: Arc<ModuleInfoCache>,
options: FetchCacherOptions, options: FetchCacherOptions,
) -> Self { ) -> Self {
Self { Self {
file_fetcher, file_fetcher,
esm_or_cjs_checker, fs,
global_http_cache, global_http_cache,
node_resolver, in_npm_pkg_checker,
npm_resolver,
module_info_cache, module_info_cache,
file_header_overrides: options.file_header_overrides, file_header_overrides: options.file_header_overrides,
permissions: options.permissions, permissions: options.permissions,
is_deno_publish: options.is_deno_publish, is_deno_publish: options.is_deno_publish,
unstable_detect_cjs: options.unstable_detect_cjs,
cache_info_enabled: false, cache_info_enabled: false,
} }
} }
@ -271,72 +261,25 @@ impl Loader for FetchCacher {
) -> LoadFuture { ) -> LoadFuture {
use deno_graph::source::CacheSetting as LoaderCacheSetting; use deno_graph::source::CacheSetting as LoaderCacheSetting;
if specifier.scheme() == "file" { if specifier.scheme() == "file"
if specifier.path().contains("/node_modules/") { && specifier.path().contains("/node_modules/")
{
// The specifier might be in a completely different symlinked tree than // The specifier might be in a completely different symlinked tree than
// what the node_modules url is in (ex. `/my-project-1/node_modules` // what the node_modules url is in (ex. `/my-project-1/node_modules`
// symlinked to `/my-project-2/node_modules`), so first we checked if the path // symlinked to `/my-project-2/node_modules`), so first we checked if the path
// is in a node_modules dir to avoid needlessly canonicalizing, then now compare // is in a node_modules dir to avoid needlessly canonicalizing, then now compare
// against the canonicalized specifier. // against the canonicalized specifier.
let specifier = let specifier = crate::node::resolve_specifier_into_node_modules(
crate::node::resolve_specifier_into_node_modules(specifier); specifier,
if self.npm_resolver.in_npm_package(&specifier) { self.fs.as_ref(),
);
if self.in_npm_pkg_checker.in_npm_package(&specifier) {
return Box::pin(futures::future::ready(Ok(Some( return Box::pin(futures::future::ready(Ok(Some(
LoadResponse::External { specifier }, LoadResponse::External { specifier },
)))); ))));
} }
} }
// make local CJS modules external to the graph
if specifier_has_extension(specifier, "cjs") {
return Box::pin(futures::future::ready(Ok(Some(
LoadResponse::External {
specifier: specifier.clone(),
},
))));
}
if self.unstable_detect_cjs && specifier_has_extension(specifier, "js") {
if let Ok(Some(pkg_json)) =
self.node_resolver.get_closest_package_json(specifier)
{
if pkg_json.typ == "commonjs" {
if let Ok(path) = specifier.to_file_path() {
if let Ok(bytes) = std::fs::read(&path) {
let text: Arc<str> = from_utf8_lossy_owned(bytes).into();
let is_es_module = match self.esm_or_cjs_checker.is_esm(
specifier,
text.clone(),
MediaType::JavaScript,
) {
Ok(value) => value,
Err(err) => {
return Box::pin(futures::future::ready(Err(err.into())));
}
};
if !is_es_module {
self.node_resolver.mark_cjs_resolution(specifier.clone());
return Box::pin(futures::future::ready(Ok(Some(
LoadResponse::External {
specifier: specifier.clone(),
},
))));
} else {
return Box::pin(futures::future::ready(Ok(Some(
LoadResponse::Module {
specifier: specifier.clone(),
content: arc_str_to_bytes(text),
maybe_headers: None,
},
))));
}
}
}
}
}
}
}
if self.is_deno_publish if self.is_deno_publish
&& matches!(specifier.scheme(), "http" | "https") && matches!(specifier.scheme(), "http" | "https")
&& !specifier.as_str().starts_with(jsr_url().as_str()) && !specifier.as_str().starts_with(jsr_url().as_str())
@ -378,6 +321,7 @@ impl Loader for FetchCacher {
} else { } else {
FetchPermissionsOptionRef::DynamicContainer(&permissions) FetchPermissionsOptionRef::DynamicContainer(&permissions)
}, },
maybe_auth: None,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: maybe_cache_setting.as_ref(), maybe_cache_setting: maybe_cache_setting.as_ref(),
}, },

View file

@ -44,18 +44,32 @@ pub static MODULE_INFO_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration {
/// A cache of `deno_graph::ModuleInfo` objects. Using this leads to a considerable /// A cache of `deno_graph::ModuleInfo` objects. Using this leads to a considerable
/// performance improvement because when it exists we can skip parsing a module for /// performance improvement because when it exists we can skip parsing a module for
/// deno_graph. /// deno_graph.
#[derive(Debug)]
pub struct ModuleInfoCache { pub struct ModuleInfoCache {
conn: CacheDB, conn: CacheDB,
parsed_source_cache: Arc<ParsedSourceCache>,
} }
impl ModuleInfoCache { impl ModuleInfoCache {
#[cfg(test)] #[cfg(test)]
pub fn new_in_memory(version: &'static str) -> Self { pub fn new_in_memory(
Self::new(CacheDB::in_memory(&MODULE_INFO_CACHE_DB, version)) version: &'static str,
parsed_source_cache: Arc<ParsedSourceCache>,
) -> Self {
Self::new(
CacheDB::in_memory(&MODULE_INFO_CACHE_DB, version),
parsed_source_cache,
)
} }
pub fn new(conn: CacheDB) -> Self { pub fn new(
Self { conn } conn: CacheDB,
parsed_source_cache: Arc<ParsedSourceCache>,
) -> Self {
Self {
conn,
parsed_source_cache,
}
} }
/// Useful for testing: re-create this cache DB with a different current version. /// Useful for testing: re-create this cache DB with a different current version.
@ -63,6 +77,7 @@ impl ModuleInfoCache {
pub(crate) fn recreate_with_version(self, version: &'static str) -> Self { pub(crate) fn recreate_with_version(self, version: &'static str) -> Self {
Self { Self {
conn: self.conn.recreate_with_version(version), conn: self.conn.recreate_with_version(version),
parsed_source_cache: self.parsed_source_cache,
} }
} }
@ -113,13 +128,10 @@ impl ModuleInfoCache {
Ok(()) Ok(())
} }
pub fn as_module_analyzer<'a>( pub fn as_module_analyzer(&self) -> ModuleInfoCacheModuleAnalyzer {
&'a self,
parsed_source_cache: &'a Arc<ParsedSourceCache>,
) -> ModuleInfoCacheModuleAnalyzer<'a> {
ModuleInfoCacheModuleAnalyzer { ModuleInfoCacheModuleAnalyzer {
module_info_cache: self, module_info_cache: self,
parsed_source_cache, parsed_source_cache: &self.parsed_source_cache,
} }
} }
} }
@ -129,6 +141,84 @@ pub struct ModuleInfoCacheModuleAnalyzer<'a> {
parsed_source_cache: &'a Arc<ParsedSourceCache>, parsed_source_cache: &'a Arc<ParsedSourceCache>,
} }
impl<'a> ModuleInfoCacheModuleAnalyzer<'a> {
fn load_cached_module_info(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
source_hash: CacheDBHash,
) -> Option<ModuleInfo> {
match self.module_info_cache.get_module_info(
specifier,
media_type,
source_hash,
) {
Ok(Some(info)) => Some(info),
Ok(None) => None,
Err(err) => {
log::debug!(
"Error loading module cache info for {}. {:#}",
specifier,
err
);
None
}
}
}
fn save_module_info_to_cache(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
source_hash: CacheDBHash,
module_info: &ModuleInfo,
) {
if let Err(err) = self.module_info_cache.set_module_info(
specifier,
media_type,
source_hash,
module_info,
) {
log::debug!(
"Error saving module cache info for {}. {:#}",
specifier,
err
);
}
}
pub fn analyze_sync(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
source: &Arc<str>,
) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> {
// attempt to load from the cache
let source_hash = CacheDBHash::from_source(source);
if let Some(info) =
self.load_cached_module_info(specifier, media_type, source_hash)
{
return Ok(info);
}
// otherwise, get the module info from the parsed source cache
let parser = self.parsed_source_cache.as_capturing_parser();
let analyzer = ParserModuleAnalyzer::new(&parser);
let module_info =
analyzer.analyze_sync(specifier, source.clone(), media_type)?;
// then attempt to cache it
self.save_module_info_to_cache(
specifier,
media_type,
source_hash,
&module_info,
);
Ok(module_info)
}
}
#[async_trait::async_trait(?Send)] #[async_trait::async_trait(?Send)]
impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> { impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> {
async fn analyze( async fn analyze(
@ -139,20 +229,10 @@ impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> {
) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> { ) -> Result<ModuleInfo, deno_ast::ParseDiagnostic> {
// attempt to load from the cache // attempt to load from the cache
let source_hash = CacheDBHash::from_source(&source); let source_hash = CacheDBHash::from_source(&source);
match self.module_info_cache.get_module_info( if let Some(info) =
specifier, self.load_cached_module_info(specifier, media_type, source_hash)
media_type, {
source_hash, return Ok(info);
) {
Ok(Some(info)) => return Ok(info),
Ok(None) => {}
Err(err) => {
log::debug!(
"Error loading module cache info for {}. {:#}",
specifier,
err
);
}
} }
// otherwise, get the module info from the parsed source cache // otherwise, get the module info from the parsed source cache
@ -169,18 +249,12 @@ impl<'a> deno_graph::ModuleAnalyzer for ModuleInfoCacheModuleAnalyzer<'a> {
.unwrap()?; .unwrap()?;
// then attempt to cache it // then attempt to cache it
if let Err(err) = self.module_info_cache.set_module_info( self.save_module_info_to_cache(
specifier, specifier,
media_type, media_type,
source_hash, source_hash,
&module_info, &module_info,
) {
log::debug!(
"Error saving module cache info for {}. {:#}",
specifier,
err
); );
}
Ok(module_info) Ok(module_info)
} }
@ -202,7 +276,7 @@ fn serialize_media_type(media_type: MediaType) -> i64 {
Tsx => 11, Tsx => 11,
Json => 12, Json => 12,
Wasm => 13, Wasm => 13,
TsBuildInfo => 14, Css => 14,
SourceMap => 15, SourceMap => 15,
Unknown => 16, Unknown => 16,
} }
@ -217,7 +291,7 @@ mod test {
#[test] #[test]
pub fn module_info_cache_general_use() { pub fn module_info_cache_general_use() {
let cache = ModuleInfoCache::new_in_memory("1.0.0"); let cache = ModuleInfoCache::new_in_memory("1.0.0", Default::default());
let specifier1 = let specifier1 =
ModuleSpecifier::parse("https://localhost/mod.ts").unwrap(); ModuleSpecifier::parse("https://localhost/mod.ts").unwrap();
let specifier2 = let specifier2 =

View file

@ -5,12 +5,11 @@ use std::sync::Arc;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_ast::ParseDiagnostic;
use deno_ast::ParsedSource; use deno_ast::ParsedSource;
use deno_core::parking_lot::Mutex; use deno_core::parking_lot::Mutex;
use deno_graph::CapturingModuleParser; use deno_graph::CapturingEsParser;
use deno_graph::DefaultModuleParser; use deno_graph::DefaultEsParser;
use deno_graph::ModuleParser; use deno_graph::EsParser;
use deno_graph::ParseOptions; use deno_graph::ParseOptions;
use deno_graph::ParsedSourceStore; use deno_graph::ParsedSourceStore;
@ -47,7 +46,7 @@ impl<'a> LazyGraphSourceParser<'a> {
} }
} }
#[derive(Default)] #[derive(Debug, Default)]
pub struct ParsedSourceCache { pub struct ParsedSourceCache {
sources: Mutex<HashMap<ModuleSpecifier, ParsedSource>>, sources: Mutex<HashMap<ModuleSpecifier, ParsedSource>>,
} }
@ -58,12 +57,11 @@ impl ParsedSourceCache {
module: &deno_graph::JsModule, module: &deno_graph::JsModule,
) -> Result<ParsedSource, deno_ast::ParseDiagnostic> { ) -> Result<ParsedSource, deno_ast::ParseDiagnostic> {
let parser = self.as_capturing_parser(); let parser = self.as_capturing_parser();
// this will conditionally parse because it's using a CapturingModuleParser // this will conditionally parse because it's using a CapturingEsParser
parser.parse_module(ParseOptions { parser.parse_program(ParseOptions {
specifier: &module.specifier, specifier: &module.specifier,
source: module.source.clone(), source: module.source.clone(),
media_type: module.media_type, media_type: module.media_type,
// don't bother enabling because this method is currently only used for vendoring
scope_analysis: false, scope_analysis: false,
}) })
} }
@ -87,10 +85,9 @@ impl ParsedSourceCache {
specifier, specifier,
source, source,
media_type, media_type,
// don't bother enabling because this method is currently only used for emitting
scope_analysis: false, scope_analysis: false,
}; };
DefaultModuleParser.parse_module(options) DefaultEsParser.parse_program(options)
} }
/// Frees the parsed source from memory. /// Frees the parsed source from memory.
@ -100,8 +97,8 @@ impl ParsedSourceCache {
/// Creates a parser that will reuse a ParsedSource from the store /// Creates a parser that will reuse a ParsedSource from the store
/// if it exists, or else parse. /// if it exists, or else parse.
pub fn as_capturing_parser(&self) -> CapturingModuleParser { pub fn as_capturing_parser(&self) -> CapturingEsParser {
CapturingModuleParser::new(None, self) CapturingEsParser::new(None, self)
} }
} }
@ -150,42 +147,3 @@ impl deno_graph::ParsedSourceStore for ParsedSourceCache {
} }
} }
} }
pub struct EsmOrCjsChecker {
parsed_source_cache: Arc<ParsedSourceCache>,
}
impl EsmOrCjsChecker {
pub fn new(parsed_source_cache: Arc<ParsedSourceCache>) -> Self {
Self {
parsed_source_cache,
}
}
pub fn is_esm(
&self,
specifier: &ModuleSpecifier,
source: Arc<str>,
media_type: MediaType,
) -> Result<bool, ParseDiagnostic> {
// todo(dsherret): add a file cache here to avoid parsing with swc on each run
let source = match self.parsed_source_cache.get_parsed_source(specifier) {
Some(source) => source.clone(),
None => {
let source = deno_ast::parse_program(deno_ast::ParseParams {
specifier: specifier.clone(),
text: source,
media_type,
capture_tokens: true, // capture because it's used for cjs export analysis
scope_analysis: false,
maybe_syntax: None,
})?;
self
.parsed_source_cache
.set_parsed_source(specifier.clone(), source.clone());
source
}
};
Ok(source.is_module())
}
}

View file

@ -3,24 +3,28 @@
use crate::cache::EmitCache; use crate::cache::EmitCache;
use crate::cache::FastInsecureHasher; use crate::cache::FastInsecureHasher;
use crate::cache::ParsedSourceCache; use crate::cache::ParsedSourceCache;
use crate::resolver::CjsTracker;
use deno_ast::ModuleKind;
use deno_ast::SourceMapOption; use deno_ast::SourceMapOption;
use deno_ast::SourceRange; use deno_ast::SourceRange;
use deno_ast::SourceRanged; use deno_ast::SourceRanged;
use deno_ast::SourceRangedForSpanned; use deno_ast::SourceRangedForSpanned;
use deno_ast::TranspileModuleOptions;
use deno_ast::TranspileResult; use deno_ast::TranspileResult;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures::stream::FuturesUnordered; use deno_core::futures::stream::FuturesUnordered;
use deno_core::futures::FutureExt; use deno_core::futures::FutureExt;
use deno_core::futures::StreamExt; use deno_core::futures::StreamExt;
use deno_core::ModuleCodeBytes;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::MediaType; use deno_graph::MediaType;
use deno_graph::Module; use deno_graph::Module;
use deno_graph::ModuleGraph; use deno_graph::ModuleGraph;
use std::sync::Arc; use std::sync::Arc;
#[derive(Debug)]
pub struct Emitter { pub struct Emitter {
cjs_tracker: Arc<CjsTracker>,
emit_cache: Arc<EmitCache>, emit_cache: Arc<EmitCache>,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
transpile_and_emit_options: transpile_and_emit_options:
@ -31,6 +35,7 @@ pub struct Emitter {
impl Emitter { impl Emitter {
pub fn new( pub fn new(
cjs_tracker: Arc<CjsTracker>,
emit_cache: Arc<EmitCache>, emit_cache: Arc<EmitCache>,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
transpile_options: deno_ast::TranspileOptions, transpile_options: deno_ast::TranspileOptions,
@ -43,6 +48,7 @@ impl Emitter {
hasher.finish() hasher.finish()
}; };
Self { Self {
cjs_tracker,
emit_cache, emit_cache,
parsed_source_cache, parsed_source_cache,
transpile_and_emit_options: Arc::new((transpile_options, emit_options)), transpile_and_emit_options: Arc::new((transpile_options, emit_options)),
@ -60,20 +66,19 @@ impl Emitter {
continue; continue;
}; };
let is_emittable = matches!( if module.media_type.is_emittable() {
module.media_type,
MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Jsx
| MediaType::Tsx
);
if is_emittable {
futures.push( futures.push(
self self
.emit_parsed_source( .emit_parsed_source(
&module.specifier, &module.specifier,
module.media_type, module.media_type,
ModuleKind::from_is_cjs(
self.cjs_tracker.is_cjs_with_known_is_script(
&module.specifier,
module.media_type,
module.is_script,
)?,
),
&module.source, &module.source,
) )
.boxed_local(), .boxed_local(),
@ -92,9 +97,10 @@ impl Emitter {
pub fn maybe_cached_emit( pub fn maybe_cached_emit(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
module_kind: deno_ast::ModuleKind,
source: &str, source: &str,
) -> Option<Vec<u8>> { ) -> Option<String> {
let source_hash = self.get_source_hash(source); let source_hash = self.get_source_hash(module_kind, source);
self.emit_cache.get_emit_code(specifier, source_hash) self.emit_cache.get_emit_code(specifier, source_hash)
} }
@ -102,25 +108,27 @@ impl Emitter {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
media_type: MediaType, media_type: MediaType,
module_kind: deno_ast::ModuleKind,
source: &Arc<str>, source: &Arc<str>,
) -> Result<ModuleCodeBytes, AnyError> { ) -> Result<String, AnyError> {
// Note: keep this in sync with the sync version below // Note: keep this in sync with the sync version below
let helper = EmitParsedSourceHelper(self); let helper = EmitParsedSourceHelper(self);
match helper.pre_emit_parsed_source(specifier, source) { match helper.pre_emit_parsed_source(specifier, module_kind, source) {
PreEmitResult::Cached(emitted_text) => Ok(emitted_text), PreEmitResult::Cached(emitted_text) => Ok(emitted_text),
PreEmitResult::NotCached { source_hash } => { PreEmitResult::NotCached { source_hash } => {
let parsed_source_cache = self.parsed_source_cache.clone(); let parsed_source_cache = self.parsed_source_cache.clone();
let transpile_and_emit_options = let transpile_and_emit_options =
self.transpile_and_emit_options.clone(); self.transpile_and_emit_options.clone();
let transpile_result = deno_core::unsync::spawn_blocking({ let transpiled_source = deno_core::unsync::spawn_blocking({
let specifier = specifier.clone(); let specifier = specifier.clone();
let source = source.clone(); let source = source.clone();
move || -> Result<_, AnyError> { move || -> Result<_, AnyError> {
EmitParsedSourceHelper::transpile( EmitParsedSourceHelper::transpile(
&parsed_source_cache, &parsed_source_cache,
&specifier, &specifier,
source.clone(),
media_type, media_type,
module_kind,
source.clone(),
&transpile_and_emit_options.0, &transpile_and_emit_options.0,
&transpile_and_emit_options.1, &transpile_and_emit_options.1,
) )
@ -128,11 +136,12 @@ impl Emitter {
}) })
.await .await
.unwrap()?; .unwrap()?;
Ok(helper.post_emit_parsed_source( helper.post_emit_parsed_source(
specifier, specifier,
transpile_result, &transpiled_source,
source_hash, source_hash,
)) );
Ok(transpiled_source)
} }
} }
} }
@ -141,26 +150,29 @@ impl Emitter {
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
media_type: MediaType, media_type: MediaType,
module_kind: deno_ast::ModuleKind,
source: &Arc<str>, source: &Arc<str>,
) -> Result<ModuleCodeBytes, AnyError> { ) -> Result<String, AnyError> {
// Note: keep this in sync with the async version above // Note: keep this in sync with the async version above
let helper = EmitParsedSourceHelper(self); let helper = EmitParsedSourceHelper(self);
match helper.pre_emit_parsed_source(specifier, source) { match helper.pre_emit_parsed_source(specifier, module_kind, source) {
PreEmitResult::Cached(emitted_text) => Ok(emitted_text), PreEmitResult::Cached(emitted_text) => Ok(emitted_text),
PreEmitResult::NotCached { source_hash } => { PreEmitResult::NotCached { source_hash } => {
let transpile_result = EmitParsedSourceHelper::transpile( let transpiled_source = EmitParsedSourceHelper::transpile(
&self.parsed_source_cache, &self.parsed_source_cache,
specifier, specifier,
source.clone(),
media_type, media_type,
module_kind,
source.clone(),
&self.transpile_and_emit_options.0, &self.transpile_and_emit_options.0,
&self.transpile_and_emit_options.1, &self.transpile_and_emit_options.1,
)?; )?;
Ok(helper.post_emit_parsed_source( helper.post_emit_parsed_source(
specifier, specifier,
transpile_result, &transpiled_source,
source_hash, source_hash,
)) );
Ok(transpiled_source)
} }
} }
} }
@ -169,6 +181,7 @@ impl Emitter {
pub async fn load_and_emit_for_hmr( pub async fn load_and_emit_for_hmr(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
module_kind: deno_ast::ModuleKind,
) -> Result<String, AnyError> { ) -> Result<String, AnyError> {
let media_type = MediaType::from_specifier(specifier); let media_type = MediaType::from_specifier(specifier);
let source_code = tokio::fs::read_to_string( let source_code = tokio::fs::read_to_string(
@ -191,9 +204,14 @@ impl Emitter {
let mut options = self.transpile_and_emit_options.1.clone(); let mut options = self.transpile_and_emit_options.1.clone();
options.source_map = SourceMapOption::None; options.source_map = SourceMapOption::None;
let transpiled_source = parsed_source let transpiled_source = parsed_source
.transpile(&self.transpile_and_emit_options.0, &options)? .transpile(
.into_source() &self.transpile_and_emit_options.0,
.into_string()?; &deno_ast::TranspileModuleOptions {
module_kind: Some(module_kind),
},
&options,
)?
.into_source();
Ok(transpiled_source.text) Ok(transpiled_source.text)
} }
MediaType::JavaScript MediaType::JavaScript
@ -204,7 +222,7 @@ impl Emitter {
| MediaType::Dcts | MediaType::Dcts
| MediaType::Json | MediaType::Json
| MediaType::Wasm | MediaType::Wasm
| MediaType::TsBuildInfo | MediaType::Css
| MediaType::SourceMap | MediaType::SourceMap
| MediaType::Unknown => { | MediaType::Unknown => {
// clear this specifier from the parsed source cache as it's now out of date // clear this specifier from the parsed source cache as it's now out of date
@ -217,16 +235,17 @@ impl Emitter {
/// A hashing function that takes the source code and uses the global emit /// A hashing function that takes the source code and uses the global emit
/// options then generates a string hash which can be stored to /// options then generates a string hash which can be stored to
/// determine if the cached emit is valid or not. /// determine if the cached emit is valid or not.
fn get_source_hash(&self, source_text: &str) -> u64 { fn get_source_hash(&self, module_kind: ModuleKind, source_text: &str) -> u64 {
FastInsecureHasher::new_without_deno_version() // stored in the transpile_and_emit_options_hash FastInsecureHasher::new_without_deno_version() // stored in the transpile_and_emit_options_hash
.write_str(source_text) .write_str(source_text)
.write_u64(self.transpile_and_emit_options_hash) .write_u64(self.transpile_and_emit_options_hash)
.write_hashable(module_kind)
.finish() .finish()
} }
} }
enum PreEmitResult { enum PreEmitResult {
Cached(ModuleCodeBytes), Cached(String),
NotCached { source_hash: u64 }, NotCached { source_hash: u64 },
} }
@ -237,14 +256,15 @@ impl<'a> EmitParsedSourceHelper<'a> {
pub fn pre_emit_parsed_source( pub fn pre_emit_parsed_source(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
module_kind: deno_ast::ModuleKind,
source: &Arc<str>, source: &Arc<str>,
) -> PreEmitResult { ) -> PreEmitResult {
let source_hash = self.0.get_source_hash(source); let source_hash = self.0.get_source_hash(module_kind, source);
if let Some(emit_code) = if let Some(emit_code) =
self.0.emit_cache.get_emit_code(specifier, source_hash) self.0.emit_cache.get_emit_code(specifier, source_hash)
{ {
PreEmitResult::Cached(emit_code.into_boxed_slice().into()) PreEmitResult::Cached(emit_code)
} else { } else {
PreEmitResult::NotCached { source_hash } PreEmitResult::NotCached { source_hash }
} }
@ -253,25 +273,24 @@ impl<'a> EmitParsedSourceHelper<'a> {
pub fn transpile( pub fn transpile(
parsed_source_cache: &ParsedSourceCache, parsed_source_cache: &ParsedSourceCache,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
source: Arc<str>,
media_type: MediaType, media_type: MediaType,
module_kind: deno_ast::ModuleKind,
source: Arc<str>,
transpile_options: &deno_ast::TranspileOptions, transpile_options: &deno_ast::TranspileOptions,
emit_options: &deno_ast::EmitOptions, emit_options: &deno_ast::EmitOptions,
) -> Result<TranspileResult, AnyError> { ) -> Result<String, AnyError> {
// nothing else needs the parsed source at this point, so remove from // nothing else needs the parsed source at this point, so remove from
// the cache in order to not transpile owned // the cache in order to not transpile owned
let parsed_source = parsed_source_cache let parsed_source = parsed_source_cache
.remove_or_parse_module(specifier, source, media_type)?; .remove_or_parse_module(specifier, source, media_type)?;
ensure_no_import_assertion(&parsed_source)?; ensure_no_import_assertion(&parsed_source)?;
Ok(parsed_source.transpile(transpile_options, emit_options)?) let transpile_result = parsed_source.transpile(
} transpile_options,
&TranspileModuleOptions {
pub fn post_emit_parsed_source( module_kind: Some(module_kind),
&self, },
specifier: &ModuleSpecifier, emit_options,
transpile_result: TranspileResult, )?;
source_hash: u64,
) -> ModuleCodeBytes {
let transpiled_source = match transpile_result { let transpiled_source = match transpile_result {
TranspileResult::Owned(source) => source, TranspileResult::Owned(source) => source,
TranspileResult::Cloned(source) => { TranspileResult::Cloned(source) => {
@ -280,12 +299,20 @@ impl<'a> EmitParsedSourceHelper<'a> {
} }
}; };
debug_assert!(transpiled_source.source_map.is_none()); debug_assert!(transpiled_source.source_map.is_none());
Ok(transpiled_source.text)
}
pub fn post_emit_parsed_source(
&self,
specifier: &ModuleSpecifier,
transpiled_source: &str,
source_hash: u64,
) {
self.0.emit_cache.set_emit_code( self.0.emit_cache.set_emit_code(
specifier, specifier,
source_hash, source_hash,
&transpiled_source.source, transpiled_source.as_bytes(),
); );
transpiled_source.source.into_boxed_slice().into()
} }
} }
@ -317,7 +344,7 @@ fn ensure_no_import_assertion(
deno_core::anyhow::anyhow!("{}", msg) deno_core::anyhow::anyhow!("{}", msg)
} }
let Some(module) = parsed_source.program_ref().as_module() else { let deno_ast::ProgramRef::Module(module) = parsed_source.program_ref() else {
return Ok(()); return Ok(());
}; };

View file

@ -88,6 +88,10 @@ fn get_resolution_error_class(err: &ResolutionError) -> &'static str {
} }
} }
fn get_try_from_int_error_class(_: &std::num::TryFromIntError) -> &'static str {
"TypeError"
}
pub fn get_error_class_name(e: &AnyError) -> &'static str { pub fn get_error_class_name(e: &AnyError) -> &'static str {
deno_runtime::errors::get_error_class_name(e) deno_runtime::errors::get_error_class_name(e)
.or_else(|| { .or_else(|| {
@ -106,5 +110,9 @@ pub fn get_error_class_name(e: &AnyError) -> &'static str {
e.downcast_ref::<ResolutionError>() e.downcast_ref::<ResolutionError>()
.map(get_resolution_error_class) .map(get_resolution_error_class)
}) })
.or_else(|| {
e.downcast_ref::<std::num::TryFromIntError>()
.map(get_try_from_int_error_class)
})
.unwrap_or("Error") .unwrap_or("Error")
} }

View file

@ -11,10 +11,10 @@ use crate::args::StorageKeyResolver;
use crate::args::TsConfigType; use crate::args::TsConfigType;
use crate::cache::Caches; use crate::cache::Caches;
use crate::cache::CodeCache; use crate::cache::CodeCache;
use crate::cache::DenoCacheEnvFsAdapter;
use crate::cache::DenoDir; use crate::cache::DenoDir;
use crate::cache::DenoDirProvider; use crate::cache::DenoDirProvider;
use crate::cache::EmitCache; use crate::cache::EmitCache;
use crate::cache::EsmOrCjsChecker;
use crate::cache::GlobalHttpCache; use crate::cache::GlobalHttpCache;
use crate::cache::HttpCache; use crate::cache::HttpCache;
use crate::cache::LocalHttpCache; use crate::cache::LocalHttpCache;
@ -33,12 +33,16 @@ use crate::module_loader::ModuleLoadPreparer;
use crate::node::CliCjsCodeAnalyzer; use crate::node::CliCjsCodeAnalyzer;
use crate::node::CliNodeCodeTranslator; use crate::node::CliNodeCodeTranslator;
use crate::npm::create_cli_npm_resolver; use crate::npm::create_cli_npm_resolver;
use crate::npm::create_in_npm_pkg_checker;
use crate::npm::CliByonmNpmResolverCreateOptions; use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliManagedInNpmPkgCheckerCreateOptions;
use crate::npm::CliManagedNpmResolverCreateOptions;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverCreateOptions; use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption; use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::resolver::CjsResolutionStore; use crate::npm::CreateInNpmPkgCheckerOptions;
use crate::resolver::CjsTracker;
use crate::resolver::CjsTrackerOptions;
use crate::resolver::CliDenoResolverFs; use crate::resolver::CliDenoResolverFs;
use crate::resolver::CliGraphResolver; use crate::resolver::CliGraphResolver;
use crate::resolver::CliGraphResolverOptions; use crate::resolver::CliGraphResolverOptions;
@ -51,6 +55,7 @@ use crate::tools::check::TypeChecker;
use crate::tools::coverage::CoverageCollector; use crate::tools::coverage::CoverageCollector;
use crate::tools::lint::LintRuleProvider; use crate::tools::lint::LintRuleProvider;
use crate::tools::run::hmr::HmrRunner; use crate::tools::run::hmr::HmrRunner;
use crate::tsc::TypeCheckingCjsTracker;
use crate::util::file_watcher::WatcherCommunicator; use crate::util::file_watcher::WatcherCommunicator;
use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
@ -59,6 +64,7 @@ use crate::worker::CliMainWorkerFactory;
use crate::worker::CliMainWorkerOptions; use crate::worker::CliMainWorkerOptions;
use std::path::PathBuf; use std::path::PathBuf;
use deno_cache_dir::npm::NpmCacheDir;
use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::WorkspaceResolver; use deno_config::workspace::WorkspaceResolver;
use deno_core::error::AnyError; use deno_core::error::AnyError;
@ -68,6 +74,7 @@ use deno_core::FeatureChecker;
use deno_runtime::deno_fs; use deno_runtime::deno_fs;
use deno_runtime::deno_node::DenoFsNodeResolverEnv; use deno_runtime::deno_node::DenoFsNodeResolverEnv;
use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_node::PackageJsonResolver;
use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_tls::rustls::RootCertStore; use deno_runtime::deno_tls::rustls::RootCertStore;
@ -77,6 +84,7 @@ use deno_runtime::inspector_server::InspectorServer;
use deno_runtime::permissions::RuntimePermissionDescriptorParser; use deno_runtime::permissions::RuntimePermissionDescriptorParser;
use log::warn; use log::warn;
use node_resolver::analyze::NodeCodeTranslator; use node_resolver::analyze::NodeCodeTranslator;
use node_resolver::InNpmPackageChecker;
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
use std::future::Future; use std::future::Future;
use std::sync::Arc; use std::sync::Arc;
@ -164,39 +172,41 @@ impl<T> Deferred<T> {
#[derive(Default)] #[derive(Default)]
struct CliFactoryServices { struct CliFactoryServices {
cli_options: Deferred<Arc<CliOptions>>, blob_store: Deferred<Arc<BlobStore>>,
caches: Deferred<Arc<Caches>>, caches: Deferred<Arc<Caches>>,
cjs_tracker: Deferred<Arc<CjsTracker>>,
cli_node_resolver: Deferred<Arc<CliNodeResolver>>,
cli_options: Deferred<Arc<CliOptions>>,
code_cache: Deferred<Arc<CodeCache>>,
emit_cache: Deferred<Arc<EmitCache>>,
emitter: Deferred<Arc<Emitter>>,
feature_checker: Deferred<Arc<FeatureChecker>>,
file_fetcher: Deferred<Arc<FileFetcher>>, file_fetcher: Deferred<Arc<FileFetcher>>,
fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
global_http_cache: Deferred<Arc<GlobalHttpCache>>, global_http_cache: Deferred<Arc<GlobalHttpCache>>,
http_cache: Deferred<Arc<dyn HttpCache>>, http_cache: Deferred<Arc<dyn HttpCache>>,
http_client_provider: Deferred<Arc<HttpClientProvider>>, http_client_provider: Deferred<Arc<HttpClientProvider>>,
emit_cache: Deferred<Arc<EmitCache>>, in_npm_pkg_checker: Deferred<Arc<dyn InNpmPackageChecker>>,
emitter: Deferred<Arc<Emitter>>,
esm_or_cjs_checker: Deferred<Arc<EsmOrCjsChecker>>,
fs: Deferred<Arc<dyn deno_fs::FileSystem>>,
main_graph_container: Deferred<Arc<MainModuleGraphContainer>>, main_graph_container: Deferred<Arc<MainModuleGraphContainer>>,
maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>,
root_cert_store_provider: Deferred<Arc<dyn RootCertStoreProvider>>,
blob_store: Deferred<Arc<BlobStore>>,
module_info_cache: Deferred<Arc<ModuleInfoCache>>,
parsed_source_cache: Deferred<Arc<ParsedSourceCache>>,
resolver: Deferred<Arc<CliGraphResolver>>,
maybe_file_watcher_reporter: Deferred<Option<FileWatcherReporter>>, maybe_file_watcher_reporter: Deferred<Option<FileWatcherReporter>>,
maybe_inspector_server: Deferred<Option<Arc<InspectorServer>>>,
module_graph_builder: Deferred<Arc<ModuleGraphBuilder>>, module_graph_builder: Deferred<Arc<ModuleGraphBuilder>>,
module_graph_creator: Deferred<Arc<ModuleGraphCreator>>, module_graph_creator: Deferred<Arc<ModuleGraphCreator>>,
module_info_cache: Deferred<Arc<ModuleInfoCache>>,
module_load_preparer: Deferred<Arc<ModuleLoadPreparer>>, module_load_preparer: Deferred<Arc<ModuleLoadPreparer>>,
node_code_translator: Deferred<Arc<CliNodeCodeTranslator>>, node_code_translator: Deferred<Arc<CliNodeCodeTranslator>>,
node_resolver: Deferred<Arc<NodeResolver>>, node_resolver: Deferred<Arc<NodeResolver>>,
npm_cache_dir: Deferred<Arc<NpmCacheDir>>,
npm_resolver: Deferred<Arc<dyn CliNpmResolver>>, npm_resolver: Deferred<Arc<dyn CliNpmResolver>>,
parsed_source_cache: Deferred<Arc<ParsedSourceCache>>,
permission_desc_parser: Deferred<Arc<RuntimePermissionDescriptorParser>>, permission_desc_parser: Deferred<Arc<RuntimePermissionDescriptorParser>>,
pkg_json_resolver: Deferred<Arc<PackageJsonResolver>>,
resolver: Deferred<Arc<CliGraphResolver>>,
root_cert_store_provider: Deferred<Arc<dyn RootCertStoreProvider>>,
root_permissions_container: Deferred<PermissionsContainer>, root_permissions_container: Deferred<PermissionsContainer>,
sloppy_imports_resolver: Deferred<Option<Arc<CliSloppyImportsResolver>>>, sloppy_imports_resolver: Deferred<Option<Arc<CliSloppyImportsResolver>>>,
text_only_progress_bar: Deferred<ProgressBar>, text_only_progress_bar: Deferred<ProgressBar>,
type_checker: Deferred<Arc<TypeChecker>>, type_checker: Deferred<Arc<TypeChecker>>,
cjs_resolutions: Deferred<Arc<CjsResolutionStore>>,
cli_node_resolver: Deferred<Arc<CliNodeResolver>>,
feature_checker: Deferred<Arc<FeatureChecker>>,
code_cache: Deferred<Arc<CodeCache>>,
workspace_resolver: Deferred<Arc<WorkspaceResolver>>, workspace_resolver: Deferred<Arc<WorkspaceResolver>>,
} }
@ -300,12 +310,6 @@ impl CliFactory {
.get_or_init(|| ProgressBar::new(ProgressBarStyle::TextOnly)) .get_or_init(|| ProgressBar::new(ProgressBarStyle::TextOnly))
} }
pub fn esm_or_cjs_checker(&self) -> &Arc<EsmOrCjsChecker> {
self.services.esm_or_cjs_checker.get_or_init(|| {
Arc::new(EsmOrCjsChecker::new(self.parsed_source_cache().clone()))
})
}
pub fn global_http_cache(&self) -> Result<&Arc<GlobalHttpCache>, AnyError> { pub fn global_http_cache(&self) -> Result<&Arc<GlobalHttpCache>, AnyError> {
self.services.global_http_cache.get_or_try_init(|| { self.services.global_http_cache.get_or_try_init(|| {
Ok(Arc::new(GlobalHttpCache::new( Ok(Arc::new(GlobalHttpCache::new(
@ -359,32 +363,76 @@ impl CliFactory {
self.services.fs.get_or_init(|| Arc::new(deno_fs::RealFs)) self.services.fs.get_or_init(|| Arc::new(deno_fs::RealFs))
} }
pub fn in_npm_pkg_checker(
&self,
) -> Result<&Arc<dyn InNpmPackageChecker>, AnyError> {
self.services.in_npm_pkg_checker.get_or_try_init(|| {
let cli_options = self.cli_options()?;
let options = if cli_options.use_byonm() {
CreateInNpmPkgCheckerOptions::Byonm
} else {
CreateInNpmPkgCheckerOptions::Managed(
CliManagedInNpmPkgCheckerCreateOptions {
root_cache_dir_url: self.npm_cache_dir()?.root_dir_url(),
maybe_node_modules_path: cli_options
.node_modules_dir_path()
.map(|p| p.as_path()),
},
)
};
Ok(create_in_npm_pkg_checker(options))
})
}
pub fn npm_cache_dir(&self) -> Result<&Arc<NpmCacheDir>, AnyError> {
self.services.npm_cache_dir.get_or_try_init(|| {
let fs = self.fs();
let global_path = self.deno_dir()?.npm_folder_path();
let cli_options = self.cli_options()?;
Ok(Arc::new(NpmCacheDir::new(
&DenoCacheEnvFsAdapter(fs.as_ref()),
global_path,
cli_options.npmrc().get_all_known_registries_urls(),
)))
})
}
pub async fn npm_resolver( pub async fn npm_resolver(
&self, &self,
) -> Result<&Arc<dyn CliNpmResolver>, AnyError> { ) -> Result<&Arc<dyn CliNpmResolver>, AnyError> {
self self
.services .services
.npm_resolver .npm_resolver
.get_or_try_init_async(async { .get_or_try_init_async(
async {
let fs = self.fs(); let fs = self.fs();
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
// For `deno install` we want to force the managed resolver so it can set up `node_modules/` directory. create_cli_npm_resolver(if cli_options.use_byonm() {
create_cli_npm_resolver(if cli_options.use_byonm() && !matches!(cli_options.sub_command(), DenoSubcommand::Install(_) | DenoSubcommand::Add(_) | DenoSubcommand::Remove(_)) { CliNpmResolverCreateOptions::Byonm(
CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { CliByonmNpmResolverCreateOptions {
fs: CliDenoResolverFs(fs.clone()), fs: CliDenoResolverFs(fs.clone()),
root_node_modules_dir: Some(match cli_options.node_modules_dir_path() { pkg_json_resolver: self.pkg_json_resolver().clone(),
root_node_modules_dir: Some(
match cli_options.node_modules_dir_path() {
Some(node_modules_path) => node_modules_path.to_path_buf(), Some(node_modules_path) => node_modules_path.to_path_buf(),
// path needs to be canonicalized for node resolution // path needs to be canonicalized for node resolution
// (node_modules_dir_path above is already canonicalized) // (node_modules_dir_path above is already canonicalized)
None => canonicalize_path_maybe_not_exists(cli_options.initial_cwd())? None => canonicalize_path_maybe_not_exists(
cli_options.initial_cwd(),
)?
.join("node_modules"), .join("node_modules"),
}), },
}) ),
},
)
} else { } else {
CliNpmResolverCreateOptions::Managed(CliNpmResolverManagedCreateOptions { CliNpmResolverCreateOptions::Managed(
CliManagedNpmResolverCreateOptions {
snapshot: match cli_options.resolve_npm_resolution_snapshot()? { snapshot: match cli_options.resolve_npm_resolution_snapshot()? {
Some(snapshot) => { Some(snapshot) => {
CliNpmResolverManagedSnapshotOption::Specified(Some(snapshot)) CliNpmResolverManagedSnapshotOption::Specified(Some(
snapshot,
))
} }
None => match cli_options.maybe_lockfile() { None => match cli_options.maybe_lockfile() {
Some(lockfile) => { Some(lockfile) => {
@ -392,23 +440,35 @@ impl CliFactory {
lockfile.clone(), lockfile.clone(),
) )
} }
None => CliNpmResolverManagedSnapshotOption::Specified(None), None => {
CliNpmResolverManagedSnapshotOption::Specified(None)
}
}, },
}, },
maybe_lockfile: cli_options.maybe_lockfile().cloned(), maybe_lockfile: cli_options.maybe_lockfile().cloned(),
fs: fs.clone(), fs: fs.clone(),
http_client_provider: self.http_client_provider().clone(), http_client_provider: self.http_client_provider().clone(),
npm_global_cache_dir: self.deno_dir()?.npm_folder_path(), npm_cache_dir: self.npm_cache_dir()?.clone(),
cache_setting: cli_options.cache_setting(), cache_setting: cli_options.cache_setting(),
text_only_progress_bar: self.text_only_progress_bar().clone(), text_only_progress_bar: self.text_only_progress_bar().clone(),
maybe_node_modules_path: cli_options.node_modules_dir_path().cloned(), maybe_node_modules_path: cli_options
npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::from_workspace(cli_options.workspace())), .node_modules_dir_path()
.cloned(),
npm_install_deps_provider: Arc::new(
NpmInstallDepsProvider::from_workspace(
cli_options.workspace(),
),
),
npm_system_info: cli_options.npm_system_info(), npm_system_info: cli_options.npm_system_info(),
npmrc: cli_options.npmrc().clone(), npmrc: cli_options.npmrc().clone(),
lifecycle_scripts: cli_options.lifecycle_scripts_config(), lifecycle_scripts: cli_options.lifecycle_scripts_config(),
},
)
}) })
}).await .await
}.boxed_local()) }
.boxed_local(),
)
.await .await
} }
@ -513,6 +573,7 @@ impl CliFactory {
self.services.module_info_cache.get_or_try_init(|| { self.services.module_info_cache.get_or_try_init(|| {
Ok(Arc::new(ModuleInfoCache::new( Ok(Arc::new(ModuleInfoCache::new(
self.caches()?.dep_analysis_db(), self.caches()?.dep_analysis_db(),
self.parsed_source_cache().clone(),
))) )))
}) })
} }
@ -541,6 +602,7 @@ impl CliFactory {
ts_config_result.ts_config, ts_config_result.ts_config,
)?; )?;
Ok(Arc::new(Emitter::new( Ok(Arc::new(Emitter::new(
self.cjs_tracker()?.clone(),
self.emit_cache()?.clone(), self.emit_cache()?.clone(),
self.parsed_source_cache().clone(), self.parsed_source_cache().clone(),
transpile_options, transpile_options,
@ -564,7 +626,9 @@ impl CliFactory {
async { async {
Ok(Arc::new(NodeResolver::new( Ok(Arc::new(NodeResolver::new(
DenoFsNodeResolverEnv::new(self.fs().clone()), DenoFsNodeResolverEnv::new(self.fs().clone()),
self.in_npm_pkg_checker()?.clone(),
self.npm_resolver().await?.clone().into_npm_resolver(), self.npm_resolver().await?.clone().into_npm_resolver(),
self.pkg_json_resolver().clone(),
))) )))
} }
.boxed_local(), .boxed_local(),
@ -582,24 +646,35 @@ impl CliFactory {
let caches = self.caches()?; let caches = self.caches()?;
let node_analysis_cache = let node_analysis_cache =
NodeAnalysisCache::new(caches.node_analysis_db()); NodeAnalysisCache::new(caches.node_analysis_db());
let node_resolver = self.cli_node_resolver().await?.clone(); let node_resolver = self.node_resolver().await?.clone();
let cjs_esm_analyzer = CliCjsCodeAnalyzer::new( let cjs_esm_analyzer = CliCjsCodeAnalyzer::new(
node_analysis_cache, node_analysis_cache,
self.cjs_tracker()?.clone(),
self.fs().clone(), self.fs().clone(),
node_resolver,
Some(self.parsed_source_cache().clone()), Some(self.parsed_source_cache().clone()),
self.cli_options()?.is_npm_main(),
); );
Ok(Arc::new(NodeCodeTranslator::new( Ok(Arc::new(NodeCodeTranslator::new(
cjs_esm_analyzer, cjs_esm_analyzer,
DenoFsNodeResolverEnv::new(self.fs().clone()), DenoFsNodeResolverEnv::new(self.fs().clone()),
self.node_resolver().await?.clone(), self.in_npm_pkg_checker()?.clone(),
node_resolver,
self.npm_resolver().await?.clone().into_npm_resolver(), self.npm_resolver().await?.clone().into_npm_resolver(),
self.pkg_json_resolver().clone(),
))) )))
}) })
.await .await
} }
pub fn pkg_json_resolver(&self) -> &Arc<PackageJsonResolver> {
self.services.pkg_json_resolver.get_or_init(|| {
Arc::new(PackageJsonResolver::new(DenoFsNodeResolverEnv::new(
self.fs().clone(),
)))
})
}
pub async fn type_checker(&self) -> Result<&Arc<TypeChecker>, AnyError> { pub async fn type_checker(&self) -> Result<&Arc<TypeChecker>, AnyError> {
self self
.services .services
@ -608,6 +683,10 @@ impl CliFactory {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
Ok(Arc::new(TypeChecker::new( Ok(Arc::new(TypeChecker::new(
self.caches()?.clone(), self.caches()?.clone(),
Arc::new(TypeCheckingCjsTracker::new(
self.cjs_tracker()?.clone(),
self.module_info_cache()?.clone(),
)),
cli_options.clone(), cli_options.clone(),
self.module_graph_builder().await?.clone(), self.module_graph_builder().await?.clone(),
self.node_resolver().await?.clone(), self.node_resolver().await?.clone(),
@ -626,19 +705,18 @@ impl CliFactory {
.get_or_try_init_async(async { .get_or_try_init_async(async {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
Ok(Arc::new(ModuleGraphBuilder::new( Ok(Arc::new(ModuleGraphBuilder::new(
cli_options.clone(),
self.caches()?.clone(), self.caches()?.clone(),
self.esm_or_cjs_checker().clone(), cli_options.clone(),
self.file_fetcher()?.clone(),
self.fs().clone(), self.fs().clone(),
self.resolver().await?.clone(), self.global_http_cache()?.clone(),
self.cli_node_resolver().await?.clone(), self.in_npm_pkg_checker()?.clone(),
self.npm_resolver().await?.clone(),
self.module_info_cache()?.clone(),
self.parsed_source_cache().clone(),
cli_options.maybe_lockfile().cloned(), cli_options.maybe_lockfile().cloned(),
self.maybe_file_watcher_reporter().clone(), self.maybe_file_watcher_reporter().clone(),
self.file_fetcher()?.clone(), self.module_info_cache()?.clone(),
self.global_http_cache()?.clone(), self.npm_resolver().await?.clone(),
self.parsed_source_cache().clone(),
self.resolver().await?.clone(),
self.root_permissions_container()?.clone(), self.root_permissions_container()?.clone(),
))) )))
}) })
@ -710,8 +788,17 @@ impl CliFactory {
.await .await
} }
pub fn cjs_resolutions(&self) -> &Arc<CjsResolutionStore> { pub fn cjs_tracker(&self) -> Result<&Arc<CjsTracker>, AnyError> {
self.services.cjs_resolutions.get_or_init(Default::default) self.services.cjs_tracker.get_or_try_init(|| {
let options = self.cli_options()?;
Ok(Arc::new(CjsTracker::new(
self.in_npm_pkg_checker()?.clone(),
self.pkg_json_resolver().clone(),
CjsTrackerOptions {
unstable_detect_cjs: options.unstable_detect_cjs(),
},
)))
})
} }
pub async fn cli_node_resolver( pub async fn cli_node_resolver(
@ -722,8 +809,9 @@ impl CliFactory {
.cli_node_resolver .cli_node_resolver
.get_or_try_init_async(async { .get_or_try_init_async(async {
Ok(Arc::new(CliNodeResolver::new( Ok(Arc::new(CliNodeResolver::new(
self.cjs_resolutions().clone(), self.cjs_tracker()?.clone(),
self.fs().clone(), self.fs().clone(),
self.in_npm_pkg_checker()?.clone(),
self.node_resolver().await?.clone(), self.node_resolver().await?.clone(),
self.npm_resolver().await?.clone(), self.npm_resolver().await?.clone(),
))) )))
@ -761,7 +849,9 @@ impl CliFactory {
) -> Result<DenoCompileBinaryWriter, AnyError> { ) -> Result<DenoCompileBinaryWriter, AnyError> {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
Ok(DenoCompileBinaryWriter::new( Ok(DenoCompileBinaryWriter::new(
self.cjs_tracker()?,
self.deno_dir()?, self.deno_dir()?,
self.emitter()?,
self.file_fetcher()?, self.file_fetcher()?,
self.http_client_provider(), self.http_client_provider(),
self.npm_resolver().await?.as_ref(), self.npm_resolver().await?.as_ref(),
@ -790,53 +880,60 @@ impl CliFactory {
&self, &self,
) -> Result<CliMainWorkerFactory, AnyError> { ) -> Result<CliMainWorkerFactory, AnyError> {
let cli_options = self.cli_options()?; let cli_options = self.cli_options()?;
let fs = self.fs();
let node_resolver = self.node_resolver().await?; let node_resolver = self.node_resolver().await?;
let npm_resolver = self.npm_resolver().await?; let npm_resolver = self.npm_resolver().await?;
let fs = self.fs();
let cli_node_resolver = self.cli_node_resolver().await?; let cli_node_resolver = self.cli_node_resolver().await?;
let cli_npm_resolver = self.npm_resolver().await?.clone(); let cli_npm_resolver = self.npm_resolver().await?.clone();
let in_npm_pkg_checker = self.in_npm_pkg_checker()?;
let maybe_file_watcher_communicator = if cli_options.has_hmr() { let maybe_file_watcher_communicator = if cli_options.has_hmr() {
Some(self.watcher_communicator.clone().unwrap()) Some(self.watcher_communicator.clone().unwrap())
} else { } else {
None None
}; };
let node_code_translator = self.node_code_translator().await?;
let cjs_tracker = self.cjs_tracker()?.clone();
let pkg_json_resolver = self.pkg_json_resolver().clone();
Ok(CliMainWorkerFactory::new( Ok(CliMainWorkerFactory::new(
self.blob_store().clone(), self.blob_store().clone(),
self.cjs_resolutions().clone(),
if cli_options.code_cache_enabled() { if cli_options.code_cache_enabled() {
Some(self.code_cache()?.clone()) Some(self.code_cache()?.clone())
} else { } else {
None None
}, },
self.feature_checker()?.clone(), self.feature_checker()?.clone(),
self.fs().clone(), fs.clone(),
maybe_file_watcher_communicator, maybe_file_watcher_communicator,
self.maybe_inspector_server()?.clone(), self.maybe_inspector_server()?.clone(),
cli_options.maybe_lockfile().cloned(), cli_options.maybe_lockfile().cloned(),
Box::new(CliModuleLoaderFactory::new( Box::new(CliModuleLoaderFactory::new(
cli_options, cli_options,
cjs_tracker,
if cli_options.code_cache_enabled() { if cli_options.code_cache_enabled() {
Some(self.code_cache()?.clone()) Some(self.code_cache()?.clone())
} else { } else {
None None
}, },
self.emitter()?.clone(), self.emitter()?.clone(),
fs.clone(),
in_npm_pkg_checker.clone(),
self.main_module_graph_container().await?.clone(), self.main_module_graph_container().await?.clone(),
self.module_load_preparer().await?.clone(), self.module_load_preparer().await?.clone(),
node_code_translator.clone(),
cli_node_resolver.clone(), cli_node_resolver.clone(),
cli_npm_resolver.clone(), cli_npm_resolver.clone(),
NpmModuleLoader::new( NpmModuleLoader::new(
self.cjs_resolutions().clone(), self.cjs_tracker()?.clone(),
self.node_code_translator().await?.clone(),
fs.clone(), fs.clone(),
cli_node_resolver.clone(), node_code_translator.clone(),
), ),
self.parsed_source_cache().clone(), self.parsed_source_cache().clone(),
self.resolver().await?.clone(), self.resolver().await?.clone(),
)), )),
node_resolver.clone(), node_resolver.clone(),
npm_resolver.clone(), npm_resolver.clone(),
pkg_json_resolver,
self.root_cert_store_provider().clone(), self.root_cert_store_provider().clone(),
self.root_permissions_container()?.clone(), self.root_permissions_container()?.clone(),
StorageKeyResolver::from_options(cli_options), StorageKeyResolver::from_options(cli_options),
@ -852,8 +949,10 @@ impl CliFactory {
let create_hmr_runner = if cli_options.has_hmr() { let create_hmr_runner = if cli_options.has_hmr() {
let watcher_communicator = self.watcher_communicator.clone().unwrap(); let watcher_communicator = self.watcher_communicator.clone().unwrap();
let emitter = self.emitter()?.clone(); let emitter = self.emitter()?.clone();
let cjs_tracker = self.cjs_tracker()?.clone();
let fn_: crate::worker::CreateHmrRunnerCb = Box::new(move |session| { let fn_: crate::worker::CreateHmrRunnerCb = Box::new(move |session| {
Box::new(HmrRunner::new( Box::new(HmrRunner::new(
cjs_tracker.clone(),
emitter.clone(), emitter.clone(),
session, session,
watcher_communicator.clone(), watcher_communicator.clone(),
@ -890,7 +989,6 @@ impl CliFactory {
inspect_wait: cli_options.inspect_wait().is_some(), inspect_wait: cli_options.inspect_wait().is_some(),
strace_ops: cli_options.strace_ops().clone(), strace_ops: cli_options.strace_ops().clone(),
is_inspecting: cli_options.is_inspecting(), is_inspecting: cli_options.is_inspecting(),
is_npm_main: cli_options.is_npm_main(),
location: cli_options.location_flag().clone(), location: cli_options.location_flag().clone(),
// if the user ran a binary command, we'll need to set process.argv[0] // if the user ran a binary command, we'll need to set process.argv[0]
// to be the name of the binary command instead of deno // to be the name of the binary command instead of deno
@ -908,7 +1006,6 @@ impl CliFactory {
node_ipc: cli_options.node_ipc_fd(), node_ipc: cli_options.node_ipc_fd(),
serve_port: cli_options.serve_port(), serve_port: cli_options.serve_port(),
serve_host: cli_options.serve_host(), serve_host: cli_options.serve_host(),
unstable_detect_cjs: cli_options.unstable_detect_cjs(),
}) })
} }
} }

View file

@ -24,6 +24,7 @@ use deno_graph::source::LoaderChecksum;
use deno_path_util::url_to_file_path; use deno_path_util::url_to_file_path;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_web::BlobStore; use deno_runtime::deno_web::BlobStore;
use http::header;
use log::debug; use log::debug;
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
@ -181,6 +182,7 @@ pub enum FetchPermissionsOptionRef<'a> {
pub struct FetchOptions<'a> { pub struct FetchOptions<'a> {
pub specifier: &'a ModuleSpecifier, pub specifier: &'a ModuleSpecifier,
pub permissions: FetchPermissionsOptionRef<'a>, pub permissions: FetchPermissionsOptionRef<'a>,
pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
pub maybe_accept: Option<&'a str>, pub maybe_accept: Option<&'a str>,
pub maybe_cache_setting: Option<&'a CacheSetting>, pub maybe_cache_setting: Option<&'a CacheSetting>,
} }
@ -350,6 +352,7 @@ impl FileFetcher {
maybe_accept: Option<&str>, maybe_accept: Option<&str>,
cache_setting: &CacheSetting, cache_setting: &CacheSetting,
maybe_checksum: Option<&LoaderChecksum>, maybe_checksum: Option<&LoaderChecksum>,
maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
) -> Result<FileOrRedirect, AnyError> { ) -> Result<FileOrRedirect, AnyError> {
debug!( debug!(
"FileFetcher::fetch_remote_no_follow - specifier: {}", "FileFetcher::fetch_remote_no_follow - specifier: {}",
@ -442,6 +445,7 @@ impl FileFetcher {
.as_ref() .as_ref()
.map(|(_, etag)| etag.clone()), .map(|(_, etag)| etag.clone()),
maybe_auth_token: maybe_auth_token.clone(), maybe_auth_token: maybe_auth_token.clone(),
maybe_auth: maybe_auth.clone(),
maybe_progress_guard: maybe_progress_guard.as_ref(), maybe_progress_guard: maybe_progress_guard.as_ref(),
}) })
.await? .await?
@ -538,7 +542,18 @@ impl FileFetcher {
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
) -> Result<File, AnyError> { ) -> Result<File, AnyError> {
self self
.fetch_inner(specifier, FetchPermissionsOptionRef::AllowAll) .fetch_inner(specifier, None, FetchPermissionsOptionRef::AllowAll)
.await
}
#[inline(always)]
pub async fn fetch_bypass_permissions_with_maybe_auth(
&self,
specifier: &ModuleSpecifier,
maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
) -> Result<File, AnyError> {
self
.fetch_inner(specifier, maybe_auth, FetchPermissionsOptionRef::AllowAll)
.await .await
} }
@ -552,6 +567,7 @@ impl FileFetcher {
self self
.fetch_inner( .fetch_inner(
specifier, specifier,
None,
FetchPermissionsOptionRef::StaticContainer(permissions), FetchPermissionsOptionRef::StaticContainer(permissions),
) )
.await .await
@ -560,12 +576,14 @@ impl FileFetcher {
async fn fetch_inner( async fn fetch_inner(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
permissions: FetchPermissionsOptionRef<'_>, permissions: FetchPermissionsOptionRef<'_>,
) -> Result<File, AnyError> { ) -> Result<File, AnyError> {
self self
.fetch_with_options(FetchOptions { .fetch_with_options(FetchOptions {
specifier, specifier,
permissions, permissions,
maybe_auth,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: None, maybe_cache_setting: None,
}) })
@ -585,12 +603,14 @@ impl FileFetcher {
max_redirect: usize, max_redirect: usize,
) -> Result<File, AnyError> { ) -> Result<File, AnyError> {
let mut specifier = Cow::Borrowed(options.specifier); let mut specifier = Cow::Borrowed(options.specifier);
let mut maybe_auth = options.maybe_auth.clone();
for _ in 0..=max_redirect { for _ in 0..=max_redirect {
match self match self
.fetch_no_follow_with_options(FetchNoFollowOptions { .fetch_no_follow_with_options(FetchNoFollowOptions {
fetch_options: FetchOptions { fetch_options: FetchOptions {
specifier: &specifier, specifier: &specifier,
permissions: options.permissions, permissions: options.permissions,
maybe_auth: maybe_auth.clone(),
maybe_accept: options.maybe_accept, maybe_accept: options.maybe_accept,
maybe_cache_setting: options.maybe_cache_setting, maybe_cache_setting: options.maybe_cache_setting,
}, },
@ -602,6 +622,10 @@ impl FileFetcher {
return Ok(file); return Ok(file);
} }
FileOrRedirect::Redirect(redirect_specifier) => { FileOrRedirect::Redirect(redirect_specifier) => {
// If we were redirected to another origin, don't send the auth header anymore.
if redirect_specifier.origin() != specifier.origin() {
maybe_auth = None;
}
specifier = Cow::Owned(redirect_specifier); specifier = Cow::Owned(redirect_specifier);
} }
} }
@ -666,6 +690,7 @@ impl FileFetcher {
options.maybe_accept, options.maybe_accept,
options.maybe_cache_setting.unwrap_or(&self.cache_setting), options.maybe_cache_setting.unwrap_or(&self.cache_setting),
maybe_checksum, maybe_checksum,
options.maybe_auth,
) )
.await .await
} }
@ -756,6 +781,7 @@ mod tests {
FetchOptions { FetchOptions {
specifier, specifier,
permissions: FetchPermissionsOptionRef::AllowAll, permissions: FetchPermissionsOptionRef::AllowAll,
maybe_auth: None,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: Some(&file_fetcher.cache_setting), maybe_cache_setting: Some(&file_fetcher.cache_setting),
}, },
@ -1255,6 +1281,7 @@ mod tests {
FetchOptions { FetchOptions {
specifier: &specifier, specifier: &specifier,
permissions: FetchPermissionsOptionRef::AllowAll, permissions: FetchPermissionsOptionRef::AllowAll,
maybe_auth: None,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: Some(&file_fetcher.cache_setting), maybe_cache_setting: Some(&file_fetcher.cache_setting),
}, },
@ -1268,6 +1295,7 @@ mod tests {
FetchOptions { FetchOptions {
specifier: &specifier, specifier: &specifier,
permissions: FetchPermissionsOptionRef::AllowAll, permissions: FetchPermissionsOptionRef::AllowAll,
maybe_auth: None,
maybe_accept: None, maybe_accept: None,
maybe_cache_setting: Some(&file_fetcher.cache_setting), maybe_cache_setting: Some(&file_fetcher.cache_setting),
}, },

View file

@ -6,7 +6,6 @@ use crate::args::CliLockfile;
use crate::args::CliOptions; use crate::args::CliOptions;
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS; use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
use crate::cache; use crate::cache;
use crate::cache::EsmOrCjsChecker;
use crate::cache::GlobalHttpCache; use crate::cache::GlobalHttpCache;
use crate::cache::ModuleInfoCache; use crate::cache::ModuleInfoCache;
use crate::cache::ParsedSourceCache; use crate::cache::ParsedSourceCache;
@ -15,7 +14,6 @@ use crate::errors::get_error_class_name;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::resolver::CliGraphResolver; use crate::resolver::CliGraphResolver;
use crate::resolver::CliNodeResolver;
use crate::resolver::CliSloppyImportsResolver; use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs; use crate::resolver::SloppyImportsCachedFs;
use crate::tools::check; use crate::tools::check;
@ -50,6 +48,7 @@ use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::jsr::JsrDepPackageReq; use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use import_map::ImportMapError; use import_map::ImportMapError;
use node_resolver::InNpmPackageChecker;
use std::collections::HashSet; use std::collections::HashSet;
use std::error::Error; use std::error::Error;
use std::ops::Deref; use std::ops::Deref;
@ -379,54 +378,51 @@ pub struct BuildFastCheckGraphOptions<'a> {
} }
pub struct ModuleGraphBuilder { pub struct ModuleGraphBuilder {
options: Arc<CliOptions>,
caches: Arc<cache::Caches>, caches: Arc<cache::Caches>,
esm_or_cjs_checker: Arc<EsmOrCjsChecker>, cli_options: Arc<CliOptions>,
file_fetcher: Arc<FileFetcher>,
fs: Arc<dyn FileSystem>, fs: Arc<dyn FileSystem>,
resolver: Arc<CliGraphResolver>, global_http_cache: Arc<GlobalHttpCache>,
node_resolver: Arc<CliNodeResolver>, in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>,
parsed_source_cache: Arc<ParsedSourceCache>,
lockfile: Option<Arc<CliLockfile>>, lockfile: Option<Arc<CliLockfile>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>, maybe_file_watcher_reporter: Option<FileWatcherReporter>,
file_fetcher: Arc<FileFetcher>, module_info_cache: Arc<ModuleInfoCache>,
global_http_cache: Arc<GlobalHttpCache>, npm_resolver: Arc<dyn CliNpmResolver>,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliGraphResolver>,
root_permissions_container: PermissionsContainer, root_permissions_container: PermissionsContainer,
} }
impl ModuleGraphBuilder { impl ModuleGraphBuilder {
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn new( pub fn new(
options: Arc<CliOptions>,
caches: Arc<cache::Caches>, caches: Arc<cache::Caches>,
esm_or_cjs_checker: Arc<EsmOrCjsChecker>, cli_options: Arc<CliOptions>,
file_fetcher: Arc<FileFetcher>,
fs: Arc<dyn FileSystem>, fs: Arc<dyn FileSystem>,
resolver: Arc<CliGraphResolver>, global_http_cache: Arc<GlobalHttpCache>,
node_resolver: Arc<CliNodeResolver>, in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
npm_resolver: Arc<dyn CliNpmResolver>,
module_info_cache: Arc<ModuleInfoCache>,
parsed_source_cache: Arc<ParsedSourceCache>,
lockfile: Option<Arc<CliLockfile>>, lockfile: Option<Arc<CliLockfile>>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>, maybe_file_watcher_reporter: Option<FileWatcherReporter>,
file_fetcher: Arc<FileFetcher>, module_info_cache: Arc<ModuleInfoCache>,
global_http_cache: Arc<GlobalHttpCache>, npm_resolver: Arc<dyn CliNpmResolver>,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliGraphResolver>,
root_permissions_container: PermissionsContainer, root_permissions_container: PermissionsContainer,
) -> Self { ) -> Self {
Self { Self {
options,
caches, caches,
esm_or_cjs_checker, cli_options,
file_fetcher,
fs, fs,
resolver, global_http_cache,
node_resolver, in_npm_pkg_checker,
npm_resolver,
module_info_cache,
parsed_source_cache,
lockfile, lockfile,
maybe_file_watcher_reporter, maybe_file_watcher_reporter,
file_fetcher, module_info_cache,
global_http_cache, npm_resolver,
parsed_source_cache,
resolver,
root_permissions_container, root_permissions_container,
} }
} }
@ -512,13 +508,11 @@ impl ModuleGraphBuilder {
} }
let maybe_imports = if options.graph_kind.include_types() { let maybe_imports = if options.graph_kind.include_types() {
self.options.to_compiler_option_types()? self.cli_options.to_compiler_option_types()?
} else { } else {
Vec::new() Vec::new()
}; };
let analyzer = self let analyzer = self.module_info_cache.as_module_analyzer();
.module_info_cache
.as_module_analyzer(&self.parsed_source_cache);
let mut loader = match options.loader { let mut loader = match options.loader {
Some(loader) => MutLoaderRef::Borrowed(loader), Some(loader) => MutLoaderRef::Borrowed(loader),
None => MutLoaderRef::Owned(self.create_graph_loader()), None => MutLoaderRef::Owned(self.create_graph_loader()),
@ -566,7 +560,7 @@ impl ModuleGraphBuilder {
// ensure an "npm install" is done if the user has explicitly // ensure an "npm install" is done if the user has explicitly
// opted into using a node_modules directory // opted into using a node_modules directory
if self if self
.options .cli_options
.node_modules_dir()? .node_modules_dir()?
.map(|m| m.uses_node_modules_dir()) .map(|m| m.uses_node_modules_dir())
.unwrap_or(false) .unwrap_or(false)
@ -677,10 +671,10 @@ impl ModuleGraphBuilder {
graph.build_fast_check_type_graph( graph.build_fast_check_type_graph(
deno_graph::BuildFastCheckTypeGraphOptions { deno_graph::BuildFastCheckTypeGraphOptions {
jsr_url_provider: &CliJsrUrlProvider, es_parser: Some(&parser),
fast_check_cache: fast_check_cache.as_ref().map(|c| c as _), fast_check_cache: fast_check_cache.as_ref().map(|c| c as _),
fast_check_dts: false, fast_check_dts: false,
module_parser: Some(&parser), jsr_url_provider: &CliJsrUrlProvider,
resolver: Some(graph_resolver), resolver: Some(graph_resolver),
npm_resolver: Some(&graph_npm_resolver), npm_resolver: Some(&graph_npm_resolver),
workspace_fast_check: options.workspace_fast_check, workspace_fast_check: options.workspace_fast_check,
@ -699,20 +693,18 @@ impl ModuleGraphBuilder {
permissions: PermissionsContainer, permissions: PermissionsContainer,
) -> cache::FetchCacher { ) -> cache::FetchCacher {
cache::FetchCacher::new( cache::FetchCacher::new(
self.esm_or_cjs_checker.clone(),
self.file_fetcher.clone(), self.file_fetcher.clone(),
self.fs.clone(),
self.global_http_cache.clone(), self.global_http_cache.clone(),
self.node_resolver.clone(), self.in_npm_pkg_checker.clone(),
self.npm_resolver.clone(),
self.module_info_cache.clone(), self.module_info_cache.clone(),
cache::FetchCacherOptions { cache::FetchCacherOptions {
file_header_overrides: self.options.resolve_file_header_overrides(), file_header_overrides: self.cli_options.resolve_file_header_overrides(),
permissions, permissions,
is_deno_publish: matches!( is_deno_publish: matches!(
self.options.sub_command(), self.cli_options.sub_command(),
crate::args::DenoSubcommand::Publish { .. } crate::args::DenoSubcommand::Publish { .. }
), ),
unstable_detect_cjs: self.options.unstable_detect_cjs(),
}, },
) )
} }
@ -737,12 +729,12 @@ impl ModuleGraphBuilder {
&self.fs, &self.fs,
roots, roots,
GraphValidOptions { GraphValidOptions {
kind: if self.options.type_check_mode().is_true() { kind: if self.cli_options.type_check_mode().is_true() {
GraphKind::All GraphKind::All
} else { } else {
GraphKind::CodeOnly GraphKind::CodeOnly
}, },
check_js: self.options.check_js(), check_js: self.cli_options.check_js(),
exit_integrity_errors: true, exit_integrity_errors: true,
}, },
) )
@ -1009,7 +1001,11 @@ impl deno_graph::source::Reporter for FileWatcherReporter {
) { ) {
let mut file_paths = self.file_paths.lock(); let mut file_paths = self.file_paths.lock();
if specifier.scheme() == "file" { if specifier.scheme() == "file" {
file_paths.push(specifier.to_file_path().unwrap()); // Don't trust that the path is a valid path at this point:
// https://github.com/denoland/deno/issues/26209.
if let Ok(file_path) = specifier.to_file_path() {
file_paths.push(file_path);
}
} }
if modules_done == modules_total { if modules_done == modules_total {

View file

@ -19,6 +19,7 @@ use deno_runtime::deno_fetch;
use deno_runtime::deno_fetch::create_http_client; use deno_runtime::deno_fetch::create_http_client;
use deno_runtime::deno_fetch::CreateHttpClientOptions; use deno_runtime::deno_fetch::CreateHttpClientOptions;
use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_tls::RootCertStoreProvider;
use http::header;
use http::header::HeaderName; use http::header::HeaderName;
use http::header::HeaderValue; use http::header::HeaderValue;
use http::header::ACCEPT; use http::header::ACCEPT;
@ -204,6 +205,7 @@ pub struct FetchOnceArgs<'a> {
pub maybe_accept: Option<String>, pub maybe_accept: Option<String>,
pub maybe_etag: Option<String>, pub maybe_etag: Option<String>,
pub maybe_auth_token: Option<AuthToken>, pub maybe_auth_token: Option<AuthToken>,
pub maybe_auth: Option<(header::HeaderName, header::HeaderValue)>,
pub maybe_progress_guard: Option<&'a UpdateGuard>, pub maybe_progress_guard: Option<&'a UpdateGuard>,
} }
@ -382,6 +384,8 @@ impl HttpClient {
request request
.headers_mut() .headers_mut()
.insert(AUTHORIZATION, authorization_val); .insert(AUTHORIZATION, authorization_val);
} else if let Some((header, value)) = args.maybe_auth {
request.headers_mut().insert(header, value);
} }
if let Some(accept) = args.maybe_accept { if let Some(accept) = args.maybe_accept {
let accepts_val = HeaderValue::from_str(&accept)?; let accepts_val = HeaderValue::from_str(&accept)?;
@ -792,6 +796,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -818,6 +823,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -845,6 +851,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -866,6 +873,7 @@ mod test {
maybe_etag: Some("33a64df551425fcc55e".to_string()), maybe_etag: Some("33a64df551425fcc55e".to_string()),
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
assert_eq!(res.unwrap(), FetchOnceResult::NotModified); assert_eq!(res.unwrap(), FetchOnceResult::NotModified);
@ -885,6 +893,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -914,6 +923,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, _)) = result { if let Ok(FetchOnceResult::Code(body, _)) = result {
@ -939,6 +949,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Redirect(url, _)) = result { if let Ok(FetchOnceResult::Redirect(url, _)) = result {
@ -974,6 +985,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -1021,6 +1033,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
@ -1083,6 +1096,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
@ -1136,6 +1150,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -1177,6 +1192,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -1199,6 +1215,7 @@ mod test {
maybe_etag: Some("33a64df551425fcc55e".to_string()), maybe_etag: Some("33a64df551425fcc55e".to_string()),
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
assert_eq!(res.unwrap(), FetchOnceResult::NotModified); assert_eq!(res.unwrap(), FetchOnceResult::NotModified);
@ -1233,6 +1250,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
if let Ok(FetchOnceResult::Code(body, headers)) = result { if let Ok(FetchOnceResult::Code(body, headers)) = result {
@ -1262,6 +1280,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
assert!(result.is_err()); assert!(result.is_err());
@ -1283,6 +1302,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;
@ -1306,6 +1326,7 @@ mod test {
maybe_etag: None, maybe_etag: None,
maybe_auth_token: None, maybe_auth_token: None,
maybe_progress_guard: None, maybe_progress_guard: None,
maybe_auth: None,
}) })
.await; .await;

View file

@ -12,13 +12,14 @@ use super::urls::url_to_uri;
use crate::args::jsr_url; use crate::args::jsr_url;
use crate::lsp::search::PackageSearchApi; use crate::lsp::search::PackageSearchApi;
use crate::tools::lint::CliLinter; use crate::tools::lint::CliLinter;
use crate::util::path::relative_specifier;
use deno_config::workspace::MappedResolution; use deno_config::workspace::MappedResolution;
use deno_graph::source::ResolutionMode;
use deno_lint::diagnostic::LintDiagnosticRange; use deno_lint::diagnostic::LintDiagnosticRange;
use deno_ast::SourceRange; use deno_ast::SourceRange;
use deno_ast::SourceRangedForSpanned; use deno_ast::SourceRangedForSpanned;
use deno_ast::SourceTextInfo; use deno_ast::SourceTextInfo;
use deno_core::anyhow::anyhow;
use deno_core::error::custom_error; use deno_core::error::custom_error;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde::Deserialize; use deno_core::serde::Deserialize;
@ -37,9 +38,9 @@ use deno_semver::package::PackageReq;
use deno_semver::package::PackageReqReference; use deno_semver::package::PackageReqReference;
use deno_semver::Version; use deno_semver::Version;
use import_map::ImportMap; use import_map::ImportMap;
use node_resolver::NpmResolver;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use std::borrow::Cow;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
@ -229,6 +230,7 @@ pub struct TsResponseImportMapper<'a> {
documents: &'a Documents, documents: &'a Documents,
maybe_import_map: Option<&'a ImportMap>, maybe_import_map: Option<&'a ImportMap>,
resolver: &'a LspResolver, resolver: &'a LspResolver,
tsc_specifier_map: &'a tsc::TscSpecifierMap,
file_referrer: ModuleSpecifier, file_referrer: ModuleSpecifier,
} }
@ -237,12 +239,14 @@ impl<'a> TsResponseImportMapper<'a> {
documents: &'a Documents, documents: &'a Documents,
maybe_import_map: Option<&'a ImportMap>, maybe_import_map: Option<&'a ImportMap>,
resolver: &'a LspResolver, resolver: &'a LspResolver,
tsc_specifier_map: &'a tsc::TscSpecifierMap,
file_referrer: &ModuleSpecifier, file_referrer: &ModuleSpecifier,
) -> Self { ) -> Self {
Self { Self {
documents, documents,
maybe_import_map, maybe_import_map,
resolver, resolver,
tsc_specifier_map,
file_referrer: file_referrer.clone(), file_referrer: file_referrer.clone(),
} }
} }
@ -336,7 +340,12 @@ impl<'a> TsResponseImportMapper<'a> {
.resolver .resolver
.maybe_managed_npm_resolver(Some(&self.file_referrer)) .maybe_managed_npm_resolver(Some(&self.file_referrer))
{ {
if npm_resolver.in_npm_package(specifier) { let in_npm_pkg = self
.resolver
.maybe_node_resolver(Some(&self.file_referrer))
.map(|n| n.in_npm_package(specifier))
.unwrap_or(false);
if in_npm_pkg {
if let Ok(Some(pkg_id)) = if let Ok(Some(pkg_id)) =
npm_resolver.resolve_pkg_id_from_specifier(specifier) npm_resolver.resolve_pkg_id_from_specifier(specifier)
{ {
@ -383,6 +392,11 @@ impl<'a> TsResponseImportMapper<'a> {
} }
} }
} }
} else if let Some(dep_name) = self
.resolver
.file_url_to_package_json_dep(specifier, Some(&self.file_referrer))
{
return Some(dep_name);
} }
// check if the import map has this specifier // check if the import map has this specifier
@ -453,20 +467,37 @@ impl<'a> TsResponseImportMapper<'a> {
specifier: &str, specifier: &str,
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
) -> Option<String> { ) -> Option<String> {
if let Ok(specifier) = referrer.join(specifier) { let specifier_stem = specifier.strip_suffix(".js").unwrap_or(specifier);
if let Some(specifier) = self.check_specifier(&specifier, referrer) { let specifiers = std::iter::once(Cow::Borrowed(specifier)).chain(
SUPPORTED_EXTENSIONS
.iter()
.map(|ext| Cow::Owned(format!("{specifier_stem}{ext}"))),
);
for specifier in specifiers {
if let Some(specifier) = self
.resolver
.as_graph_resolver(Some(&self.file_referrer))
.resolve(
&specifier,
&deno_graph::Range {
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
ResolutionMode::Types,
)
.ok()
.and_then(|s| self.tsc_specifier_map.normalize(s.as_str()).ok())
.filter(|s| self.documents.exists(s, Some(&self.file_referrer)))
{
if let Some(specifier) = self
.check_specifier(&specifier, referrer)
.or_else(|| relative_specifier(referrer, &specifier))
.filter(|s| !s.contains("/node_modules/"))
{
return Some(specifier); return Some(specifier);
} }
} }
let specifier = specifier.strip_suffix(".js").unwrap_or(specifier);
for ext in SUPPORTED_EXTENSIONS {
let specifier_with_ext = format!("{specifier}{ext}");
if self
.documents
.contains_import(&specifier_with_ext, referrer)
{
return Some(specifier_with_ext);
}
} }
None None
} }
@ -555,8 +586,9 @@ fn try_reverse_map_package_json_exports(
pub fn fix_ts_import_changes( pub fn fix_ts_import_changes(
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
changes: &[tsc::FileTextChanges], changes: &[tsc::FileTextChanges],
import_mapper: &TsResponseImportMapper, language_server: &language_server::Inner,
) -> Result<Vec<tsc::FileTextChanges>, AnyError> { ) -> Result<Vec<tsc::FileTextChanges>, AnyError> {
let import_mapper = language_server.get_ts_response_import_mapper(referrer);
let mut r = Vec::new(); let mut r = Vec::new();
for change in changes { for change in changes {
let mut text_changes = Vec::new(); let mut text_changes = Vec::new();
@ -598,29 +630,26 @@ pub fn fix_ts_import_changes(
/// Fix tsc import code actions so that the module specifier is correct for /// Fix tsc import code actions so that the module specifier is correct for
/// resolution by Deno (includes the extension). /// resolution by Deno (includes the extension).
fn fix_ts_import_action( fn fix_ts_import_action<'a>(
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
action: &tsc::CodeFixAction, action: &'a tsc::CodeFixAction,
import_mapper: &TsResponseImportMapper, language_server: &language_server::Inner,
) -> Result<Option<tsc::CodeFixAction>, AnyError> { ) -> Option<Cow<'a, tsc::CodeFixAction>> {
if matches!( if !matches!(
action.fix_name.as_str(), action.fix_name.as_str(),
"import" | "fixMissingFunctionDeclaration" "import" | "fixMissingFunctionDeclaration"
) { ) {
let change = action return Some(Cow::Borrowed(action));
.changes }
.first() let specifier = (|| {
.ok_or_else(|| anyhow!("Unexpected action changes."))?; let text_change = action.changes.first()?.text_changes.first()?;
let text_change = change let captures = IMPORT_SPECIFIER_RE.captures(&text_change.new_text)?;
.text_changes Some(captures.get(1)?.as_str())
.first() })();
.ok_or_else(|| anyhow!("Missing text change."))?; let Some(specifier) = specifier else {
if let Some(captures) = IMPORT_SPECIFIER_RE.captures(&text_change.new_text) return Some(Cow::Borrowed(action));
{ };
let specifier = captures let import_mapper = language_server.get_ts_response_import_mapper(referrer);
.get(1)
.ok_or_else(|| anyhow!("Missing capture."))?
.as_str();
if let Some(new_specifier) = if let Some(new_specifier) =
import_mapper.check_unresolved_specifier(specifier, referrer) import_mapper.check_unresolved_specifier(specifier, referrer)
{ {
@ -645,21 +674,19 @@ fn fix_ts_import_action(
}) })
.collect(); .collect();
return Ok(Some(tsc::CodeFixAction { Some(Cow::Owned(tsc::CodeFixAction {
description, description,
changes, changes,
commands: None, commands: None,
fix_name: action.fix_name.clone(), fix_name: action.fix_name.clone(),
fix_id: None, fix_id: None,
fix_all_description: None, fix_all_description: None,
})); }))
} else if !import_mapper.is_valid_import(specifier, referrer) { } else if !import_mapper.is_valid_import(specifier, referrer) {
return Ok(None); None
} else {
Some(Cow::Borrowed(action))
} }
}
}
Ok(Some(action.clone()))
} }
/// Determines if two TypeScript diagnostic codes are effectively equivalent. /// Determines if two TypeScript diagnostic codes are effectively equivalent.
@ -730,7 +757,7 @@ pub fn ts_changes_to_edit(
})) }))
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct CodeActionData { pub struct CodeActionData {
pub specifier: ModuleSpecifier, pub specifier: ModuleSpecifier,
@ -1000,11 +1027,7 @@ impl CodeActionCollection {
"The action returned from TypeScript is unsupported.", "The action returned from TypeScript is unsupported.",
)); ));
} }
let Some(action) = fix_ts_import_action( let Some(action) = fix_ts_import_action(specifier, action, language_server)
specifier,
action,
&language_server.get_ts_response_import_mapper(specifier),
)?
else { else {
return Ok(()); return Ok(());
}; };
@ -1027,7 +1050,7 @@ impl CodeActionCollection {
}); });
self self
.actions .actions
.push(CodeActionKind::Tsc(code_action, action.clone())); .push(CodeActionKind::Tsc(code_action, action.as_ref().clone()));
if let Some(fix_id) = &action.fix_id { if let Some(fix_id) = &action.fix_id {
if let Some(CodeActionKind::Tsc(existing_fix_all, existing_action)) = if let Some(CodeActionKind::Tsc(existing_fix_all, existing_action)) =
@ -1054,10 +1077,12 @@ impl CodeActionCollection {
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
diagnostic: &lsp::Diagnostic, diagnostic: &lsp::Diagnostic,
) { ) {
let data = Some(json!({ let data = action.fix_id.as_ref().map(|fix_id| {
"specifier": specifier, json!(CodeActionData {
"fixId": action.fix_id, specifier: specifier.clone(),
})); fix_id: fix_id.clone(),
})
});
let title = if let Some(description) = &action.fix_all_description { let title = if let Some(description) = &action.fix_all_description {
description.clone() description.clone()
} else { } else {
@ -1206,14 +1231,11 @@ impl CodeActionCollection {
}), }),
); );
match parsed_source.program_ref() { parsed_source
deno_ast::swc::ast::Program::Module(module) => module .program_ref()
.body .body()
.iter()
.find(|i| i.range().contains(&specifier_range)) .find(|i| i.range().contains(&specifier_range))
.map(|i| text_info.line_and_column_index(i.range().start)), .map(|i| text_info.line_and_column_index(i.range().start))
deno_ast::swc::ast::Program::Script(_) => None,
}
} }
async fn deno_types_for_npm_action( async fn deno_types_for_npm_action(

View file

@ -421,7 +421,7 @@ pub fn collect_test(
) -> Result<Vec<lsp::CodeLens>, AnyError> { ) -> Result<Vec<lsp::CodeLens>, AnyError> {
let mut collector = let mut collector =
DenoTestCollector::new(specifier.clone(), parsed_source.clone()); DenoTestCollector::new(specifier.clone(), parsed_source.clone());
parsed_source.module().visit_with(&mut collector); parsed_source.program().visit_with(&mut collector);
Ok(collector.take()) Ok(collector.take())
} }
@ -581,7 +581,7 @@ mod tests {
.unwrap(); .unwrap();
let mut collector = let mut collector =
DenoTestCollector::new(specifier, parsed_module.clone()); DenoTestCollector::new(specifier, parsed_module.clone());
parsed_module.module().visit_with(&mut collector); parsed_module.program().visit_with(&mut collector);
assert_eq!( assert_eq!(
collector.take(), collector.take(),
vec![ vec![

View file

@ -41,6 +41,7 @@ use deno_runtime::deno_node::PackageJson;
use indexmap::IndexSet; use indexmap::IndexSet;
use lsp_types::ClientCapabilities; use lsp_types::ClientCapabilities;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap; use std::collections::HashMap;
use std::ops::Deref; use std::ops::Deref;
use std::ops::DerefMut; use std::ops::DerefMut;
@ -439,6 +440,8 @@ pub struct LanguagePreferences {
pub use_aliases_for_renames: bool, pub use_aliases_for_renames: bool,
#[serde(default)] #[serde(default)]
pub quote_style: QuoteStyle, pub quote_style: QuoteStyle,
#[serde(default)]
pub prefer_type_only_auto_imports: bool,
} }
impl Default for LanguagePreferences { impl Default for LanguagePreferences {
@ -449,6 +452,7 @@ impl Default for LanguagePreferences {
auto_import_file_exclude_patterns: vec![], auto_import_file_exclude_patterns: vec![],
use_aliases_for_renames: true, use_aliases_for_renames: true,
quote_style: Default::default(), quote_style: Default::default(),
prefer_type_only_auto_imports: false,
} }
} }
} }
@ -981,7 +985,7 @@ impl Config {
| MediaType::Tsx => Some(&workspace_settings.typescript), | MediaType::Tsx => Some(&workspace_settings.typescript),
MediaType::Json MediaType::Json
| MediaType::Wasm | MediaType::Wasm
| MediaType::TsBuildInfo | MediaType::Css
| MediaType::SourceMap | MediaType::SourceMap
| MediaType::Unknown => None, | MediaType::Unknown => None,
} }
@ -1187,6 +1191,7 @@ pub struct ConfigData {
pub resolver: Arc<WorkspaceResolver>, pub resolver: Arc<WorkspaceResolver>,
pub sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>, pub sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
pub import_map_from_settings: Option<ModuleSpecifier>, pub import_map_from_settings: Option<ModuleSpecifier>,
pub unstable: BTreeSet<String>,
watched_files: HashMap<ModuleSpecifier, ConfigWatchedFileType>, watched_files: HashMap<ModuleSpecifier, ConfigWatchedFileType>,
} }
@ -1584,9 +1589,16 @@ impl ConfigData {
.join("\n") .join("\n")
); );
} }
let unstable = member_dir
.workspace
.unstable_features()
.iter()
.chain(settings.unstable.as_deref())
.cloned()
.collect::<BTreeSet<_>>();
let unstable_sloppy_imports = std::env::var("DENO_UNSTABLE_SLOPPY_IMPORTS") let unstable_sloppy_imports = std::env::var("DENO_UNSTABLE_SLOPPY_IMPORTS")
.is_ok() .is_ok()
|| member_dir.workspace.has_unstable("sloppy-imports"); || unstable.contains("sloppy-imports");
let sloppy_imports_resolver = unstable_sloppy_imports.then(|| { let sloppy_imports_resolver = unstable_sloppy_imports.then(|| {
Arc::new(CliSloppyImportsResolver::new( Arc::new(CliSloppyImportsResolver::new(
SloppyImportsCachedFs::new_without_stat_cache(Arc::new( SloppyImportsCachedFs::new_without_stat_cache(Arc::new(
@ -1627,6 +1639,7 @@ impl ConfigData {
lockfile, lockfile,
npmrc, npmrc,
import_map_from_settings, import_map_from_settings,
unstable,
watched_files, watched_files,
} }
} }
@ -2251,6 +2264,7 @@ mod tests {
auto_import_file_exclude_patterns: vec![], auto_import_file_exclude_patterns: vec![],
use_aliases_for_renames: true, use_aliases_for_renames: true,
quote_style: QuoteStyle::Auto, quote_style: QuoteStyle::Auto,
prefer_type_only_auto_imports: false,
}, },
suggest: CompletionSettings { suggest: CompletionSettings {
complete_function_calls: false, complete_function_calls: false,
@ -2296,6 +2310,7 @@ mod tests {
auto_import_file_exclude_patterns: vec![], auto_import_file_exclude_patterns: vec![],
use_aliases_for_renames: true, use_aliases_for_renames: true,
quote_style: QuoteStyle::Auto, quote_style: QuoteStyle::Auto,
prefer_type_only_auto_imports: false,
}, },
suggest: CompletionSettings { suggest: CompletionSettings {
complete_function_calls: false, complete_function_calls: false,

View file

@ -1499,7 +1499,11 @@ fn diagnose_dependency(
.data_for_specifier(referrer_doc.file_referrer().unwrap_or(referrer)) .data_for_specifier(referrer_doc.file_referrer().unwrap_or(referrer))
.and_then(|d| d.resolver.maybe_import_map()); .and_then(|d| d.resolver.maybe_import_map());
if let Some(import_map) = import_map { if let Some(import_map) = import_map {
if let Resolution::Ok(resolved) = &dependency.maybe_code { let resolved = dependency
.maybe_code
.ok()
.or_else(|| dependency.maybe_type.ok());
if let Some(resolved) = resolved {
if let Some(to) = import_map.lookup(&resolved.specifier, referrer) { if let Some(to) = import_map.lookup(&resolved.specifier, referrer) {
if dependency_key != to { if dependency_key != to {
diagnostics.push( diagnostics.push(

View file

@ -272,7 +272,7 @@ fn get_maybe_test_module_fut(
parsed_source.specifier().clone(), parsed_source.specifier().clone(),
parsed_source.text_info_lazy().clone(), parsed_source.text_info_lazy().clone(),
); );
parsed_source.module().visit_with(&mut collector); parsed_source.program().visit_with(&mut collector);
Arc::new(collector.take()) Arc::new(collector.take())
}) })
.map(Result::ok) .map(Result::ok)
@ -332,12 +332,8 @@ impl Document {
.filter(|s| cache.is_valid_file_referrer(s)) .filter(|s| cache.is_valid_file_referrer(s))
.cloned() .cloned()
.or(file_referrer); .or(file_referrer);
let media_type = resolve_media_type( let media_type =
&specifier, resolve_media_type(&specifier, maybe_headers.as_ref(), maybe_language_id);
maybe_headers.as_ref(),
maybe_language_id,
&resolver,
);
let (maybe_parsed_source, maybe_module) = let (maybe_parsed_source, maybe_module) =
if media_type_is_diagnosable(media_type) { if media_type_is_diagnosable(media_type) {
parse_and_analyze_module( parse_and_analyze_module(
@ -399,7 +395,6 @@ impl Document {
&self.specifier, &self.specifier,
self.maybe_headers.as_ref(), self.maybe_headers.as_ref(),
self.maybe_language_id, self.maybe_language_id,
&resolver,
); );
let dependencies; let dependencies;
let maybe_types_dependency; let maybe_types_dependency;
@ -764,14 +759,7 @@ fn resolve_media_type(
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
maybe_headers: Option<&HashMap<String, String>>, maybe_headers: Option<&HashMap<String, String>>,
maybe_language_id: Option<LanguageId>, maybe_language_id: Option<LanguageId>,
resolver: &LspResolver,
) -> MediaType { ) -> MediaType {
if resolver.in_node_modules(specifier) {
if let Some(media_type) = resolver.node_media_type(specifier) {
return media_type;
}
}
if let Some(language_id) = maybe_language_id { if let Some(language_id) = maybe_language_id {
return MediaType::from_specifier_and_content_type( return MediaType::from_specifier_and_content_type(
specifier, specifier,
@ -1071,34 +1059,6 @@ impl Documents {
self.cache.is_valid_file_referrer(specifier) self.cache.is_valid_file_referrer(specifier)
} }
/// Return `true` if the provided specifier can be resolved to a document,
/// otherwise `false`.
pub fn contains_import(
&self,
specifier: &str,
referrer: &ModuleSpecifier,
) -> bool {
let file_referrer = self.get_file_referrer(referrer);
let maybe_specifier = self
.resolver
.as_graph_resolver(file_referrer.as_deref())
.resolve(
specifier,
&deno_graph::Range {
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
ResolutionMode::Types,
)
.ok();
if let Some(import_specifier) = maybe_specifier {
self.exists(&import_specifier, file_referrer.as_deref())
} else {
false
}
}
pub fn resolve_document_specifier( pub fn resolve_document_specifier(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
@ -1561,7 +1521,7 @@ fn parse_source(
text: Arc<str>, text: Arc<str>,
media_type: MediaType, media_type: MediaType,
) -> ParsedSourceResult { ) -> ParsedSourceResult {
deno_ast::parse_module(deno_ast::ParseParams { deno_ast::parse_program(deno_ast::ParseParams {
specifier, specifier,
text, text,
media_type, media_type,

View file

@ -863,7 +863,10 @@ impl Inner {
// We ignore these directories by default because there is a // We ignore these directories by default because there is a
// high likelihood they aren't relevant. Someone can opt-into // high likelihood they aren't relevant. Someone can opt-into
// them by specifying one of them as an enabled path. // them by specifying one of them as an enabled path.
if matches!(dir_name.as_str(), "vendor" | "node_modules" | ".git") { if matches!(
dir_name.as_str(),
"vendor" | "coverage" | "node_modules" | ".git"
) {
continue; continue;
} }
// ignore cargo target directories for anyone using Deno with Rust // ignore cargo target directories for anyone using Deno with Rust
@ -904,7 +907,7 @@ impl Inner {
| MediaType::Tsx => {} | MediaType::Tsx => {}
MediaType::Wasm MediaType::Wasm
| MediaType::SourceMap | MediaType::SourceMap
| MediaType::TsBuildInfo | MediaType::Css
| MediaType::Unknown => { | MediaType::Unknown => {
if path.extension().and_then(|s| s.to_str()) != Some("jsonc") { if path.extension().and_then(|s| s.to_str()) != Some("jsonc") {
continue; continue;
@ -1384,14 +1387,10 @@ impl Inner {
.clone(); .clone();
fmt_options.use_tabs = Some(!params.options.insert_spaces); fmt_options.use_tabs = Some(!params.options.insert_spaces);
fmt_options.indent_width = Some(params.options.tab_size as u8); fmt_options.indent_width = Some(params.options.tab_size as u8);
let maybe_workspace = self let config_data = self.config.tree.data_for_specifier(&specifier);
.config
.tree
.data_for_specifier(&specifier)
.map(|d| &d.member_dir.workspace);
let unstable_options = UnstableFmtOptions { let unstable_options = UnstableFmtOptions {
component: maybe_workspace component: config_data
.map(|w| w.has_unstable("fmt-component")) .map(|d| d.unstable.contains("fmt-component"))
.unwrap_or(false), .unwrap_or(false),
}; };
let document = document.clone(); let document = document.clone();
@ -1838,7 +1837,7 @@ impl Inner {
fix_ts_import_changes( fix_ts_import_changes(
&code_action_data.specifier, &code_action_data.specifier,
&combined_code_actions.changes, &combined_code_actions.changes,
&self.get_ts_response_import_mapper(&code_action_data.specifier), self,
) )
.map_err(|err| { .map_err(|err| {
error!("Unable to remap changes: {:#}", err); error!("Unable to remap changes: {:#}", err);
@ -1891,7 +1890,7 @@ impl Inner {
refactor_edit_info.edits = fix_ts_import_changes( refactor_edit_info.edits = fix_ts_import_changes(
&action_data.specifier, &action_data.specifier,
&refactor_edit_info.edits, &refactor_edit_info.edits,
&self.get_ts_response_import_mapper(&action_data.specifier), self,
) )
.map_err(|err| { .map_err(|err| {
error!("Unable to remap changes: {:#}", err); error!("Unable to remap changes: {:#}", err);
@ -1922,7 +1921,8 @@ impl Inner {
// todo(dsherret): this should probably just take the resolver itself // todo(dsherret): this should probably just take the resolver itself
// as the import map is an implementation detail // as the import map is an implementation detail
.and_then(|d| d.resolver.maybe_import_map()), .and_then(|d| d.resolver.maybe_import_map()),
self.resolver.as_ref(), &self.resolver,
&self.ts_server.specifier_map,
file_referrer, file_referrer,
) )
} }
@ -2285,7 +2285,11 @@ impl Inner {
.into(), .into(),
scope.cloned(), scope.cloned(),
) )
.await; .await
.unwrap_or_else(|err| {
error!("Unable to get completion info from TypeScript: {:#}", err);
None
});
if let Some(completions) = maybe_completion_info { if let Some(completions) = maybe_completion_info {
response = Some( response = Some(
@ -3812,7 +3816,7 @@ impl Inner {
let maybe_inlay_hints = maybe_inlay_hints.map(|hints| { let maybe_inlay_hints = maybe_inlay_hints.map(|hints| {
hints hints
.iter() .iter()
.map(|hint| hint.to_lsp(line_index.clone())) .map(|hint| hint.to_lsp(line_index.clone(), self))
.collect() .collect()
}); });
self.performance.measure(mark); self.performance.measure(mark);
@ -3948,7 +3952,9 @@ mod tests {
fn test_walk_workspace() { fn test_walk_workspace() {
let temp_dir = TempDir::new(); let temp_dir = TempDir::new();
temp_dir.create_dir_all("root1/vendor/"); temp_dir.create_dir_all("root1/vendor/");
temp_dir.create_dir_all("root1/coverage/");
temp_dir.write("root1/vendor/mod.ts", ""); // no, vendor temp_dir.write("root1/vendor/mod.ts", ""); // no, vendor
temp_dir.write("root1/coverage/mod.ts", ""); // no, coverage
temp_dir.create_dir_all("root1/node_modules/"); temp_dir.create_dir_all("root1/node_modules/");
temp_dir.write("root1/node_modules/mod.ts", ""); // no, node_modules temp_dir.write("root1/node_modules/mod.ts", ""); // no, node_modules

View file

@ -4,6 +4,7 @@ use dashmap::DashMap;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_npm::npm_rc::NpmRc;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::Version; use deno_semver::Version;
use serde::Deserialize; use serde::Deserialize;
@ -25,7 +26,10 @@ pub struct CliNpmSearchApi {
impl CliNpmSearchApi { impl CliNpmSearchApi {
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self { pub fn new(file_fetcher: Arc<FileFetcher>) -> Self {
let resolver = NpmFetchResolver::new(file_fetcher.clone()); let resolver = NpmFetchResolver::new(
file_fetcher.clone(),
Arc::new(NpmRc::default().as_resolved(npm_registry_url()).unwrap()),
);
Self { Self {
file_fetcher, file_fetcher,
resolver, resolver,

View file

@ -482,6 +482,7 @@ impl ModuleRegistry {
.fetch_with_options(FetchOptions { .fetch_with_options(FetchOptions {
specifier: &specifier, specifier: &specifier,
permissions: FetchPermissionsOptionRef::AllowAll, permissions: FetchPermissionsOptionRef::AllowAll,
maybe_auth: None,
maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"), maybe_accept: Some("application/vnd.deno.reg.v2+json, application/vnd.deno.reg.v1+json;q=0.9, application/json;q=0.8"),
maybe_cache_setting: None, maybe_cache_setting: None,
}) })

View file

@ -2,6 +2,8 @@
use dashmap::DashMap; use dashmap::DashMap;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_ast::ParsedSource;
use deno_cache_dir::npm::NpmCacheDir;
use deno_cache_dir::HttpCache; use deno_cache_dir::HttpCache;
use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::WorkspaceResolver; use deno_config::workspace::WorkspaceResolver;
@ -14,15 +16,15 @@ use deno_path_util::url_to_file_path;
use deno_runtime::deno_fs; use deno_runtime::deno_fs;
use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_node::PackageJson; use deno_runtime::deno_node::PackageJson;
use deno_runtime::deno_node::PackageJsonResolver;
use deno_semver::jsr::JsrPackageReqReference; use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use indexmap::IndexMap; use indexmap::IndexMap;
use node_resolver::errors::ClosestPkgJsonError; use node_resolver::errors::ClosestPkgJsonError;
use node_resolver::NodeResolution; use node_resolver::InNpmPackageChecker;
use node_resolver::NodeResolutionMode; use node_resolver::NodeResolutionMode;
use node_resolver::NpmResolver;
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::collections::BTreeSet; use std::collections::BTreeSet;
@ -36,6 +38,7 @@ use crate::args::create_default_npmrc;
use crate::args::CacheSetting; use crate::args::CacheSetting;
use crate::args::CliLockfile; use crate::args::CliLockfile;
use crate::args::NpmInstallDepsProvider; use crate::args::NpmInstallDepsProvider;
use crate::cache::DenoCacheEnvFsAdapter;
use crate::graph_util::CliJsrUrlProvider; use crate::graph_util::CliJsrUrlProvider;
use crate::http_util::HttpClientProvider; use crate::http_util::HttpClientProvider;
use crate::lsp::config::Config; use crate::lsp::config::Config;
@ -43,40 +46,50 @@ use crate::lsp::config::ConfigData;
use crate::lsp::logging::lsp_warn; use crate::lsp::logging::lsp_warn;
use crate::npm::create_cli_npm_resolver_for_lsp; use crate::npm::create_cli_npm_resolver_for_lsp;
use crate::npm::CliByonmNpmResolverCreateOptions; use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliManagedInNpmPkgCheckerCreateOptions;
use crate::npm::CliManagedNpmResolverCreateOptions;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverCreateOptions; use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption; use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::CreateInNpmPkgCheckerOptions;
use crate::npm::ManagedCliNpmResolver; use crate::npm::ManagedCliNpmResolver;
use crate::resolver::CjsResolutionStore; use crate::resolver::CjsTracker;
use crate::resolver::CjsTrackerOptions;
use crate::resolver::CliDenoResolverFs; use crate::resolver::CliDenoResolverFs;
use crate::resolver::CliGraphResolver; use crate::resolver::CliGraphResolver;
use crate::resolver::CliGraphResolverOptions; use crate::resolver::CliGraphResolverOptions;
use crate::resolver::CliNodeResolver; use crate::resolver::CliNodeResolver;
use crate::resolver::WorkerCliNpmGraphResolver; use crate::resolver::WorkerCliNpmGraphResolver;
use crate::tsc::into_specifier_and_media_type;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle; use crate::util::progress_bar::ProgressBarStyle;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct LspScopeResolver { struct LspScopeResolver {
cjs_tracker: Option<Arc<LspCjsTracker>>,
graph_resolver: Arc<CliGraphResolver>, graph_resolver: Arc<CliGraphResolver>,
jsr_resolver: Option<Arc<JsrCacheResolver>>, jsr_resolver: Option<Arc<JsrCacheResolver>>,
npm_resolver: Option<Arc<dyn CliNpmResolver>>, npm_resolver: Option<Arc<dyn CliNpmResolver>>,
node_resolver: Option<Arc<CliNodeResolver>>, node_resolver: Option<Arc<CliNodeResolver>>,
pkg_json_resolver: Option<Arc<PackageJsonResolver>>,
redirect_resolver: Option<Arc<RedirectResolver>>, redirect_resolver: Option<Arc<RedirectResolver>>,
graph_imports: Arc<IndexMap<ModuleSpecifier, GraphImport>>, graph_imports: Arc<IndexMap<ModuleSpecifier, GraphImport>>,
package_json_deps_by_resolution: Arc<IndexMap<ModuleSpecifier, String>>,
config_data: Option<Arc<ConfigData>>, config_data: Option<Arc<ConfigData>>,
} }
impl Default for LspScopeResolver { impl Default for LspScopeResolver {
fn default() -> Self { fn default() -> Self {
Self { Self {
cjs_tracker: None,
graph_resolver: create_graph_resolver(None, None, None), graph_resolver: create_graph_resolver(None, None, None),
jsr_resolver: None, jsr_resolver: None,
npm_resolver: None, npm_resolver: None,
node_resolver: None, node_resolver: None,
pkg_json_resolver: None,
redirect_resolver: None, redirect_resolver: None,
graph_imports: Default::default(), graph_imports: Default::default(),
package_json_deps_by_resolution: Default::default(),
config_data: None, config_data: None,
} }
} }
@ -90,14 +103,35 @@ impl LspScopeResolver {
) -> Self { ) -> Self {
let mut npm_resolver = None; let mut npm_resolver = None;
let mut node_resolver = None; let mut node_resolver = None;
let mut lsp_cjs_tracker = None;
let fs = Arc::new(deno_fs::RealFs);
let pkg_json_resolver = Arc::new(PackageJsonResolver::new(
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
));
if let Some(http_client) = http_client_provider { if let Some(http_client) = http_client_provider {
npm_resolver = create_npm_resolver( npm_resolver = create_npm_resolver(
config_data.map(|d| d.as_ref()), config_data.map(|d| d.as_ref()),
cache, cache,
http_client, http_client,
&pkg_json_resolver,
) )
.await; .await;
node_resolver = create_node_resolver(npm_resolver.as_ref()); if let Some(npm_resolver) = &npm_resolver {
let in_npm_pkg_checker = create_in_npm_pkg_checker(npm_resolver);
let cjs_tracker = create_cjs_tracker(
in_npm_pkg_checker.clone(),
pkg_json_resolver.clone(),
);
lsp_cjs_tracker =
Some(Arc::new(LspCjsTracker::new(cjs_tracker.clone())));
node_resolver = Some(create_node_resolver(
cjs_tracker,
fs.clone(),
in_npm_pkg_checker,
npm_resolver,
pkg_json_resolver.clone(),
));
}
} }
let graph_resolver = create_graph_resolver( let graph_resolver = create_graph_resolver(
config_data.map(|d| d.as_ref()), config_data.map(|d| d.as_ref()),
@ -133,13 +167,43 @@ impl LspScopeResolver {
) )
}) })
.unwrap_or_default(); .unwrap_or_default();
let package_json_deps_by_resolution = (|| {
let node_resolver = node_resolver.as_ref()?;
let package_json = config_data?.maybe_pkg_json()?;
let referrer = package_json.specifier();
let dependencies = package_json.dependencies.as_ref()?;
let result = dependencies
.iter()
.flat_map(|(name, _)| {
let req_ref =
NpmPackageReqReference::from_str(&format!("npm:{name}")).ok()?;
let specifier = into_specifier_and_media_type(Some(
node_resolver
.resolve_req_reference(
&req_ref,
&referrer,
NodeResolutionMode::Types,
)
.ok()?,
))
.0;
Some((specifier, name.clone()))
})
.collect();
Some(result)
})();
let package_json_deps_by_resolution =
Arc::new(package_json_deps_by_resolution.unwrap_or_default());
Self { Self {
cjs_tracker: lsp_cjs_tracker,
graph_resolver, graph_resolver,
jsr_resolver, jsr_resolver,
npm_resolver, npm_resolver,
node_resolver, node_resolver,
pkg_json_resolver: Some(pkg_json_resolver),
redirect_resolver, redirect_resolver,
graph_imports, graph_imports,
package_json_deps_by_resolution,
config_data: config_data.cloned(), config_data: config_data.cloned(),
} }
} }
@ -147,19 +211,44 @@ impl LspScopeResolver {
fn snapshot(&self) -> Arc<Self> { fn snapshot(&self) -> Arc<Self> {
let npm_resolver = let npm_resolver =
self.npm_resolver.as_ref().map(|r| r.clone_snapshotted()); self.npm_resolver.as_ref().map(|r| r.clone_snapshotted());
let node_resolver = create_node_resolver(npm_resolver.as_ref()); let fs = Arc::new(deno_fs::RealFs);
let pkg_json_resolver = Arc::new(PackageJsonResolver::new(
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
));
let mut node_resolver = None;
let mut lsp_cjs_tracker = None;
if let Some(npm_resolver) = &npm_resolver {
let in_npm_pkg_checker = create_in_npm_pkg_checker(npm_resolver);
let cjs_tracker = create_cjs_tracker(
in_npm_pkg_checker.clone(),
pkg_json_resolver.clone(),
);
lsp_cjs_tracker = Some(Arc::new(LspCjsTracker::new(cjs_tracker.clone())));
node_resolver = Some(create_node_resolver(
cjs_tracker,
fs,
in_npm_pkg_checker,
npm_resolver,
pkg_json_resolver.clone(),
));
}
let graph_resolver = create_graph_resolver( let graph_resolver = create_graph_resolver(
self.config_data.as_deref(), self.config_data.as_deref(),
npm_resolver.as_ref(), npm_resolver.as_ref(),
node_resolver.as_ref(), node_resolver.as_ref(),
); );
Arc::new(Self { Arc::new(Self {
cjs_tracker: lsp_cjs_tracker,
graph_resolver, graph_resolver,
jsr_resolver: self.jsr_resolver.clone(), jsr_resolver: self.jsr_resolver.clone(),
npm_resolver, npm_resolver,
node_resolver, node_resolver,
redirect_resolver: self.redirect_resolver.clone(), redirect_resolver: self.redirect_resolver.clone(),
pkg_json_resolver: Some(pkg_json_resolver),
graph_imports: self.graph_imports.clone(), graph_imports: self.graph_imports.clone(),
package_json_deps_by_resolution: self
.package_json_deps_by_resolution
.clone(),
config_data: self.config_data.clone(), config_data: self.config_data.clone(),
}) })
} }
@ -261,6 +350,22 @@ impl LspResolver {
resolver.graph_resolver.create_graph_npm_resolver() resolver.graph_resolver.create_graph_npm_resolver()
} }
pub fn maybe_cjs_tracker(
&self,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<&Arc<LspCjsTracker>> {
let resolver = self.get_scope_resolver(file_referrer);
resolver.cjs_tracker.as_ref()
}
pub fn maybe_node_resolver(
&self,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<&Arc<CliNodeResolver>> {
let resolver = self.get_scope_resolver(file_referrer);
resolver.node_resolver.as_ref()
}
pub fn maybe_managed_npm_resolver( pub fn maybe_managed_npm_resolver(
&self, &self,
file_referrer: Option<&ModuleSpecifier>, file_referrer: Option<&ModuleSpecifier>,
@ -328,13 +433,25 @@ impl LspResolver {
) -> Option<(ModuleSpecifier, MediaType)> { ) -> Option<(ModuleSpecifier, MediaType)> {
let resolver = self.get_scope_resolver(file_referrer); let resolver = self.get_scope_resolver(file_referrer);
let node_resolver = resolver.node_resolver.as_ref()?; let node_resolver = resolver.node_resolver.as_ref()?;
Some(NodeResolution::into_specifier_and_media_type(Some( Some(into_specifier_and_media_type(Some(
node_resolver node_resolver
.resolve_req_reference(req_ref, referrer, NodeResolutionMode::Types) .resolve_req_reference(req_ref, referrer, NodeResolutionMode::Types)
.ok()?, .ok()?,
))) )))
} }
pub fn file_url_to_package_json_dep(
&self,
specifier: &ModuleSpecifier,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<String> {
let resolver = self.get_scope_resolver(file_referrer);
resolver
.package_json_deps_by_resolution
.get(specifier)
.cloned()
}
pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool { pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool {
fn has_node_modules_dir(specifier: &ModuleSpecifier) -> bool { fn has_node_modules_dir(specifier: &ModuleSpecifier) -> bool {
// consider any /node_modules/ directory as being in the node_modules // consider any /node_modules/ directory as being in the node_modules
@ -346,14 +463,10 @@ impl LspResolver {
.contains("/node_modules/") .contains("/node_modules/")
} }
let global_npm_resolver = self if let Some(node_resolver) =
.get_scope_resolver(Some(specifier)) &self.get_scope_resolver(Some(specifier)).node_resolver
.npm_resolver {
.as_ref() if node_resolver.in_npm_package(specifier) {
.and_then(|npm_resolver| npm_resolver.as_managed())
.filter(|r| r.root_node_modules_path().is_none());
if let Some(npm_resolver) = &global_npm_resolver {
if npm_resolver.in_npm_package(specifier) {
return true; return true;
} }
} }
@ -361,18 +474,6 @@ impl LspResolver {
has_node_modules_dir(specifier) has_node_modules_dir(specifier)
} }
pub fn node_media_type(
&self,
specifier: &ModuleSpecifier,
) -> Option<MediaType> {
let resolver = self.get_scope_resolver(Some(specifier));
let node_resolver = resolver.node_resolver.as_ref()?;
let resolution = node_resolver
.url_to_node_resolution(specifier.clone())
.ok()?;
Some(NodeResolution::into_specifier_and_media_type(Some(resolution)).1)
}
pub fn is_bare_package_json_dep( pub fn is_bare_package_json_dep(
&self, &self,
specifier_text: &str, specifier_text: &str,
@ -398,10 +499,10 @@ impl LspResolver {
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
) -> Result<Option<Arc<PackageJson>>, ClosestPkgJsonError> { ) -> Result<Option<Arc<PackageJson>>, ClosestPkgJsonError> {
let resolver = self.get_scope_resolver(Some(referrer)); let resolver = self.get_scope_resolver(Some(referrer));
let Some(node_resolver) = resolver.node_resolver.as_ref() else { let Some(pkg_json_resolver) = resolver.pkg_json_resolver.as_ref() else {
return Ok(None); return Ok(None);
}; };
node_resolver.get_closest_package_json(referrer) pkg_json_resolver.get_closest_package_json(referrer)
} }
pub fn resolve_redirects( pub fn resolve_redirects(
@ -457,11 +558,13 @@ async fn create_npm_resolver(
config_data: Option<&ConfigData>, config_data: Option<&ConfigData>,
cache: &LspCache, cache: &LspCache,
http_client_provider: &Arc<HttpClientProvider>, http_client_provider: &Arc<HttpClientProvider>,
pkg_json_resolver: &Arc<PackageJsonResolver>,
) -> Option<Arc<dyn CliNpmResolver>> { ) -> Option<Arc<dyn CliNpmResolver>> {
let enable_byonm = config_data.map(|d| d.byonm).unwrap_or(false); let enable_byonm = config_data.map(|d| d.byonm).unwrap_or(false);
let options = if enable_byonm { let options = if enable_byonm {
CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions {
fs: CliDenoResolverFs(Arc::new(deno_fs::RealFs)), fs: CliDenoResolverFs(Arc::new(deno_fs::RealFs)),
pkg_json_resolver: pkg_json_resolver.clone(),
root_node_modules_dir: config_data.and_then(|config_data| { root_node_modules_dir: config_data.and_then(|config_data| {
config_data.node_modules_dir.clone().or_else(|| { config_data.node_modules_dir.clone().or_else(|| {
url_to_file_path(&config_data.scope) url_to_file_path(&config_data.scope)
@ -471,7 +574,15 @@ async fn create_npm_resolver(
}), }),
}) })
} else { } else {
CliNpmResolverCreateOptions::Managed(CliNpmResolverManagedCreateOptions { let npmrc = config_data
.and_then(|d| d.npmrc.clone())
.unwrap_or_else(create_default_npmrc);
let npm_cache_dir = Arc::new(NpmCacheDir::new(
&DenoCacheEnvFsAdapter(&deno_fs::RealFs),
cache.deno_dir().npm_folder_path(),
npmrc.get_all_known_registries_urls(),
));
CliNpmResolverCreateOptions::Managed(CliManagedNpmResolverCreateOptions {
http_client_provider: http_client_provider.clone(), http_client_provider: http_client_provider.clone(),
snapshot: match config_data.and_then(|d| d.lockfile.as_ref()) { snapshot: match config_data.and_then(|d| d.lockfile.as_ref()) {
Some(lockfile) => { Some(lockfile) => {
@ -485,7 +596,7 @@ async fn create_npm_resolver(
// updating it. Only the cache request should update the lockfile. // updating it. Only the cache request should update the lockfile.
maybe_lockfile: None, maybe_lockfile: None,
fs: Arc::new(deno_fs::RealFs), fs: Arc::new(deno_fs::RealFs),
npm_global_cache_dir: cache.deno_dir().npm_folder_path(), npm_cache_dir,
// Use an "only" cache setting in order to make the // Use an "only" cache setting in order to make the
// user do an explicit "cache" command and prevent // user do an explicit "cache" command and prevent
// the cache from being filled with lots of packages while // the cache from being filled with lots of packages while
@ -496,9 +607,7 @@ async fn create_npm_resolver(
.and_then(|d| d.node_modules_dir.clone()), .and_then(|d| d.node_modules_dir.clone()),
// only used for top level install, so we can ignore this // only used for top level install, so we can ignore this
npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::empty()), npm_install_deps_provider: Arc::new(NpmInstallDepsProvider::empty()),
npmrc: config_data npmrc,
.and_then(|d| d.npmrc.clone())
.unwrap_or_else(create_default_npmrc),
npm_system_info: NpmSystemInfo::default(), npm_system_info: NpmSystemInfo::default(),
lifecycle_scripts: Default::default(), lifecycle_scripts: Default::default(),
}) })
@ -506,28 +615,59 @@ async fn create_npm_resolver(
Some(create_cli_npm_resolver_for_lsp(options).await) Some(create_cli_npm_resolver_for_lsp(options).await)
} }
fn create_cjs_tracker(
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
pkg_json_resolver: Arc<PackageJsonResolver>,
) -> Arc<CjsTracker> {
Arc::new(CjsTracker::new(
in_npm_pkg_checker,
pkg_json_resolver,
CjsTrackerOptions {
// todo(dsherret): support in the lsp by stabilizing the feature
// so that we don't have to pipe the config in here
unstable_detect_cjs: false,
},
))
}
fn create_in_npm_pkg_checker(
npm_resolver: &Arc<dyn CliNpmResolver>,
) -> Arc<dyn InNpmPackageChecker> {
crate::npm::create_in_npm_pkg_checker(match npm_resolver.as_inner() {
crate::npm::InnerCliNpmResolverRef::Byonm(_) => {
CreateInNpmPkgCheckerOptions::Byonm
}
crate::npm::InnerCliNpmResolverRef::Managed(m) => {
CreateInNpmPkgCheckerOptions::Managed(
CliManagedInNpmPkgCheckerCreateOptions {
root_cache_dir_url: m.global_cache_root_url(),
maybe_node_modules_path: m.maybe_node_modules_path(),
},
)
}
})
}
fn create_node_resolver( fn create_node_resolver(
npm_resolver: Option<&Arc<dyn CliNpmResolver>>, cjs_tracker: Arc<CjsTracker>,
) -> Option<Arc<CliNodeResolver>> { fs: Arc<dyn deno_fs::FileSystem>,
use once_cell::sync::Lazy; in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
npm_resolver: &Arc<dyn CliNpmResolver>,
// it's not ideal to share this across all scopes and to pkg_json_resolver: Arc<PackageJsonResolver>,
// never clear it, but it's fine for the time being ) -> Arc<CliNodeResolver> {
static CJS_RESOLUTIONS: Lazy<Arc<CjsResolutionStore>> =
Lazy::new(Default::default);
let npm_resolver = npm_resolver?;
let fs = Arc::new(deno_fs::RealFs);
let node_resolver_inner = Arc::new(NodeResolver::new( let node_resolver_inner = Arc::new(NodeResolver::new(
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
in_npm_pkg_checker.clone(),
npm_resolver.clone().into_npm_resolver(), npm_resolver.clone().into_npm_resolver(),
pkg_json_resolver.clone(),
)); ));
Some(Arc::new(CliNodeResolver::new( Arc::new(CliNodeResolver::new(
CJS_RESOLUTIONS.clone(), cjs_tracker.clone(),
fs, fs,
in_npm_pkg_checker,
node_resolver_inner, node_resolver_inner,
npm_resolver.clone(), npm_resolver.clone(),
))) ))
} }
fn create_graph_resolver( fn create_graph_resolver(
@ -555,8 +695,8 @@ fn create_graph_resolver(
workspace.to_maybe_jsx_import_source_config().ok().flatten() workspace.to_maybe_jsx_import_source_config().ok().flatten()
}), }),
maybe_vendor_dir: config_data.and_then(|d| d.vendor_dir.as_ref()), maybe_vendor_dir: config_data.and_then(|d| d.vendor_dir.as_ref()),
bare_node_builtins_enabled: workspace bare_node_builtins_enabled: config_data
.is_some_and(|workspace| workspace.has_unstable("bare-node-builtins")), .is_some_and(|d| d.unstable.contains("bare-node-builtins")),
sloppy_imports_resolver: config_data sloppy_imports_resolver: config_data
.and_then(|d| d.sloppy_imports_resolver.clone()), .and_then(|d| d.sloppy_imports_resolver.clone()),
})) }))
@ -702,6 +842,45 @@ impl RedirectResolver {
} }
} }
#[derive(Debug)]
pub struct LspCjsTracker {
cjs_tracker: Arc<CjsTracker>,
}
impl LspCjsTracker {
pub fn new(cjs_tracker: Arc<CjsTracker>) -> Self {
Self { cjs_tracker }
}
pub fn is_cjs(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
maybe_parsed_source: Option<&ParsedSource>,
) -> bool {
if let Some(module_kind) =
self.cjs_tracker.get_known_kind(specifier, media_type)
{
module_kind.is_cjs()
} else {
let maybe_is_script = maybe_parsed_source.map(|p| p.compute_is_script());
maybe_is_script
.and_then(|is_script| {
self
.cjs_tracker
.is_cjs_with_known_is_script(specifier, media_type, is_script)
.ok()
})
.unwrap_or_else(|| {
self
.cjs_tracker
.is_maybe_cjs(specifier, media_type)
.unwrap_or(false)
})
}
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View file

@ -650,7 +650,7 @@ pub mod tests {
.unwrap(); .unwrap();
let text_info = parsed_module.text_info_lazy().clone(); let text_info = parsed_module.text_info_lazy().clone();
let mut collector = TestCollector::new(specifier, text_info); let mut collector = TestCollector::new(specifier, text_info);
parsed_module.module().visit_with(&mut collector); parsed_module.program().visit_with(&mut collector);
collector.take() collector.take()
} }

View file

@ -236,7 +236,7 @@ pub struct TsServer {
performance: Arc<Performance>, performance: Arc<Performance>,
sender: mpsc::UnboundedSender<Request>, sender: mpsc::UnboundedSender<Request>,
receiver: Mutex<Option<mpsc::UnboundedReceiver<Request>>>, receiver: Mutex<Option<mpsc::UnboundedReceiver<Request>>>,
specifier_map: Arc<TscSpecifierMap>, pub specifier_map: Arc<TscSpecifierMap>,
inspector_server: Mutex<Option<Arc<InspectorServer>>>, inspector_server: Mutex<Option<Arc<InspectorServer>>>,
pending_change: Mutex<Option<PendingChange>>, pending_change: Mutex<Option<PendingChange>>,
} }
@ -882,20 +882,22 @@ impl TsServer {
options: GetCompletionsAtPositionOptions, options: GetCompletionsAtPositionOptions,
format_code_settings: FormatCodeSettings, format_code_settings: FormatCodeSettings,
scope: Option<ModuleSpecifier>, scope: Option<ModuleSpecifier>,
) -> Option<CompletionInfo> { ) -> Result<Option<CompletionInfo>, AnyError> {
let req = TscRequest::GetCompletionsAtPosition(Box::new(( let req = TscRequest::GetCompletionsAtPosition(Box::new((
self.specifier_map.denormalize(&specifier), self.specifier_map.denormalize(&specifier),
position, position,
options, options,
format_code_settings, format_code_settings,
))); )));
match self.request(snapshot, req, scope).await { self
Ok(maybe_info) => maybe_info, .request::<Option<CompletionInfo>>(snapshot, req, scope)
Err(err) => { .await
log::error!("Unable to get completion info from TypeScript: {:#}", err); .map(|mut info| {
None if let Some(info) = &mut info {
} info.normalize(&self.specifier_map);
} }
info
})
} }
pub async fn get_completion_details( pub async fn get_completion_details(
@ -2182,6 +2184,50 @@ impl NavigateToItem {
} }
} }
#[derive(Debug, Clone, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct InlayHintDisplayPart {
pub text: String,
pub span: Option<TextSpan>,
pub file: Option<String>,
}
impl InlayHintDisplayPart {
pub fn to_lsp(
&self,
language_server: &language_server::Inner,
) -> lsp::InlayHintLabelPart {
let location = self.file.as_ref().map(|f| {
let specifier =
resolve_url(f).unwrap_or_else(|_| INVALID_SPECIFIER.clone());
let file_referrer =
language_server.documents.get_file_referrer(&specifier);
let uri = language_server
.url_map
.specifier_to_uri(&specifier, file_referrer.as_deref())
.unwrap_or_else(|_| INVALID_URI.clone());
let range = self
.span
.as_ref()
.and_then(|s| {
let asset_or_doc =
language_server.get_asset_or_document(&specifier).ok()?;
Some(s.to_range(asset_or_doc.line_index()))
})
.unwrap_or_else(|| {
lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0))
});
lsp::Location { uri, range }
});
lsp::InlayHintLabelPart {
value: self.text.clone(),
tooltip: None,
location,
command: None,
}
}
}
#[derive(Debug, Clone, Deserialize)] #[derive(Debug, Clone, Deserialize)]
pub enum InlayHintKind { pub enum InlayHintKind {
Type, Type,
@ -2203,6 +2249,7 @@ impl InlayHintKind {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct InlayHint { pub struct InlayHint {
pub text: String, pub text: String,
pub display_parts: Option<Vec<InlayHintDisplayPart>>,
pub position: u32, pub position: u32,
pub kind: InlayHintKind, pub kind: InlayHintKind,
pub whitespace_before: Option<bool>, pub whitespace_before: Option<bool>,
@ -2210,10 +2257,23 @@ pub struct InlayHint {
} }
impl InlayHint { impl InlayHint {
pub fn to_lsp(&self, line_index: Arc<LineIndex>) -> lsp::InlayHint { pub fn to_lsp(
&self,
line_index: Arc<LineIndex>,
language_server: &language_server::Inner,
) -> lsp::InlayHint {
lsp::InlayHint { lsp::InlayHint {
position: line_index.position_tsc(self.position.into()), position: line_index.position_tsc(self.position.into()),
label: lsp::InlayHintLabel::String(self.text.clone()), label: if let Some(display_parts) = &self.display_parts {
lsp::InlayHintLabel::LabelParts(
display_parts
.iter()
.map(|p| p.to_lsp(language_server))
.collect(),
)
} else {
lsp::InlayHintLabel::String(self.text.clone())
},
kind: self.kind.to_lsp(), kind: self.kind.to_lsp(),
padding_left: self.whitespace_before, padding_left: self.whitespace_before,
padding_right: self.whitespace_after, padding_right: self.whitespace_after,
@ -3584,6 +3644,12 @@ pub struct CompletionInfo {
} }
impl CompletionInfo { impl CompletionInfo {
fn normalize(&mut self, specifier_map: &TscSpecifierMap) {
for entry in &mut self.entries {
entry.normalize(specifier_map);
}
}
pub fn as_completion_response( pub fn as_completion_response(
&self, &self,
line_index: Arc<LineIndex>, line_index: Arc<LineIndex>,
@ -3645,11 +3711,17 @@ pub struct CompletionItemData {
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
struct CompletionEntryDataImport { struct CompletionEntryDataAutoImport {
module_specifier: String, module_specifier: String,
file_name: String, file_name: String,
} }
#[derive(Debug)]
pub struct CompletionNormalizedAutoImportData {
raw: CompletionEntryDataAutoImport,
normalized: ModuleSpecifier,
}
#[derive(Debug, Default, Deserialize, Serialize)] #[derive(Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct CompletionEntry { pub struct CompletionEntry {
@ -3682,9 +3754,28 @@ pub struct CompletionEntry {
is_import_statement_completion: Option<bool>, is_import_statement_completion: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
data: Option<Value>, data: Option<Value>,
/// This is not from tsc, we add it for convenience during normalization.
/// Represents `self.data.file_name`, but normalized.
#[serde(skip)]
auto_import_data: Option<CompletionNormalizedAutoImportData>,
} }
impl CompletionEntry { impl CompletionEntry {
fn normalize(&mut self, specifier_map: &TscSpecifierMap) {
let Some(data) = &self.data else {
return;
};
let Ok(raw) =
serde_json::from_value::<CompletionEntryDataAutoImport>(data.clone())
else {
return;
};
if let Ok(normalized) = specifier_map.normalize(&raw.file_name) {
self.auto_import_data =
Some(CompletionNormalizedAutoImportData { raw, normalized });
}
}
fn get_commit_characters( fn get_commit_characters(
&self, &self,
info: &CompletionInfo, info: &CompletionInfo,
@ -3833,27 +3924,26 @@ impl CompletionEntry {
if let Some(source) = &self.source { if let Some(source) = &self.source {
let mut display_source = source.clone(); let mut display_source = source.clone();
if let Some(data) = &self.data { if let Some(import_data) = &self.auto_import_data {
if let Ok(import_data) =
serde_json::from_value::<CompletionEntryDataImport>(data.clone())
{
if let Ok(import_specifier) = resolve_url(&import_data.file_name) {
if let Some(new_module_specifier) = language_server if let Some(new_module_specifier) = language_server
.get_ts_response_import_mapper(specifier) .get_ts_response_import_mapper(specifier)
.check_specifier(&import_specifier, specifier) .check_specifier(&import_data.normalized, specifier)
.or_else(|| relative_specifier(specifier, &import_specifier)) .or_else(|| relative_specifier(specifier, &import_data.normalized))
{ {
if new_module_specifier.contains("/node_modules/") {
return None;
}
display_source.clone_from(&new_module_specifier); display_source.clone_from(&new_module_specifier);
if new_module_specifier != import_data.module_specifier { if new_module_specifier != import_data.raw.module_specifier {
specifier_rewrite = specifier_rewrite = Some((
Some((import_data.module_specifier, new_module_specifier)); import_data.raw.module_specifier.clone(),
new_module_specifier,
));
} }
} else if source.starts_with(jsr_url().as_str()) { } else if source.starts_with(jsr_url().as_str()) {
return None; return None;
} }
} }
}
}
// We want relative or bare (import-mapped or otherwise) specifiers to // We want relative or bare (import-mapped or otherwise) specifiers to
// appear at the top. // appear at the top.
if resolve_url(&display_source).is_err() { if resolve_url(&display_source).is_err() {
@ -4154,6 +4244,13 @@ impl TscSpecifierMap {
return specifier.to_string(); return specifier.to_string();
} }
let mut specifier = original.to_string(); let mut specifier = original.to_string();
if specifier.contains("/node_modules/.deno/")
&& !specifier.contains("/node_modules/@types/node/")
{
// The ts server doesn't give completions from files in
// `node_modules/.deno/`. We work around it like this.
specifier = specifier.replace("/node_modules/", "/$node_modules/");
}
let media_type = MediaType::from_specifier(original); let media_type = MediaType::from_specifier(original);
// If the URL-inferred media type doesn't correspond to tsc's path-inferred // If the URL-inferred media type doesn't correspond to tsc's path-inferred
// media type, force it to be the same by appending an extension. // media type, force it to be the same by appending an extension.
@ -4271,7 +4368,7 @@ fn op_is_cancelled(state: &mut OpState) -> bool {
fn op_is_node_file(state: &mut OpState, #[string] path: String) -> bool { fn op_is_node_file(state: &mut OpState, #[string] path: String) -> bool {
let state = state.borrow::<State>(); let state = state.borrow::<State>();
let mark = state.performance.mark("tsc.op.op_is_node_file"); let mark = state.performance.mark("tsc.op.op_is_node_file");
let r = match ModuleSpecifier::parse(&path) { let r = match state.specifier_map.normalize(path) {
Ok(specifier) => state.state_snapshot.resolver.in_node_modules(&specifier), Ok(specifier) => state.state_snapshot.resolver.in_node_modules(&specifier),
Err(_) => false, Err(_) => false,
}; };
@ -4304,14 +4401,25 @@ fn op_load<'s>(
None None
} else { } else {
let asset_or_document = state.get_asset_or_document(&specifier); let asset_or_document = state.get_asset_or_document(&specifier);
asset_or_document.map(|doc| LoadResponse { asset_or_document.map(|doc| {
let maybe_cjs_tracker = state
.state_snapshot
.resolver
.maybe_cjs_tracker(Some(&specifier));
LoadResponse {
data: doc.text(), data: doc.text(),
script_kind: crate::tsc::as_ts_script_kind(doc.media_type()), script_kind: crate::tsc::as_ts_script_kind(doc.media_type()),
version: state.script_version(&specifier), version: state.script_version(&specifier),
is_cjs: matches!( is_cjs: maybe_cjs_tracker
.map(|t| {
t.is_cjs(
&specifier,
doc.media_type(), doc.media_type(),
MediaType::Cjs | MediaType::Cts | MediaType::Dcts doc.maybe_parsed_source().and_then(|p| p.as_ref().ok()),
), )
})
.unwrap_or(false),
}
}) })
}; };
@ -4540,7 +4648,10 @@ fn op_script_names(state: &mut OpState) -> ScriptNames {
for doc in &docs { for doc in &docs {
let specifier = doc.specifier(); let specifier = doc.specifier();
let is_open = doc.is_open(); let is_open = doc.is_open();
if is_open || specifier.scheme() == "file" { if is_open
|| (specifier.scheme() == "file"
&& !state.state_snapshot.resolver.in_node_modules(specifier))
{
let script_names = doc let script_names = doc
.scope() .scope()
.and_then(|s| result.by_scope.get_mut(s)) .and_then(|s| result.by_scope.get_mut(s))
@ -4892,6 +5003,10 @@ pub struct UserPreferences {
pub allow_rename_of_import_path: Option<bool>, pub allow_rename_of_import_path: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub auto_import_file_exclude_patterns: Option<Vec<String>>, pub auto_import_file_exclude_patterns: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub interactive_inlay_hints: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub prefer_type_only_auto_imports: Option<bool>,
} }
impl UserPreferences { impl UserPreferences {
@ -4909,6 +5024,7 @@ impl UserPreferences {
include_completions_with_snippet_text: Some( include_completions_with_snippet_text: Some(
config.snippet_support_capable(), config.snippet_support_capable(),
), ),
interactive_inlay_hints: Some(true),
provide_refactor_not_applicable_reason: Some(true), provide_refactor_not_applicable_reason: Some(true),
quote_preference: Some(fmt_config.into()), quote_preference: Some(fmt_config.into()),
use_label_details_in_completion_entries: Some(true), use_label_details_in_completion_entries: Some(true),
@ -5013,6 +5129,9 @@ impl UserPreferences {
} else { } else {
Some(language_settings.preferences.quote_style) Some(language_settings.preferences.quote_style)
}, },
prefer_type_only_auto_imports: Some(
language_settings.preferences.prefer_type_only_auto_imports,
),
..base_preferences ..base_preferences
} }
} }
@ -5958,6 +6077,7 @@ mod tests {
Some(temp_dir.url()), Some(temp_dir.url()),
) )
.await .await
.unwrap()
.unwrap(); .unwrap();
assert_eq!(info.entries.len(), 22); assert_eq!(info.entries.len(), 22);
let details = ts_server let details = ts_server
@ -6117,6 +6237,7 @@ mod tests {
Some(temp_dir.url()), Some(temp_dir.url()),
) )
.await .await
.unwrap()
.unwrap(); .unwrap();
let entry = info let entry = info
.entries .entries
@ -6154,7 +6275,7 @@ mod tests {
let change = changes.text_changes.first().unwrap(); let change = changes.text_changes.first().unwrap();
assert_eq!( assert_eq!(
change.new_text, change.new_text,
"import type { someLongVariable } from './b.ts'\n" "import { someLongVariable } from './b.ts'\n"
); );
} }

View file

@ -15,7 +15,6 @@ mod js;
mod jsr; mod jsr;
mod lsp; mod lsp;
mod module_loader; mod module_loader;
mod napi;
mod node; mod node;
mod npm; mod npm;
mod ops; mod ops;
@ -136,7 +135,7 @@ async fn run_subcommand(flags: Arc<Flags>) -> Result<i32, AnyError> {
tools::compile::compile(flags, compile_flags).await tools::compile::compile(flags, compile_flags).await
}), }),
DenoSubcommand::Coverage(coverage_flags) => spawn_subcommand(async { DenoSubcommand::Coverage(coverage_flags) => spawn_subcommand(async {
tools::coverage::cover_files(flags, coverage_flags).await tools::coverage::cover_files(flags, coverage_flags)
}), }),
DenoSubcommand::Fmt(fmt_flags) => { DenoSubcommand::Fmt(fmt_flags) => {
spawn_subcommand( spawn_subcommand(

View file

@ -88,11 +88,10 @@ fn main() {
let standalone = standalone::extract_standalone(Cow::Owned(args)); let standalone = standalone::extract_standalone(Cow::Owned(args));
let future = async move { let future = async move {
match standalone { match standalone {
Ok(Some(future)) => { Ok(Some(data)) => {
let (metadata, eszip) = future.await?; util::logger::init(data.metadata.log_level);
util::logger::init(metadata.log_level); load_env_vars(&data.metadata.env_vars_from_env_file);
load_env_vars(&metadata.env_vars_from_env_file); let exit_code = standalone::run(data).await?;
let exit_code = standalone::run(eszip, metadata).await?;
std::process::exit(exit_code); std::process::exit(exit_code);
} }
Ok(None) => Ok(()), Ok(None) => Ok(()),

View file

@ -2,6 +2,7 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::cell::RefCell; use std::cell::RefCell;
use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::pin::Pin; use std::pin::Pin;
use std::rc::Rc; use std::rc::Rc;
@ -23,19 +24,23 @@ use crate::graph_container::ModuleGraphUpdatePermit;
use crate::graph_util::CreateGraphOptions; use crate::graph_util::CreateGraphOptions;
use crate::graph_util::ModuleGraphBuilder; use crate::graph_util::ModuleGraphBuilder;
use crate::node; use crate::node;
use crate::node::CliNodeCodeTranslator;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::resolver::CjsTracker;
use crate::resolver::CliGraphResolver; use crate::resolver::CliGraphResolver;
use crate::resolver::CliNodeResolver; use crate::resolver::CliNodeResolver;
use crate::resolver::ModuleCodeStringSource; use crate::resolver::ModuleCodeStringSource;
use crate::resolver::NotSupportedKindInNpmError;
use crate::resolver::NpmModuleLoader; use crate::resolver::NpmModuleLoader;
use crate::tools::check; use crate::tools::check;
use crate::tools::check::TypeChecker; use crate::tools::check::TypeChecker;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
use crate::util::text_encoding::code_without_source_map; use crate::util::text_encoding::code_without_source_map;
use crate::util::text_encoding::source_map_from_code; use crate::util::text_encoding::source_map_from_code;
use crate::worker::ModuleLoaderAndSourceMapGetter; use crate::worker::CreateModuleLoaderResult;
use crate::worker::ModuleLoaderFactory; use crate::worker::ModuleLoaderFactory;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_ast::ModuleKind;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
@ -63,9 +68,12 @@ use deno_graph::Module;
use deno_graph::ModuleGraph; use deno_graph::ModuleGraph;
use deno_graph::Resolution; use deno_graph::Resolution;
use deno_runtime::code_cache; use deno_runtime::code_cache;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::create_host_defined_options; use deno_runtime::deno_node::create_host_defined_options;
use deno_runtime::deno_node::NodeRequireLoader;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use node_resolver::InNpmPackageChecker;
use node_resolver::NodeResolutionMode; use node_resolver::NodeResolutionMode;
pub struct ModuleLoadPreparer { pub struct ModuleLoadPreparer {
@ -198,11 +206,16 @@ struct SharedCliModuleLoaderState {
lib_worker: TsTypeLib, lib_worker: TsTypeLib,
initial_cwd: PathBuf, initial_cwd: PathBuf,
is_inspecting: bool, is_inspecting: bool,
is_npm_main: bool,
is_repl: bool, is_repl: bool,
cjs_tracker: Arc<CjsTracker>,
code_cache: Option<Arc<CodeCache>>, code_cache: Option<Arc<CodeCache>>,
emitter: Arc<Emitter>, emitter: Arc<Emitter>,
fs: Arc<dyn FileSystem>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
main_module_graph_container: Arc<MainModuleGraphContainer>, main_module_graph_container: Arc<MainModuleGraphContainer>,
module_load_preparer: Arc<ModuleLoadPreparer>, module_load_preparer: Arc<ModuleLoadPreparer>,
node_code_translator: Arc<CliNodeCodeTranslator>,
node_resolver: Arc<CliNodeResolver>, node_resolver: Arc<CliNodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>, npm_resolver: Arc<dyn CliNpmResolver>,
npm_module_loader: NpmModuleLoader, npm_module_loader: NpmModuleLoader,
@ -218,10 +231,14 @@ impl CliModuleLoaderFactory {
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn new( pub fn new(
options: &CliOptions, options: &CliOptions,
cjs_tracker: Arc<CjsTracker>,
code_cache: Option<Arc<CodeCache>>, code_cache: Option<Arc<CodeCache>>,
emitter: Arc<Emitter>, emitter: Arc<Emitter>,
fs: Arc<dyn FileSystem>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
main_module_graph_container: Arc<MainModuleGraphContainer>, main_module_graph_container: Arc<MainModuleGraphContainer>,
module_load_preparer: Arc<ModuleLoadPreparer>, module_load_preparer: Arc<ModuleLoadPreparer>,
node_code_translator: Arc<CliNodeCodeTranslator>,
node_resolver: Arc<CliNodeResolver>, node_resolver: Arc<CliNodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>, npm_resolver: Arc<dyn CliNpmResolver>,
npm_module_loader: NpmModuleLoader, npm_module_loader: NpmModuleLoader,
@ -235,14 +252,19 @@ impl CliModuleLoaderFactory {
lib_worker: options.ts_type_lib_worker(), lib_worker: options.ts_type_lib_worker(),
initial_cwd: options.initial_cwd().to_path_buf(), initial_cwd: options.initial_cwd().to_path_buf(),
is_inspecting: options.is_inspecting(), is_inspecting: options.is_inspecting(),
is_npm_main: options.is_npm_main(),
is_repl: matches!( is_repl: matches!(
options.sub_command(), options.sub_command(),
DenoSubcommand::Repl(_) | DenoSubcommand::Jupyter(_) DenoSubcommand::Repl(_) | DenoSubcommand::Jupyter(_)
), ),
cjs_tracker,
code_cache, code_cache,
emitter, emitter,
fs,
in_npm_pkg_checker,
main_module_graph_container, main_module_graph_container,
module_load_preparer, module_load_preparer,
node_code_translator,
node_resolver, node_resolver,
npm_resolver, npm_resolver,
npm_module_loader, npm_module_loader,
@ -259,19 +281,30 @@ impl CliModuleLoaderFactory {
is_worker: bool, is_worker: bool,
parent_permissions: PermissionsContainer, parent_permissions: PermissionsContainer,
permissions: PermissionsContainer, permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter { ) -> CreateModuleLoaderResult {
let loader = Rc::new(CliModuleLoader(Rc::new(CliModuleLoaderInner { let module_loader =
Rc::new(CliModuleLoader(Rc::new(CliModuleLoaderInner {
lib, lib,
is_worker, is_worker,
is_npm_main: self.shared.is_npm_main,
parent_permissions, parent_permissions,
permissions, permissions,
graph_container, graph_container: graph_container.clone(),
node_code_translator: self.shared.node_code_translator.clone(),
emitter: self.shared.emitter.clone(), emitter: self.shared.emitter.clone(),
parsed_source_cache: self.shared.parsed_source_cache.clone(), parsed_source_cache: self.shared.parsed_source_cache.clone(),
shared: self.shared.clone(), shared: self.shared.clone(),
}))); })));
ModuleLoaderAndSourceMapGetter { let node_require_loader = Rc::new(CliNodeRequireLoader::new(
module_loader: loader, self.shared.emitter.clone(),
self.shared.fs.clone(),
graph_container,
self.shared.in_npm_pkg_checker.clone(),
self.shared.npm_resolver.clone(),
));
CreateModuleLoaderResult {
module_loader,
node_require_loader,
} }
} }
} }
@ -280,7 +313,7 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory {
fn create_for_main( fn create_for_main(
&self, &self,
root_permissions: PermissionsContainer, root_permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter { ) -> CreateModuleLoaderResult {
self.create_with_lib( self.create_with_lib(
(*self.shared.main_module_graph_container).clone(), (*self.shared.main_module_graph_container).clone(),
self.shared.lib_window, self.shared.lib_window,
@ -294,7 +327,7 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory {
&self, &self,
parent_permissions: PermissionsContainer, parent_permissions: PermissionsContainer,
permissions: PermissionsContainer, permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter { ) -> CreateModuleLoaderResult {
self.create_with_lib( self.create_with_lib(
// create a fresh module graph for the worker // create a fresh module graph for the worker
WorkerModuleGraphContainer::new(Arc::new(ModuleGraph::new( WorkerModuleGraphContainer::new(Arc::new(ModuleGraph::new(
@ -310,6 +343,7 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory {
struct CliModuleLoaderInner<TGraphContainer: ModuleGraphContainer> { struct CliModuleLoaderInner<TGraphContainer: ModuleGraphContainer> {
lib: TsTypeLib, lib: TsTypeLib,
is_npm_main: bool,
is_worker: bool, is_worker: bool,
/// The initial set of permissions used to resolve the static imports in the /// The initial set of permissions used to resolve the static imports in the
/// worker. These are "allow all" for main worker, and parent thread /// worker. These are "allow all" for main worker, and parent thread
@ -318,6 +352,7 @@ struct CliModuleLoaderInner<TGraphContainer: ModuleGraphContainer> {
permissions: PermissionsContainer, permissions: PermissionsContainer,
shared: Arc<SharedCliModuleLoaderState>, shared: Arc<SharedCliModuleLoaderState>,
emitter: Arc<Emitter>, emitter: Arc<Emitter>,
node_code_translator: Arc<CliNodeCodeTranslator>,
parsed_source_cache: Arc<ParsedSourceCache>, parsed_source_cache: Arc<ParsedSourceCache>,
graph_container: TGraphContainer, graph_container: TGraphContainer,
} }
@ -331,24 +366,7 @@ impl<TGraphContainer: ModuleGraphContainer>
maybe_referrer: Option<&ModuleSpecifier>, maybe_referrer: Option<&ModuleSpecifier>,
requested_module_type: RequestedModuleType, requested_module_type: RequestedModuleType,
) -> Result<ModuleSource, AnyError> { ) -> Result<ModuleSource, AnyError> {
let code_source = match self.load_prepared_module(specifier).await? { let code_source = self.load_code_source(specifier, maybe_referrer).await?;
Some(code_source) => code_source,
None => {
if self.shared.npm_module_loader.if_in_npm_package(specifier) {
self
.shared
.npm_module_loader
.load(specifier, maybe_referrer)
.await?
} else {
let mut msg = format!("Loading unprepared module: {specifier}");
if let Some(referrer) = maybe_referrer {
msg = format!("{}, imported from: {}", msg, referrer.as_str());
}
return Err(anyhow!(msg));
}
}
};
let code = if self.shared.is_inspecting { let code = if self.shared.is_inspecting {
// we need the code with the source map in order for // we need the code with the source map in order for
// it to work with --inspect or --inspect-brk // it to work with --inspect or --inspect-brk
@ -402,6 +420,29 @@ impl<TGraphContainer: ModuleGraphContainer>
)) ))
} }
async fn load_code_source(
&self,
specifier: &ModuleSpecifier,
maybe_referrer: Option<&ModuleSpecifier>,
) -> Result<ModuleCodeStringSource, AnyError> {
if let Some(code_source) = self.load_prepared_module(specifier).await? {
return Ok(code_source);
}
if self.shared.node_resolver.in_npm_package(specifier) {
return self
.shared
.npm_module_loader
.load(specifier, maybe_referrer)
.await;
}
let mut msg = format!("Loading unprepared module: {specifier}");
if let Some(referrer) = maybe_referrer {
msg = format!("{}, imported from: {}", msg, referrer.as_str());
}
Err(anyhow!(msg))
}
fn resolve_referrer( fn resolve_referrer(
&self, &self,
referrer: &str, referrer: &str,
@ -474,15 +515,11 @@ impl<TGraphContainer: ModuleGraphContainer>
if self.shared.is_repl { if self.shared.is_repl {
if let Ok(reference) = NpmPackageReqReference::from_specifier(&specifier) if let Ok(reference) = NpmPackageReqReference::from_specifier(&specifier)
{ {
return self return self.shared.node_resolver.resolve_req_reference(
.shared
.node_resolver
.resolve_req_reference(
&reference, &reference,
referrer, referrer,
NodeResolutionMode::Execution, NodeResolutionMode::Execution,
) );
.map(|res| res.into_url());
} }
} }
@ -506,13 +543,15 @@ impl<TGraphContainer: ModuleGraphContainer>
.with_context(|| { .with_context(|| {
format!("Could not resolve '{}'.", module.nv_reference) format!("Could not resolve '{}'.", module.nv_reference)
})? })?
.into_url()
} }
Some(Module::Node(module)) => module.specifier.clone(), Some(Module::Node(module)) => module.specifier.clone(),
Some(Module::Js(module)) => module.specifier.clone(), Some(Module::Js(module)) => module.specifier.clone(),
Some(Module::Json(module)) => module.specifier.clone(), Some(Module::Json(module)) => module.specifier.clone(),
Some(Module::External(module)) => { Some(Module::External(module)) => {
node::resolve_specifier_into_node_modules(&module.specifier) node::resolve_specifier_into_node_modules(
&module.specifier,
self.shared.fs.as_ref(),
)
} }
None => specifier.into_owned(), None => specifier.into_owned(),
}; };
@ -534,23 +573,32 @@ impl<TGraphContainer: ModuleGraphContainer>
}) => { }) => {
let transpile_result = self let transpile_result = self
.emitter .emitter
.emit_parsed_source(specifier, media_type, source) .emit_parsed_source(specifier, media_type, ModuleKind::Esm, source)
.await?; .await?;
// at this point, we no longer need the parsed source in memory, so free it // at this point, we no longer need the parsed source in memory, so free it
self.parsed_source_cache.free(specifier); self.parsed_source_cache.free(specifier);
Ok(Some(ModuleCodeStringSource { Ok(Some(ModuleCodeStringSource {
code: ModuleSourceCode::Bytes(transpile_result), // note: it's faster to provide a string if we know it's a string
code: ModuleSourceCode::String(transpile_result.into()),
found_url: specifier.clone(), found_url: specifier.clone(),
media_type, media_type,
})) }))
} }
Some(CodeOrDeferredEmit::Cjs {
specifier,
media_type,
source,
}) => self
.load_maybe_cjs(specifier, media_type, source)
.await
.map(Some),
None => Ok(None), None => Ok(None),
} }
} }
fn load_prepared_module_sync( fn load_prepared_module_for_source_map_sync(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
) -> Result<Option<ModuleCodeStringSource>, AnyError> { ) -> Result<Option<ModuleCodeStringSource>, AnyError> {
@ -563,19 +611,31 @@ impl<TGraphContainer: ModuleGraphContainer>
media_type, media_type,
source, source,
}) => { }) => {
let transpile_result = self let transpile_result = self.emitter.emit_parsed_source_sync(
.emitter specifier,
.emit_parsed_source_sync(specifier, media_type, source)?; media_type,
ModuleKind::Esm,
source,
)?;
// at this point, we no longer need the parsed source in memory, so free it // at this point, we no longer need the parsed source in memory, so free it
self.parsed_source_cache.free(specifier); self.parsed_source_cache.free(specifier);
Ok(Some(ModuleCodeStringSource { Ok(Some(ModuleCodeStringSource {
code: ModuleSourceCode::Bytes(transpile_result), // note: it's faster to provide a string if we know it's a string
code: ModuleSourceCode::String(transpile_result.into()),
found_url: specifier.clone(), found_url: specifier.clone(),
media_type, media_type,
})) }))
} }
Some(CodeOrDeferredEmit::Cjs { .. }) => {
self.parsed_source_cache.free(specifier);
// todo(dsherret): to make this work, we should probably just
// rely on the CJS export cache. At the moment this is hard because
// cjs export analysis is only async
Ok(None)
}
None => Ok(None), None => Ok(None),
} }
} }
@ -605,20 +665,40 @@ impl<TGraphContainer: ModuleGraphContainer>
source, source,
media_type, media_type,
specifier, specifier,
is_script,
.. ..
})) => { })) => {
// todo(dsherret): revert in https://github.com/denoland/deno/pull/26439
if self.is_npm_main && *is_script
|| self.shared.cjs_tracker.is_cjs_with_known_is_script(
specifier,
*media_type,
*is_script,
)?
{
return Ok(Some(CodeOrDeferredEmit::Cjs {
specifier,
media_type: *media_type,
source,
}));
}
let code: ModuleCodeString = match media_type { let code: ModuleCodeString = match media_type {
MediaType::JavaScript MediaType::JavaScript
| MediaType::Unknown | MediaType::Unknown
| MediaType::Cjs
| MediaType::Mjs | MediaType::Mjs
| MediaType::Json => source.clone().into(), | MediaType::Json => source.clone().into(),
MediaType::Dts | MediaType::Dcts | MediaType::Dmts => { MediaType::Dts | MediaType::Dcts | MediaType::Dmts => {
Default::default() Default::default()
} }
MediaType::Cjs | MediaType::Cts => {
return Ok(Some(CodeOrDeferredEmit::Cjs {
specifier,
media_type: *media_type,
source,
}));
}
MediaType::TypeScript MediaType::TypeScript
| MediaType::Mts | MediaType::Mts
| MediaType::Cts
| MediaType::Jsx | MediaType::Jsx
| MediaType::Tsx => { | MediaType::Tsx => {
return Ok(Some(CodeOrDeferredEmit::DeferredEmit { return Ok(Some(CodeOrDeferredEmit::DeferredEmit {
@ -627,7 +707,7 @@ impl<TGraphContainer: ModuleGraphContainer>
source, source,
})); }));
} }
MediaType::TsBuildInfo | MediaType::Wasm | MediaType::SourceMap => { MediaType::Css | MediaType::Wasm | MediaType::SourceMap => {
panic!("Unexpected media type {media_type} for {specifier}") panic!("Unexpected media type {media_type} for {specifier}")
} }
}; };
@ -649,6 +729,48 @@ impl<TGraphContainer: ModuleGraphContainer>
| None => Ok(None), | None => Ok(None),
} }
} }
async fn load_maybe_cjs(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
original_source: &Arc<str>,
) -> Result<ModuleCodeStringSource, AnyError> {
let js_source = if media_type.is_emittable() {
Cow::Owned(
self
.emitter
.emit_parsed_source(
specifier,
media_type,
ModuleKind::Cjs,
original_source,
)
.await?,
)
} else {
Cow::Borrowed(original_source.as_ref())
};
let text = self
.node_code_translator
.translate_cjs_to_esm(specifier, Some(js_source))
.await?;
// at this point, we no longer need the parsed source in memory, so free it
self.parsed_source_cache.free(specifier);
Ok(ModuleCodeStringSource {
code: match text {
// perf: if the text is borrowed, that means it didn't make any changes
// to the original source, so we can just provide that instead of cloning
// the borrowed text
Cow::Borrowed(_) => {
ModuleSourceCode::String(original_source.clone().into())
}
Cow::Owned(text) => ModuleSourceCode::String(text.into()),
},
found_url: specifier.clone(),
media_type,
})
}
} }
enum CodeOrDeferredEmit<'a> { enum CodeOrDeferredEmit<'a> {
@ -658,6 +780,11 @@ enum CodeOrDeferredEmit<'a> {
media_type: MediaType, media_type: MediaType,
source: &'a Arc<str>, source: &'a Arc<str>,
}, },
Cjs {
specifier: &'a ModuleSpecifier,
media_type: MediaType,
source: &'a Arc<str>,
},
} }
// todo(dsherret): this double Rc boxing is not ideal // todo(dsherret): this double Rc boxing is not ideal
@ -819,7 +946,10 @@ impl<TGraphContainer: ModuleGraphContainer> ModuleLoader
"wasm" | "file" | "http" | "https" | "data" | "blob" => (), "wasm" | "file" | "http" | "https" | "data" | "blob" => (),
_ => return None, _ => return None,
} }
let source = self.0.load_prepared_module_sync(&specifier).ok()??; let source = self
.0
.load_prepared_module_for_source_map_sync(&specifier)
.ok()??;
source_map_from_code(source.code.as_bytes()) source_map_from_code(source.code.as_bytes())
} }
@ -898,3 +1028,79 @@ impl ModuleGraphUpdatePermit for WorkerModuleGraphUpdatePermit {
drop(self.permit); // explicit drop for clarity drop(self.permit); // explicit drop for clarity
} }
} }
#[derive(Debug)]
struct CliNodeRequireLoader<TGraphContainer: ModuleGraphContainer> {
emitter: Arc<Emitter>,
fs: Arc<dyn FileSystem>,
graph_container: TGraphContainer,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
npm_resolver: Arc<dyn CliNpmResolver>,
}
impl<TGraphContainer: ModuleGraphContainer>
CliNodeRequireLoader<TGraphContainer>
{
pub fn new(
emitter: Arc<Emitter>,
fs: Arc<dyn FileSystem>,
graph_container: TGraphContainer,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
npm_resolver: Arc<dyn CliNpmResolver>,
) -> Self {
Self {
emitter,
fs,
graph_container,
in_npm_pkg_checker,
npm_resolver,
}
}
}
impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
for CliNodeRequireLoader<TGraphContainer>
{
fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn deno_runtime::deno_node::NodePermissions,
path: &'a Path,
) -> Result<std::borrow::Cow<'a, Path>, AnyError> {
if let Ok(url) = deno_path_util::url_from_file_path(path) {
// allow reading if it's in the module graph
if self.graph_container.graph().get(&url).is_some() {
return Ok(std::borrow::Cow::Borrowed(path));
}
}
self.npm_resolver.ensure_read_permission(permissions, path)
}
fn load_text_file_lossy(&self, path: &Path) -> Result<String, AnyError> {
// todo(dsherret): use the preloaded module from the graph if available?
let media_type = MediaType::from_path(path);
let text = self.fs.read_text_file_lossy_sync(path, None)?;
if media_type.is_emittable() {
let specifier = deno_path_util::url_from_file_path(path)?;
if self.in_npm_pkg_checker.in_npm_package(&specifier) {
return Err(
NotSupportedKindInNpmError {
media_type,
specifier,
}
.into(),
);
}
self.emitter.emit_parsed_source_sync(
&specifier,
media_type,
// this is probably not super accurate due to require esm, but probably ok.
// If we find this causes a lot of churn in the emit cache then we should
// investigate how we can make this better
ModuleKind::Cjs,
&text.into(),
)
} else {
Ok(text)
}
}
}

View file

@ -1,114 +0,0 @@
# napi
This directory contains source for Deno's Node-API implementation. It depends on
`napi_sym` and `deno_napi`.
Files are generally organized the same as in Node.js's implementation to ease in
ensuring compatibility.
## Adding a new function
Add the symbol name to
[`cli/napi_sym/symbol_exports.json`](../napi_sym/symbol_exports.json).
```diff
{
"symbols": [
...
"napi_get_undefined",
- "napi_get_null"
+ "napi_get_null",
+ "napi_get_boolean"
]
}
```
Determine where to place the implementation. `napi_get_boolean` is related to JS
values so we will place it in `js_native_api.rs`. If something is not clear,
just create a new file module.
See [`napi_sym`](../napi_sym/) for writing the implementation:
```rust
#[napi_sym::napi_sym]
pub fn napi_get_boolean(
env: *mut Env,
value: bool,
result: *mut napi_value,
) -> Result {
// ...
Ok(())
}
```
Update the generated symbol lists using the script:
```
deno run --allow-write tools/napi/generate_symbols_lists.js
```
Add a test in [`/tests/napi`](../../tests/napi/). You can also refer to Node.js
test suite for Node-API.
```js
// tests/napi/boolean_test.js
import { assertEquals, loadTestLibrary } from "./common.js";
const lib = loadTestLibrary();
Deno.test("napi get boolean", function () {
assertEquals(lib.test_get_boolean(true), true);
assertEquals(lib.test_get_boolean(false), false);
});
```
```rust
// tests/napi/src/boolean.rs
use napi_sys::Status::napi_ok;
use napi_sys::ValueType::napi_boolean;
use napi_sys::*;
extern "C" fn test_boolean(
env: napi_env,
info: napi_callback_info,
) -> napi_value {
let (args, argc, _) = crate::get_callback_info!(env, info, 1);
assert_eq!(argc, 1);
let mut ty = -1;
assert!(unsafe { napi_typeof(env, args[0], &mut ty) } == napi_ok);
assert_eq!(ty, napi_boolean);
// Use napi_get_boolean here...
value
}
pub fn init(env: napi_env, exports: napi_value) {
let properties = &[crate::new_property!(env, "test_boolean\0", test_boolean)];
unsafe {
napi_define_properties(env, exports, properties.len(), properties.as_ptr())
};
}
```
```diff
// tests/napi/src/lib.rs
+ mod boolean;
...
#[no_mangle]
unsafe extern "C" fn napi_register_module_v1(
env: napi_env,
exports: napi_value,
) -> napi_value {
...
+ boolean::init(env, exports);
exports
}
```
Run the test using `cargo test -p tests/napi`.

View file

@ -1,21 +0,0 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
#![allow(unused_mut)]
#![allow(non_camel_case_types)]
#![allow(clippy::undocumented_unsafe_blocks)]
//! Symbols to be exported are now defined in this JSON file.
//! The `#[napi_sym]` macro checks for missing entries and panics.
//!
//! `./tools/napi/generate_symbols_list.js` is used to generate the LINK `cli/exports.def` on Windows,
//! which is also checked into git.
//!
//! To add a new napi function:
//! 1. Place `#[napi_sym]` on top of your implementation.
//! 2. Add the function's identifier to this JSON list.
//! 3. Finally, run `tools/napi/generate_symbols_list.js` to update `cli/napi/generated_symbol_exports_list_*.def`.
pub mod js_native_api;
pub mod node_api;
pub mod util;
pub mod uv;

View file

@ -1,11 +1,14 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::sync::Arc; use std::sync::Arc;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_graph::ParsedSourceStore; use deno_graph::ParsedSourceStore;
use deno_path_util::url_from_file_path;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_fs; use deno_runtime::deno_fs;
use deno_runtime::deno_node::DenoFsNodeResolverEnv; use deno_runtime::deno_node::DenoFsNodeResolverEnv;
use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis; use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis;
@ -18,8 +21,8 @@ use serde::Serialize;
use crate::cache::CacheDBHash; use crate::cache::CacheDBHash;
use crate::cache::NodeAnalysisCache; use crate::cache::NodeAnalysisCache;
use crate::cache::ParsedSourceCache; use crate::cache::ParsedSourceCache;
use crate::resolver::CliNodeResolver; use crate::resolver::CjsTracker;
use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
pub type CliNodeCodeTranslator = pub type CliNodeCodeTranslator =
NodeCodeTranslator<CliCjsCodeAnalyzer, DenoFsNodeResolverEnv>; NodeCodeTranslator<CliCjsCodeAnalyzer, DenoFsNodeResolverEnv>;
@ -32,14 +35,14 @@ pub type CliNodeCodeTranslator =
/// because the node_modules folder might not exist at that time. /// because the node_modules folder might not exist at that time.
pub fn resolve_specifier_into_node_modules( pub fn resolve_specifier_into_node_modules(
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
fs: &dyn deno_fs::FileSystem,
) -> ModuleSpecifier { ) -> ModuleSpecifier {
specifier url_to_file_path(specifier)
.to_file_path()
.ok() .ok()
// this path might not exist at the time the graph is being created // this path might not exist at the time the graph is being created
// because the node_modules folder might not yet exist // because the node_modules folder might not yet exist
.and_then(|path| canonicalize_path_maybe_not_exists(&path).ok()) .and_then(|path| canonicalize_path_maybe_not_exists_with_fs(&path, fs).ok())
.and_then(|path| ModuleSpecifier::from_file_path(path).ok()) .and_then(|path| url_from_file_path(&path).ok())
.unwrap_or_else(|| specifier.clone()) .unwrap_or_else(|| specifier.clone())
} }
@ -56,23 +59,29 @@ pub enum CliCjsAnalysis {
pub struct CliCjsCodeAnalyzer { pub struct CliCjsCodeAnalyzer {
cache: NodeAnalysisCache, cache: NodeAnalysisCache,
cjs_tracker: Arc<CjsTracker>,
fs: deno_fs::FileSystemRc, fs: deno_fs::FileSystemRc,
node_resolver: Arc<CliNodeResolver>,
parsed_source_cache: Option<Arc<ParsedSourceCache>>, parsed_source_cache: Option<Arc<ParsedSourceCache>>,
// todo(dsherret): hack, remove in https://github.com/denoland/deno/pull/26439
// For example, this does not properly handle if cjs analysis was already done
// and has been cached.
is_npm_main: bool,
} }
impl CliCjsCodeAnalyzer { impl CliCjsCodeAnalyzer {
pub fn new( pub fn new(
cache: NodeAnalysisCache, cache: NodeAnalysisCache,
cjs_tracker: Arc<CjsTracker>,
fs: deno_fs::FileSystemRc, fs: deno_fs::FileSystemRc,
node_resolver: Arc<CliNodeResolver>,
parsed_source_cache: Option<Arc<ParsedSourceCache>>, parsed_source_cache: Option<Arc<ParsedSourceCache>>,
is_npm_main: bool,
) -> Self { ) -> Self {
Self { Self {
cache, cache,
cjs_tracker,
fs, fs,
node_resolver,
parsed_source_cache, parsed_source_cache,
is_npm_main,
} }
} }
@ -88,7 +97,7 @@ impl CliCjsCodeAnalyzer {
return Ok(analysis); return Ok(analysis);
} }
let mut media_type = MediaType::from_specifier(specifier); let media_type = MediaType::from_specifier(specifier);
if media_type == MediaType::Json { if media_type == MediaType::Json {
return Ok(CliCjsAnalysis::Cjs { return Ok(CliCjsAnalysis::Cjs {
exports: vec![], exports: vec![],
@ -96,31 +105,20 @@ impl CliCjsCodeAnalyzer {
}); });
} }
if media_type == MediaType::JavaScript { let cjs_tracker = self.cjs_tracker.clone();
if let Some(package_json) = let is_npm_main = self.is_npm_main;
self.node_resolver.get_closest_package_json(specifier)? let is_maybe_cjs =
{ cjs_tracker.is_maybe_cjs(specifier, media_type)? || is_npm_main;
match package_json.typ.as_str() { let analysis = if is_maybe_cjs {
"commonjs" => {
media_type = MediaType::Cjs;
}
"module" => {
media_type = MediaType::Mjs;
}
_ => {}
}
}
}
let maybe_parsed_source = self let maybe_parsed_source = self
.parsed_source_cache .parsed_source_cache
.as_ref() .as_ref()
.and_then(|c| c.remove_parsed_source(specifier)); .and_then(|c| c.remove_parsed_source(specifier));
let analysis = deno_core::unsync::spawn_blocking({ deno_core::unsync::spawn_blocking({
let specifier = specifier.clone(); let specifier = specifier.clone();
let source: Arc<str> = source.into(); let source: Arc<str> = source.into();
move || -> Result<_, deno_ast::ParseDiagnostic> { move || -> Result<_, AnyError> {
let parsed_source = let parsed_source =
maybe_parsed_source.map(Ok).unwrap_or_else(|| { maybe_parsed_source.map(Ok).unwrap_or_else(|| {
deno_ast::parse_program(deno_ast::ParseParams { deno_ast::parse_program(deno_ast::ParseParams {
@ -132,26 +130,28 @@ impl CliCjsCodeAnalyzer {
maybe_syntax: None, maybe_syntax: None,
}) })
})?; })?;
if parsed_source.is_script() { let is_script = parsed_source.compute_is_script();
let is_cjs = cjs_tracker.is_cjs_with_known_is_script(
parsed_source.specifier(),
media_type,
is_script,
)? || is_script && is_npm_main;
if is_cjs {
let analysis = parsed_source.analyze_cjs(); let analysis = parsed_source.analyze_cjs();
Ok(CliCjsAnalysis::Cjs { Ok(CliCjsAnalysis::Cjs {
exports: analysis.exports, exports: analysis.exports,
reexports: analysis.reexports, reexports: analysis.reexports,
}) })
} else if media_type == MediaType::Cjs {
// FIXME: `deno_ast` should internally handle MediaType::Cjs implying that
// the result must never be Esm
Ok(CliCjsAnalysis::Cjs {
exports: vec![],
reexports: vec![],
})
} else { } else {
Ok(CliCjsAnalysis::Esm) Ok(CliCjsAnalysis::Esm)
} }
} }
}) })
.await .await
.unwrap()?; .unwrap()?
} else {
CliCjsAnalysis::Esm
};
self self
.cache .cache
@ -163,11 +163,11 @@ impl CliCjsCodeAnalyzer {
#[async_trait::async_trait(?Send)] #[async_trait::async_trait(?Send)]
impl CjsCodeAnalyzer for CliCjsCodeAnalyzer { impl CjsCodeAnalyzer for CliCjsCodeAnalyzer {
async fn analyze_cjs( async fn analyze_cjs<'a>(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
source: Option<String>, source: Option<Cow<'a, str>>,
) -> Result<ExtNodeCjsAnalysis, AnyError> { ) -> Result<ExtNodeCjsAnalysis<'a>, AnyError> {
let source = match source { let source = match source {
Some(source) => source, Some(source) => source,
None => { None => {
@ -175,7 +175,7 @@ impl CjsCodeAnalyzer for CliCjsCodeAnalyzer {
if let Ok(source_from_file) = if let Ok(source_from_file) =
self.fs.read_text_file_lossy_async(path, None).await self.fs.read_text_file_lossy_async(path, None).await
{ {
source_from_file Cow::Owned(source_from_file)
} else { } else {
return Ok(ExtNodeCjsAnalysis::Cjs(CjsAnalysisExports { return Ok(ExtNodeCjsAnalysis::Cjs(CjsAnalysisExports {
exports: vec![], exports: vec![],

View file

@ -10,8 +10,8 @@ use deno_core::serde_json;
use deno_core::url::Url; use deno_core::url::Url;
use deno_resolver::npm::ByonmNpmResolver; use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::ByonmNpmResolverCreateOptions; use deno_resolver::npm::ByonmNpmResolverCreateOptions;
use deno_runtime::deno_node::DenoFsNodeResolverEnv;
use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodePermissions;
use deno_runtime::deno_node::NodeRequireResolver;
use deno_runtime::ops::process::NpmProcessStateProvider; use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use node_resolver::NpmResolver; use node_resolver::NpmResolver;
@ -25,30 +25,14 @@ use super::InnerCliNpmResolverRef;
use super::ResolvePkgFolderFromDenoReqError; use super::ResolvePkgFolderFromDenoReqError;
pub type CliByonmNpmResolverCreateOptions = pub type CliByonmNpmResolverCreateOptions =
ByonmNpmResolverCreateOptions<CliDenoResolverFs>; ByonmNpmResolverCreateOptions<CliDenoResolverFs, DenoFsNodeResolverEnv>;
pub type CliByonmNpmResolver = ByonmNpmResolver<CliDenoResolverFs>; pub type CliByonmNpmResolver =
ByonmNpmResolver<CliDenoResolverFs, DenoFsNodeResolverEnv>;
// todo(dsherret): the services hanging off `CliNpmResolver` doesn't seem ideal. We should probably decouple. // todo(dsherret): the services hanging off `CliNpmResolver` doesn't seem ideal. We should probably decouple.
#[derive(Debug)] #[derive(Debug)]
struct CliByonmWrapper(Arc<CliByonmNpmResolver>); struct CliByonmWrapper(Arc<CliByonmNpmResolver>);
impl NodeRequireResolver for CliByonmWrapper {
fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn NodePermissions,
path: &'a Path,
) -> Result<Cow<'a, Path>, AnyError> {
if !path
.components()
.any(|c| c.as_os_str().to_ascii_lowercase() == "node_modules")
{
permissions.check_read_path(path)
} else {
Ok(Cow::Borrowed(path))
}
}
}
impl NpmProcessStateProvider for CliByonmWrapper { impl NpmProcessStateProvider for CliByonmWrapper {
fn get_npm_process_state(&self) -> String { fn get_npm_process_state(&self) -> String {
serde_json::to_string(&NpmProcessState { serde_json::to_string(&NpmProcessState {
@ -67,10 +51,6 @@ impl CliNpmResolver for CliByonmNpmResolver {
self self
} }
fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver> {
Arc::new(CliByonmWrapper(self))
}
fn into_process_state_provider( fn into_process_state_provider(
self: Arc<Self>, self: Arc<Self>,
) -> Arc<dyn NpmProcessStateProvider> { ) -> Arc<dyn NpmProcessStateProvider> {
@ -100,6 +80,21 @@ impl CliNpmResolver for CliByonmNpmResolver {
.map_err(ResolvePkgFolderFromDenoReqError::Byonm) .map_err(ResolvePkgFolderFromDenoReqError::Byonm)
} }
fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn NodePermissions,
path: &'a Path,
) -> Result<Cow<'a, Path>, AnyError> {
if !path
.components()
.any(|c| c.as_os_str().to_ascii_lowercase() == "node_modules")
{
permissions.check_read_path(path).map_err(Into::into)
} else {
Ok(Cow::Borrowed(path))
}
}
fn check_state_hash(&self) -> Option<u64> { fn check_state_hash(&self) -> Option<u64> {
// it is very difficult to determine the check state hash for byonm // it is very difficult to determine the check state hash for byonm
// so we just return None to signify check caching is not supported // so we just return None to signify check caching is not supported

View file

@ -3,6 +3,7 @@
use base64::prelude::BASE64_STANDARD; use base64::prelude::BASE64_STANDARD;
use base64::Engine; use base64::Engine;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_npm::npm_rc::RegistryConfig; use deno_npm::npm_rc::RegistryConfig;
use http::header; use http::header;
@ -36,17 +37,21 @@ pub fn maybe_auth_header_for_npm_registry(
} }
if username.is_some() && password.is_some() { if username.is_some() && password.is_some() {
return Ok(Some(( // The npm client does some double encoding when generating the
header::AUTHORIZATION, // bearer token value, see
header::HeaderValue::from_str(&format!( // https://github.com/npm/cli/blob/780afc50e3a345feb1871a28e33fa48235bc3bd5/workspaces/config/lib/index.js#L846-L851
"Basic {}", let pw_base64 = BASE64_STANDARD
BASE64_STANDARD.encode(format!( .decode(password.unwrap())
.with_context(|| "The password in npmrc is an invalid base64 string")?;
let bearer = BASE64_STANDARD.encode(format!(
"{}:{}", "{}:{}",
username.unwrap(), username.unwrap(),
password.unwrap() String::from_utf8_lossy(&pw_base64)
)) ));
))
.unwrap(), return Ok(Some((
header::AUTHORIZATION,
header::HeaderValue::from_str(&format!("Basic {}", bearer)).unwrap(),
))); )));
} }

View file

@ -26,7 +26,7 @@ use crate::cache::CACHE_PERM;
use crate::util::fs::atomic_write_file_with_retries; use crate::util::fs::atomic_write_file_with_retries;
use crate::util::fs::hard_link_dir_recursive; use crate::util::fs::hard_link_dir_recursive;
mod registry_info; pub mod registry_info;
mod tarball; mod tarball;
mod tarball_extract; mod tarball_extract;
@ -36,7 +36,7 @@ pub use tarball::TarballCache;
/// Stores a single copy of npm packages in a cache. /// Stores a single copy of npm packages in a cache.
#[derive(Debug)] #[derive(Debug)]
pub struct NpmCache { pub struct NpmCache {
cache_dir: NpmCacheDir, cache_dir: Arc<NpmCacheDir>,
cache_setting: CacheSetting, cache_setting: CacheSetting,
npmrc: Arc<ResolvedNpmRc>, npmrc: Arc<ResolvedNpmRc>,
/// ensures a package is only downloaded once per run /// ensures a package is only downloaded once per run
@ -45,7 +45,7 @@ pub struct NpmCache {
impl NpmCache { impl NpmCache {
pub fn new( pub fn new(
cache_dir: NpmCacheDir, cache_dir: Arc<NpmCacheDir>,
cache_setting: CacheSetting, cache_setting: CacheSetting,
npmrc: Arc<ResolvedNpmRc>, npmrc: Arc<ResolvedNpmRc>,
) -> Self { ) -> Self {
@ -61,6 +61,10 @@ impl NpmCache {
&self.cache_setting &self.cache_setting
} }
pub fn root_dir_path(&self) -> &Path {
self.cache_dir.root_dir()
}
pub fn root_dir_url(&self) -> &Url { pub fn root_dir_url(&self) -> &Url {
self.cache_dir.root_dir_url() self.cache_dir.root_dir_url()
} }
@ -152,10 +156,6 @@ impl NpmCache {
self.cache_dir.package_name_folder(name, registry_url) self.cache_dir.package_name_folder(name, registry_url)
} }
pub fn root_folder(&self) -> PathBuf {
self.cache_dir.root_dir().to_owned()
}
pub fn resolve_package_folder_id_from_specifier( pub fn resolve_package_folder_id_from_specifier(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,

View file

@ -84,7 +84,7 @@ impl RegistryInfoDownloader {
self.load_package_info_inner(name).await.with_context(|| { self.load_package_info_inner(name).await.with_context(|| {
format!( format!(
"Error getting response at {} for package \"{}\"", "Error getting response at {} for package \"{}\"",
self.get_package_url(name), get_package_url(&self.npmrc, name),
name name
) )
}) })
@ -190,7 +190,7 @@ impl RegistryInfoDownloader {
fn create_load_future(self: &Arc<Self>, name: &str) -> LoadFuture { fn create_load_future(self: &Arc<Self>, name: &str) -> LoadFuture {
let downloader = self.clone(); let downloader = self.clone();
let package_url = self.get_package_url(name); let package_url = get_package_url(&self.npmrc, name);
let registry_config = self.npmrc.get_registry_config(name); let registry_config = self.npmrc.get_registry_config(name);
let maybe_auth_header = let maybe_auth_header =
match maybe_auth_header_for_npm_registry(registry_config) { match maybe_auth_header_for_npm_registry(registry_config) {
@ -239,9 +239,10 @@ impl RegistryInfoDownloader {
.map(|r| r.map_err(Arc::new)) .map(|r| r.map_err(Arc::new))
.boxed_local() .boxed_local()
} }
}
fn get_package_url(&self, name: &str) -> Url { pub fn get_package_url(npmrc: &ResolvedNpmRc, name: &str) -> Url {
let registry_url = self.npmrc.get_registry_url(name); let registry_url = npmrc.get_registry_url(name);
// The '/' character in scoped package names "@scope/name" must be // The '/' character in scoped package names "@scope/name" must be
// encoded for older third party registries. Newer registries and // encoded for older third party registries. Newer registries and
// npm itself support both ways // npm itself support both ways
@ -270,5 +271,4 @@ impl RegistryInfoDownloader {
// to match npm. // to match npm.
.join(&name.to_string().replace("%2F", "%2f")) .join(&name.to_string().replace("%2F", "%2f"))
.unwrap() .unwrap()
}
} }

View file

@ -12,6 +12,7 @@ use deno_cache_dir::npm::NpmCacheDir;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::url::Url;
use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo; use deno_npm::registry::NpmPackageInfo;
use deno_npm::registry::NpmRegistryApi; use deno_npm::registry::NpmRegistryApi;
@ -24,12 +25,12 @@ use deno_npm::NpmSystemInfo;
use deno_runtime::colors; use deno_runtime::colors;
use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::NodePermissions; use deno_runtime::deno_node::NodePermissions;
use deno_runtime::deno_node::NodeRequireResolver;
use deno_runtime::ops::process::NpmProcessStateProvider; use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use node_resolver::errors::PackageFolderResolveError; use node_resolver::errors::PackageFolderResolveError;
use node_resolver::errors::PackageFolderResolveIoError; use node_resolver::errors::PackageFolderResolveIoError;
use node_resolver::InNpmPackageChecker;
use node_resolver::NpmResolver; use node_resolver::NpmResolver;
use resolution::AddPkgReqsResult; use resolution::AddPkgReqsResult;
@ -38,7 +39,7 @@ use crate::args::LifecycleScriptsConfig;
use crate::args::NpmInstallDepsProvider; use crate::args::NpmInstallDepsProvider;
use crate::args::NpmProcessState; use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind; use crate::args::NpmProcessStateKind;
use crate::cache::DenoCacheEnvFsAdapter; use crate::args::PackageJsonDepValueParseWithLocationError;
use crate::cache::FastInsecureHasher; use crate::cache::FastInsecureHasher;
use crate::http_util::HttpClientProvider; use crate::http_util::HttpClientProvider;
use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs; use crate::util::fs::canonicalize_path_maybe_not_exists_with_fs;
@ -55,7 +56,7 @@ use super::CliNpmResolver;
use super::InnerCliNpmResolverRef; use super::InnerCliNpmResolverRef;
use super::ResolvePkgFolderFromDenoReqError; use super::ResolvePkgFolderFromDenoReqError;
mod cache; pub mod cache;
mod registry; mod registry;
mod resolution; mod resolution;
mod resolvers; mod resolvers;
@ -65,12 +66,12 @@ pub enum CliNpmResolverManagedSnapshotOption {
Specified(Option<ValidSerializedNpmResolutionSnapshot>), Specified(Option<ValidSerializedNpmResolutionSnapshot>),
} }
pub struct CliNpmResolverManagedCreateOptions { pub struct CliManagedNpmResolverCreateOptions {
pub snapshot: CliNpmResolverManagedSnapshotOption, pub snapshot: CliNpmResolverManagedSnapshotOption,
pub maybe_lockfile: Option<Arc<CliLockfile>>, pub maybe_lockfile: Option<Arc<CliLockfile>>,
pub fs: Arc<dyn deno_runtime::deno_fs::FileSystem>, pub fs: Arc<dyn deno_runtime::deno_fs::FileSystem>,
pub http_client_provider: Arc<crate::http_util::HttpClientProvider>, pub http_client_provider: Arc<crate::http_util::HttpClientProvider>,
pub npm_global_cache_dir: PathBuf, pub npm_cache_dir: Arc<NpmCacheDir>,
pub cache_setting: crate::args::CacheSetting, pub cache_setting: crate::args::CacheSetting,
pub text_only_progress_bar: crate::util::progress_bar::ProgressBar, pub text_only_progress_bar: crate::util::progress_bar::ProgressBar,
pub maybe_node_modules_path: Option<PathBuf>, pub maybe_node_modules_path: Option<PathBuf>,
@ -81,7 +82,7 @@ pub struct CliNpmResolverManagedCreateOptions {
} }
pub async fn create_managed_npm_resolver_for_lsp( pub async fn create_managed_npm_resolver_for_lsp(
options: CliNpmResolverManagedCreateOptions, options: CliManagedNpmResolverCreateOptions,
) -> Arc<dyn CliNpmResolver> { ) -> Arc<dyn CliNpmResolver> {
let npm_cache = create_cache(&options); let npm_cache = create_cache(&options);
let npm_api = create_api(&options, npm_cache.clone()); let npm_api = create_api(&options, npm_cache.clone());
@ -114,7 +115,7 @@ pub async fn create_managed_npm_resolver_for_lsp(
} }
pub async fn create_managed_npm_resolver( pub async fn create_managed_npm_resolver(
options: CliNpmResolverManagedCreateOptions, options: CliManagedNpmResolverCreateOptions,
) -> Result<Arc<dyn CliNpmResolver>, AnyError> { ) -> Result<Arc<dyn CliNpmResolver>, AnyError> {
let npm_cache = create_cache(&options); let npm_cache = create_cache(&options);
let npm_api = create_api(&options, npm_cache.clone()); let npm_api = create_api(&options, npm_cache.clone());
@ -188,20 +189,16 @@ fn create_inner(
)) ))
} }
fn create_cache(options: &CliNpmResolverManagedCreateOptions) -> Arc<NpmCache> { fn create_cache(options: &CliManagedNpmResolverCreateOptions) -> Arc<NpmCache> {
Arc::new(NpmCache::new( Arc::new(NpmCache::new(
NpmCacheDir::new( options.npm_cache_dir.clone(),
&DenoCacheEnvFsAdapter(options.fs.as_ref()),
options.npm_global_cache_dir.clone(),
options.npmrc.get_all_known_registries_urls(),
),
options.cache_setting.clone(), options.cache_setting.clone(),
options.npmrc.clone(), options.npmrc.clone(),
)) ))
} }
fn create_api( fn create_api(
options: &CliNpmResolverManagedCreateOptions, options: &CliManagedNpmResolverCreateOptions,
npm_cache: Arc<NpmCache>, npm_cache: Arc<NpmCache>,
) -> Arc<CliNpmRegistryApi> { ) -> Arc<CliNpmRegistryApi> {
Arc::new(CliNpmRegistryApi::new( Arc::new(CliNpmRegistryApi::new(
@ -258,6 +255,35 @@ async fn snapshot_from_lockfile(
Ok(snapshot) Ok(snapshot)
} }
#[derive(Debug)]
struct ManagedInNpmPackageChecker {
root_dir: Url,
}
impl InNpmPackageChecker for ManagedInNpmPackageChecker {
fn in_npm_package(&self, specifier: &Url) -> bool {
specifier.as_ref().starts_with(self.root_dir.as_str())
}
}
pub struct CliManagedInNpmPkgCheckerCreateOptions<'a> {
pub root_cache_dir_url: &'a Url,
pub maybe_node_modules_path: Option<&'a Path>,
}
pub fn create_managed_in_npm_pkg_checker(
options: CliManagedInNpmPkgCheckerCreateOptions,
) -> Arc<dyn InNpmPackageChecker> {
let root_dir = match options.maybe_node_modules_path {
Some(node_modules_folder) => {
deno_path_util::url_from_directory_path(node_modules_folder).unwrap()
}
None => options.root_cache_dir_url.clone(),
};
debug_assert!(root_dir.as_str().ends_with('/'));
Arc::new(ManagedInNpmPackageChecker { root_dir })
}
/// An npm resolver where the resolution is managed by Deno rather than /// An npm resolver where the resolution is managed by Deno rather than
/// the user bringing their own node_modules (BYONM) on the file system. /// the user bringing their own node_modules (BYONM) on the file system.
pub struct ManagedCliNpmResolver { pub struct ManagedCliNpmResolver {
@ -480,19 +506,24 @@ impl ManagedCliNpmResolver {
self.resolution.resolve_pkg_id_from_pkg_req(req) self.resolution.resolve_pkg_id_from_pkg_req(req)
} }
pub fn ensure_no_pkg_json_dep_errors(&self) -> Result<(), AnyError> { pub fn ensure_no_pkg_json_dep_errors(
&self,
) -> Result<(), Box<PackageJsonDepValueParseWithLocationError>> {
for err in self.npm_install_deps_provider.pkg_json_dep_errors() { for err in self.npm_install_deps_provider.pkg_json_dep_errors() {
match err { match &err.source {
deno_package_json::PackageJsonDepValueParseError::VersionReq(_) => { deno_package_json::PackageJsonDepValueParseError::VersionReq(_) => {
return Err( return Err(Box::new(err.clone()));
AnyError::from(err.clone())
.context("Failed to install from package.json"),
);
} }
deno_package_json::PackageJsonDepValueParseError::Unsupported { deno_package_json::PackageJsonDepValueParseError::Unsupported {
.. ..
} => { } => {
log::warn!("{} {} in package.json", colors::yellow("Warning"), err) // only warn for this one
log::warn!(
"{} {}\n at {}",
colors::yellow("Warning"),
err.source,
err.location,
)
} }
} }
} }
@ -549,8 +580,16 @@ impl ManagedCliNpmResolver {
.map_err(|err| err.into()) .map_err(|err| err.into())
} }
pub fn global_cache_root_folder(&self) -> PathBuf { pub fn maybe_node_modules_path(&self) -> Option<&Path> {
self.npm_cache.root_folder() self.fs_resolver.node_modules_path()
}
pub fn global_cache_root_path(&self) -> &Path {
self.npm_cache.root_dir_path()
}
pub fn global_cache_root_url(&self) -> &Url {
self.npm_cache.root_dir_url()
} }
} }
@ -585,22 +624,6 @@ impl NpmResolver for ManagedCliNpmResolver {
log::debug!("Resolved {} from {} to {}", name, referrer, path.display()); log::debug!("Resolved {} from {} to {}", name, referrer, path.display());
Ok(path) Ok(path)
} }
fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
let root_dir_url = self.fs_resolver.root_dir_url();
debug_assert!(root_dir_url.as_str().ends_with('/'));
specifier.as_ref().starts_with(root_dir_url.as_str())
}
}
impl NodeRequireResolver for ManagedCliNpmResolver {
fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn NodePermissions,
path: &'a Path,
) -> Result<Cow<'a, Path>, AnyError> {
self.fs_resolver.ensure_read_permission(permissions, path)
}
} }
impl NpmProcessStateProvider for ManagedCliNpmResolver { impl NpmProcessStateProvider for ManagedCliNpmResolver {
@ -617,10 +640,6 @@ impl CliNpmResolver for ManagedCliNpmResolver {
self self
} }
fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver> {
self
}
fn into_process_state_provider( fn into_process_state_provider(
self: Arc<Self>, self: Arc<Self>,
) -> Arc<dyn NpmProcessStateProvider> { ) -> Arc<dyn NpmProcessStateProvider> {
@ -681,6 +700,14 @@ impl CliNpmResolver for ManagedCliNpmResolver {
.map_err(ResolvePkgFolderFromDenoReqError::Managed) .map_err(ResolvePkgFolderFromDenoReqError::Managed)
} }
fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn NodePermissions,
path: &'a Path,
) -> Result<Cow<'a, Path>, AnyError> {
self.fs_resolver.ensure_read_permission(permissions, path)
}
fn check_state_hash(&self) -> Option<u64> { fn check_state_hash(&self) -> Option<u64> {
// We could go further and check all the individual // We could go further and check all the individual
// npm packages, but that's probably overkill. // npm packages, but that's probably overkill.

View file

@ -17,7 +17,6 @@ use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures; use deno_core::futures;
use deno_core::futures::StreamExt; use deno_core::futures::StreamExt;
use deno_core::url::Url;
use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId; use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage; use deno_npm::NpmResolutionPackage;
@ -30,9 +29,6 @@ use crate::npm::managed::cache::TarballCache;
/// Part of the resolution that interacts with the file system. /// Part of the resolution that interacts with the file system.
#[async_trait(?Send)] #[async_trait(?Send)]
pub trait NpmPackageFsResolver: Send + Sync { pub trait NpmPackageFsResolver: Send + Sync {
/// Specifier for the root directory.
fn root_dir_url(&self) -> &Url;
/// The local node_modules folder if it is applicable to the implementation. /// The local node_modules folder if it is applicable to the implementation.
fn node_modules_path(&self) -> Option<&Path>; fn node_modules_path(&self) -> Option<&Path>;
@ -137,7 +133,7 @@ impl RegistryReadPermissionChecker {
} }
} }
permissions.check_read_path(path) permissions.check_read_path(path).map_err(Into::into)
} }
} }

View file

@ -11,7 +11,6 @@ use crate::colors;
use async_trait::async_trait; use async_trait::async_trait;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId; use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage; use deno_npm::NpmResolutionPackage;
@ -56,7 +55,7 @@ impl GlobalNpmPackageResolver {
Self { Self {
registry_read_permission_checker: RegistryReadPermissionChecker::new( registry_read_permission_checker: RegistryReadPermissionChecker::new(
fs, fs,
cache.root_folder(), cache.root_dir_path().to_path_buf(),
), ),
cache, cache,
tarball_cache, tarball_cache,
@ -69,10 +68,6 @@ impl GlobalNpmPackageResolver {
#[async_trait(?Send)] #[async_trait(?Send)]
impl NpmPackageFsResolver for GlobalNpmPackageResolver { impl NpmPackageFsResolver for GlobalNpmPackageResolver {
fn root_dir_url(&self) -> &Url {
self.cache.root_dir_url()
}
fn node_modules_path(&self) -> Option<&Path> { fn node_modules_path(&self) -> Option<&Path> {
None None
} }

View file

@ -155,10 +155,6 @@ impl LocalNpmPackageResolver {
#[async_trait(?Send)] #[async_trait(?Send)]
impl NpmPackageFsResolver for LocalNpmPackageResolver { impl NpmPackageFsResolver for LocalNpmPackageResolver {
fn root_dir_url(&self) -> &Url {
&self.root_node_modules_url
}
fn node_modules_path(&self) -> Option<&Path> { fn node_modules_path(&self) -> Option<&Path> {
Some(self.root_node_modules_path.as_ref()) Some(self.root_node_modules_path.as_ref())
} }
@ -1039,12 +1035,18 @@ fn junction_or_symlink_dir(
if symlink_err.kind() == std::io::ErrorKind::PermissionDenied => if symlink_err.kind() == std::io::ErrorKind::PermissionDenied =>
{ {
USE_JUNCTIONS.store(true, std::sync::atomic::Ordering::Relaxed); USE_JUNCTIONS.store(true, std::sync::atomic::Ordering::Relaxed);
junction::create(old_path, new_path).map_err(Into::into) junction::create(old_path, new_path)
.context("Failed creating junction in node_modules folder")
}
Err(symlink_err) => {
log::warn!(
"{} Unexpected error symlinking node_modules: {symlink_err}",
colors::yellow("Warning")
);
USE_JUNCTIONS.store(true, std::sync::atomic::Ordering::Relaxed);
junction::create(old_path, new_path)
.context("Failed creating junction in node_modules folder")
} }
Err(symlink_err) => Err(
AnyError::from(symlink_err)
.context("Failed creating symlink in node_modules folder"),
),
} }
} }

View file

@ -4,30 +4,37 @@ mod byonm;
mod common; mod common;
mod managed; mod managed;
use std::borrow::Cow;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use common::maybe_auth_header_for_npm_registry;
use dashmap::DashMap; use dashmap::DashMap;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::serde_json; use deno_core::serde_json;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo; use deno_npm::registry::NpmPackageInfo;
use deno_resolver::npm::ByonmInNpmPackageChecker;
use deno_resolver::npm::ByonmNpmResolver; use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError; use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError;
use deno_runtime::deno_node::NodeRequireResolver; use deno_runtime::deno_node::NodePermissions;
use deno_runtime::ops::process::NpmProcessStateProvider; use deno_runtime::ops::process::NpmProcessStateProvider;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use managed::cache::registry_info::get_package_url;
use managed::create_managed_in_npm_pkg_checker;
use node_resolver::InNpmPackageChecker;
use node_resolver::NpmResolver; use node_resolver::NpmResolver;
use thiserror::Error; use thiserror::Error;
use crate::args::npm_registry_url;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
pub use self::byonm::CliByonmNpmResolver; pub use self::byonm::CliByonmNpmResolver;
pub use self::byonm::CliByonmNpmResolverCreateOptions; pub use self::byonm::CliByonmNpmResolverCreateOptions;
pub use self::managed::CliNpmResolverManagedCreateOptions; pub use self::managed::CliManagedInNpmPkgCheckerCreateOptions;
pub use self::managed::CliManagedNpmResolverCreateOptions;
pub use self::managed::CliNpmResolverManagedSnapshotOption; pub use self::managed::CliNpmResolverManagedSnapshotOption;
pub use self::managed::ManagedCliNpmResolver; pub use self::managed::ManagedCliNpmResolver;
@ -40,7 +47,7 @@ pub enum ResolvePkgFolderFromDenoReqError {
} }
pub enum CliNpmResolverCreateOptions { pub enum CliNpmResolverCreateOptions {
Managed(CliNpmResolverManagedCreateOptions), Managed(CliManagedNpmResolverCreateOptions),
Byonm(CliByonmNpmResolverCreateOptions), Byonm(CliByonmNpmResolverCreateOptions),
} }
@ -66,6 +73,22 @@ pub async fn create_cli_npm_resolver(
} }
} }
pub enum CreateInNpmPkgCheckerOptions<'a> {
Managed(CliManagedInNpmPkgCheckerCreateOptions<'a>),
Byonm,
}
pub fn create_in_npm_pkg_checker(
options: CreateInNpmPkgCheckerOptions,
) -> Arc<dyn InNpmPackageChecker> {
match options {
CreateInNpmPkgCheckerOptions::Managed(options) => {
create_managed_in_npm_pkg_checker(options)
}
CreateInNpmPkgCheckerOptions::Byonm => Arc::new(ByonmInNpmPackageChecker),
}
}
pub enum InnerCliNpmResolverRef<'a> { pub enum InnerCliNpmResolverRef<'a> {
Managed(&'a ManagedCliNpmResolver), Managed(&'a ManagedCliNpmResolver),
#[allow(dead_code)] #[allow(dead_code)]
@ -74,7 +97,6 @@ pub enum InnerCliNpmResolverRef<'a> {
pub trait CliNpmResolver: NpmResolver { pub trait CliNpmResolver: NpmResolver {
fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver>; fn into_npm_resolver(self: Arc<Self>) -> Arc<dyn NpmResolver>;
fn into_require_resolver(self: Arc<Self>) -> Arc<dyn NodeRequireResolver>;
fn into_process_state_provider( fn into_process_state_provider(
self: Arc<Self>, self: Arc<Self>,
) -> Arc<dyn NpmProcessStateProvider>; ) -> Arc<dyn NpmProcessStateProvider>;
@ -105,6 +127,12 @@ pub trait CliNpmResolver: NpmResolver {
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError>; ) -> Result<PathBuf, ResolvePkgFolderFromDenoReqError>;
fn ensure_read_permission<'a>(
&self,
permissions: &mut dyn NodePermissions,
path: &'a Path,
) -> Result<Cow<'a, Path>, AnyError>;
/// Returns a hash returning the state of the npm resolver /// Returns a hash returning the state of the npm resolver
/// or `None` if the state currently can't be determined. /// or `None` if the state currently can't be determined.
fn check_state_hash(&self) -> Option<u64>; fn check_state_hash(&self) -> Option<u64>;
@ -115,14 +143,19 @@ pub struct NpmFetchResolver {
nv_by_req: DashMap<PackageReq, Option<PackageNv>>, nv_by_req: DashMap<PackageReq, Option<PackageNv>>,
info_by_name: DashMap<String, Option<Arc<NpmPackageInfo>>>, info_by_name: DashMap<String, Option<Arc<NpmPackageInfo>>>,
file_fetcher: Arc<FileFetcher>, file_fetcher: Arc<FileFetcher>,
npmrc: Arc<ResolvedNpmRc>,
} }
impl NpmFetchResolver { impl NpmFetchResolver {
pub fn new(file_fetcher: Arc<FileFetcher>) -> Self { pub fn new(
file_fetcher: Arc<FileFetcher>,
npmrc: Arc<ResolvedNpmRc>,
) -> Self {
Self { Self {
nv_by_req: Default::default(), nv_by_req: Default::default(),
info_by_name: Default::default(), info_by_name: Default::default(),
file_fetcher, file_fetcher,
npmrc,
} }
} }
@ -157,11 +190,21 @@ impl NpmFetchResolver {
return info.value().clone(); return info.value().clone();
} }
let fetch_package_info = || async { let fetch_package_info = || async {
let info_url = npm_registry_url().join(name).ok()?; let info_url = get_package_url(&self.npmrc, name);
let file_fetcher = self.file_fetcher.clone(); let file_fetcher = self.file_fetcher.clone();
let registry_config = self.npmrc.get_registry_config(name);
// TODO(bartlomieju): this should error out, not use `.ok()`.
let maybe_auth_header =
maybe_auth_header_for_npm_registry(registry_config).ok()?;
// spawn due to the lsp's `Send` requirement // spawn due to the lsp's `Send` requirement
let file = deno_core::unsync::spawn(async move { let file = deno_core::unsync::spawn(async move {
file_fetcher.fetch_bypass_permissions(&info_url).await.ok() file_fetcher
.fetch_bypass_permissions_with_maybe_auth(
&info_url,
maybe_auth_header,
)
.await
.ok()
}) })
.await .await
.ok()??; .ok()??;
@ -172,3 +215,15 @@ impl NpmFetchResolver {
info info
} }
} }
pub const NPM_CONFIG_USER_AGENT_ENV_VAR: &str = "npm_config_user_agent";
pub fn get_npm_config_user_agent() -> String {
format!(
"deno/{} npm/? deno/{} {} {}",
env!("CARGO_PKG_VERSION"),
env!("CARGO_PKG_VERSION"),
std::env::consts::OS,
std::env::consts::ARCH
)
}

View file

@ -2,7 +2,6 @@
use std::sync::atomic::AtomicUsize; use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering; use std::sync::atomic::Ordering;
use std::time;
use deno_core::error::generic_error; use deno_core::error::generic_error;
use deno_core::error::type_error; use deno_core::error::type_error;
@ -13,6 +12,7 @@ use deno_core::ModuleSpecifier;
use deno_core::OpState; use deno_core::OpState;
use deno_runtime::deno_permissions::ChildPermissionsArg; use deno_runtime::deno_permissions::ChildPermissionsArg;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_web::StartTime;
use tokio::sync::mpsc::UnboundedSender; use tokio::sync::mpsc::UnboundedSender;
use uuid::Uuid; use uuid::Uuid;
@ -56,7 +56,7 @@ struct PermissionsHolder(Uuid, PermissionsContainer);
pub fn op_pledge_test_permissions( pub fn op_pledge_test_permissions(
state: &mut OpState, state: &mut OpState,
#[serde] args: ChildPermissionsArg, #[serde] args: ChildPermissionsArg,
) -> Result<Uuid, AnyError> { ) -> Result<Uuid, deno_runtime::deno_permissions::ChildPermissionError> {
let token = Uuid::new_v4(); let token = Uuid::new_v4();
let parent_permissions = state.borrow_mut::<PermissionsContainer>(); let parent_permissions = state.borrow_mut::<PermissionsContainer>();
let worker_permissions = parent_permissions.create_child_permissions(args)?; let worker_permissions = parent_permissions.create_child_permissions(args)?;
@ -147,8 +147,8 @@ fn op_dispatch_bench_event(state: &mut OpState, #[serde] event: BenchEvent) {
#[op2(fast)] #[op2(fast)]
#[number] #[number]
fn op_bench_now(state: &mut OpState) -> Result<u64, AnyError> { fn op_bench_now(state: &mut OpState) -> Result<u64, std::num::TryFromIntError> {
let ns = state.borrow::<time::Instant>().elapsed().as_nanos(); let ns = state.borrow::<StartTime>().elapsed().as_nanos();
let ns_u64 = u64::try_from(ns)?; let ns_u64 = u64::try_from(ns)?;
Ok(ns_u64) Ok(ns_u64)
} }

View file

@ -46,7 +46,7 @@ pub fn op_jupyter_input(
state: &mut OpState, state: &mut OpState,
#[string] prompt: String, #[string] prompt: String,
is_password: bool, is_password: bool,
) -> Result<Option<String>, AnyError> { ) -> Option<String> {
let (last_execution_request, stdin_connection_proxy) = { let (last_execution_request, stdin_connection_proxy) = {
( (
state.borrow::<Arc<Mutex<Option<JupyterMessage>>>>().clone(), state.borrow::<Arc<Mutex<Option<JupyterMessage>>>>().clone(),
@ -58,11 +58,11 @@ pub fn op_jupyter_input(
if let Some(last_request) = maybe_last_request { if let Some(last_request) = maybe_last_request {
let JupyterMessageContent::ExecuteRequest(msg) = &last_request.content let JupyterMessageContent::ExecuteRequest(msg) = &last_request.content
else { else {
return Ok(None); return None;
}; };
if !msg.allow_stdin { if !msg.allow_stdin {
return Ok(None); return None;
} }
let content = InputRequest { let content = InputRequest {
@ -73,7 +73,7 @@ pub fn op_jupyter_input(
let msg = JupyterMessage::new(content, Some(&last_request)); let msg = JupyterMessage::new(content, Some(&last_request));
let Ok(()) = stdin_connection_proxy.lock().tx.send(msg) else { let Ok(()) = stdin_connection_proxy.lock().tx.send(msg) else {
return Ok(None); return None;
}; };
// Need to spawn a separate thread here, because `blocking_recv()` can't // Need to spawn a separate thread here, because `blocking_recv()` can't
@ -82,17 +82,25 @@ pub fn op_jupyter_input(
stdin_connection_proxy.lock().rx.blocking_recv() stdin_connection_proxy.lock().rx.blocking_recv()
}); });
let Ok(Some(response)) = join_handle.join() else { let Ok(Some(response)) = join_handle.join() else {
return Ok(None); return None;
}; };
let JupyterMessageContent::InputReply(msg) = response.content else { let JupyterMessageContent::InputReply(msg) = response.content else {
return Ok(None); return None;
}; };
return Ok(Some(msg.value)); return Some(msg.value);
} }
Ok(None) None
}
#[derive(Debug, thiserror::Error)]
pub enum JupyterBroadcastError {
#[error(transparent)]
SerdeJson(serde_json::Error),
#[error(transparent)]
ZeroMq(AnyError),
} }
#[op2(async)] #[op2(async)]
@ -102,7 +110,7 @@ pub async fn op_jupyter_broadcast(
#[serde] content: serde_json::Value, #[serde] content: serde_json::Value,
#[serde] metadata: serde_json::Value, #[serde] metadata: serde_json::Value,
#[serde] buffers: Vec<deno_core::JsBuffer>, #[serde] buffers: Vec<deno_core::JsBuffer>,
) -> Result<(), AnyError> { ) -> Result<(), JupyterBroadcastError> {
let (iopub_connection, last_execution_request) = { let (iopub_connection, last_execution_request) = {
let s = state.borrow(); let s = state.borrow();
@ -125,36 +133,35 @@ pub async fn op_jupyter_broadcast(
content, content,
err err
); );
err JupyterBroadcastError::SerdeJson(err)
})?; })?;
let jupyter_message = JupyterMessage::new(content, Some(&last_request)) let jupyter_message = JupyterMessage::new(content, Some(&last_request))
.with_metadata(metadata) .with_metadata(metadata)
.with_buffers(buffers.into_iter().map(|b| b.to_vec().into()).collect()); .with_buffers(buffers.into_iter().map(|b| b.to_vec().into()).collect());
iopub_connection.lock().send(jupyter_message).await?; iopub_connection
.lock()
.send(jupyter_message)
.await
.map_err(JupyterBroadcastError::ZeroMq)?;
} }
Ok(()) Ok(())
} }
#[op2(fast)] #[op2(fast)]
pub fn op_print( pub fn op_print(state: &mut OpState, #[string] msg: &str, is_err: bool) {
state: &mut OpState,
#[string] msg: &str,
is_err: bool,
) -> Result<(), AnyError> {
let sender = state.borrow_mut::<mpsc::UnboundedSender<StreamContent>>(); let sender = state.borrow_mut::<mpsc::UnboundedSender<StreamContent>>();
if is_err { if is_err {
if let Err(err) = sender.send(StreamContent::stderr(msg)) { if let Err(err) = sender.send(StreamContent::stderr(msg)) {
log::error!("Failed to send stderr message: {}", err); log::error!("Failed to send stderr message: {}", err);
} }
return Ok(()); return;
} }
if let Err(err) = sender.send(StreamContent::stdout(msg)) { if let Err(err) = sender.send(StreamContent::stdout(msg)) {
log::error!("Failed to send stdout message: {}", err); log::error!("Failed to send stdout message: {}", err);
} }
Ok(())
} }

View file

@ -51,7 +51,7 @@ struct PermissionsHolder(Uuid, PermissionsContainer);
pub fn op_pledge_test_permissions( pub fn op_pledge_test_permissions(
state: &mut OpState, state: &mut OpState,
#[serde] args: ChildPermissionsArg, #[serde] args: ChildPermissionsArg,
) -> Result<Uuid, AnyError> { ) -> Result<Uuid, deno_runtime::deno_permissions::ChildPermissionError> {
let token = Uuid::new_v4(); let token = Uuid::new_v4();
let parent_permissions = state.borrow_mut::<PermissionsContainer>(); let parent_permissions = state.borrow_mut::<PermissionsContainer>();
let worker_permissions = parent_permissions.create_child_permissions(args)?; let worker_permissions = parent_permissions.create_child_permissions(args)?;
@ -150,7 +150,7 @@ fn op_register_test_step(
#[smi] parent_id: usize, #[smi] parent_id: usize,
#[smi] root_id: usize, #[smi] root_id: usize,
#[string] root_name: String, #[string] root_name: String,
) -> Result<usize, AnyError> { ) -> usize {
let id = NEXT_ID.fetch_add(1, Ordering::SeqCst); let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
let origin = state.borrow::<ModuleSpecifier>().to_string(); let origin = state.borrow::<ModuleSpecifier>().to_string();
let description = TestStepDescription { let description = TestStepDescription {
@ -169,7 +169,7 @@ fn op_register_test_step(
}; };
let sender = state.borrow_mut::<TestEventSender>(); let sender = state.borrow_mut::<TestEventSender>();
sender.send(TestEvent::StepRegister(description)).ok(); sender.send(TestEvent::StepRegister(description)).ok();
Ok(id) id
} }
#[op2(fast)] #[op2(fast)]

View file

@ -4,6 +4,7 @@ use async_trait::async_trait;
use dashmap::DashMap; use dashmap::DashMap;
use dashmap::DashSet; use dashmap::DashSet;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_ast::ModuleKind;
use deno_config::workspace::MappedResolution; use deno_config::workspace::MappedResolution;
use deno_config::workspace::MappedResolutionDiagnostic; use deno_config::workspace::MappedResolutionDiagnostic;
use deno_config::workspace::MappedResolutionError; use deno_config::workspace::MappedResolutionError;
@ -11,6 +12,7 @@ use deno_config::workspace::WorkspaceResolver;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_core::ModuleSourceCode; use deno_core::ModuleSourceCode;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_graph::source::ResolutionMode; use deno_graph::source::ResolutionMode;
@ -29,6 +31,7 @@ use deno_runtime::deno_fs;
use deno_runtime::deno_fs::FileSystem; use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_node::is_builtin_node_module; use deno_runtime::deno_node::is_builtin_node_module;
use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_node::PackageJsonResolver;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use node_resolver::errors::ClosestPkgJsonError; use node_resolver::errors::ClosestPkgJsonError;
@ -38,21 +41,22 @@ use node_resolver::errors::PackageFolderResolveErrorKind;
use node_resolver::errors::PackageFolderResolveIoError; use node_resolver::errors::PackageFolderResolveIoError;
use node_resolver::errors::PackageNotFoundError; use node_resolver::errors::PackageNotFoundError;
use node_resolver::errors::PackageResolveErrorKind; use node_resolver::errors::PackageResolveErrorKind;
use node_resolver::errors::UrlToNodeResolutionError; use node_resolver::errors::PackageSubpathResolveError;
use node_resolver::InNpmPackageChecker;
use node_resolver::NodeModuleKind; use node_resolver::NodeModuleKind;
use node_resolver::NodeResolution; use node_resolver::NodeResolution;
use node_resolver::NodeResolutionMode; use node_resolver::NodeResolutionMode;
use node_resolver::PackageJson; use std::borrow::Cow;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use thiserror::Error;
use crate::args::JsxImportSourceConfig; use crate::args::JsxImportSourceConfig;
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS; use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
use crate::node::CliNodeCodeTranslator; use crate::node::CliNodeCodeTranslator;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::npm::InnerCliNpmResolverRef; use crate::npm::InnerCliNpmResolverRef;
use crate::util::path::specifier_has_extension;
use crate::util::sync::AtomicFlag; use crate::util::sync::AtomicFlag;
use crate::util::text_encoding::from_utf8_lossy_owned; use crate::util::text_encoding::from_utf8_lossy_owned;
@ -104,36 +108,32 @@ impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs {
#[derive(Debug)] #[derive(Debug)]
pub struct CliNodeResolver { pub struct CliNodeResolver {
cjs_resolutions: Arc<CjsResolutionStore>, cjs_tracker: Arc<CjsTracker>,
fs: Arc<dyn deno_fs::FileSystem>, fs: Arc<dyn deno_fs::FileSystem>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
node_resolver: Arc<NodeResolver>, node_resolver: Arc<NodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>, npm_resolver: Arc<dyn CliNpmResolver>,
} }
impl CliNodeResolver { impl CliNodeResolver {
pub fn new( pub fn new(
cjs_resolutions: Arc<CjsResolutionStore>, cjs_tracker: Arc<CjsTracker>,
fs: Arc<dyn deno_fs::FileSystem>, fs: Arc<dyn deno_fs::FileSystem>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
node_resolver: Arc<NodeResolver>, node_resolver: Arc<NodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>, npm_resolver: Arc<dyn CliNpmResolver>,
) -> Self { ) -> Self {
Self { Self {
cjs_resolutions, cjs_tracker,
fs, fs,
in_npm_pkg_checker,
node_resolver, node_resolver,
npm_resolver, npm_resolver,
} }
} }
pub fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool { pub fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
self.npm_resolver.in_npm_package(specifier) self.in_npm_pkg_checker.in_npm_package(specifier)
}
pub fn get_closest_package_json(
&self,
referrer: &ModuleSpecifier,
) -> Result<Option<Arc<PackageJson>>, ClosestPkgJsonError> {
self.node_resolver.get_closest_package_json(referrer)
} }
pub fn resolve_if_for_npm_pkg( pub fn resolve_if_for_npm_pkg(
@ -153,8 +153,7 @@ impl CliNodeResolver {
| NodeResolveErrorKind::UnsupportedEsmUrlScheme(_) | NodeResolveErrorKind::UnsupportedEsmUrlScheme(_)
| NodeResolveErrorKind::DataUrlReferrer(_) | NodeResolveErrorKind::DataUrlReferrer(_)
| NodeResolveErrorKind::TypesNotFound(_) | NodeResolveErrorKind::TypesNotFound(_)
| NodeResolveErrorKind::FinalizeResolution(_) | NodeResolveErrorKind::FinalizeResolution(_) => Err(err.into()),
| NodeResolveErrorKind::UrlToNodeResolution(_) => Err(err.into()),
NodeResolveErrorKind::PackageResolve(err) => { NodeResolveErrorKind::PackageResolve(err) => {
let err = err.into_kind(); let err = err.into_kind();
match err { match err {
@ -216,7 +215,11 @@ impl CliNodeResolver {
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
mode: NodeResolutionMode, mode: NodeResolutionMode,
) -> Result<NodeResolution, NodeResolveError> { ) -> Result<NodeResolution, NodeResolveError> {
let referrer_kind = if self.cjs_resolutions.is_known_cjs(referrer) { let referrer_kind = if self
.cjs_tracker
.is_maybe_cjs(referrer, MediaType::from_specifier(referrer))
.map_err(|err| NodeResolveErrorKind::PackageResolve(err.into()))?
{
NodeModuleKind::Cjs NodeModuleKind::Cjs
} else { } else {
NodeModuleKind::Esm NodeModuleKind::Esm
@ -226,7 +229,7 @@ impl CliNodeResolver {
self self
.node_resolver .node_resolver
.resolve(specifier, referrer, referrer_kind, mode)?; .resolve(specifier, referrer, referrer_kind, mode)?;
Ok(self.handle_node_resolution(res)) Ok(res)
} }
pub fn resolve_req_reference( pub fn resolve_req_reference(
@ -234,7 +237,7 @@ impl CliNodeResolver {
req_ref: &NpmPackageReqReference, req_ref: &NpmPackageReqReference,
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
mode: NodeResolutionMode, mode: NodeResolutionMode,
) -> Result<NodeResolution, AnyError> { ) -> Result<ModuleSpecifier, AnyError> {
self.resolve_req_with_sub_path( self.resolve_req_with_sub_path(
req_ref.req(), req_ref.req(),
req_ref.sub_path(), req_ref.sub_path(),
@ -249,7 +252,7 @@ impl CliNodeResolver {
sub_path: Option<&str>, sub_path: Option<&str>,
referrer: &ModuleSpecifier, referrer: &ModuleSpecifier,
mode: NodeResolutionMode, mode: NodeResolutionMode,
) -> Result<NodeResolution, AnyError> { ) -> Result<ModuleSpecifier, AnyError> {
let package_folder = self let package_folder = self
.npm_resolver .npm_resolver
.resolve_pkg_folder_from_deno_module_req(req, referrer)?; .resolve_pkg_folder_from_deno_module_req(req, referrer)?;
@ -260,7 +263,7 @@ impl CliNodeResolver {
mode, mode,
); );
match resolution_result { match resolution_result {
Ok(resolution) => Ok(resolution), Ok(url) => Ok(url),
Err(err) => { Err(err) => {
if self.npm_resolver.as_byonm().is_some() { if self.npm_resolver.as_byonm().is_some() {
let package_json_path = package_folder.join("package.json"); let package_json_path = package_folder.join("package.json");
@ -271,7 +274,7 @@ impl CliNodeResolver {
)); ));
} }
} }
Err(err) Err(err.into())
} }
} }
} }
@ -282,16 +285,13 @@ impl CliNodeResolver {
sub_path: Option<&str>, sub_path: Option<&str>,
maybe_referrer: Option<&ModuleSpecifier>, maybe_referrer: Option<&ModuleSpecifier>,
mode: NodeResolutionMode, mode: NodeResolutionMode,
) -> Result<NodeResolution, AnyError> { ) -> Result<ModuleSpecifier, PackageSubpathResolveError> {
let res = self self.node_resolver.resolve_package_subpath_from_deno_module(
.node_resolver
.resolve_package_subpath_from_deno_module(
package_folder, package_folder,
sub_path, sub_path,
maybe_referrer, maybe_referrer,
mode, mode,
)?; )
Ok(self.handle_node_resolution(res))
} }
pub fn handle_if_in_node_modules( pub fn handle_if_in_node_modules(
@ -306,71 +306,45 @@ impl CliNodeResolver {
// so canoncalize then check if it's in the node_modules directory. // so canoncalize then check if it's in the node_modules directory.
// If so, check if we need to store this specifier as being a CJS // If so, check if we need to store this specifier as being a CJS
// resolution. // resolution.
let specifier = let specifier = crate::node::resolve_specifier_into_node_modules(
crate::node::resolve_specifier_into_node_modules(specifier); specifier,
if self.in_npm_package(&specifier) { self.fs.as_ref(),
let resolution = );
self.node_resolver.url_to_node_resolution(specifier)?; return Ok(Some(specifier));
let resolution = self.handle_node_resolution(resolution);
return Ok(Some(resolution.into_url()));
}
} }
Ok(None) Ok(None)
} }
pub fn url_to_node_resolution(
&self,
specifier: ModuleSpecifier,
) -> Result<NodeResolution, UrlToNodeResolutionError> {
self.node_resolver.url_to_node_resolution(specifier)
}
fn handle_node_resolution(
&self,
resolution: NodeResolution,
) -> NodeResolution {
if let NodeResolution::CommonJs(specifier) = &resolution {
// remember that this was a common js resolution
self.mark_cjs_resolution(specifier.clone());
}
resolution
}
pub fn mark_cjs_resolution(&self, specifier: ModuleSpecifier) {
self.cjs_resolutions.insert(specifier);
}
} }
// todo(dsherret): move to module_loader.rs #[derive(Debug, Error)]
#[error("{media_type} files are not supported in npm packages: {specifier}")]
pub struct NotSupportedKindInNpmError {
pub media_type: MediaType,
pub specifier: Url,
}
// todo(dsherret): move to module_loader.rs (it seems to be here due to use in standalone)
#[derive(Clone)] #[derive(Clone)]
pub struct NpmModuleLoader { pub struct NpmModuleLoader {
cjs_resolutions: Arc<CjsResolutionStore>, cjs_tracker: Arc<CjsTracker>,
node_code_translator: Arc<CliNodeCodeTranslator>,
fs: Arc<dyn deno_fs::FileSystem>, fs: Arc<dyn deno_fs::FileSystem>,
node_resolver: Arc<CliNodeResolver>, node_code_translator: Arc<CliNodeCodeTranslator>,
} }
impl NpmModuleLoader { impl NpmModuleLoader {
pub fn new( pub fn new(
cjs_resolutions: Arc<CjsResolutionStore>, cjs_tracker: Arc<CjsTracker>,
node_code_translator: Arc<CliNodeCodeTranslator>,
fs: Arc<dyn deno_fs::FileSystem>, fs: Arc<dyn deno_fs::FileSystem>,
node_resolver: Arc<CliNodeResolver>, node_code_translator: Arc<CliNodeCodeTranslator>,
) -> Self { ) -> Self {
Self { Self {
cjs_resolutions, cjs_tracker,
node_code_translator, node_code_translator,
fs, fs,
node_resolver,
} }
} }
pub fn if_in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
self.node_resolver.in_npm_package(specifier)
|| self.cjs_resolutions.is_known_cjs(specifier)
}
pub async fn load( pub async fn load(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
@ -413,20 +387,30 @@ impl NpmModuleLoader {
} }
})?; })?;
let code = if self.cjs_resolutions.is_known_cjs(specifier) { let media_type = MediaType::from_specifier(specifier);
if media_type.is_emittable() {
return Err(AnyError::from(NotSupportedKindInNpmError {
media_type,
specifier: specifier.clone(),
}));
}
let code = if self.cjs_tracker.is_maybe_cjs(specifier, media_type)? {
// translate cjs to esm if it's cjs and inject node globals // translate cjs to esm if it's cjs and inject node globals
let code = from_utf8_lossy_owned(code); let code = from_utf8_lossy_owned(code);
ModuleSourceCode::String( ModuleSourceCode::String(
self self
.node_code_translator .node_code_translator
.translate_cjs_to_esm(specifier, Some(code)) .translate_cjs_to_esm(specifier, Some(Cow::Owned(code)))
.await? .await?
.into_owned()
.into(), .into(),
) )
} else { } else {
// esm and json code is untouched // esm and json code is untouched
ModuleSourceCode::Bytes(code.into_boxed_slice().into()) ModuleSourceCode::Bytes(code.into_boxed_slice().into())
}; };
Ok(ModuleCodeStringSource { Ok(ModuleCodeStringSource {
code, code,
found_url: specifier.clone(), found_url: specifier.clone(),
@ -435,21 +419,165 @@ impl NpmModuleLoader {
} }
} }
pub struct CjsTrackerOptions {
pub unstable_detect_cjs: bool,
}
/// Keeps track of what module specifiers were resolved as CJS. /// Keeps track of what module specifiers were resolved as CJS.
#[derive(Debug, Default)] ///
pub struct CjsResolutionStore(DashSet<ModuleSpecifier>); /// Modules that are `.js` or `.ts` are only known to be CJS or
/// ESM after they're loaded based on their contents. So these files
/// will be "maybe CJS" until they're loaded.
#[derive(Debug)]
pub struct CjsTracker {
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
pkg_json_resolver: Arc<PackageJsonResolver>,
unstable_detect_cjs: bool,
known: DashMap<ModuleSpecifier, ModuleKind>,
}
impl CjsResolutionStore { impl CjsTracker {
pub fn is_known_cjs(&self, specifier: &ModuleSpecifier) -> bool { pub fn new(
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
pkg_json_resolver: Arc<PackageJsonResolver>,
options: CjsTrackerOptions,
) -> Self {
Self {
in_npm_pkg_checker,
pkg_json_resolver,
unstable_detect_cjs: options.unstable_detect_cjs,
known: Default::default(),
}
}
/// Checks whether the file might be treated as CJS, but it's not for sure
/// yet because the source hasn't been loaded to see whether it contains
/// imports or exports.
pub fn is_maybe_cjs(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
) -> Result<bool, ClosestPkgJsonError> {
self.treat_as_cjs_with_is_script(specifier, media_type, None)
}
/// Gets whether the file is CJS. If true, this is for sure
/// cjs because `is_script` is provided.
///
/// `is_script` should be `true` when the contents of the file at the
/// provided specifier are known to be a script and not an ES module.
pub fn is_cjs_with_known_is_script(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
is_script: bool,
) -> Result<bool, ClosestPkgJsonError> {
self.treat_as_cjs_with_is_script(specifier, media_type, Some(is_script))
}
fn treat_as_cjs_with_is_script(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
is_script: Option<bool>,
) -> Result<bool, ClosestPkgJsonError> {
let kind = match self
.get_known_kind_with_is_script(specifier, media_type, is_script)
{
Some(kind) => kind,
None => self.check_based_on_pkg_json(specifier)?,
};
Ok(kind.is_cjs())
}
pub fn get_known_kind(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
) -> Option<ModuleKind> {
self.get_known_kind_with_is_script(specifier, media_type, None)
}
fn get_known_kind_with_is_script(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
is_script: Option<bool>,
) -> Option<ModuleKind> {
if specifier.scheme() != "file" { if specifier.scheme() != "file" {
return false; return Some(ModuleKind::Esm);
} }
specifier_has_extension(specifier, "cjs") || self.0.contains(specifier) match media_type {
MediaType::Mts | MediaType::Mjs | MediaType::Dmts => Some(ModuleKind::Esm),
MediaType::Cjs | MediaType::Cts | MediaType::Dcts => Some(ModuleKind::Cjs),
MediaType::Dts => {
// dts files are always determined based on the package.json because
// they contain imports/exports even when considered CJS
if let Some(value) = self.known.get(specifier).map(|v| *v) {
Some(value)
} else {
let value = self.check_based_on_pkg_json(specifier).ok();
if let Some(value) = value {
self.known.insert(specifier.clone(), value);
}
Some(value.unwrap_or(ModuleKind::Esm))
}
}
MediaType::Wasm |
MediaType::Json => Some(ModuleKind::Esm),
MediaType::JavaScript
| MediaType::Jsx
| MediaType::TypeScript
| MediaType::Tsx
// treat these as unknown
| MediaType::Css
| MediaType::SourceMap
| MediaType::Unknown => {
if let Some(value) = self.known.get(specifier).map(|v| *v) {
if value.is_cjs() && is_script == Some(false) {
// we now know this is actually esm
self.known.insert(specifier.clone(), ModuleKind::Esm);
Some(ModuleKind::Esm)
} else {
Some(value)
}
} else if is_script == Some(false) {
// we know this is esm
self.known.insert(specifier.clone(), ModuleKind::Esm);
Some(ModuleKind::Esm)
} else {
None
}
}
}
} }
pub fn insert(&self, specifier: ModuleSpecifier) { fn check_based_on_pkg_json(
self.0.insert(specifier); &self,
specifier: &ModuleSpecifier,
) -> Result<ModuleKind, ClosestPkgJsonError> {
if self.in_npm_pkg_checker.in_npm_package(specifier) {
if let Some(pkg_json) =
self.pkg_json_resolver.get_closest_package_json(specifier)?
{
let is_file_location_cjs = pkg_json.typ != "module";
Ok(ModuleKind::from_is_cjs(is_file_location_cjs))
} else {
Ok(ModuleKind::Cjs)
}
} else if self.unstable_detect_cjs {
if let Some(pkg_json) =
self.pkg_json_resolver.get_closest_package_json(specifier)?
{
let is_cjs_type = pkg_json.typ == "commonjs";
Ok(ModuleKind::from_is_cjs(is_cjs_type))
} else {
Ok(ModuleKind::Esm)
}
} else {
Ok(ModuleKind::Esm)
}
} }
} }
@ -633,8 +761,7 @@ impl Resolver for CliGraphResolver {
Some(referrer), Some(referrer),
to_node_mode(mode), to_node_mode(mode),
) )
.map_err(ResolveError::Other) .map_err(|e| ResolveError::Other(e.into())),
.map(|res| res.into_url()),
MappedResolution::PackageJson { MappedResolution::PackageJson {
dep_result, dep_result,
alias, alias,
@ -665,7 +792,6 @@ impl Resolver for CliGraphResolver {
) )
.map_err(|e| ResolveError::Other(e.into())) .map_err(|e| ResolveError::Other(e.into()))
.and_then(|pkg_folder| { .and_then(|pkg_folder| {
Ok(
self self
.node_resolver .node_resolver
.as_ref() .as_ref()
@ -675,9 +801,8 @@ impl Resolver for CliGraphResolver {
sub_path.as_deref(), sub_path.as_deref(),
Some(referrer), Some(referrer),
to_node_mode(mode), to_node_mode(mode),
)?
.into_url(),
) )
.map_err(|e| ResolveError::Other(e.into()))
}), }),
}) })
} }
@ -717,23 +842,20 @@ impl Resolver for CliGraphResolver {
npm_req_ref.req(), npm_req_ref.req(),
) )
{ {
return Ok( return node_resolver
node_resolver
.resolve_package_sub_path_from_deno_module( .resolve_package_sub_path_from_deno_module(
pkg_folder, pkg_folder,
npm_req_ref.sub_path(), npm_req_ref.sub_path(),
Some(referrer), Some(referrer),
to_node_mode(mode), to_node_mode(mode),
)? )
.into_url(), .map_err(|e| ResolveError::Other(e.into()));
);
} }
// do npm resolution for byonm // do npm resolution for byonm
if is_byonm { if is_byonm {
return node_resolver return node_resolver
.resolve_req_reference(&npm_req_ref, referrer, to_node_mode(mode)) .resolve_req_reference(&npm_req_ref, referrer, to_node_mode(mode))
.map(|res| res.into_url())
.map_err(|err| err.into()); .map_err(|err| err.into());
} }
} }
@ -751,9 +873,7 @@ impl Resolver for CliGraphResolver {
.map_err(ResolveError::Other)?; .map_err(ResolveError::Other)?;
if let Some(res) = maybe_resolution { if let Some(res) = maybe_resolution {
match res { match res {
NodeResolution::Esm(url) | NodeResolution::CommonJs(url) => { NodeResolution::Module(url) => return Ok(url),
return Ok(url)
}
NodeResolution::BuiltIn(_) => { NodeResolution::BuiltIn(_) => {
// don't resolve bare specifiers for built-in modules via node resolution // don't resolve bare specifiers for built-in modules via node resolution
} }

View file

@ -291,7 +291,7 @@
"type": "array", "type": "array",
"description": "List of tag names that will be run. Empty list disables all tags and will only use rules from `include`.", "description": "List of tag names that will be run. Empty list disables all tags and will only use rules from `include`.",
"items": { "items": {
"type": "string" "$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/tags.v1.json"
}, },
"minItems": 0, "minItems": 0,
"uniqueItems": true "uniqueItems": true
@ -300,7 +300,7 @@
"type": "array", "type": "array",
"description": "List of rule names that will be excluded from configured tag sets. If the same rule is in `include` it will be run.", "description": "List of rule names that will be excluded from configured tag sets. If the same rule is in `include` it will be run.",
"items": { "items": {
"type": "string" "$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/rules.v1.json"
}, },
"minItems": 0, "minItems": 0,
"uniqueItems": true "uniqueItems": true
@ -309,7 +309,7 @@
"type": "array", "type": "array",
"description": "List of rule names that will be run. Even if the same rule is in `exclude` it will be run.", "description": "List of rule names that will be run. Even if the same rule is in `exclude` it will be run.",
"items": { "items": {
"type": "string" "$ref": "https://raw.githubusercontent.com/denoland/deno_lint/main/schemas/rules.v1.json"
}, },
"minItems": 0, "minItems": 0,
"uniqueItems": true "uniqueItems": true
@ -531,6 +531,7 @@
"detect-cjs", "detect-cjs",
"ffi", "ffi",
"fs", "fs",
"fmt-component",
"http", "http",
"kv", "kv",
"net", "net",

View file

@ -9,14 +9,19 @@ use std::ffi::OsString;
use std::fs; use std::fs;
use std::fs::File; use std::fs::File;
use std::future::Future; use std::future::Future;
use std::io::ErrorKind;
use std::io::Read; use std::io::Read;
use std::io::Seek; use std::io::Seek;
use std::io::SeekFrom; use std::io::SeekFrom;
use std::io::Write; use std::io::Write;
use std::ops::Range;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::process::Command; use std::process::Command;
use std::sync::Arc;
use deno_ast::MediaType;
use deno_ast::ModuleKind;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_config::workspace::PackageJsonDepResolution; use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::ResolverWorkspaceJsrPackage; use deno_config::workspace::ResolverWorkspaceJsrPackage;
@ -30,13 +35,22 @@ use deno_core::futures::AsyncReadExt;
use deno_core::futures::AsyncSeekExt; use deno_core::futures::AsyncSeekExt;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::url::Url; use deno_core::url::Url;
use deno_graph::source::RealFileSystem;
use deno_graph::ModuleGraph;
use deno_npm::resolution::SerializedNpmResolutionSnapshot;
use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_npm::NpmSystemInfo; use deno_npm::NpmSystemInfo;
use deno_runtime::deno_fs;
use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_fs::RealFs;
use deno_runtime::deno_io::fs::FsError;
use deno_runtime::deno_node::PackageJson; use deno_runtime::deno_node::PackageJson;
use deno_semver::npm::NpmVersionReqParseError; use deno_semver::npm::NpmVersionReqParseError;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
use deno_semver::Version; use deno_semver::Version;
use deno_semver::VersionReqSpecifierParseError; use deno_semver::VersionReqSpecifierParseError;
use eszip::EszipRelativeFileBaseUrl;
use indexmap::IndexMap; use indexmap::IndexMap;
use log::Level; use log::Level;
use serde::Deserialize; use serde::Deserialize;
@ -49,10 +63,12 @@ use crate::args::NpmInstallDepsProvider;
use crate::args::PermissionFlags; use crate::args::PermissionFlags;
use crate::args::UnstableConfig; use crate::args::UnstableConfig;
use crate::cache::DenoDir; use crate::cache::DenoDir;
use crate::emit::Emitter;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
use crate::http_util::HttpClientProvider; use crate::http_util::HttpClientProvider;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::npm::InnerCliNpmResolverRef; use crate::npm::InnerCliNpmResolverRef;
use crate::resolver::CjsTracker;
use crate::shared::ReleaseChannel; use crate::shared::ReleaseChannel;
use crate::standalone::virtual_fs::VfsEntry; use crate::standalone::virtual_fs::VfsEntry;
use crate::util::archive; use crate::util::archive;
@ -60,12 +76,63 @@ use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle; use crate::util::progress_bar::ProgressBarStyle;
use super::file_system::DenoCompileFileSystem;
use super::serialization::deserialize_binary_data_section;
use super::serialization::serialize_binary_data_section;
use super::serialization::DenoCompileModuleData;
use super::serialization::DeserializedDataSection;
use super::serialization::RemoteModulesStore;
use super::serialization::RemoteModulesStoreBuilder;
use super::virtual_fs::FileBackedVfs; use super::virtual_fs::FileBackedVfs;
use super::virtual_fs::VfsBuilder; use super::virtual_fs::VfsBuilder;
use super::virtual_fs::VfsRoot; use super::virtual_fs::VfsRoot;
use super::virtual_fs::VirtualDirectory; use super::virtual_fs::VirtualDirectory;
const MAGIC_TRAILER: &[u8; 8] = b"d3n0l4nd"; /// A URL that can be designated as the base for relative URLs.
///
/// After creation, this URL may be used to get the key for a
/// module in the binary.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StandaloneRelativeFileBaseUrl<'a>(&'a Url);
impl<'a> From<&'a Url> for StandaloneRelativeFileBaseUrl<'a> {
fn from(url: &'a Url) -> Self {
Self(url)
}
}
impl<'a> StandaloneRelativeFileBaseUrl<'a> {
pub fn new(url: &'a Url) -> Self {
debug_assert_eq!(url.scheme(), "file");
Self(url)
}
/// Gets the module map key of the provided specifier.
///
/// * Descendant file specifiers will be made relative to the base.
/// * Non-descendant file specifiers will stay as-is (absolute).
/// * Non-file specifiers will stay as-is.
pub fn specifier_key<'b>(&self, target: &'b Url) -> Cow<'b, str> {
if target.scheme() != "file" {
return Cow::Borrowed(target.as_str());
}
match self.0.make_relative(target) {
Some(relative) => {
if relative.starts_with("../") {
Cow::Borrowed(target.as_str())
} else {
Cow::Owned(relative)
}
}
None => Cow::Borrowed(target.as_str()),
}
}
pub fn inner(&self) -> &Url {
self.0
}
}
#[derive(Deserialize, Serialize)] #[derive(Deserialize, Serialize)]
pub enum NodeModules { pub enum NodeModules {
@ -120,78 +187,23 @@ pub struct Metadata {
pub unstable_config: UnstableConfig, pub unstable_config: UnstableConfig,
} }
pub fn load_npm_vfs(root_dir_path: PathBuf) -> Result<FileBackedVfs, AnyError> {
let data = libsui::find_section("d3n0l4nd").unwrap();
// We do the first part sync so it can complete quickly
let trailer: [u8; TRAILER_SIZE] = data[0..TRAILER_SIZE].try_into().unwrap();
let trailer = match Trailer::parse(&trailer)? {
None => panic!("Could not find trailer"),
Some(trailer) => trailer,
};
let data = &data[TRAILER_SIZE..];
let vfs_data =
&data[trailer.npm_vfs_pos as usize..trailer.npm_files_pos as usize];
let mut dir: VirtualDirectory = serde_json::from_slice(vfs_data)?;
// align the name of the directory with the root dir
dir.name = root_dir_path
.file_name()
.unwrap()
.to_string_lossy()
.to_string();
let fs_root = VfsRoot {
dir,
root_path: root_dir_path,
start_file_offset: trailer.npm_files_pos,
};
Ok(FileBackedVfs::new(data.to_vec(), fs_root))
}
fn write_binary_bytes( fn write_binary_bytes(
mut file_writer: File, mut file_writer: File,
original_bin: Vec<u8>, original_bin: Vec<u8>,
metadata: &Metadata, metadata: &Metadata,
eszip: eszip::EszipV2, npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
npm_vfs: Option<&VirtualDirectory>, remote_modules: &RemoteModulesStoreBuilder,
npm_files: &Vec<Vec<u8>>, vfs: VfsBuilder,
compile_flags: &CompileFlags, compile_flags: &CompileFlags,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let metadata = serde_json::to_string(metadata)?.as_bytes().to_vec(); let data_section_bytes =
let npm_vfs = serde_json::to_string(&npm_vfs)?.as_bytes().to_vec(); serialize_binary_data_section(metadata, npm_snapshot, remote_modules, vfs)?;
let eszip_archive = eszip.into_bytes();
let mut writer = Vec::new();
// write the trailer, which includes the positions
// of the data blocks in the file
writer.write_all(&{
let metadata_pos = eszip_archive.len() as u64;
let npm_vfs_pos = metadata_pos + (metadata.len() as u64);
let npm_files_pos = npm_vfs_pos + (npm_vfs.len() as u64);
Trailer {
eszip_pos: 0,
metadata_pos,
npm_vfs_pos,
npm_files_pos,
}
.as_bytes()
})?;
writer.write_all(&eszip_archive)?;
writer.write_all(&metadata)?;
writer.write_all(&npm_vfs)?;
for file in npm_files {
writer.write_all(file)?;
}
let target = compile_flags.resolve_target(); let target = compile_flags.resolve_target();
if target.contains("linux") { if target.contains("linux") {
libsui::Elf::new(&original_bin).append( libsui::Elf::new(&original_bin).append(
"d3n0l4nd", "d3n0l4nd",
&writer, &data_section_bytes,
&mut file_writer, &mut file_writer,
)?; )?;
} else if target.contains("windows") { } else if target.contains("windows") {
@ -201,11 +213,11 @@ fn write_binary_bytes(
pe = pe.set_icon(&icon)?; pe = pe.set_icon(&icon)?;
} }
pe.write_resource("d3n0l4nd", writer)? pe.write_resource("d3n0l4nd", data_section_bytes)?
.build(&mut file_writer)?; .build(&mut file_writer)?;
} else if target.contains("darwin") { } else if target.contains("darwin") {
libsui::Macho::from(original_bin)? libsui::Macho::from(original_bin)?
.write_section("d3n0l4nd", writer)? .write_section("d3n0l4nd", data_section_bytes)?
.build_and_sign(&mut file_writer)?; .build_and_sign(&mut file_writer)?;
} }
Ok(()) Ok(())
@ -221,6 +233,67 @@ pub fn is_standalone_binary(exe_path: &Path) -> bool {
|| libsui::utils::is_macho(&data) || libsui::utils::is_macho(&data)
} }
pub struct StandaloneData {
pub fs: Arc<dyn deno_fs::FileSystem>,
pub metadata: Metadata,
pub modules: StandaloneModules,
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
pub root_path: PathBuf,
pub vfs: Arc<FileBackedVfs>,
}
pub struct StandaloneModules {
remote_modules: RemoteModulesStore,
vfs: Arc<FileBackedVfs>,
}
impl StandaloneModules {
pub fn resolve_specifier<'a>(
&'a self,
specifier: &'a ModuleSpecifier,
) -> Result<Option<&'a ModuleSpecifier>, AnyError> {
if specifier.scheme() == "file" {
Ok(Some(specifier))
} else {
self.remote_modules.resolve_specifier(specifier)
}
}
pub fn has_file(&self, path: &Path) -> bool {
self.vfs.file_entry(path).is_ok()
}
pub fn read<'a>(
&'a self,
specifier: &'a ModuleSpecifier,
) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> {
if specifier.scheme() == "file" {
let path = deno_path_util::url_to_file_path(specifier)?;
let bytes = match self.vfs.file_entry(&path) {
Ok(entry) => self.vfs.read_file_all(entry)?,
Err(err) if err.kind() == ErrorKind::NotFound => {
let bytes = match RealFs.read_file_sync(&path, None) {
Ok(bytes) => bytes,
Err(FsError::Io(err)) if err.kind() == ErrorKind::NotFound => {
return Ok(None)
}
Err(err) => return Err(err.into()),
};
Cow::Owned(bytes)
}
Err(err) => return Err(err.into()),
};
Ok(Some(DenoCompileModuleData {
media_type: MediaType::from_specifier(specifier),
specifier,
data: bytes,
}))
} else {
self.remote_modules.read(specifier)
}
}
}
/// This function will try to run this binary as a standalone binary /// This function will try to run this binary as a standalone binary
/// produced by `deno compile`. It determines if this is a standalone /// produced by `deno compile`. It determines if this is a standalone
/// binary by skipping over the trailer width at the end of the file, /// binary by skipping over the trailer width at the end of the file,
@ -228,110 +301,67 @@ pub fn is_standalone_binary(exe_path: &Path) -> bool {
/// the bundle is executed. If not, this function exits with `Ok(None)`. /// the bundle is executed. If not, this function exits with `Ok(None)`.
pub fn extract_standalone( pub fn extract_standalone(
cli_args: Cow<Vec<OsString>>, cli_args: Cow<Vec<OsString>>,
) -> Result< ) -> Result<Option<StandaloneData>, AnyError> {
Option<impl Future<Output = Result<(Metadata, eszip::EszipV2), AnyError>>>,
AnyError,
> {
let Some(data) = libsui::find_section("d3n0l4nd") else { let Some(data) = libsui::find_section("d3n0l4nd") else {
return Ok(None); return Ok(None);
}; };
// We do the first part sync so it can complete quickly let DeserializedDataSection {
let trailer = match Trailer::parse(&data[0..TRAILER_SIZE])? { mut metadata,
npm_snapshot,
remote_modules,
mut vfs_dir,
vfs_files_data,
} = match deserialize_binary_data_section(data)? {
Some(data_section) => data_section,
None => return Ok(None), None => return Ok(None),
Some(trailer) => trailer,
}; };
let root_path = {
let maybe_current_exe = std::env::current_exe().ok();
let current_exe_name = maybe_current_exe
.as_ref()
.and_then(|p| p.file_name())
.map(|p| p.to_string_lossy())
// should never happen
.unwrap_or_else(|| Cow::Borrowed("binary"));
std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name))
};
let cli_args = cli_args.into_owned(); let cli_args = cli_args.into_owned();
// If we have an eszip, read it out
Ok(Some(async move {
let bufreader =
deno_core::futures::io::BufReader::new(&data[TRAILER_SIZE..]);
let (eszip, loader) = eszip::EszipV2::parse(bufreader)
.await
.context("Failed to parse eszip header")?;
let bufreader = loader.await.context("Failed to parse eszip archive")?;
let mut metadata = String::new();
bufreader
.take(trailer.metadata_len())
.read_to_string(&mut metadata)
.await
.context("Failed to read metadata from the current executable")?;
let mut metadata: Metadata = serde_json::from_str(&metadata).unwrap();
metadata.argv.reserve(cli_args.len() - 1); metadata.argv.reserve(cli_args.len() - 1);
for arg in cli_args.into_iter().skip(1) { for arg in cli_args.into_iter().skip(1) {
metadata.argv.push(arg.into_string().unwrap()); metadata.argv.push(arg.into_string().unwrap());
} }
let vfs = {
// align the name of the directory with the root dir
vfs_dir.name = root_path.file_name().unwrap().to_string_lossy().to_string();
Ok((metadata, eszip)) let fs_root = VfsRoot {
dir: vfs_dir,
root_path: root_path.clone(),
start_file_offset: 0,
};
Arc::new(FileBackedVfs::new(Cow::Borrowed(vfs_files_data), fs_root))
};
let fs: Arc<dyn deno_fs::FileSystem> =
Arc::new(DenoCompileFileSystem::new(vfs.clone()));
Ok(Some(StandaloneData {
fs,
metadata,
modules: StandaloneModules {
remote_modules,
vfs: vfs.clone(),
},
npm_snapshot,
root_path,
vfs,
})) }))
} }
const TRAILER_SIZE: usize = std::mem::size_of::<Trailer>() + 8; // 8 bytes for the magic trailer string
struct Trailer {
eszip_pos: u64,
metadata_pos: u64,
npm_vfs_pos: u64,
npm_files_pos: u64,
}
impl Trailer {
pub fn parse(trailer: &[u8]) -> Result<Option<Trailer>, AnyError> {
let (magic_trailer, rest) = trailer.split_at(8);
if magic_trailer != MAGIC_TRAILER {
return Ok(None);
}
let (eszip_archive_pos, rest) = rest.split_at(8);
let (metadata_pos, rest) = rest.split_at(8);
let (npm_vfs_pos, npm_files_pos) = rest.split_at(8);
let eszip_archive_pos = u64_from_bytes(eszip_archive_pos)?;
let metadata_pos = u64_from_bytes(metadata_pos)?;
let npm_vfs_pos = u64_from_bytes(npm_vfs_pos)?;
let npm_files_pos = u64_from_bytes(npm_files_pos)?;
Ok(Some(Trailer {
eszip_pos: eszip_archive_pos,
metadata_pos,
npm_vfs_pos,
npm_files_pos,
}))
}
pub fn metadata_len(&self) -> u64 {
self.npm_vfs_pos - self.metadata_pos
}
pub fn npm_vfs_len(&self) -> u64 {
self.npm_files_pos - self.npm_vfs_pos
}
pub fn as_bytes(&self) -> Vec<u8> {
let mut trailer = MAGIC_TRAILER.to_vec();
trailer.write_all(&self.eszip_pos.to_be_bytes()).unwrap();
trailer.write_all(&self.metadata_pos.to_be_bytes()).unwrap();
trailer.write_all(&self.npm_vfs_pos.to_be_bytes()).unwrap();
trailer
.write_all(&self.npm_files_pos.to_be_bytes())
.unwrap();
trailer
}
}
fn u64_from_bytes(arr: &[u8]) -> Result<u64, AnyError> {
let fixed_arr: &[u8; 8] = arr
.try_into()
.context("Failed to convert the buffer into a fixed-size array")?;
Ok(u64::from_be_bytes(*fixed_arr))
}
pub struct DenoCompileBinaryWriter<'a> { pub struct DenoCompileBinaryWriter<'a> {
cjs_tracker: &'a CjsTracker,
deno_dir: &'a DenoDir, deno_dir: &'a DenoDir,
emitter: &'a Emitter,
file_fetcher: &'a FileFetcher, file_fetcher: &'a FileFetcher,
http_client_provider: &'a HttpClientProvider, http_client_provider: &'a HttpClientProvider,
npm_resolver: &'a dyn CliNpmResolver, npm_resolver: &'a dyn CliNpmResolver,
@ -342,7 +372,9 @@ pub struct DenoCompileBinaryWriter<'a> {
impl<'a> DenoCompileBinaryWriter<'a> { impl<'a> DenoCompileBinaryWriter<'a> {
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn new( pub fn new(
cjs_tracker: &'a CjsTracker,
deno_dir: &'a DenoDir, deno_dir: &'a DenoDir,
emitter: &'a Emitter,
file_fetcher: &'a FileFetcher, file_fetcher: &'a FileFetcher,
http_client_provider: &'a HttpClientProvider, http_client_provider: &'a HttpClientProvider,
npm_resolver: &'a dyn CliNpmResolver, npm_resolver: &'a dyn CliNpmResolver,
@ -350,7 +382,9 @@ impl<'a> DenoCompileBinaryWriter<'a> {
npm_system_info: NpmSystemInfo, npm_system_info: NpmSystemInfo,
) -> Self { ) -> Self {
Self { Self {
cjs_tracker,
deno_dir, deno_dir,
emitter,
file_fetcher, file_fetcher,
http_client_provider, http_client_provider,
npm_resolver, npm_resolver,
@ -362,8 +396,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
pub async fn write_bin( pub async fn write_bin(
&self, &self,
writer: File, writer: File,
eszip: eszip::EszipV2, graph: &ModuleGraph,
root_dir_url: EszipRelativeFileBaseUrl<'_>, root_dir_url: StandaloneRelativeFileBaseUrl<'_>,
entrypoint: &ModuleSpecifier, entrypoint: &ModuleSpecifier,
compile_flags: &CompileFlags, compile_flags: &CompileFlags,
cli_options: &CliOptions, cli_options: &CliOptions,
@ -390,15 +424,17 @@ impl<'a> DenoCompileBinaryWriter<'a> {
) )
} }
} }
self.write_standalone_binary( self
.write_standalone_binary(
writer, writer,
original_binary, original_binary,
eszip, graph,
root_dir_url, root_dir_url,
entrypoint, entrypoint,
cli_options, cli_options,
compile_flags, compile_flags,
) )
.await
} }
async fn get_base_binary( async fn get_base_binary(
@ -493,12 +529,12 @@ impl<'a> DenoCompileBinaryWriter<'a> {
/// This functions creates a standalone deno binary by appending a bundle /// This functions creates a standalone deno binary by appending a bundle
/// and magic trailer to the currently executing binary. /// and magic trailer to the currently executing binary.
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
fn write_standalone_binary( async fn write_standalone_binary(
&self, &self,
writer: File, writer: File,
original_bin: Vec<u8>, original_bin: Vec<u8>,
mut eszip: eszip::EszipV2, graph: &ModuleGraph,
root_dir_url: EszipRelativeFileBaseUrl<'_>, root_dir_url: StandaloneRelativeFileBaseUrl<'_>,
entrypoint: &ModuleSpecifier, entrypoint: &ModuleSpecifier,
cli_options: &CliOptions, cli_options: &CliOptions,
compile_flags: &CompileFlags, compile_flags: &CompileFlags,
@ -512,19 +548,17 @@ impl<'a> DenoCompileBinaryWriter<'a> {
None => None, None => None,
}; };
let root_path = root_dir_url.inner().to_file_path().unwrap(); let root_path = root_dir_url.inner().to_file_path().unwrap();
let (npm_vfs, npm_files, node_modules) = match self.npm_resolver.as_inner() let (maybe_npm_vfs, node_modules, npm_snapshot) = match self
.npm_resolver
.as_inner()
{ {
InnerCliNpmResolverRef::Managed(managed) => { InnerCliNpmResolverRef::Managed(managed) => {
let snapshot = let snapshot =
managed.serialized_valid_snapshot_for_system(&self.npm_system_info); managed.serialized_valid_snapshot_for_system(&self.npm_system_info);
if !snapshot.as_serialized().packages.is_empty() { if !snapshot.as_serialized().packages.is_empty() {
let (root_dir, files) = self let npm_vfs_builder = self.build_npm_vfs(&root_path, cli_options)?;
.build_vfs(&root_path, cli_options)?
.into_dir_and_files();
eszip.add_npm_snapshot(snapshot);
( (
Some(root_dir), Some(npm_vfs_builder),
files,
Some(NodeModules::Managed { Some(NodeModules::Managed {
node_modules_dir: self.npm_resolver.root_node_modules_path().map( node_modules_dir: self.npm_resolver.root_node_modules_path().map(
|path| { |path| {
@ -536,18 +570,16 @@ impl<'a> DenoCompileBinaryWriter<'a> {
}, },
), ),
}), }),
Some(snapshot),
) )
} else { } else {
(None, Vec::new(), None) (None, None, None)
} }
} }
InnerCliNpmResolverRef::Byonm(resolver) => { InnerCliNpmResolverRef::Byonm(resolver) => {
let (root_dir, files) = self let npm_vfs_builder = self.build_npm_vfs(&root_path, cli_options)?;
.build_vfs(&root_path, cli_options)?
.into_dir_and_files();
( (
Some(root_dir), Some(npm_vfs_builder),
files,
Some(NodeModules::Byonm { Some(NodeModules::Byonm {
root_node_modules_dir: resolver.root_node_modules_path().map( root_node_modules_dir: resolver.root_node_modules_path().map(
|node_modules_dir| { |node_modules_dir| {
@ -560,9 +592,69 @@ impl<'a> DenoCompileBinaryWriter<'a> {
}, },
), ),
}), }),
None,
) )
} }
}; };
let mut vfs = if let Some(npm_vfs) = maybe_npm_vfs {
npm_vfs
} else {
VfsBuilder::new(root_path.clone())?
};
let mut remote_modules_store = RemoteModulesStoreBuilder::default();
for module in graph.modules() {
if module.specifier().scheme() == "data" {
continue; // don't store data urls as an entry as they're in the code
}
let (maybe_source, media_type) = match module {
deno_graph::Module::Js(m) => {
let source = if m.media_type.is_emittable() {
let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script(
&m.specifier,
m.media_type,
m.is_script,
)?;
let module_kind = ModuleKind::from_is_cjs(is_cjs);
let source = self
.emitter
.emit_parsed_source(
&m.specifier,
m.media_type,
module_kind,
&m.source,
)
.await?;
source.into_bytes()
} else {
m.source.as_bytes().to_vec()
};
(Some(source), m.media_type)
}
deno_graph::Module::Json(m) => {
(Some(m.source.as_bytes().to_vec()), m.media_type)
}
deno_graph::Module::Npm(_)
| deno_graph::Module::Node(_)
| deno_graph::Module::External(_) => (None, MediaType::Unknown),
};
if module.specifier().scheme() == "file" {
let file_path = deno_path_util::url_to_file_path(module.specifier())?;
vfs
.add_file_with_data(
&file_path,
match maybe_source {
Some(source) => source,
None => RealFs.read_file_sync(&file_path, None)?,
},
)
.with_context(|| {
format!("Failed adding '{}'", file_path.display())
})?;
} else if let Some(source) = maybe_source {
remote_modules_store.add(module.specifier(), media_type, source);
}
}
remote_modules_store.add_redirects(&graph.redirects);
let env_vars_from_env_file = match cli_options.env_file_name() { let env_vars_from_env_file = match cli_options.env_file_name() {
Some(env_filename) => { Some(env_filename) => {
@ -636,14 +728,14 @@ impl<'a> DenoCompileBinaryWriter<'a> {
writer, writer,
original_bin, original_bin,
&metadata, &metadata,
eszip, npm_snapshot.map(|s| s.into_serialized()),
npm_vfs.as_ref(), &remote_modules_store,
&npm_files, vfs,
compile_flags, compile_flags,
) )
} }
fn build_vfs( fn build_npm_vfs(
&self, &self,
root_path: &Path, root_path: &Path,
cli_options: &CliOptions, cli_options: &CliOptions,
@ -664,8 +756,9 @@ impl<'a> DenoCompileBinaryWriter<'a> {
} else { } else {
// DO NOT include the user's registry url as it may contain credentials, // DO NOT include the user's registry url as it may contain credentials,
// but also don't make this dependent on the registry url // but also don't make this dependent on the registry url
let root_path = npm_resolver.global_cache_root_folder(); let global_cache_root_path = npm_resolver.global_cache_root_path();
let mut builder = VfsBuilder::new(root_path)?; let mut builder =
VfsBuilder::new(global_cache_root_path.to_path_buf())?;
let mut packages = let mut packages =
npm_resolver.all_system_packages(&self.npm_system_info); npm_resolver.all_system_packages(&self.npm_system_info);
packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism
@ -675,12 +768,12 @@ impl<'a> DenoCompileBinaryWriter<'a> {
builder.add_dir_recursive(&folder)?; builder.add_dir_recursive(&folder)?;
} }
// Flatten all the registries folders into a single "node_modules/localhost" folder // Flatten all the registries folders into a single ".deno_compile_node_modules/localhost" folder
// that will be used by denort when loading the npm cache. This avoids us exposing // that will be used by denort when loading the npm cache. This avoids us exposing
// the user's private registry information and means we don't have to bother // the user's private registry information and means we don't have to bother
// serializing all the different registry config into the binary. // serializing all the different registry config into the binary.
builder.with_root_dir(|root_dir| { builder.with_root_dir(|root_dir| {
root_dir.name = "node_modules".to_string(); root_dir.name = ".deno_compile_node_modules".to_string();
let mut new_entries = Vec::with_capacity(root_dir.entries.len()); let mut new_entries = Vec::with_capacity(root_dir.entries.len());
let mut localhost_entries = IndexMap::new(); let mut localhost_entries = IndexMap::new();
for entry in std::mem::take(&mut root_dir.entries) { for entry in std::mem::take(&mut root_dir.entries) {
@ -715,6 +808,8 @@ impl<'a> DenoCompileBinaryWriter<'a> {
root_dir.entries = new_entries; root_dir.entries = new_entries;
}); });
builder.set_new_root_path(root_path.to_path_buf())?;
Ok(builder) Ok(builder)
} }
} }

View file

@ -22,8 +22,8 @@ use super::virtual_fs::FileBackedVfs;
pub struct DenoCompileFileSystem(Arc<FileBackedVfs>); pub struct DenoCompileFileSystem(Arc<FileBackedVfs>);
impl DenoCompileFileSystem { impl DenoCompileFileSystem {
pub fn new(vfs: FileBackedVfs) -> Self { pub fn new(vfs: Arc<FileBackedVfs>) -> Self {
Self(Arc::new(vfs)) Self(vfs)
} }
fn error_if_in_vfs(&self, path: &Path) -> FsResult<()> { fn error_if_in_vfs(&self, path: &Path) -> FsResult<()> {

View file

@ -5,6 +5,8 @@
#![allow(dead_code)] #![allow(dead_code)]
#![allow(unused_imports)] #![allow(unused_imports)]
use binary::StandaloneData;
use binary::StandaloneModules;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_cache_dir::npm::NpmCacheDir; use deno_cache_dir::npm::NpmCacheDir;
use deno_config::workspace::MappedResolution; use deno_config::workspace::MappedResolution;
@ -17,6 +19,7 @@ use deno_core::error::type_error;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures::FutureExt; use deno_core::futures::FutureExt;
use deno_core::v8_set_flags; use deno_core::v8_set_flags;
use deno_core::FastString;
use deno_core::FeatureChecker; use deno_core::FeatureChecker;
use deno_core::ModuleLoader; use deno_core::ModuleLoader;
use deno_core::ModuleSourceCode; use deno_core::ModuleSourceCode;
@ -28,7 +31,9 @@ use deno_npm::npm_rc::ResolvedNpmRc;
use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepValue;
use deno_runtime::deno_fs; use deno_runtime::deno_fs;
use deno_runtime::deno_node::create_host_defined_options; use deno_runtime::deno_node::create_host_defined_options;
use deno_runtime::deno_node::NodeRequireLoader;
use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_node::NodeResolver;
use deno_runtime::deno_node::PackageJsonResolver;
use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::deno_tls::rustls::RootCertStore; use deno_runtime::deno_tls::rustls::RootCertStore;
@ -38,10 +43,10 @@ use deno_runtime::permissions::RuntimePermissionDescriptorParser;
use deno_runtime::WorkerExecutionMode; use deno_runtime::WorkerExecutionMode;
use deno_runtime::WorkerLogLevel; use deno_runtime::WorkerLogLevel;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use eszip::EszipRelativeFileBaseUrl;
use import_map::parse_from_json; use import_map::parse_from_json;
use node_resolver::analyze::NodeCodeTranslator; use node_resolver::analyze::NodeCodeTranslator;
use node_resolver::NodeResolutionMode; use node_resolver::NodeResolutionMode;
use serialization::DenoCompileModuleSource;
use std::borrow::Cow; use std::borrow::Cow;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
@ -54,17 +59,24 @@ use crate::args::CacheSetting;
use crate::args::NpmInstallDepsProvider; use crate::args::NpmInstallDepsProvider;
use crate::args::StorageKeyResolver; use crate::args::StorageKeyResolver;
use crate::cache::Caches; use crate::cache::Caches;
use crate::cache::DenoCacheEnvFsAdapter;
use crate::cache::DenoDirProvider; use crate::cache::DenoDirProvider;
use crate::cache::NodeAnalysisCache; use crate::cache::NodeAnalysisCache;
use crate::cache::RealDenoCacheEnv; use crate::cache::RealDenoCacheEnv;
use crate::http_util::HttpClientProvider; use crate::http_util::HttpClientProvider;
use crate::node::CliCjsCodeAnalyzer; use crate::node::CliCjsCodeAnalyzer;
use crate::node::CliNodeCodeTranslator;
use crate::npm::create_cli_npm_resolver; use crate::npm::create_cli_npm_resolver;
use crate::npm::create_in_npm_pkg_checker;
use crate::npm::CliByonmNpmResolverCreateOptions; use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliManagedInNpmPkgCheckerCreateOptions;
use crate::npm::CliManagedNpmResolverCreateOptions;
use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverCreateOptions; use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption; use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::resolver::CjsResolutionStore; use crate::npm::CreateInNpmPkgCheckerOptions;
use crate::resolver::CjsTracker;
use crate::resolver::CjsTrackerOptions;
use crate::resolver::CliDenoResolverFs; use crate::resolver::CliDenoResolverFs;
use crate::resolver::CliNodeResolver; use crate::resolver::CliNodeResolver;
use crate::resolver::NpmModuleLoader; use crate::resolver::NpmModuleLoader;
@ -73,60 +85,30 @@ use crate::util::progress_bar::ProgressBarStyle;
use crate::util::v8::construct_v8_flags; use crate::util::v8::construct_v8_flags;
use crate::worker::CliMainWorkerFactory; use crate::worker::CliMainWorkerFactory;
use crate::worker::CliMainWorkerOptions; use crate::worker::CliMainWorkerOptions;
use crate::worker::ModuleLoaderAndSourceMapGetter; use crate::worker::CreateModuleLoaderResult;
use crate::worker::ModuleLoaderFactory; use crate::worker::ModuleLoaderFactory;
pub mod binary; pub mod binary;
mod file_system; mod file_system;
mod serialization;
mod virtual_fs; mod virtual_fs;
pub use binary::extract_standalone; pub use binary::extract_standalone;
pub use binary::is_standalone_binary; pub use binary::is_standalone_binary;
pub use binary::DenoCompileBinaryWriter; pub use binary::DenoCompileBinaryWriter;
use self::binary::load_npm_vfs;
use self::binary::Metadata; use self::binary::Metadata;
use self::file_system::DenoCompileFileSystem; use self::file_system::DenoCompileFileSystem;
struct WorkspaceEszipModule {
specifier: ModuleSpecifier,
inner: eszip::Module,
}
struct WorkspaceEszip {
eszip: eszip::EszipV2,
root_dir_url: Arc<ModuleSpecifier>,
}
impl WorkspaceEszip {
pub fn get_module(
&self,
specifier: &ModuleSpecifier,
) -> Option<WorkspaceEszipModule> {
if specifier.scheme() == "file" {
let specifier_key = EszipRelativeFileBaseUrl::new(&self.root_dir_url)
.specifier_key(specifier);
let module = self.eszip.get_module(&specifier_key)?;
let specifier = self.root_dir_url.join(&module.specifier).unwrap();
Some(WorkspaceEszipModule {
specifier,
inner: module,
})
} else {
let module = self.eszip.get_module(specifier.as_str())?;
Some(WorkspaceEszipModule {
specifier: ModuleSpecifier::parse(&module.specifier).unwrap(),
inner: module,
})
}
}
}
struct SharedModuleLoaderState { struct SharedModuleLoaderState {
eszip: WorkspaceEszip, cjs_tracker: Arc<CjsTracker>,
workspace_resolver: WorkspaceResolver, fs: Arc<dyn deno_fs::FileSystem>,
modules: StandaloneModules,
node_code_translator: Arc<CliNodeCodeTranslator>,
node_resolver: Arc<CliNodeResolver>, node_resolver: Arc<CliNodeResolver>,
npm_module_loader: Arc<NpmModuleLoader>, npm_module_loader: Arc<NpmModuleLoader>,
npm_resolver: Arc<dyn CliNpmResolver>,
workspace_resolver: WorkspaceResolver,
} }
#[derive(Clone)] #[derive(Clone)]
@ -134,6 +116,12 @@ struct EmbeddedModuleLoader {
shared: Arc<SharedModuleLoaderState>, shared: Arc<SharedModuleLoaderState>,
} }
impl std::fmt::Debug for EmbeddedModuleLoader {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("EmbeddedModuleLoader").finish()
}
}
pub const MODULE_NOT_FOUND: &str = "Module not found"; pub const MODULE_NOT_FOUND: &str = "Module not found";
pub const UNSUPPORTED_SCHEME: &str = "Unsupported scheme"; pub const UNSUPPORTED_SCHEME: &str = "Unsupported scheme";
@ -191,8 +179,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
sub_path.as_deref(), sub_path.as_deref(),
Some(&referrer), Some(&referrer),
NodeResolutionMode::Execution, NodeResolutionMode::Execution,
)? )?,
.into_url(),
), ),
Ok(MappedResolution::PackageJson { Ok(MappedResolution::PackageJson {
dep_result, dep_result,
@ -200,16 +187,14 @@ impl ModuleLoader for EmbeddedModuleLoader {
alias, alias,
.. ..
}) => match dep_result.as_ref().map_err(|e| AnyError::from(e.clone()))? { }) => match dep_result.as_ref().map_err(|e| AnyError::from(e.clone()))? {
PackageJsonDepValue::Req(req) => self PackageJsonDepValue::Req(req) => {
.shared self.shared.node_resolver.resolve_req_with_sub_path(
.node_resolver
.resolve_req_with_sub_path(
req, req,
sub_path.as_deref(), sub_path.as_deref(),
&referrer, &referrer,
NodeResolutionMode::Execution, NodeResolutionMode::Execution,
) )
.map(|res| res.into_url()), }
PackageJsonDepValue::Workspace(version_req) => { PackageJsonDepValue::Workspace(version_req) => {
let pkg_folder = self let pkg_folder = self
.shared .shared
@ -227,8 +212,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
sub_path.as_deref(), sub_path.as_deref(),
Some(&referrer), Some(&referrer),
NodeResolutionMode::Execution, NodeResolutionMode::Execution,
)? )?,
.into_url(),
) )
} }
}, },
@ -237,20 +221,18 @@ impl ModuleLoader for EmbeddedModuleLoader {
if let Ok(reference) = if let Ok(reference) =
NpmPackageReqReference::from_specifier(&specifier) NpmPackageReqReference::from_specifier(&specifier)
{ {
return self return self.shared.node_resolver.resolve_req_reference(
.shared
.node_resolver
.resolve_req_reference(
&reference, &reference,
&referrer, &referrer,
NodeResolutionMode::Execution, NodeResolutionMode::Execution,
) );
.map(|res| res.into_url());
} }
if specifier.scheme() == "jsr" { if specifier.scheme() == "jsr" {
if let Some(module) = self.shared.eszip.get_module(&specifier) { if let Some(specifier) =
return Ok(module.specifier); self.shared.modules.resolve_specifier(&specifier)?
{
return Ok(specifier.clone());
} }
} }
@ -345,82 +327,140 @@ impl ModuleLoader for EmbeddedModuleLoader {
); );
} }
let Some(module) = self.shared.eszip.get_module(original_specifier) else { match self.shared.modules.read(original_specifier) {
return deno_core::ModuleLoadResponse::Sync(Err(type_error(format!( Ok(Some(module)) => {
"{MODULE_NOT_FOUND}: {}", let media_type = module.media_type;
original_specifier let (module_specifier, module_type, module_source) =
)))); module.into_parts();
let is_maybe_cjs = match self
.shared
.cjs_tracker
.is_maybe_cjs(original_specifier, media_type)
{
Ok(is_maybe_cjs) => is_maybe_cjs,
Err(err) => {
return deno_core::ModuleLoadResponse::Sync(Err(type_error(
format!("{:?}", err),
)));
}
}; };
if is_maybe_cjs {
let original_specifier = original_specifier.clone(); let original_specifier = original_specifier.clone();
let module_specifier = module_specifier.clone();
let shared = self.shared.clone();
deno_core::ModuleLoadResponse::Async( deno_core::ModuleLoadResponse::Async(
async move { async move {
let code = module.inner.source().await.ok_or_else(|| { let source = match module_source {
type_error(format!("Module not found: {}", original_specifier)) DenoCompileModuleSource::String(string) => {
})?; Cow::Borrowed(string)
let code = arc_u8_to_arc_str(code) }
.map_err(|_| type_error("Module source is not utf-8"))?; DenoCompileModuleSource::Bytes(module_code_bytes) => {
match module_code_bytes {
Cow::Owned(bytes) => Cow::Owned(
crate::util::text_encoding::from_utf8_lossy_owned(bytes),
),
Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes),
}
}
};
let source = shared
.node_code_translator
.translate_cjs_to_esm(&module_specifier, Some(source))
.await?;
let module_source = match source {
Cow::Owned(source) => ModuleSourceCode::String(source.into()),
Cow::Borrowed(source) => {
ModuleSourceCode::String(FastString::from_static(source))
}
};
Ok(deno_core::ModuleSource::new_with_redirect( Ok(deno_core::ModuleSource::new_with_redirect(
match module.inner.kind { module_type,
eszip::ModuleKind::JavaScript => ModuleType::JavaScript, module_source,
eszip::ModuleKind::Json => ModuleType::Json,
eszip::ModuleKind::Jsonc => {
return Err(type_error("jsonc modules not supported"))
}
eszip::ModuleKind::OpaqueData => {
unreachable!();
}
},
ModuleSourceCode::String(code.into()),
&original_specifier, &original_specifier,
&module.specifier, &module_specifier,
None, None,
)) ))
} }
.boxed_local(), .boxed_local(),
) )
} else {
let module_source = module_source.into_for_v8();
deno_core::ModuleLoadResponse::Sync(Ok(
deno_core::ModuleSource::new_with_redirect(
module_type,
module_source,
original_specifier,
module_specifier,
None,
),
))
}
}
Ok(None) => deno_core::ModuleLoadResponse::Sync(Err(type_error(
format!("{MODULE_NOT_FOUND}: {}", original_specifier),
))),
Err(err) => deno_core::ModuleLoadResponse::Sync(Err(type_error(
format!("{:?}", err),
))),
}
} }
} }
fn arc_u8_to_arc_str( impl NodeRequireLoader for EmbeddedModuleLoader {
arc_u8: Arc<[u8]>, fn ensure_read_permission<'a>(
) -> Result<Arc<str>, std::str::Utf8Error> { &self,
// Check that the string is valid UTF-8. permissions: &mut dyn deno_runtime::deno_node::NodePermissions,
std::str::from_utf8(&arc_u8)?; path: &'a std::path::Path,
// SAFETY: the string is valid UTF-8, and the layout Arc<[u8]> is the same as ) -> Result<Cow<'a, std::path::Path>, AnyError> {
// Arc<str>. This is proven by the From<Arc<str>> impl for Arc<[u8]> from the if self.shared.modules.has_file(path) {
// standard library. // allow reading if the file is in the snapshot
Ok(unsafe { return Ok(Cow::Borrowed(path));
std::mem::transmute::<std::sync::Arc<[u8]>, std::sync::Arc<str>>(arc_u8) }
})
self
.shared
.npm_resolver
.ensure_read_permission(permissions, path)
}
fn load_text_file_lossy(
&self,
path: &std::path::Path,
) -> Result<String, AnyError> {
Ok(self.shared.fs.read_text_file_lossy_sync(path, None)?)
}
} }
struct StandaloneModuleLoaderFactory { struct StandaloneModuleLoaderFactory {
shared: Arc<SharedModuleLoaderState>, shared: Arc<SharedModuleLoaderState>,
} }
impl StandaloneModuleLoaderFactory {
pub fn create_result(&self) -> CreateModuleLoaderResult {
let loader = Rc::new(EmbeddedModuleLoader {
shared: self.shared.clone(),
});
CreateModuleLoaderResult {
module_loader: loader.clone(),
node_require_loader: loader,
}
}
}
impl ModuleLoaderFactory for StandaloneModuleLoaderFactory { impl ModuleLoaderFactory for StandaloneModuleLoaderFactory {
fn create_for_main( fn create_for_main(
&self, &self,
_root_permissions: PermissionsContainer, _root_permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter { ) -> CreateModuleLoaderResult {
ModuleLoaderAndSourceMapGetter { self.create_result()
module_loader: Rc::new(EmbeddedModuleLoader {
shared: self.shared.clone(),
}),
}
} }
fn create_for_worker( fn create_for_worker(
&self, &self,
_parent_permissions: PermissionsContainer, _parent_permissions: PermissionsContainer,
_permissions: PermissionsContainer, _permissions: PermissionsContainer,
) -> ModuleLoaderAndSourceMapGetter { ) -> CreateModuleLoaderResult {
ModuleLoaderAndSourceMapGetter { self.create_result()
module_loader: Rc::new(EmbeddedModuleLoader {
shared: self.shared.clone(),
}),
}
} }
} }
@ -439,13 +479,15 @@ impl RootCertStoreProvider for StandaloneRootCertStoreProvider {
} }
} }
pub async fn run( pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
mut eszip: eszip::EszipV2, let StandaloneData {
metadata: Metadata, fs,
) -> Result<i32, AnyError> { metadata,
let current_exe_path = std::env::current_exe().unwrap(); modules,
let current_exe_name = npm_snapshot,
current_exe_path.file_name().unwrap().to_string_lossy(); root_path,
vfs,
} = data;
let deno_dir_provider = Arc::new(DenoDirProvider::new(None)); let deno_dir_provider = Arc::new(DenoDirProvider::new(None));
let root_cert_store_provider = Arc::new(StandaloneRootCertStoreProvider { let root_cert_store_provider = Arc::new(StandaloneRootCertStoreProvider {
ca_stores: metadata.ca_stores, ca_stores: metadata.ca_stores,
@ -459,44 +501,50 @@ pub async fn run(
)); ));
// use a dummy npm registry url // use a dummy npm registry url
let npm_registry_url = ModuleSpecifier::parse("https://localhost/").unwrap(); let npm_registry_url = ModuleSpecifier::parse("https://localhost/").unwrap();
let root_path =
std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name));
let root_dir_url = let root_dir_url =
Arc::new(ModuleSpecifier::from_directory_path(&root_path).unwrap()); Arc::new(ModuleSpecifier::from_directory_path(&root_path).unwrap());
let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap(); let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap();
let root_node_modules_path = root_path.join("node_modules"); let npm_global_cache_dir = root_path.join(".deno_compile_node_modules");
let npm_cache_dir = NpmCacheDir::new(
&RealDenoCacheEnv,
root_node_modules_path.clone(),
vec![npm_registry_url.clone()],
);
let npm_global_cache_dir = npm_cache_dir.get_cache_location();
let cache_setting = CacheSetting::Only; let cache_setting = CacheSetting::Only;
let (fs, npm_resolver, maybe_vfs_root) = match metadata.node_modules { let pkg_json_resolver = Arc::new(PackageJsonResolver::new(
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
));
let (in_npm_pkg_checker, npm_resolver) = match metadata.node_modules {
Some(binary::NodeModules::Managed { node_modules_dir }) => { Some(binary::NodeModules::Managed { node_modules_dir }) => {
// this will always have a snapshot // create an npmrc that uses the fake npm_registry_url to resolve packages
let snapshot = eszip.take_npm_snapshot().unwrap(); let npmrc = Arc::new(ResolvedNpmRc {
let vfs_root_dir_path = if node_modules_dir.is_some() { default_config: deno_npm::npm_rc::RegistryConfigWithUrl {
root_path.clone() registry_url: npm_registry_url.clone(),
} else { config: Default::default(),
npm_cache_dir.root_dir().to_owned() },
}; scopes: Default::default(),
let vfs = load_npm_vfs(vfs_root_dir_path.clone()) registry_configs: Default::default(),
.context("Failed to load npm vfs.")?; });
let npm_cache_dir = Arc::new(NpmCacheDir::new(
&DenoCacheEnvFsAdapter(fs.as_ref()),
npm_global_cache_dir,
npmrc.get_all_known_registries_urls(),
));
let snapshot = npm_snapshot.unwrap();
let maybe_node_modules_path = node_modules_dir let maybe_node_modules_path = node_modules_dir
.map(|node_modules_dir| vfs_root_dir_path.join(node_modules_dir)); .map(|node_modules_dir| root_path.join(node_modules_dir));
let fs = Arc::new(DenoCompileFileSystem::new(vfs)) let in_npm_pkg_checker =
as Arc<dyn deno_fs::FileSystem>; create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Managed(
CliManagedInNpmPkgCheckerCreateOptions {
root_cache_dir_url: npm_cache_dir.root_dir_url(),
maybe_node_modules_path: maybe_node_modules_path.as_deref(),
},
));
let npm_resolver = let npm_resolver =
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed( create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
CliNpmResolverManagedCreateOptions { CliManagedNpmResolverCreateOptions {
snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some( snapshot: CliNpmResolverManagedSnapshotOption::Specified(Some(
snapshot, snapshot,
)), )),
maybe_lockfile: None, maybe_lockfile: None,
fs: fs.clone(), fs: fs.clone(),
http_client_provider: http_client_provider.clone(), http_client_provider: http_client_provider.clone(),
npm_global_cache_dir, npm_cache_dir,
cache_setting, cache_setting,
text_only_progress_bar: progress_bar, text_only_progress_bar: progress_bar,
maybe_node_modules_path, maybe_node_modules_path,
@ -505,50 +553,54 @@ pub async fn run(
// this is only used for installing packages, which isn't necessary with deno compile // this is only used for installing packages, which isn't necessary with deno compile
NpmInstallDepsProvider::empty(), NpmInstallDepsProvider::empty(),
), ),
// create an npmrc that uses the fake npm_registry_url to resolve packages npmrc,
npmrc: Arc::new(ResolvedNpmRc {
default_config: deno_npm::npm_rc::RegistryConfigWithUrl {
registry_url: npm_registry_url.clone(),
config: Default::default(),
},
scopes: Default::default(),
registry_configs: Default::default(),
}),
lifecycle_scripts: Default::default(), lifecycle_scripts: Default::default(),
}, },
)) ))
.await?; .await?;
(fs, npm_resolver, Some(vfs_root_dir_path)) (in_npm_pkg_checker, npm_resolver)
} }
Some(binary::NodeModules::Byonm { Some(binary::NodeModules::Byonm {
root_node_modules_dir, root_node_modules_dir,
}) => { }) => {
let vfs_root_dir_path = root_path.clone();
let vfs = load_npm_vfs(vfs_root_dir_path.clone())
.context("Failed to load vfs.")?;
let root_node_modules_dir = let root_node_modules_dir =
root_node_modules_dir.map(|p| vfs.root().join(p)); root_node_modules_dir.map(|p| vfs.root().join(p));
let fs = Arc::new(DenoCompileFileSystem::new(vfs)) let in_npm_pkg_checker =
as Arc<dyn deno_fs::FileSystem>; create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Byonm);
let npm_resolver = create_cli_npm_resolver( let npm_resolver = create_cli_npm_resolver(
CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions {
fs: CliDenoResolverFs(fs.clone()), fs: CliDenoResolverFs(fs.clone()),
pkg_json_resolver: pkg_json_resolver.clone(),
root_node_modules_dir, root_node_modules_dir,
}), }),
) )
.await?; .await?;
(fs, npm_resolver, Some(vfs_root_dir_path)) (in_npm_pkg_checker, npm_resolver)
} }
None => { None => {
let fs = Arc::new(deno_fs::RealFs) as Arc<dyn deno_fs::FileSystem>; // Packages from different registries are already inlined in the binary,
// so no need to create actual `.npmrc` configuration.
let npmrc = create_default_npmrc();
let npm_cache_dir = Arc::new(NpmCacheDir::new(
&DenoCacheEnvFsAdapter(fs.as_ref()),
npm_global_cache_dir,
npmrc.get_all_known_registries_urls(),
));
let in_npm_pkg_checker =
create_in_npm_pkg_checker(CreateInNpmPkgCheckerOptions::Managed(
CliManagedInNpmPkgCheckerCreateOptions {
root_cache_dir_url: npm_cache_dir.root_dir_url(),
maybe_node_modules_path: None,
},
));
let npm_resolver = let npm_resolver =
create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed( create_cli_npm_resolver(CliNpmResolverCreateOptions::Managed(
CliNpmResolverManagedCreateOptions { CliManagedNpmResolverCreateOptions {
snapshot: CliNpmResolverManagedSnapshotOption::Specified(None), snapshot: CliNpmResolverManagedSnapshotOption::Specified(None),
maybe_lockfile: None, maybe_lockfile: None,
fs: fs.clone(), fs: fs.clone(),
http_client_provider: http_client_provider.clone(), http_client_provider: http_client_provider.clone(),
npm_global_cache_dir, npm_cache_dir,
cache_setting, cache_setting,
text_only_progress_bar: progress_bar, text_only_progress_bar: progress_bar,
maybe_node_modules_path: None, maybe_node_modules_path: None,
@ -557,42 +609,52 @@ pub async fn run(
// this is only used for installing packages, which isn't necessary with deno compile // this is only used for installing packages, which isn't necessary with deno compile
NpmInstallDepsProvider::empty(), NpmInstallDepsProvider::empty(),
), ),
// Packages from different registries are already inlined in the ESZip,
// so no need to create actual `.npmrc` configuration.
npmrc: create_default_npmrc(), npmrc: create_default_npmrc(),
lifecycle_scripts: Default::default(), lifecycle_scripts: Default::default(),
}, },
)) ))
.await?; .await?;
(fs, npm_resolver, None) (in_npm_pkg_checker, npm_resolver)
} }
}; };
let has_node_modules_dir = npm_resolver.root_node_modules_path().is_some(); let has_node_modules_dir = npm_resolver.root_node_modules_path().is_some();
let node_resolver = Arc::new(NodeResolver::new( let node_resolver = Arc::new(NodeResolver::new(
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
in_npm_pkg_checker.clone(),
npm_resolver.clone().into_npm_resolver(), npm_resolver.clone().into_npm_resolver(),
pkg_json_resolver.clone(),
));
let cjs_tracker = Arc::new(CjsTracker::new(
in_npm_pkg_checker.clone(),
pkg_json_resolver.clone(),
CjsTrackerOptions {
unstable_detect_cjs: metadata.unstable_config.detect_cjs,
},
)); ));
let cjs_resolutions = Arc::new(CjsResolutionStore::default());
let cache_db = Caches::new(deno_dir_provider.clone()); let cache_db = Caches::new(deno_dir_provider.clone());
let node_analysis_cache = NodeAnalysisCache::new(cache_db.node_analysis_db()); let node_analysis_cache = NodeAnalysisCache::new(cache_db.node_analysis_db());
let cli_node_resolver = Arc::new(CliNodeResolver::new( let cli_node_resolver = Arc::new(CliNodeResolver::new(
cjs_resolutions.clone(), cjs_tracker.clone(),
fs.clone(), fs.clone(),
in_npm_pkg_checker.clone(),
node_resolver.clone(), node_resolver.clone(),
npm_resolver.clone(), npm_resolver.clone(),
)); ));
let cjs_esm_code_analyzer = CliCjsCodeAnalyzer::new( let cjs_esm_code_analyzer = CliCjsCodeAnalyzer::new(
node_analysis_cache, node_analysis_cache,
cjs_tracker.clone(),
fs.clone(), fs.clone(),
cli_node_resolver.clone(),
None, None,
false,
); );
let node_code_translator = Arc::new(NodeCodeTranslator::new( let node_code_translator = Arc::new(NodeCodeTranslator::new(
cjs_esm_code_analyzer, cjs_esm_code_analyzer,
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()), deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
in_npm_pkg_checker,
node_resolver.clone(), node_resolver.clone(),
npm_resolver.clone().into_npm_resolver(), npm_resolver.clone().into_npm_resolver(),
pkg_json_resolver.clone(),
)); ));
let workspace_resolver = { let workspace_resolver = {
let import_map = match metadata.workspace_resolver.import_map { let import_map = match metadata.workspace_resolver.import_map {
@ -645,37 +707,35 @@ pub async fn run(
}; };
let module_loader_factory = StandaloneModuleLoaderFactory { let module_loader_factory = StandaloneModuleLoaderFactory {
shared: Arc::new(SharedModuleLoaderState { shared: Arc::new(SharedModuleLoaderState {
eszip: WorkspaceEszip { cjs_tracker: cjs_tracker.clone(),
eszip, fs: fs.clone(),
root_dir_url, modules,
}, node_code_translator: node_code_translator.clone(),
workspace_resolver,
node_resolver: cli_node_resolver.clone(), node_resolver: cli_node_resolver.clone(),
npm_module_loader: Arc::new(NpmModuleLoader::new( npm_module_loader: Arc::new(NpmModuleLoader::new(
cjs_resolutions.clone(), cjs_tracker.clone(),
node_code_translator,
fs.clone(), fs.clone(),
cli_node_resolver, node_code_translator,
)), )),
npm_resolver: npm_resolver.clone(),
workspace_resolver,
}), }),
}; };
let permissions = { let permissions = {
let mut permissions = let mut permissions =
metadata.permissions.to_options(/* cli_arg_urls */ &[]); metadata.permissions.to_options(/* cli_arg_urls */ &[]);
// if running with an npm vfs, grant read access to it // grant read access to the vfs
if let Some(vfs_root) = maybe_vfs_root {
match &mut permissions.allow_read { match &mut permissions.allow_read {
Some(vec) if vec.is_empty() => { Some(vec) if vec.is_empty() => {
// do nothing, already granted // do nothing, already granted
} }
Some(vec) => { Some(vec) => {
vec.push(vfs_root.to_string_lossy().to_string()); vec.push(root_path.to_string_lossy().to_string());
} }
None => { None => {
permissions.allow_read = permissions.allow_read =
Some(vec![vfs_root.to_string_lossy().to_string()]); Some(vec![root_path.to_string_lossy().to_string()]);
}
} }
} }
@ -697,7 +757,6 @@ pub async fn run(
}); });
let worker_factory = CliMainWorkerFactory::new( let worker_factory = CliMainWorkerFactory::new(
Arc::new(BlobStore::default()), Arc::new(BlobStore::default()),
cjs_resolutions,
// Code cache is not supported for standalone binary yet. // Code cache is not supported for standalone binary yet.
None, None,
feature_checker, feature_checker,
@ -708,6 +767,7 @@ pub async fn run(
Box::new(module_loader_factory), Box::new(module_loader_factory),
node_resolver, node_resolver,
npm_resolver, npm_resolver,
pkg_json_resolver,
root_cert_store_provider, root_cert_store_provider,
permissions, permissions,
StorageKeyResolver::empty(), StorageKeyResolver::empty(),
@ -723,7 +783,6 @@ pub async fn run(
inspect_wait: false, inspect_wait: false,
strace_ops: None, strace_ops: None,
is_inspecting: false, is_inspecting: false,
is_npm_main: main_module.scheme() == "npm",
skip_op_registration: true, skip_op_registration: true,
location: metadata.location, location: metadata.location,
argv0: NpmPackageReqReference::from_specifier(&main_module) argv0: NpmPackageReqReference::from_specifier(&main_module)
@ -740,7 +799,6 @@ pub async fn run(
node_ipc: None, node_ipc: None,
serve_port: None, serve_port: None,
serve_host: None, serve_host: None,
unstable_detect_cjs: metadata.unstable_config.detect_cjs,
}, },
); );

View file

@ -0,0 +1,661 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::io::Write;
use deno_ast::MediaType;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_core::FastString;
use deno_core::ModuleSourceCode;
use deno_core::ModuleType;
use deno_npm::resolution::SerializedNpmResolutionSnapshot;
use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_semver::package::PackageReq;
use crate::standalone::virtual_fs::VirtualDirectory;
use super::binary::Metadata;
use super::virtual_fs::VfsBuilder;
const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd";
/// Binary format:
/// * d3n0l4nd
/// * <metadata_len><metadata>
/// * <npm_snapshot_len><npm_snapshot>
/// * <remote_modules_len><remote_modules>
/// * <vfs_headers_len><vfs_headers>
/// * <vfs_file_data_len><vfs_file_data>
/// * d3n0l4nd
pub fn serialize_binary_data_section(
metadata: &Metadata,
npm_snapshot: Option<SerializedNpmResolutionSnapshot>,
remote_modules: &RemoteModulesStoreBuilder,
vfs: VfsBuilder,
) -> Result<Vec<u8>, AnyError> {
fn write_bytes_with_len(bytes: &mut Vec<u8>, data: &[u8]) {
bytes.extend_from_slice(&(data.len() as u64).to_le_bytes());
bytes.extend_from_slice(data);
}
let mut bytes = Vec::new();
bytes.extend_from_slice(MAGIC_BYTES);
// 1. Metadata
{
let metadata = serde_json::to_string(metadata)?;
write_bytes_with_len(&mut bytes, metadata.as_bytes());
}
// 2. Npm snapshot
{
let npm_snapshot =
npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default();
write_bytes_with_len(&mut bytes, &npm_snapshot);
}
// 3. Remote modules
{
let update_index = bytes.len();
bytes.extend_from_slice(&(0_u64).to_le_bytes());
let start_index = bytes.len();
remote_modules.write(&mut bytes)?;
let length = bytes.len() - start_index;
let length_bytes = (length as u64).to_le_bytes();
bytes[update_index..update_index + length_bytes.len()]
.copy_from_slice(&length_bytes);
}
// 4. VFS
{
let (vfs, vfs_files) = vfs.into_dir_and_files();
let vfs = serde_json::to_string(&vfs)?;
write_bytes_with_len(&mut bytes, vfs.as_bytes());
let vfs_bytes_len = vfs_files.iter().map(|f| f.len() as u64).sum::<u64>();
bytes.extend_from_slice(&vfs_bytes_len.to_le_bytes());
for file in &vfs_files {
bytes.extend_from_slice(file);
}
}
// write the magic bytes at the end so we can use it
// to make sure we've deserialized correctly
bytes.extend_from_slice(MAGIC_BYTES);
Ok(bytes)
}
pub struct DeserializedDataSection {
pub metadata: Metadata,
pub npm_snapshot: Option<ValidSerializedNpmResolutionSnapshot>,
pub remote_modules: RemoteModulesStore,
pub vfs_dir: VirtualDirectory,
pub vfs_files_data: &'static [u8],
}
pub fn deserialize_binary_data_section(
data: &'static [u8],
) -> Result<Option<DeserializedDataSection>, AnyError> {
fn read_bytes_with_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> {
let (input, len) = read_u64(input)?;
let (input, data) = read_bytes(input, len as usize)?;
Ok((input, data))
}
fn read_magic_bytes(input: &[u8]) -> Result<(&[u8], bool), AnyError> {
if input.len() < MAGIC_BYTES.len() {
bail!("Unexpected end of data. Could not find magic bytes.");
}
let (magic_bytes, input) = input.split_at(MAGIC_BYTES.len());
if magic_bytes != MAGIC_BYTES {
return Ok((input, false));
}
Ok((input, true))
}
let (input, found) = read_magic_bytes(data)?;
if !found {
return Ok(None);
}
// 1. Metadata
let (input, data) = read_bytes_with_len(input).context("reading metadata")?;
let metadata: Metadata =
serde_json::from_slice(data).context("deserializing metadata")?;
// 2. Npm snapshot
let (input, data) =
read_bytes_with_len(input).context("reading npm snapshot")?;
let npm_snapshot = if data.is_empty() {
None
} else {
Some(deserialize_npm_snapshot(data).context("deserializing npm snapshot")?)
};
// 3. Remote modules
let (input, data) =
read_bytes_with_len(input).context("reading remote modules data")?;
let remote_modules =
RemoteModulesStore::build(data).context("deserializing remote modules")?;
// 4. VFS
let (input, data) = read_bytes_with_len(input).context("vfs")?;
let vfs_dir: VirtualDirectory =
serde_json::from_slice(data).context("deserializing vfs data")?;
let (input, vfs_files_data) =
read_bytes_with_len(input).context("reading vfs files data")?;
// finally ensure we read the magic bytes at the end
let (_input, found) = read_magic_bytes(input)?;
if !found {
bail!("Could not find magic bytes at the end of the data.");
}
Ok(Some(DeserializedDataSection {
metadata,
npm_snapshot,
remote_modules,
vfs_dir,
vfs_files_data,
}))
}
#[derive(Default)]
pub struct RemoteModulesStoreBuilder {
specifiers: Vec<(String, u64)>,
data: Vec<(MediaType, Vec<u8>)>,
data_byte_len: u64,
redirects: Vec<(String, String)>,
redirects_len: u64,
}
impl RemoteModulesStoreBuilder {
pub fn add(&mut self, specifier: &Url, media_type: MediaType, data: Vec<u8>) {
log::debug!("Adding '{}' ({})", specifier, media_type);
let specifier = specifier.to_string();
self.specifiers.push((specifier, self.data_byte_len));
self.data_byte_len += 1 + 8 + data.len() as u64; // media type (1 byte), data length (8 bytes), data
self.data.push((media_type, data));
}
pub fn add_redirects(&mut self, redirects: &BTreeMap<Url, Url>) {
self.redirects.reserve(redirects.len());
for (from, to) in redirects {
log::debug!("Adding redirect '{}' -> '{}'", from, to);
let from = from.to_string();
let to = to.to_string();
self.redirects_len += (4 + from.len() + 4 + to.len()) as u64;
self.redirects.push((from, to));
}
}
fn write(&self, writer: &mut dyn Write) -> Result<(), AnyError> {
writer.write_all(&(self.specifiers.len() as u32).to_le_bytes())?;
writer.write_all(&(self.redirects.len() as u32).to_le_bytes())?;
for (specifier, offset) in &self.specifiers {
writer.write_all(&(specifier.len() as u32).to_le_bytes())?;
writer.write_all(specifier.as_bytes())?;
writer.write_all(&offset.to_le_bytes())?;
}
for (from, to) in &self.redirects {
writer.write_all(&(from.len() as u32).to_le_bytes())?;
writer.write_all(from.as_bytes())?;
writer.write_all(&(to.len() as u32).to_le_bytes())?;
writer.write_all(to.as_bytes())?;
}
for (media_type, data) in &self.data {
writer.write_all(&[serialize_media_type(*media_type)])?;
writer.write_all(&(data.len() as u64).to_le_bytes())?;
writer.write_all(data)?;
}
Ok(())
}
}
pub enum DenoCompileModuleSource {
String(&'static str),
Bytes(Cow<'static, [u8]>),
}
impl DenoCompileModuleSource {
pub fn into_for_v8(self) -> ModuleSourceCode {
fn into_bytes(data: Cow<'static, [u8]>) -> ModuleSourceCode {
ModuleSourceCode::Bytes(match data {
Cow::Borrowed(d) => d.into(),
Cow::Owned(d) => d.into_boxed_slice().into(),
})
}
match self {
// todo(https://github.com/denoland/deno_core/pull/943): store whether
// the string is ascii or not ahead of time so we can avoid the is_ascii()
// check in FastString::from_static
Self::String(s) => ModuleSourceCode::String(FastString::from_static(s)),
Self::Bytes(b) => into_bytes(b),
}
}
}
pub struct DenoCompileModuleData<'a> {
pub specifier: &'a Url,
pub media_type: MediaType,
pub data: Cow<'static, [u8]>,
}
impl<'a> DenoCompileModuleData<'a> {
pub fn into_parts(self) -> (&'a Url, ModuleType, DenoCompileModuleSource) {
fn into_string_unsafe(data: Cow<'static, [u8]>) -> DenoCompileModuleSource {
match data {
Cow::Borrowed(d) => DenoCompileModuleSource::String(
// SAFETY: we know this is a valid utf8 string
unsafe { std::str::from_utf8_unchecked(d) },
),
Cow::Owned(d) => DenoCompileModuleSource::Bytes(Cow::Owned(d)),
}
}
let (media_type, source) = match self.media_type {
MediaType::JavaScript
| MediaType::Jsx
| MediaType::Mjs
| MediaType::Cjs
| MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts
| MediaType::Tsx => {
(ModuleType::JavaScript, into_string_unsafe(self.data))
}
MediaType::Json => (ModuleType::Json, into_string_unsafe(self.data)),
MediaType::Wasm => {
(ModuleType::Wasm, DenoCompileModuleSource::Bytes(self.data))
}
// just assume javascript if we made it here
MediaType::Css | MediaType::SourceMap | MediaType::Unknown => (
ModuleType::JavaScript,
DenoCompileModuleSource::Bytes(self.data),
),
};
(self.specifier, media_type, source)
}
}
enum RemoteModulesStoreSpecifierValue {
Data(usize),
Redirect(Url),
}
pub struct RemoteModulesStore {
specifiers: HashMap<Url, RemoteModulesStoreSpecifierValue>,
files_data: &'static [u8],
}
impl RemoteModulesStore {
fn build(data: &'static [u8]) -> Result<Self, AnyError> {
fn read_specifier(input: &[u8]) -> Result<(&[u8], (Url, u64)), AnyError> {
let (input, specifier) = read_string_lossy(input)?;
let specifier = Url::parse(&specifier)?;
let (input, offset) = read_u64(input)?;
Ok((input, (specifier, offset)))
}
fn read_redirect(input: &[u8]) -> Result<(&[u8], (Url, Url)), AnyError> {
let (input, from) = read_string_lossy(input)?;
let from = Url::parse(&from)?;
let (input, to) = read_string_lossy(input)?;
let to = Url::parse(&to)?;
Ok((input, (from, to)))
}
fn read_headers(
input: &[u8],
) -> Result<(&[u8], HashMap<Url, RemoteModulesStoreSpecifierValue>), AnyError>
{
let (input, specifiers_len) = read_u32_as_usize(input)?;
let (mut input, redirects_len) = read_u32_as_usize(input)?;
let mut specifiers =
HashMap::with_capacity(specifiers_len + redirects_len);
for _ in 0..specifiers_len {
let (current_input, (specifier, offset)) =
read_specifier(input).context("reading specifier")?;
input = current_input;
specifiers.insert(
specifier,
RemoteModulesStoreSpecifierValue::Data(offset as usize),
);
}
for _ in 0..redirects_len {
let (current_input, (from, to)) = read_redirect(input)?;
input = current_input;
specifiers.insert(from, RemoteModulesStoreSpecifierValue::Redirect(to));
}
Ok((input, specifiers))
}
let (files_data, specifiers) = read_headers(data)?;
Ok(Self {
specifiers,
files_data,
})
}
pub fn resolve_specifier<'a>(
&'a self,
specifier: &'a Url,
) -> Result<Option<&'a Url>, AnyError> {
let mut count = 0;
let mut current = specifier;
loop {
if count > 10 {
bail!("Too many redirects resolving '{}'", specifier);
}
match self.specifiers.get(current) {
Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => {
current = to;
count += 1;
}
Some(RemoteModulesStoreSpecifierValue::Data(_)) => {
return Ok(Some(current));
}
None => {
return Ok(None);
}
}
}
}
pub fn read<'a>(
&'a self,
original_specifier: &'a Url,
) -> Result<Option<DenoCompileModuleData<'a>>, AnyError> {
let mut count = 0;
let mut specifier = original_specifier;
loop {
if count > 10 {
bail!("Too many redirects resolving '{}'", original_specifier);
}
match self.specifiers.get(specifier) {
Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => {
specifier = to;
count += 1;
}
Some(RemoteModulesStoreSpecifierValue::Data(offset)) => {
let input = &self.files_data[*offset..];
let (input, media_type_byte) = read_bytes(input, 1)?;
let media_type = deserialize_media_type(media_type_byte[0])?;
let (input, len) = read_u64(input)?;
let (_input, data) = read_bytes(input, len as usize)?;
return Ok(Some(DenoCompileModuleData {
specifier,
media_type,
data: Cow::Borrowed(data),
}));
}
None => {
return Ok(None);
}
}
}
}
}
fn serialize_npm_snapshot(
mut snapshot: SerializedNpmResolutionSnapshot,
) -> Vec<u8> {
fn append_string(bytes: &mut Vec<u8>, string: &str) {
let len = string.len() as u32;
bytes.extend_from_slice(&len.to_le_bytes());
bytes.extend_from_slice(string.as_bytes());
}
snapshot.packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism
let ids_to_stored_ids = snapshot
.packages
.iter()
.enumerate()
.map(|(i, pkg)| (&pkg.id, i as u32))
.collect::<HashMap<_, _>>();
let mut root_packages: Vec<_> = snapshot.root_packages.iter().collect();
root_packages.sort();
let mut bytes = Vec::new();
bytes.extend_from_slice(&(snapshot.packages.len() as u32).to_le_bytes());
for pkg in &snapshot.packages {
append_string(&mut bytes, &pkg.id.as_serialized());
}
bytes.extend_from_slice(&(root_packages.len() as u32).to_le_bytes());
for (req, id) in root_packages {
append_string(&mut bytes, &req.to_string());
let id = ids_to_stored_ids.get(&id).unwrap();
bytes.extend_from_slice(&id.to_le_bytes());
}
for pkg in &snapshot.packages {
let deps_len = pkg.dependencies.len() as u32;
bytes.extend_from_slice(&deps_len.to_le_bytes());
let mut deps: Vec<_> = pkg.dependencies.iter().collect();
deps.sort();
for (req, id) in deps {
append_string(&mut bytes, req);
let id = ids_to_stored_ids.get(&id).unwrap();
bytes.extend_from_slice(&id.to_le_bytes());
}
}
bytes
}
fn deserialize_npm_snapshot(
input: &[u8],
) -> Result<ValidSerializedNpmResolutionSnapshot, AnyError> {
fn parse_id(input: &[u8]) -> Result<(&[u8], NpmPackageId), AnyError> {
let (input, id) = read_string_lossy(input)?;
let id = NpmPackageId::from_serialized(&id)?;
Ok((input, id))
}
#[allow(clippy::needless_lifetimes)] // clippy bug
fn parse_root_package<'a>(
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> impl Fn(&[u8]) -> Result<(&[u8], (PackageReq, NpmPackageId)), AnyError> + 'a
{
|input| {
let (input, req) = read_string_lossy(input)?;
let req = PackageReq::from_str(&req)?;
let (input, id) = read_u32_as_usize(input)?;
Ok((input, (req, id_to_npm_id(id)?)))
}
}
#[allow(clippy::needless_lifetimes)] // clippy bug
fn parse_package_dep<'a>(
id_to_npm_id: &'a impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> impl Fn(&[u8]) -> Result<(&[u8], (String, NpmPackageId)), AnyError> + 'a
{
|input| {
let (input, req) = read_string_lossy(input)?;
let (input, id) = read_u32_as_usize(input)?;
Ok((input, (req.into_owned(), id_to_npm_id(id)?)))
}
}
fn parse_package<'a>(
input: &'a [u8],
id: NpmPackageId,
id_to_npm_id: &impl Fn(usize) -> Result<NpmPackageId, AnyError>,
) -> Result<(&'a [u8], SerializedNpmResolutionSnapshotPackage), AnyError> {
let (input, deps_len) = read_u32_as_usize(input)?;
let (input, dependencies) =
parse_hashmap_n_times(input, deps_len, parse_package_dep(id_to_npm_id))?;
Ok((
input,
SerializedNpmResolutionSnapshotPackage {
id,
system: Default::default(),
dist: Default::default(),
dependencies,
optional_dependencies: Default::default(),
bin: None,
scripts: Default::default(),
deprecated: Default::default(),
},
))
}
let (input, packages_len) = read_u32_as_usize(input)?;
// get a hashmap of all the npm package ids to their serialized ids
let (input, data_ids_to_npm_ids) =
parse_vec_n_times(input, packages_len, parse_id)
.context("deserializing id")?;
let data_id_to_npm_id = |id: usize| {
data_ids_to_npm_ids
.get(id)
.cloned()
.ok_or_else(|| deno_core::anyhow::anyhow!("Invalid npm package id"))
};
let (input, root_packages_len) = read_u32_as_usize(input)?;
let (input, root_packages) = parse_hashmap_n_times(
input,
root_packages_len,
parse_root_package(&data_id_to_npm_id),
)
.context("deserializing root package")?;
let (input, packages) =
parse_vec_n_times_with_index(input, packages_len, |input, index| {
parse_package(input, data_id_to_npm_id(index)?, &data_id_to_npm_id)
})
.context("deserializing package")?;
if !input.is_empty() {
bail!("Unexpected data left over");
}
Ok(
SerializedNpmResolutionSnapshot {
packages,
root_packages,
}
// this is ok because we have already verified that all the
// identifiers found in the snapshot are valid via the
// npm package id -> npm package id mapping
.into_valid_unsafe(),
)
}
fn serialize_media_type(media_type: MediaType) -> u8 {
match media_type {
MediaType::JavaScript => 0,
MediaType::Jsx => 1,
MediaType::Mjs => 2,
MediaType::Cjs => 3,
MediaType::TypeScript => 4,
MediaType::Mts => 5,
MediaType::Cts => 6,
MediaType::Dts => 7,
MediaType::Dmts => 8,
MediaType::Dcts => 9,
MediaType::Tsx => 10,
MediaType::Json => 11,
MediaType::Wasm => 12,
MediaType::Css => 13,
MediaType::SourceMap => 14,
MediaType::Unknown => 15,
}
}
fn deserialize_media_type(value: u8) -> Result<MediaType, AnyError> {
match value {
0 => Ok(MediaType::JavaScript),
1 => Ok(MediaType::Jsx),
2 => Ok(MediaType::Mjs),
3 => Ok(MediaType::Cjs),
4 => Ok(MediaType::TypeScript),
5 => Ok(MediaType::Mts),
6 => Ok(MediaType::Cts),
7 => Ok(MediaType::Dts),
8 => Ok(MediaType::Dmts),
9 => Ok(MediaType::Dcts),
10 => Ok(MediaType::Tsx),
11 => Ok(MediaType::Json),
12 => Ok(MediaType::Wasm),
13 => Ok(MediaType::Css),
14 => Ok(MediaType::SourceMap),
15 => Ok(MediaType::Unknown),
_ => bail!("Unknown media type value: {}", value),
}
}
fn parse_hashmap_n_times<TKey: std::cmp::Eq + std::hash::Hash, TValue>(
mut input: &[u8],
times: usize,
parse: impl Fn(&[u8]) -> Result<(&[u8], (TKey, TValue)), AnyError>,
) -> Result<(&[u8], HashMap<TKey, TValue>), AnyError> {
let mut results = HashMap::with_capacity(times);
for _ in 0..times {
let result = parse(input);
let (new_input, (key, value)) = result?;
results.insert(key, value);
input = new_input;
}
Ok((input, results))
}
fn parse_vec_n_times<TResult>(
input: &[u8],
times: usize,
parse: impl Fn(&[u8]) -> Result<(&[u8], TResult), AnyError>,
) -> Result<(&[u8], Vec<TResult>), AnyError> {
parse_vec_n_times_with_index(input, times, |input, _index| parse(input))
}
fn parse_vec_n_times_with_index<TResult>(
mut input: &[u8],
times: usize,
parse: impl Fn(&[u8], usize) -> Result<(&[u8], TResult), AnyError>,
) -> Result<(&[u8], Vec<TResult>), AnyError> {
let mut results = Vec::with_capacity(times);
for i in 0..times {
let result = parse(input, i);
let (new_input, result) = result?;
results.push(result);
input = new_input;
}
Ok((input, results))
}
fn read_bytes(input: &[u8], len: usize) -> Result<(&[u8], &[u8]), AnyError> {
if input.len() < len {
bail!("Unexpected end of data.",);
}
let (len_bytes, input) = input.split_at(len);
Ok((input, len_bytes))
}
fn read_string_lossy(input: &[u8]) -> Result<(&[u8], Cow<str>), AnyError> {
let (input, str_len) = read_u32_as_usize(input)?;
let (input, data_bytes) = read_bytes(input, str_len)?;
Ok((input, String::from_utf8_lossy(data_bytes)))
}
fn read_u32_as_usize(input: &[u8]) -> Result<(&[u8], usize), AnyError> {
let (input, len_bytes) = read_bytes(input, 4)?;
let len = u32::from_le_bytes(len_bytes.try_into()?);
Ok((input, len as usize))
}
fn read_u64(input: &[u8]) -> Result<(&[u8], u64), AnyError> {
let (input, len_bytes) = read_bytes(input, 8)?;
let len = u64::from_le_bytes(len_bytes.try_into()?);
Ok((input, len))
}

View file

@ -7,6 +7,7 @@ use std::fs::File;
use std::io::Read; use std::io::Read;
use std::io::Seek; use std::io::Seek;
use std::io::SeekFrom; use std::io::SeekFrom;
use std::ops::Range;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::rc::Rc; use std::rc::Rc;
@ -67,6 +68,26 @@ impl VfsBuilder {
}) })
} }
pub fn set_new_root_path(
&mut self,
root_path: PathBuf,
) -> Result<(), AnyError> {
let root_path = canonicalize_path(&root_path)?;
self.root_path = root_path;
self.root_dir = VirtualDirectory {
name: self
.root_path
.file_stem()
.map(|s| s.to_string_lossy().into_owned())
.unwrap_or("root".to_string()),
entries: vec![VfsEntry::Dir(VirtualDirectory {
name: std::mem::take(&mut self.root_dir.name),
entries: std::mem::take(&mut self.root_dir.entries),
})],
};
Ok(())
}
pub fn with_root_dir<R>( pub fn with_root_dir<R>(
&mut self, &mut self,
with_root: impl FnOnce(&mut VirtualDirectory) -> R, with_root: impl FnOnce(&mut VirtualDirectory) -> R,
@ -119,7 +140,7 @@ impl VfsBuilder {
// inline the symlink and make the target file // inline the symlink and make the target file
let file_bytes = std::fs::read(&target) let file_bytes = std::fs::read(&target)
.with_context(|| format!("Reading {}", path.display()))?; .with_context(|| format!("Reading {}", path.display()))?;
self.add_file(&path, file_bytes)?; self.add_file_with_data_inner(&path, file_bytes)?;
} else { } else {
log::warn!( log::warn!(
"{} Symlink target is outside '{}'. Excluding symlink at '{}' with target '{}'.", "{} Symlink target is outside '{}'. Excluding symlink at '{}' with target '{}'.",
@ -191,16 +212,32 @@ impl VfsBuilder {
self.add_file_at_path_not_symlink(&target_path) self.add_file_at_path_not_symlink(&target_path)
} }
pub fn add_file_at_path_not_symlink( fn add_file_at_path_not_symlink(
&mut self, &mut self,
path: &Path, path: &Path,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let file_bytes = std::fs::read(path) let file_bytes = std::fs::read(path)
.with_context(|| format!("Reading {}", path.display()))?; .with_context(|| format!("Reading {}", path.display()))?;
self.add_file(path, file_bytes) self.add_file_with_data_inner(path, file_bytes)
} }
fn add_file(&mut self, path: &Path, data: Vec<u8>) -> Result<(), AnyError> { pub fn add_file_with_data(
&mut self,
path: &Path,
data: Vec<u8>,
) -> Result<(), AnyError> {
let target_path = canonicalize_path(path)?;
if target_path != path {
self.add_symlink(path, &target_path)?;
}
self.add_file_with_data_inner(&target_path, data)
}
fn add_file_with_data_inner(
&mut self,
path: &Path,
data: Vec<u8>,
) -> Result<(), AnyError> {
log::debug!("Adding file '{}'", path.display()); log::debug!("Adding file '{}'", path.display());
let checksum = util::checksum::gen(&[&data]); let checksum = util::checksum::gen(&[&data]);
let offset = if let Some(offset) = self.file_offsets.get(&checksum) { let offset = if let Some(offset) = self.file_offsets.get(&checksum) {
@ -249,8 +286,15 @@ impl VfsBuilder {
path.display(), path.display(),
target.display() target.display()
); );
let dest = self.path_relative_root(target)?; let relative_target = self.path_relative_root(target)?;
if dest == self.path_relative_root(path)? { let relative_path = match self.path_relative_root(path) {
Ok(path) => path,
Err(StripRootError { .. }) => {
// ignore if the original path is outside the root directory
return Ok(());
}
};
if relative_target == relative_path {
// it's the same, ignore // it's the same, ignore
return Ok(()); return Ok(());
} }
@ -263,7 +307,7 @@ impl VfsBuilder {
insert_index, insert_index,
VfsEntry::Symlink(VirtualSymlink { VfsEntry::Symlink(VirtualSymlink {
name: name.to_string(), name: name.to_string(),
dest_parts: dest dest_parts: relative_target
.components() .components()
.map(|c| c.as_os_str().to_string_lossy().to_string()) .map(|c| c.as_os_str().to_string_lossy().to_string())
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
@ -751,14 +795,14 @@ impl deno_io::fs::File for FileBackedVfsFile {
#[derive(Debug)] #[derive(Debug)]
pub struct FileBackedVfs { pub struct FileBackedVfs {
file: Mutex<Vec<u8>>, vfs_data: Cow<'static, [u8]>,
fs_root: VfsRoot, fs_root: VfsRoot,
} }
impl FileBackedVfs { impl FileBackedVfs {
pub fn new(file: Vec<u8>, fs_root: VfsRoot) -> Self { pub fn new(data: Cow<'static, [u8]>, fs_root: VfsRoot) -> Self {
Self { Self {
file: Mutex::new(file), vfs_data: data,
fs_root, fs_root,
} }
} }
@ -827,10 +871,15 @@ impl FileBackedVfs {
Ok(path) Ok(path)
} }
pub fn read_file_all(&self, file: &VirtualFile) -> std::io::Result<Vec<u8>> { pub fn read_file_all(
let mut buf = vec![0; file.len as usize]; &self,
self.read_file(file, 0, &mut buf)?; file: &VirtualFile,
Ok(buf) ) -> std::io::Result<Cow<'static, [u8]>> {
let read_range = self.get_read_range(file, 0, file.len)?;
match &self.vfs_data {
Cow::Borrowed(data) => Ok(Cow::Borrowed(&data[read_range])),
Cow::Owned(data) => Ok(Cow::Owned(data[read_range].to_vec())),
}
} }
pub fn read_file( pub fn read_file(
@ -839,18 +888,27 @@ impl FileBackedVfs {
pos: u64, pos: u64,
buf: &mut [u8], buf: &mut [u8],
) -> std::io::Result<usize> { ) -> std::io::Result<usize> {
let data = self.file.lock(); let read_range = self.get_read_range(file, pos, buf.len() as u64)?;
buf.copy_from_slice(&self.vfs_data[read_range]);
Ok(buf.len())
}
fn get_read_range(
&self,
file: &VirtualFile,
pos: u64,
len: u64,
) -> std::io::Result<Range<usize>> {
let data = &self.vfs_data;
let start = self.fs_root.start_file_offset + file.offset + pos; let start = self.fs_root.start_file_offset + file.offset + pos;
let end = start + buf.len() as u64; let end = start + len;
if end > data.len() as u64 { if end > data.len() as u64 {
return Err(std::io::Error::new( return Err(std::io::Error::new(
std::io::ErrorKind::UnexpectedEof, std::io::ErrorKind::UnexpectedEof,
"unexpected EOF", "unexpected EOF",
)); ));
} }
Ok(start as usize..end as usize)
buf.copy_from_slice(&data[start as usize..end as usize]);
Ok(buf.len())
} }
pub fn dir_entry(&self, path: &Path) -> std::io::Result<&VirtualDirectory> { pub fn dir_entry(&self, path: &Path) -> std::io::Result<&VirtualDirectory> {
@ -888,7 +946,7 @@ mod test {
#[track_caller] #[track_caller]
fn read_file(vfs: &FileBackedVfs, path: &Path) -> String { fn read_file(vfs: &FileBackedVfs, path: &Path) -> String {
let file = vfs.file_entry(path).unwrap(); let file = vfs.file_entry(path).unwrap();
String::from_utf8(vfs.read_file_all(file).unwrap()).unwrap() String::from_utf8(vfs.read_file_all(file).unwrap().into_owned()).unwrap()
} }
#[test] #[test]
@ -901,20 +959,23 @@ mod test {
let src_path = src_path.to_path_buf(); let src_path = src_path.to_path_buf();
let mut builder = VfsBuilder::new(src_path.clone()).unwrap(); let mut builder = VfsBuilder::new(src_path.clone()).unwrap();
builder builder
.add_file(&src_path.join("a.txt"), "data".into()) .add_file_with_data_inner(&src_path.join("a.txt"), "data".into())
.unwrap(); .unwrap();
builder builder
.add_file(&src_path.join("b.txt"), "data".into()) .add_file_with_data_inner(&src_path.join("b.txt"), "data".into())
.unwrap(); .unwrap();
assert_eq!(builder.files.len(), 1); // because duplicate data assert_eq!(builder.files.len(), 1); // because duplicate data
builder builder
.add_file(&src_path.join("c.txt"), "c".into()) .add_file_with_data_inner(&src_path.join("c.txt"), "c".into())
.unwrap(); .unwrap();
builder builder
.add_file(&src_path.join("sub_dir").join("d.txt"), "d".into()) .add_file_with_data_inner(
&src_path.join("sub_dir").join("d.txt"),
"d".into(),
)
.unwrap(); .unwrap();
builder builder
.add_file(&src_path.join("e.txt"), "e".into()) .add_file_with_data_inner(&src_path.join("e.txt"), "e".into())
.unwrap(); .unwrap();
builder builder
.add_symlink( .add_symlink(
@ -1031,7 +1092,7 @@ mod test {
( (
dest_path.to_path_buf(), dest_path.to_path_buf(),
FileBackedVfs::new( FileBackedVfs::new(
data, Cow::Owned(data),
VfsRoot { VfsRoot {
dir: root_dir, dir: root_dir,
root_path: dest_path.to_path_buf(), root_path: dest_path.to_path_buf(),
@ -1082,7 +1143,7 @@ mod test {
let temp_path = temp_dir.path().canonicalize(); let temp_path = temp_dir.path().canonicalize();
let mut builder = VfsBuilder::new(temp_path.to_path_buf()).unwrap(); let mut builder = VfsBuilder::new(temp_path.to_path_buf()).unwrap();
builder builder
.add_file( .add_file_with_data_inner(
temp_path.join("a.txt").as_path(), temp_path.join("a.txt").as_path(),
"0123456789".to_string().into_bytes(), "0123456789".to_string().into_bytes(),
) )

View file

@ -155,6 +155,12 @@ fn prepare_env_vars(
initial_cwd.to_string_lossy().to_string(), initial_cwd.to_string_lossy().to_string(),
); );
} }
if !env_vars.contains_key(crate::npm::NPM_CONFIG_USER_AGENT_ENV_VAR) {
env_vars.insert(
crate::npm::NPM_CONFIG_USER_AGENT_ENV_VAR.into(),
crate::npm::get_npm_config_user_agent(),
);
}
if let Some(node_modules_dir) = node_modules_dir { if let Some(node_modules_dir) = node_modules_dir {
prepend_to_path( prepend_to_path(
&mut env_vars, &mut env_vars,
@ -204,7 +210,7 @@ impl ShellCommand for NpmCommand {
mut context: ShellCommandContext, mut context: ShellCommandContext,
) -> LocalBoxFuture<'static, ExecuteResult> { ) -> LocalBoxFuture<'static, ExecuteResult> {
if context.args.first().map(|s| s.as_str()) == Some("run") if context.args.first().map(|s| s.as_str()) == Some("run")
&& context.args.len() > 2 && context.args.len() >= 2
// for now, don't run any npm scripts that have a flag because // for now, don't run any npm scripts that have a flag because
// we don't handle stuff like `--workspaces` properly // we don't handle stuff like `--workspaces` properly
&& !context.args.iter().any(|s| s.starts_with('-')) && !context.args.iter().any(|s| s.starts_with('-'))
@ -267,10 +273,12 @@ impl ShellCommand for NodeCommand {
) )
.execute(context); .execute(context);
} }
args.extend(["run", "-A"].into_iter().map(|s| s.to_string())); args.extend(["run", "-A"].into_iter().map(|s| s.to_string()));
args.extend(context.args.iter().cloned()); args.extend(context.args.iter().cloned());
let mut state = context.state; let mut state = context.state;
state.apply_env_var(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME, "1"); state.apply_env_var(USE_PKG_JSON_HIDDEN_ENV_VAR_NAME, "1");
ExecutableCommand::new("deno".to_string(), std::env::current_exe().unwrap()) ExecutableCommand::new("deno".to_string(), std::env::current_exe().unwrap())
.execute(ShellCommandContext { .execute(ShellCommandContext {

View file

@ -193,7 +193,7 @@ async fn bench_specifier_inner(
.await?; .await?;
// We execute the main module as a side module so that import.meta.main is not set. // We execute the main module as a side module so that import.meta.main is not set.
worker.execute_side_module_possibly_with_npm().await?; worker.execute_side_module().await?;
let mut worker = worker.into_main_worker(); let mut worker = worker.into_main_worker();

View file

@ -32,6 +32,7 @@ use crate::graph_util::ModuleGraphBuilder;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::tsc; use crate::tsc;
use crate::tsc::Diagnostics; use crate::tsc::Diagnostics;
use crate::tsc::TypeCheckingCjsTracker;
use crate::util::extract; use crate::util::extract;
use crate::util::path::to_percent_decoded_str; use crate::util::path::to_percent_decoded_str;
@ -99,6 +100,7 @@ pub struct CheckOptions {
pub struct TypeChecker { pub struct TypeChecker {
caches: Arc<Caches>, caches: Arc<Caches>,
cjs_tracker: Arc<TypeCheckingCjsTracker>,
cli_options: Arc<CliOptions>, cli_options: Arc<CliOptions>,
module_graph_builder: Arc<ModuleGraphBuilder>, module_graph_builder: Arc<ModuleGraphBuilder>,
node_resolver: Arc<NodeResolver>, node_resolver: Arc<NodeResolver>,
@ -108,6 +110,7 @@ pub struct TypeChecker {
impl TypeChecker { impl TypeChecker {
pub fn new( pub fn new(
caches: Arc<Caches>, caches: Arc<Caches>,
cjs_tracker: Arc<TypeCheckingCjsTracker>,
cli_options: Arc<CliOptions>, cli_options: Arc<CliOptions>,
module_graph_builder: Arc<ModuleGraphBuilder>, module_graph_builder: Arc<ModuleGraphBuilder>,
node_resolver: Arc<NodeResolver>, node_resolver: Arc<NodeResolver>,
@ -115,6 +118,7 @@ impl TypeChecker {
) -> Self { ) -> Self {
Self { Self {
caches, caches,
cjs_tracker,
cli_options, cli_options,
module_graph_builder, module_graph_builder,
node_resolver, node_resolver,
@ -244,6 +248,7 @@ impl TypeChecker {
graph: graph.clone(), graph: graph.clone(),
hash_data, hash_data,
maybe_npm: Some(tsc::RequestNpmState { maybe_npm: Some(tsc::RequestNpmState {
cjs_tracker: self.cjs_tracker.clone(),
node_resolver: self.node_resolver.clone(), node_resolver: self.node_resolver.clone(),
npm_resolver: self.npm_resolver.clone(), npm_resolver: self.npm_resolver.clone(),
}), }),
@ -346,7 +351,7 @@ fn get_check_hash(
} }
} }
MediaType::Json MediaType::Json
| MediaType::TsBuildInfo | MediaType::Css
| MediaType::SourceMap | MediaType::SourceMap
| MediaType::Wasm | MediaType::Wasm
| MediaType::Unknown => continue, | MediaType::Unknown => continue,
@ -428,7 +433,7 @@ fn get_tsc_roots(
} }
MediaType::Json MediaType::Json
| MediaType::Wasm | MediaType::Wasm
| MediaType::TsBuildInfo | MediaType::Css
| MediaType::SourceMap | MediaType::SourceMap
| MediaType::Unknown => None, | MediaType::Unknown => None,
}, },
@ -536,7 +541,7 @@ fn has_ts_check(media_type: MediaType, file_text: &str) -> bool {
| MediaType::Tsx | MediaType::Tsx
| MediaType::Json | MediaType::Json
| MediaType::Wasm | MediaType::Wasm
| MediaType::TsBuildInfo | MediaType::Css
| MediaType::SourceMap | MediaType::SourceMap
| MediaType::Unknown => false, | MediaType::Unknown => false,
} }

View file

@ -5,6 +5,7 @@ use crate::args::CompileFlags;
use crate::args::Flags; use crate::args::Flags;
use crate::factory::CliFactory; use crate::factory::CliFactory;
use crate::http_util::HttpClientProvider; use crate::http_util::HttpClientProvider;
use crate::standalone::binary::StandaloneRelativeFileBaseUrl;
use crate::standalone::is_standalone_binary; use crate::standalone::is_standalone_binary;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
@ -14,7 +15,6 @@ use deno_core::error::AnyError;
use deno_core::resolve_url_or_path; use deno_core::resolve_url_or_path;
use deno_graph::GraphKind; use deno_graph::GraphKind;
use deno_terminal::colors; use deno_terminal::colors;
use eszip::EszipRelativeFileBaseUrl;
use rand::Rng; use rand::Rng;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
@ -29,7 +29,6 @@ pub async fn compile(
let factory = CliFactory::from_flags(flags); let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?; let cli_options = factory.cli_options()?;
let module_graph_creator = factory.module_graph_creator().await?; let module_graph_creator = factory.module_graph_creator().await?;
let parsed_source_cache = factory.parsed_source_cache();
let binary_writer = factory.create_compile_binary_writer().await?; let binary_writer = factory.create_compile_binary_writer().await?;
let http_client = factory.http_client_provider(); let http_client = factory.http_client_provider();
let module_specifier = cli_options.resolve_main_module()?; let module_specifier = cli_options.resolve_main_module()?;
@ -54,16 +53,6 @@ pub async fn compile(
); );
} }
if cli_options.unstable_detect_cjs() {
log::warn!(
concat!(
"{} --unstable-detect-cjs is not properly supported in deno compile. ",
"The compiled executable may encounter runtime errors.",
),
crate::colors::yellow("Warning"),
);
}
let output_path = resolve_compile_executable_output_path( let output_path = resolve_compile_executable_output_path(
http_client, http_client,
&compile_flags, &compile_flags,
@ -80,7 +69,7 @@ pub async fn compile(
let graph = if cli_options.type_check_mode().is_true() { let graph = if cli_options.type_check_mode().is_true() {
// In this case, the previous graph creation did type checking, which will // In this case, the previous graph creation did type checking, which will
// create a module graph with types information in it. We don't want to // create a module graph with types information in it. We don't want to
// store that in the eszip so create a code only module graph from scratch. // store that in the binary so create a code only module graph from scratch.
module_graph_creator module_graph_creator
.create_graph(GraphKind::CodeOnly, module_roots) .create_graph(GraphKind::CodeOnly, module_roots)
.await? .await?
@ -91,11 +80,6 @@ pub async fn compile(
let ts_config_for_emit = cli_options let ts_config_for_emit = cli_options
.resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?; .resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?;
check_warn_tsconfig(&ts_config_for_emit); check_warn_tsconfig(&ts_config_for_emit);
let (transpile_options, emit_options) =
crate::args::ts_config_to_transpile_and_emit_options(
ts_config_for_emit.ts_config,
)?;
let parser = parsed_source_cache.as_capturing_parser();
let root_dir_url = resolve_root_dir_from_specifiers( let root_dir_url = resolve_root_dir_from_specifiers(
cli_options.workspace().root_dir(), cli_options.workspace().root_dir(),
graph.specifiers().map(|(s, _)| s).chain( graph.specifiers().map(|(s, _)| s).chain(
@ -106,17 +90,6 @@ pub async fn compile(
), ),
); );
log::debug!("Binary root dir: {}", root_dir_url); log::debug!("Binary root dir: {}", root_dir_url);
let root_dir_url = EszipRelativeFileBaseUrl::new(&root_dir_url);
let eszip = eszip::EszipV2::from_graph(eszip::FromGraphOptions {
graph,
parser,
transpile_options,
emit_options,
// make all the modules relative to the root folder
relative_file_base: Some(root_dir_url),
npm_packages: None,
})?;
log::info!( log::info!(
"{} {} to {}", "{} {} to {}",
colors::green("Compile"), colors::green("Compile"),
@ -143,15 +116,18 @@ pub async fn compile(
let write_result = binary_writer let write_result = binary_writer
.write_bin( .write_bin(
file, file,
eszip, &graph,
root_dir_url, StandaloneRelativeFileBaseUrl::from(&root_dir_url),
module_specifier, module_specifier,
&compile_flags, &compile_flags,
cli_options, cli_options,
) )
.await .await
.with_context(|| { .with_context(|| {
format!("Writing temporary file '{}'", temp_path.display()) format!(
"Writing deno compile executable to temporary file '{}'",
temp_path.display()
)
}); });
// set it as executable // set it as executable

View file

@ -6,12 +6,12 @@ use crate::args::FileFlags;
use crate::args::Flags; use crate::args::Flags;
use crate::cdp; use crate::cdp;
use crate::factory::CliFactory; use crate::factory::CliFactory;
use crate::npm::CliNpmResolver;
use crate::tools::fmt::format_json; use crate::tools::fmt::format_json;
use crate::tools::test::is_supported_test_path; use crate::tools::test::is_supported_test_path;
use crate::util::text_encoding::source_map_from_code; use crate::util::text_encoding::source_map_from_code;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_ast::ModuleKind;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_config::glob::FileCollector; use deno_config::glob::FileCollector;
use deno_config::glob::FilePatterns; use deno_config::glob::FilePatterns;
@ -25,6 +25,7 @@ use deno_core::serde_json;
use deno_core::sourcemap::SourceMap; use deno_core::sourcemap::SourceMap;
use deno_core::url::Url; use deno_core::url::Url;
use deno_core::LocalInspectorSession; use deno_core::LocalInspectorSession;
use node_resolver::InNpmPackageChecker;
use regex::Regex; use regex::Regex;
use std::fs; use std::fs;
use std::fs::File; use std::fs::File;
@ -327,6 +328,7 @@ fn generate_coverage_report(
coverage_report.found_lines = coverage_report.found_lines =
if let Some(source_map) = maybe_source_map.as_ref() { if let Some(source_map) = maybe_source_map.as_ref() {
let script_source_lines = script_source.lines().collect::<Vec<_>>();
let mut found_lines = line_counts let mut found_lines = line_counts
.iter() .iter()
.enumerate() .enumerate()
@ -334,7 +336,23 @@ fn generate_coverage_report(
// get all the mappings from this destination line to a different src line // get all the mappings from this destination line to a different src line
let mut results = source_map let mut results = source_map
.tokens() .tokens()
.filter(move |token| token.get_dst_line() as usize == index) .filter(|token| {
let dst_line = token.get_dst_line() as usize;
dst_line == index && {
let dst_col = token.get_dst_col() as usize;
let content = script_source_lines
.get(dst_line)
.and_then(|line| {
line.get(dst_col..std::cmp::min(dst_col + 2, line.len()))
})
.unwrap_or("");
!content.is_empty()
&& content != "/*"
&& content != "*/"
&& content != "//"
}
})
.map(move |token| (token.get_src_line() as usize, *count)) .map(move |token| (token.get_src_line() as usize, *count))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
// only keep the results that point at different src lines // only keep the results that point at different src lines
@ -444,7 +462,7 @@ fn filter_coverages(
coverages: Vec<cdp::ScriptCoverage>, coverages: Vec<cdp::ScriptCoverage>,
include: Vec<String>, include: Vec<String>,
exclude: Vec<String>, exclude: Vec<String>,
npm_resolver: &dyn CliNpmResolver, in_npm_pkg_checker: &dyn InNpmPackageChecker,
) -> Vec<cdp::ScriptCoverage> { ) -> Vec<cdp::ScriptCoverage> {
let include: Vec<Regex> = let include: Vec<Regex> =
include.iter().map(|e| Regex::new(e).unwrap()).collect(); include.iter().map(|e| Regex::new(e).unwrap()).collect();
@ -468,7 +486,7 @@ fn filter_coverages(
|| doc_test_re.is_match(e.url.as_str()) || doc_test_re.is_match(e.url.as_str())
|| Url::parse(&e.url) || Url::parse(&e.url)
.ok() .ok()
.map(|url| npm_resolver.in_npm_package(&url)) .map(|url| in_npm_pkg_checker.in_npm_package(&url))
.unwrap_or(false); .unwrap_or(false);
let is_included = include.iter().any(|p| p.is_match(&e.url)); let is_included = include.iter().any(|p| p.is_match(&e.url));
@ -479,7 +497,7 @@ fn filter_coverages(
.collect::<Vec<cdp::ScriptCoverage>>() .collect::<Vec<cdp::ScriptCoverage>>()
} }
pub async fn cover_files( pub fn cover_files(
flags: Arc<Flags>, flags: Arc<Flags>,
coverage_flags: CoverageFlags, coverage_flags: CoverageFlags,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
@ -489,9 +507,10 @@ pub async fn cover_files(
let factory = CliFactory::from_flags(flags); let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?; let cli_options = factory.cli_options()?;
let npm_resolver = factory.npm_resolver().await?; let in_npm_pkg_checker = factory.in_npm_pkg_checker()?;
let file_fetcher = factory.file_fetcher()?; let file_fetcher = factory.file_fetcher()?;
let emitter = factory.emitter()?; let emitter = factory.emitter()?;
let cjs_tracker = factory.cjs_tracker()?;
assert!(!coverage_flags.files.include.is_empty()); assert!(!coverage_flags.files.include.is_empty());
@ -511,7 +530,7 @@ pub async fn cover_files(
script_coverages, script_coverages,
coverage_flags.include, coverage_flags.include,
coverage_flags.exclude, coverage_flags.exclude,
npm_resolver.as_ref(), in_npm_pkg_checker.as_ref(),
); );
if script_coverages.is_empty() { if script_coverages.is_empty() {
return Err(generic_error("No covered files included in the report")); return Err(generic_error("No covered files included in the report"));
@ -568,16 +587,21 @@ pub async fn cover_files(
let transpiled_code = match file.media_type { let transpiled_code = match file.media_type {
MediaType::JavaScript MediaType::JavaScript
| MediaType::Unknown | MediaType::Unknown
| MediaType::Css
| MediaType::Wasm
| MediaType::Cjs | MediaType::Cjs
| MediaType::Mjs | MediaType::Mjs
| MediaType::Json => None, | MediaType::Json => None,
MediaType::Dts | MediaType::Dmts | MediaType::Dcts => Some(Vec::new()), MediaType::Dts | MediaType::Dmts | MediaType::Dcts => Some(String::new()),
MediaType::TypeScript MediaType::TypeScript
| MediaType::Jsx | MediaType::Jsx
| MediaType::Mts | MediaType::Mts
| MediaType::Cts | MediaType::Cts
| MediaType::Tsx => { | MediaType::Tsx => {
Some(match emitter.maybe_cached_emit(&file.specifier, &file.source) { let module_kind = ModuleKind::from_is_cjs(
cjs_tracker.is_maybe_cjs(&file.specifier, file.media_type)?,
);
Some(match emitter.maybe_cached_emit(&file.specifier, module_kind, &file.source) {
Some(code) => code, Some(code) => code,
None => { None => {
return Err(anyhow!( return Err(anyhow!(
@ -588,13 +612,12 @@ pub async fn cover_files(
} }
}) })
} }
MediaType::Wasm | MediaType::TsBuildInfo | MediaType::SourceMap => { MediaType::SourceMap => {
unreachable!() unreachable!()
} }
}; };
let runtime_code: String = match transpiled_code { let runtime_code: String = match transpiled_code {
Some(code) => String::from_utf8(code) Some(code) => code,
.with_context(|| format!("Failed decoding {}", file.specifier))?,
None => original_source.to_string(), None => original_source.to_string(),
}; };

View file

@ -22,9 +22,9 @@ use deno_core::serde_json;
use deno_doc as doc; use deno_doc as doc;
use deno_doc::html::UrlResolveKind; use deno_doc::html::UrlResolveKind;
use deno_graph::source::NullFileSystem; use deno_graph::source::NullFileSystem;
use deno_graph::EsParser;
use deno_graph::GraphKind; use deno_graph::GraphKind;
use deno_graph::ModuleAnalyzer; use deno_graph::ModuleAnalyzer;
use deno_graph::ModuleParser;
use deno_graph::ModuleSpecifier; use deno_graph::ModuleSpecifier;
use doc::html::ShortPath; use doc::html::ShortPath;
use doc::DocDiagnostic; use doc::DocDiagnostic;
@ -37,7 +37,7 @@ const JSON_SCHEMA_VERSION: u8 = 1;
async fn generate_doc_nodes_for_builtin_types( async fn generate_doc_nodes_for_builtin_types(
doc_flags: DocFlags, doc_flags: DocFlags,
parser: &dyn ModuleParser, parser: &dyn EsParser,
analyzer: &dyn ModuleAnalyzer, analyzer: &dyn ModuleAnalyzer,
) -> Result<IndexMap<ModuleSpecifier, Vec<doc::DocNode>>, AnyError> { ) -> Result<IndexMap<ModuleSpecifier, Vec<doc::DocNode>>, AnyError> {
let source_file_specifier = let source_file_specifier =
@ -96,7 +96,7 @@ pub async fn doc(
let module_info_cache = factory.module_info_cache()?; let module_info_cache = factory.module_info_cache()?;
let parsed_source_cache = factory.parsed_source_cache(); let parsed_source_cache = factory.parsed_source_cache();
let capturing_parser = parsed_source_cache.as_capturing_parser(); let capturing_parser = parsed_source_cache.as_capturing_parser();
let analyzer = module_info_cache.as_module_analyzer(parsed_source_cache); let analyzer = module_info_cache.as_module_analyzer();
let doc_nodes_by_url = match doc_flags.source_files { let doc_nodes_by_url = match doc_flags.source_files {
DocSourceFileFlag::Builtin => { DocSourceFileFlag::Builtin => {

View file

@ -353,6 +353,21 @@ fn format_yaml(
file_text: &str, file_text: &str,
fmt_options: &FmtOptionsConfig, fmt_options: &FmtOptionsConfig,
) -> Result<Option<String>, AnyError> { ) -> Result<Option<String>, AnyError> {
let ignore_file = file_text
.lines()
.take_while(|line| line.starts_with('#'))
.any(|line| {
line
.strip_prefix('#')
.unwrap()
.trim()
.starts_with("deno-fmt-ignore-file")
});
if ignore_file {
return Ok(None);
}
let formatted_str = let formatted_str =
pretty_yaml::format_text(file_text, &get_resolved_yaml_config(fmt_options)) pretty_yaml::format_text(file_text, &get_resolved_yaml_config(fmt_options))
.map_err(AnyError::from)?; .map_err(AnyError::from)?;
@ -978,6 +993,7 @@ fn get_resolved_malva_config(
single_line_top_level_declarations: false, single_line_top_level_declarations: false,
selector_override_comment_directive: "deno-fmt-selector-override".into(), selector_override_comment_directive: "deno-fmt-selector-override".into(),
ignore_comment_directive: "deno-fmt-ignore".into(), ignore_comment_directive: "deno-fmt-ignore".into(),
ignore_file_comment_directive: "deno-fmt-ignore-file".into(),
}; };
FormatOptions { FormatOptions {
@ -1016,7 +1032,7 @@ fn get_resolved_markup_fmt_config(
max_attrs_per_line: None, max_attrs_per_line: None,
prefer_attrs_single_line: false, prefer_attrs_single_line: false,
html_normal_self_closing: None, html_normal_self_closing: None,
html_void_self_closing: Some(true), html_void_self_closing: None,
component_self_closing: None, component_self_closing: None,
svg_self_closing: None, svg_self_closing: None,
mathml_self_closing: None, mathml_self_closing: None,
@ -1036,6 +1052,7 @@ fn get_resolved_markup_fmt_config(
svelte_directive_shorthand: Some(true), svelte_directive_shorthand: Some(true),
astro_attr_shorthand: Some(true), astro_attr_shorthand: Some(true),
ignore_comment_directive: "deno-fmt-ignore".into(), ignore_comment_directive: "deno-fmt-ignore".into(),
ignore_file_comment_directive: "deno-fmt-ignore-file".into(),
}; };
FormatOptions { FormatOptions {

View file

@ -530,7 +530,7 @@ impl<'a> GraphDisplayContext<'a> {
fn build_module_info(&mut self, module: &Module, type_dep: bool) -> TreeNode { fn build_module_info(&mut self, module: &Module, type_dep: bool) -> TreeNode {
enum PackageOrSpecifier { enum PackageOrSpecifier {
Package(NpmResolutionPackage), Package(Box<NpmResolutionPackage>),
Specifier(ModuleSpecifier), Specifier(ModuleSpecifier),
} }
@ -538,7 +538,7 @@ impl<'a> GraphDisplayContext<'a> {
let package_or_specifier = match module.npm() { let package_or_specifier = match module.npm() {
Some(npm) => match self.npm_info.resolve_package(npm.nv_reference.nv()) { Some(npm) => match self.npm_info.resolve_package(npm.nv_reference.nv()) {
Some(package) => Package(package.clone()), Some(package) => Package(Box::new(package.clone())),
None => Specifier(module.specifier().clone()), // should never happen None => Specifier(module.specifier().clone()), // should never happen
}, },
None => Specifier(module.specifier().clone()), None => Specifier(module.specifier().clone()),
@ -645,10 +645,12 @@ impl<'a> GraphDisplayContext<'a> {
let message = match err { let message = match err {
HttpsChecksumIntegrity(_) => "(checksum integrity error)", HttpsChecksumIntegrity(_) => "(checksum integrity error)",
Decode(_) => "(loading decode error)", Decode(_) => "(loading decode error)",
Loader(err) => match deno_core::error::get_custom_error_class(err) { Loader(err) => {
match deno_runtime::errors::get_error_class_name(err) {
Some("NotCapable") => "(not capable, requires --allow-import)", Some("NotCapable") => "(not capable, requires --allow-import)",
_ => "(loading error)", _ => "(loading error)",
}, }
}
Jsr(_) => "(loading error)", Jsr(_) => "(loading error)",
NodeUnknownBuiltinModule(_) => "(unknown node built-in error)", NodeUnknownBuiltinModule(_) => "(unknown node built-in error)",
Npm(_) => "(npm loading error)", Npm(_) => "(npm loading error)",

View file

@ -24,32 +24,29 @@ pub fn init_project(init_flags: InitFlags) -> Result<(), AnyError> {
create_file( create_file(
&dir, &dir,
"main.ts", "main.ts",
r#"import { type Route, route, serveDir } from "@std/http"; r#"import { serveDir } from "@std/http";
const routes: Route[] = [ const userPagePattern = new URLPattern({ pathname: "/users/:id" });
{ const staticPathPattern = new URLPattern({ pathname: "/static/*" });
pattern: new URLPattern({ pathname: "/" }),
handler: () => new Response("Home page"),
},
{
pattern: new URLPattern({ pathname: "/users/:id" }),
handler: (_req, _info, params) => new Response(params?.pathname.groups.id),
},
{
pattern: new URLPattern({ pathname: "/static/*" }),
handler: (req) => serveDir(req),
},
];
function defaultHandler(_req: Request) {
return new Response("Not found", { status: 404 });
}
const handler = route(routes, defaultHandler);
export default { export default {
fetch(req) { fetch(req) {
return handler(req); const url = new URL(req.url);
if (url.pathname === "/") {
return new Response("Home page");
}
const userPageMatch = userPagePattern.exec(url);
if (userPageMatch) {
return new Response(userPageMatch.pathname.groups.id);
}
if (staticPathPattern.test(url)) {
return serveDir(req);
}
return new Response("Not found", { status: 404 });
}, },
} satisfies Deno.ServeDefaultExport; } satisfies Deno.ServeDefaultExport;
"#, "#,

File diff suppressed because it is too large Load diff

View file

@ -44,7 +44,11 @@ pub async fn cache_top_level_deps(
let mut seen_reqs = std::collections::HashSet::new(); let mut seen_reqs = std::collections::HashSet::new();
for entry in import_map.imports().entries() { for entry in import_map.imports().entries().chain(
import_map
.scopes()
.flat_map(|scope| scope.imports.entries()),
) {
let Some(specifier) = entry.value else { let Some(specifier) = entry.value else {
continue; continue;
}; };
@ -89,10 +93,6 @@ pub async fn cache_top_level_deps(
while let Some(info_future) = info_futures.next().await { while let Some(info_future) = info_futures.next().await {
if let Some((specifier, info)) = info_future { if let Some((specifier, info)) = info_future {
if info.export(".").is_some() {
roots.push(specifier.clone());
continue;
}
let exports = info.exports(); let exports = info.exports();
for (k, _) in exports { for (k, _) in exports {
if let Ok(spec) = specifier.join(k) { if let Ok(spec) = specifier.join(k) {

View file

@ -120,7 +120,7 @@ fn resolve_content_maybe_unfurling(
| MediaType::Unknown | MediaType::Unknown
| MediaType::Json | MediaType::Json
| MediaType::Wasm | MediaType::Wasm
| MediaType::TsBuildInfo => { | MediaType::Css => {
// not unfurlable data // not unfurlable data
return Ok(data); return Ok(data);
} }

View file

@ -25,6 +25,7 @@ use deno_ast::swc::visit::noop_visit_type;
use deno_ast::swc::visit::Visit; use deno_ast::swc::visit::Visit;
use deno_ast::swc::visit::VisitWith; use deno_ast::swc::visit::VisitWith;
use deno_ast::ImportsNotUsedAsValues; use deno_ast::ImportsNotUsedAsValues;
use deno_ast::ModuleKind;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_ast::ParseDiagnosticsError; use deno_ast::ParseDiagnosticsError;
use deno_ast::ParsedSource; use deno_ast::ParsedSource;
@ -641,6 +642,10 @@ impl ReplSession {
jsx_fragment_factory: self.jsx.frag_factory.clone(), jsx_fragment_factory: self.jsx.frag_factory.clone(),
jsx_import_source: self.jsx.import_source.clone(), jsx_import_source: self.jsx.import_source.clone(),
var_decl_imports: true, var_decl_imports: true,
verbatim_module_syntax: false,
},
&deno_ast::TranspileModuleOptions {
module_kind: Some(ModuleKind::Esm),
}, },
&deno_ast::EmitOptions { &deno_ast::EmitOptions {
source_map: deno_ast::SourceMapOption::None, source_map: deno_ast::SourceMapOption::None,
@ -651,7 +656,6 @@ impl ReplSession {
}, },
)? )?
.into_source() .into_source()
.into_string()?
.text; .text;
let value = self let value = self

View file

@ -1,9 +1,11 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::cdp; use std::collections::HashMap;
use crate::emit::Emitter; use std::path::PathBuf;
use crate::util::file_watcher::WatcherCommunicator; use std::sync::Arc;
use crate::util::file_watcher::WatcherRestartMode;
use deno_ast::MediaType;
use deno_ast::ModuleKind;
use deno_core::error::generic_error; use deno_core::error::generic_error;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures::StreamExt; use deno_core::futures::StreamExt;
@ -12,11 +14,14 @@ use deno_core::serde_json::{self};
use deno_core::url::Url; use deno_core::url::Url;
use deno_core::LocalInspectorSession; use deno_core::LocalInspectorSession;
use deno_terminal::colors; use deno_terminal::colors;
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use tokio::select; use tokio::select;
use crate::cdp;
use crate::emit::Emitter;
use crate::resolver::CjsTracker;
use crate::util::file_watcher::WatcherCommunicator;
use crate::util::file_watcher::WatcherRestartMode;
fn explain(status: &cdp::Status) -> &'static str { fn explain(status: &cdp::Status) -> &'static str {
match status { match status {
cdp::Status::Ok => "OK", cdp::Status::Ok => "OK",
@ -58,6 +63,7 @@ pub struct HmrRunner {
session: LocalInspectorSession, session: LocalInspectorSession,
watcher_communicator: Arc<WatcherCommunicator>, watcher_communicator: Arc<WatcherCommunicator>,
script_ids: HashMap<String, String>, script_ids: HashMap<String, String>,
cjs_tracker: Arc<CjsTracker>,
emitter: Arc<Emitter>, emitter: Arc<Emitter>,
} }
@ -139,7 +145,8 @@ impl crate::worker::HmrRunner for HmrRunner {
}; };
let source_code = self.emitter.load_and_emit_for_hmr( let source_code = self.emitter.load_and_emit_for_hmr(
&module_url &module_url,
ModuleKind::from_is_cjs(self.cjs_tracker.is_maybe_cjs(&module_url, MediaType::from_specifier(&module_url))?),
).await?; ).await?;
let mut tries = 1; let mut tries = 1;
@ -172,12 +179,14 @@ impl crate::worker::HmrRunner for HmrRunner {
impl HmrRunner { impl HmrRunner {
pub fn new( pub fn new(
cjs_tracker: Arc<CjsTracker>,
emitter: Arc<Emitter>, emitter: Arc<Emitter>,
session: LocalInspectorSession, session: LocalInspectorSession,
watcher_communicator: Arc<WatcherCommunicator>, watcher_communicator: Arc<WatcherCommunicator>,
) -> Self { ) -> Self {
Self { Self {
session, session,
cjs_tracker,
emitter, emitter,
watcher_communicator, watcher_communicator,
script_ids: HashMap::new(), script_ids: HashMap::new(),

View file

@ -30,6 +30,16 @@ To grant permissions, set them before the script argument. For example:
} }
} }
fn set_npm_user_agent() {
static ONCE: std::sync::Once = std::sync::Once::new();
ONCE.call_once(|| {
std::env::set_var(
crate::npm::NPM_CONFIG_USER_AGENT_ENV_VAR,
crate::npm::get_npm_config_user_agent(),
);
});
}
pub async fn run_script( pub async fn run_script(
mode: WorkerExecutionMode, mode: WorkerExecutionMode,
flags: Arc<Flags>, flags: Arc<Flags>,
@ -58,6 +68,10 @@ pub async fn run_script(
let main_module = cli_options.resolve_main_module()?; let main_module = cli_options.resolve_main_module()?;
if main_module.scheme() == "npm" {
set_npm_user_agent();
}
maybe_npm_install(&factory).await?; maybe_npm_install(&factory).await?;
let worker_factory = factory.create_cli_main_worker_factory().await?; let worker_factory = factory.create_cli_main_worker_factory().await?;
@ -119,6 +133,10 @@ async fn run_with_watch(
let cli_options = factory.cli_options()?; let cli_options = factory.cli_options()?;
let main_module = cli_options.resolve_main_module()?; let main_module = cli_options.resolve_main_module()?;
if main_module.scheme() == "npm" {
set_npm_user_agent();
}
maybe_npm_install(&factory).await?; maybe_npm_install(&factory).await?;
let _ = watcher_communicator.watch_paths(cli_options.watch_paths()); let _ = watcher_communicator.watch_paths(cli_options.watch_paths());

View file

@ -44,12 +44,15 @@ pub async fn serve(
maybe_npm_install(&factory).await?; maybe_npm_install(&factory).await?;
let worker_factory = factory.create_cli_main_worker_factory().await?; let worker_factory = factory.create_cli_main_worker_factory().await?;
let hmr = serve_flags
.watch
.map(|watch_flags| watch_flags.hmr)
.unwrap_or(false);
do_serve( do_serve(
worker_factory, worker_factory,
main_module.clone(), main_module.clone(),
serve_flags.worker_count, serve_flags.worker_count,
false, hmr,
) )
.await .await
} }
@ -109,8 +112,6 @@ async fn do_serve(
} }
} }
Ok(exit_code) Ok(exit_code)
// main.await?
} }
async fn run_worker( async fn run_worker(
@ -119,7 +120,7 @@ async fn run_worker(
main_module: ModuleSpecifier, main_module: ModuleSpecifier,
hmr: bool, hmr: bool,
) -> Result<i32, AnyError> { ) -> Result<i32, AnyError> {
let mut worker = worker_factory let mut worker: crate::worker::CliMainWorker = worker_factory
.create_main_worker( .create_main_worker(
deno_runtime::WorkerExecutionMode::Serve { deno_runtime::WorkerExecutionMode::Serve {
is_main: false, is_main: false,

Some files were not shown because too many files have changed in this diff Show more