1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-21 15:04:11 -05:00

Merge branch 'main' into Fix-UNC-Path-Permissions-Issue-on-Windows

This commit is contained in:
Yazan AbdAl-Rahman 2024-11-14 10:36:13 +02:00 committed by GitHub
commit aa56715d1d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
192 changed files with 4472 additions and 952 deletions

399
Cargo.lock generated
View file

@ -347,6 +347,53 @@ version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0"
[[package]]
name = "axum"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf"
dependencies = [
"async-trait",
"axum-core",
"bytes",
"futures-util",
"http 1.1.0",
"http-body 1.0.0",
"http-body-util",
"itoa",
"matchit",
"memchr",
"mime",
"percent-encoding",
"pin-project-lite",
"rustversion",
"serde",
"sync_wrapper",
"tower",
"tower-layer",
"tower-service",
]
[[package]]
name = "axum-core"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199"
dependencies = [
"async-trait",
"bytes",
"futures-util",
"http 1.1.0",
"http-body 1.0.0",
"http-body-util",
"mime",
"pin-project-lite",
"rustversion",
"sync_wrapper",
"tower-layer",
"tower-service",
]
[[package]]
name = "backtrace"
version = "0.3.73"
@ -435,29 +482,6 @@ dependencies = [
"serde",
]
[[package]]
name = "bindgen"
version = "0.69.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0"
dependencies = [
"bitflags 2.6.0",
"cexpr",
"clang-sys",
"itertools 0.10.5",
"lazy_static",
"lazycell",
"log",
"prettyplease 0.2.17",
"proc-macro2",
"quote",
"regex",
"rustc-hash 1.1.0",
"shlex",
"syn 2.0.72",
"which 4.4.2",
]
[[package]]
name = "bindgen"
version = "0.70.1"
@ -469,7 +493,7 @@ dependencies = [
"clang-sys",
"itertools 0.13.0",
"log",
"prettyplease 0.2.17",
"prettyplease",
"proc-macro2",
"quote",
"regex",
@ -1118,7 +1142,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856"
dependencies = [
"cfg-if",
"hashbrown",
"hashbrown 0.14.5",
"lock_api",
"once_cell",
"parking_lot_core",
@ -1207,7 +1231,7 @@ dependencies = [
"http-body-util",
"hyper-util",
"import_map",
"indexmap",
"indexmap 2.3.0",
"jsonc-parser",
"junction",
"lazy-regex",
@ -1363,7 +1387,7 @@ dependencies = [
"base32",
"deno_media_type",
"deno_path_util",
"indexmap",
"indexmap 2.3.0",
"log",
"once_cell",
"parking_lot",
@ -1398,7 +1422,7 @@ dependencies = [
"glob",
"ignore",
"import_map",
"indexmap",
"indexmap 2.3.0",
"jsonc-parser",
"log",
"percent-encoding",
@ -1418,9 +1442,9 @@ dependencies = [
[[package]]
name = "deno_core"
version = "0.318.0"
version = "0.319.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10cae2393219ff9278123f7b24799cdfab37c7d6561b69ca06ced115cac92111"
checksum = "e9dbb841f9850534320d8927dce53ca8d64bafbab5576c2a98f03f9e08534215"
dependencies = [
"anyhow",
"bincode",
@ -1450,9 +1474,9 @@ dependencies = [
[[package]]
name = "deno_core_icudata"
version = "0.0.73"
version = "0.74.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a13951ea98c0a4c372f162d669193b4c9d991512de9f2381dd161027f34b26b1"
checksum = "fe4dccb6147bb3f3ba0c7a48e993bfeb999d2c2e47a81badee80e2b370c8d695"
[[package]]
name = "deno_cron"
@ -1519,7 +1543,7 @@ dependencies = [
"handlebars",
"html-escape",
"import_map",
"indexmap",
"indexmap 2.3.0",
"itoa",
"lazy_static",
"regex",
@ -1619,7 +1643,7 @@ dependencies = [
"encoding_rs",
"futures",
"import_map",
"indexmap",
"indexmap 2.3.0",
"log",
"monch",
"once_cell",
@ -1851,7 +1875,7 @@ dependencies = [
"hyper 1.4.1",
"hyper-util",
"idna 0.3.0",
"indexmap",
"indexmap 2.3.0",
"ipnetwork",
"k256",
"lazy-regex",
@ -1921,9 +1945,9 @@ dependencies = [
[[package]]
name = "deno_ops"
version = "0.194.0"
version = "0.195.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f760b492bd638c1dc3e992d11672c259fbe9a233162099a8347591c9e22d0391"
checksum = "797f348c38c07a5398bf790b280077c698e13fb49252f61ca6f6c5c616060292"
dependencies = [
"proc-macro-rules",
"proc-macro2",
@ -1941,7 +1965,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6cbc4c4d3eb0960b58e8f43f9fc2d3f620fcac9a03cd85203e08db5b04e83c1f"
dependencies = [
"deno_semver",
"indexmap",
"indexmap 2.3.0",
"serde",
"serde_json",
"thiserror",
@ -1997,6 +2021,7 @@ dependencies = [
name = "deno_runtime"
version = "0.186.0"
dependencies = [
"async-trait",
"color-print",
"deno_ast",
"deno_broadcast_channel",
@ -2042,7 +2067,13 @@ dependencies = [
"notify",
"ntapi",
"once_cell",
"opentelemetry",
"opentelemetry-http",
"opentelemetry-otlp",
"opentelemetry-semantic-conventions",
"opentelemetry_sdk",
"percent-encoding",
"pin-project",
"regex",
"rustyline",
"same-file",
@ -2259,9 +2290,9 @@ dependencies = [
[[package]]
name = "denokv_proto"
version = "0.8.2"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f629c77d2bf59e2e2a07fd7b14bfffae352a3813fbdcb801e72205741fb7625c"
checksum = "f7ba1f99ed11a9c11e868a8521b1f71a7e1aba785d7f42ea9ecbdc01146c89ec"
dependencies = [
"anyhow",
"async-trait",
@ -2275,9 +2306,9 @@ dependencies = [
[[package]]
name = "denokv_remote"
version = "0.8.1"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d57717b5123e8d1ec5f52973a67f98e3621274d362d18b245038967b402082df"
checksum = "08ed833073189e8f6d03155fe3b05a024e75e29d8a28a4c2e9ec3b5c925e727b"
dependencies = [
"anyhow",
"async-stream",
@ -2300,9 +2331,9 @@ dependencies = [
[[package]]
name = "denokv_sqlite"
version = "0.8.2"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c4f5719e2bf698ec4f39fe29d91b62ff06a4b4a37ee481ffb8658d140fed986"
checksum = "9b790f01d1302d53a0c3cbd27de88a06b3abd64ec8ab8673924e490541c7c713"
dependencies = [
"anyhow",
"async-stream",
@ -2548,8 +2579,8 @@ checksum = "f3ab0dd2bedc109d25f0d21afb09b7d329f6c6fa83b095daf31d2d967e091548"
dependencies = [
"anyhow",
"bumpalo",
"hashbrown",
"indexmap",
"hashbrown 0.14.5",
"indexmap 2.3.0",
"rustc-hash 1.1.0",
"serde",
"unicode-width",
@ -2755,7 +2786,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48cede2bb1b07dd598d269f973792c43e0cd92686d3b452bd6e01d7a8eb01211"
dependencies = [
"debug-ignore",
"indexmap",
"indexmap 2.3.0",
"log",
"thiserror",
"zerocopy",
@ -3392,7 +3423,7 @@ checksum = "9c08c1f623a8d0b722b8b99f821eb0ba672a1618f0d3b16ddbee1cedd2dd8557"
dependencies = [
"bitflags 2.6.0",
"gpu-descriptor-types",
"hashbrown",
"hashbrown 0.14.5",
]
[[package]]
@ -3436,7 +3467,7 @@ dependencies = [
"futures-sink",
"futures-util",
"http 0.2.12",
"indexmap",
"indexmap 2.3.0",
"slab",
"tokio",
"tokio-util",
@ -3455,7 +3486,7 @@ dependencies = [
"futures-sink",
"futures-util",
"http 1.1.0",
"indexmap",
"indexmap 2.3.0",
"slab",
"tokio",
"tokio-util",
@ -3468,7 +3499,7 @@ version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8588661a8607108a5ca69cab034063441a0413a0b041c13618a7dd348021ef6f"
dependencies = [
"hashbrown",
"hashbrown 0.14.5",
"serde",
]
@ -3487,6 +3518,12 @@ dependencies = [
"thiserror",
]
[[package]]
name = "hashbrown"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "hashbrown"
version = "0.14.5"
@ -3503,7 +3540,7 @@ version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af"
dependencies = [
"hashbrown",
"hashbrown 0.14.5",
]
[[package]]
@ -3666,7 +3703,7 @@ version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a9de2bdef6354361892492bab5e316b2d78a0ee9971db4d36da9b1eb0e11999"
dependencies = [
"hashbrown",
"hashbrown 0.14.5",
"new_debug_unreachable",
"once_cell",
"phf",
@ -3820,6 +3857,19 @@ dependencies = [
"webpki-roots",
]
[[package]]
name = "hyper-timeout"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3203a961e5c83b6f5498933e78b6b263e208c197b63e9c6c53cc82ffd3f63793"
dependencies = [
"hyper 1.4.1",
"hyper-util",
"pin-project-lite",
"tokio",
"tower-service",
]
[[package]]
name = "hyper-util"
version = "0.1.7"
@ -3908,7 +3958,7 @@ version = "0.20.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "351a787decc56f38d65d16d32687265045d6d6a4531b4a0e1b649def3590354e"
dependencies = [
"indexmap",
"indexmap 2.3.0",
"log",
"percent-encoding",
"serde",
@ -3917,6 +3967,16 @@ dependencies = [
"url",
]
[[package]]
name = "indexmap"
version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
dependencies = [
"autocfg",
"hashbrown 0.12.3",
]
[[package]]
name = "indexmap"
version = "2.3.0"
@ -3924,7 +3984,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0"
dependencies = [
"equivalent",
"hashbrown",
"hashbrown 0.14.5",
"serde",
]
@ -4192,12 +4252,6 @@ dependencies = [
"spin",
]
[[package]]
name = "lazycell"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
[[package]]
name = "libc"
version = "0.2.153"
@ -4289,7 +4343,7 @@ version = "1.48.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca8dfd1a173826d193e3b955e07c22765829890f62c677a59c4a410cb4f47c01"
dependencies = [
"bindgen 0.70.1",
"bindgen",
"libloading 0.8.5",
]
@ -4406,6 +4460,12 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4"
[[package]]
name = "matchit"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94"
[[package]]
name = "md-5"
version = "0.10.6"
@ -4533,7 +4593,7 @@ dependencies = [
"bitflags 2.6.0",
"codespan-reporting",
"hexf-parse",
"indexmap",
"indexmap 2.3.0",
"log",
"num-traits",
"rustc-hash 1.1.0",
@ -4837,6 +4897,93 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
[[package]]
name = "opentelemetry"
version = "0.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f3cebff57f7dbd1255b44d8bddc2cebeb0ea677dbaa2e25a3070a91b318f660"
dependencies = [
"futures-core",
"futures-sink",
"js-sys",
"once_cell",
"pin-project-lite",
"thiserror",
]
[[package]]
name = "opentelemetry-http"
version = "0.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10a8a7f5f6ba7c1b286c2fbca0454eaba116f63bbe69ed250b642d36fbb04d80"
dependencies = [
"async-trait",
"bytes",
"http 1.1.0",
"opentelemetry",
]
[[package]]
name = "opentelemetry-otlp"
version = "0.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91cf61a1868dacc576bf2b2a1c3e9ab150af7272909e80085c3173384fe11f76"
dependencies = [
"async-trait",
"futures-core",
"http 1.1.0",
"opentelemetry",
"opentelemetry-http",
"opentelemetry-proto",
"opentelemetry_sdk",
"prost",
"serde_json",
"thiserror",
"tokio",
"tonic",
"tracing",
]
[[package]]
name = "opentelemetry-proto"
version = "0.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6e05acbfada5ec79023c85368af14abd0b307c015e9064d249b2a950ef459a6"
dependencies = [
"hex",
"opentelemetry",
"opentelemetry_sdk",
"prost",
"serde",
"tonic",
]
[[package]]
name = "opentelemetry-semantic-conventions"
version = "0.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc1b6902ff63b32ef6c489e8048c5e253e2e4a803ea3ea7e783914536eb15c52"
[[package]]
name = "opentelemetry_sdk"
version = "0.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "27b742c1cae4693792cc564e58d75a2a0ba29421a34a85b50da92efa89ecb2bc"
dependencies = [
"async-trait",
"futures-channel",
"futures-executor",
"futures-util",
"glob",
"once_cell",
"opentelemetry",
"percent-encoding",
"rand",
"serde_json",
"thiserror",
"tracing",
]
[[package]]
name = "option-ext"
version = "0.2.0"
@ -5062,7 +5209,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9"
dependencies = [
"fixedbitset",
"indexmap",
"indexmap 2.3.0",
]
[[package]]
@ -5241,16 +5388,6 @@ dependencies = [
"yaml_parser",
]
[[package]]
name = "prettyplease"
version = "0.1.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86"
dependencies = [
"proc-macro2",
"syn 1.0.109",
]
[[package]]
name = "prettyplease"
version = "0.2.17"
@ -5334,9 +5471,9 @@ checksum = "43d84d1d7a6ac92673717f9f6d1518374ef257669c24ebc5ac25d5033828be58"
[[package]]
name = "prost"
version = "0.11.9"
version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd"
checksum = "7b0487d90e047de87f984913713b85c601c05609aad5b0df4b4573fbf69aa13f"
dependencies = [
"bytes",
"prost-derive",
@ -5344,44 +5481,43 @@ dependencies = [
[[package]]
name = "prost-build"
version = "0.11.9"
version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270"
checksum = "0c1318b19085f08681016926435853bbf7858f9c082d0999b80550ff5d9abe15"
dependencies = [
"bytes",
"heck 0.4.1",
"itertools 0.10.5",
"lazy_static",
"heck 0.5.0",
"itertools 0.13.0",
"log",
"multimap",
"once_cell",
"petgraph",
"prettyplease 0.1.25",
"prettyplease",
"prost",
"prost-types",
"regex",
"syn 1.0.109",
"syn 2.0.72",
"tempfile",
"which 4.4.2",
]
[[package]]
name = "prost-derive"
version = "0.11.9"
version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4"
checksum = "e9552f850d5f0964a4e4d0bf306459ac29323ddfbae05e35a7c0d35cb0803cc5"
dependencies = [
"anyhow",
"itertools 0.10.5",
"itertools 0.13.0",
"proc-macro2",
"quote",
"syn 1.0.109",
"syn 2.0.72",
]
[[package]]
name = "prost-types"
version = "0.11.9"
version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "213622a1460818959ac1181aaeb2dc9c7f63df720db7d788b3e24eacd1983e13"
checksum = "4759aa0d3a6232fb8dbdb97b61de2c20047c68aca932c7ed76da9d788508d670"
dependencies = [
"prost",
]
@ -5439,7 +5575,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d1a341ae463320e9f8f34adda49c8a85d81d4e8f34cce4397fb0350481552224"
dependencies = [
"chrono",
"indexmap",
"indexmap 2.3.0",
"quick-xml",
"strip-ansi-escapes",
"thiserror",
@ -5801,7 +5937,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49"
dependencies = [
"countme",
"hashbrown",
"hashbrown 0.14.5",
"memoffset",
"rustc-hash 1.1.0",
"text-size",
@ -6206,7 +6342,7 @@ version = "1.0.122"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "784b6203951c57ff748476b126ccb5e8e2959a5c19e5c617ab1956be3dbc68da"
dependencies = [
"indexmap",
"indexmap 2.3.0",
"itoa",
"memchr",
"ryu",
@ -6238,9 +6374,9 @@ dependencies = [
[[package]]
name = "serde_v8"
version = "0.227.0"
version = "0.228.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a8294c2223c53bed343be8b80564ece4dc0d03b643b06fa86c4ccc0e064eda0"
checksum = "bfe23e75c9a167f4e9d67a90d9fcaa622d1eec9aecad526c270e99a92f6915ff"
dependencies = [
"num-bigint",
"serde",
@ -6603,7 +6739,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adc8bd3075d1c6964010333fae9ddcd91ad422a4f8eb8b3206a9b2b6afb4209e"
dependencies = [
"bumpalo",
"hashbrown",
"hashbrown 0.14.5",
"ptr_meta",
"rustc-hash 1.1.0",
"triomphe",
@ -6629,7 +6765,7 @@ checksum = "c77c112c218a09635d99a45802a81b4f341d6c28c81076aa2c29ba3bcd9151a9"
dependencies = [
"anyhow",
"crc",
"indexmap",
"indexmap 2.3.0",
"is-macro",
"once_cell",
"parking_lot",
@ -6699,7 +6835,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4740e53eaf68b101203c1df0937d5161a29f3c13bceed0836ddfe245b72dd000"
dependencies = [
"anyhow",
"indexmap",
"indexmap 2.3.0",
"serde",
"serde_json",
"swc_cached",
@ -6811,7 +6947,7 @@ checksum = "65f21494e75d0bd8ef42010b47cabab9caaed8f2207570e809f6f4eb51a710d1"
dependencies = [
"better_scoped_tls",
"bitflags 2.6.0",
"indexmap",
"indexmap 2.3.0",
"once_cell",
"phf",
"rustc-hash 1.1.0",
@ -6859,7 +6995,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98d8447ea20ef76958a8240feef95743702485a84331e6df5bdbe7e383c87838"
dependencies = [
"dashmap",
"indexmap",
"indexmap 2.3.0",
"once_cell",
"petgraph",
"rustc-hash 1.1.0",
@ -6904,7 +7040,7 @@ checksum = "76c76d8b9792ce51401d38da0fa62158d61f6d80d16d68fe5b03ce4bf5fba383"
dependencies = [
"base64 0.21.7",
"dashmap",
"indexmap",
"indexmap 2.3.0",
"once_cell",
"serde",
"sha1",
@ -6944,7 +7080,7 @@ version = "0.134.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "029eec7dd485923a75b5a45befd04510288870250270292fc2c1b3a9e7547408"
dependencies = [
"indexmap",
"indexmap 2.3.0",
"num_cpus",
"once_cell",
"rustc-hash 1.1.0",
@ -6989,7 +7125,7 @@ version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "357e2c97bb51431d65080f25b436bc4e2fc1a7f64a643bc21a8353e478dc799f"
dependencies = [
"indexmap",
"indexmap 2.3.0",
"petgraph",
"rustc-hash 1.1.0",
"swc_common",
@ -7402,9 +7538,9 @@ dependencies = [
[[package]]
name = "tokio-stream"
version = "0.1.15"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af"
checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1"
dependencies = [
"futures-core",
"pin-project-lite",
@ -7422,7 +7558,7 @@ dependencies = [
"futures-io",
"futures-sink",
"futures-util",
"hashbrown",
"hashbrown 0.14.5",
"pin-project-lite",
"slab",
"tokio",
@ -7438,6 +7574,36 @@ dependencies = [
"serde",
]
[[package]]
name = "tonic"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52"
dependencies = [
"async-stream",
"async-trait",
"axum",
"base64 0.22.1",
"bytes",
"h2 0.4.4",
"http 1.1.0",
"http-body 1.0.0",
"http-body-util",
"hyper 1.4.1",
"hyper-timeout",
"hyper-util",
"percent-encoding",
"pin-project",
"prost",
"socket2",
"tokio",
"tokio-stream",
"tower",
"tower-layer",
"tower-service",
"tracing",
]
[[package]]
name = "tower"
version = "0.4.13"
@ -7446,11 +7612,16 @@ checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c"
dependencies = [
"futures-core",
"futures-util",
"indexmap 1.9.3",
"pin-project",
"pin-project-lite",
"rand",
"slab",
"tokio",
"tokio-util",
"tower-layer",
"tower-service",
"tracing",
]
[[package]]
@ -7740,11 +7911,11 @@ dependencies = [
[[package]]
name = "v8"
version = "0.106.0"
version = "130.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a381badc47c6f15acb5fe0b5b40234162349ed9d4e4fd7c83a7f5547c0fc69c5"
checksum = "c23b5c2caff00209b03a716609b275acae94b02dd3b63c4648e7232a84a8402f"
dependencies = [
"bindgen 0.69.4",
"bindgen",
"bitflags 2.6.0",
"fslock",
"gzip-header",
@ -7763,7 +7934,7 @@ checksum = "97599c400fc79925922b58303e98fcb8fa88f573379a08ddb652e72cbd2e70f6"
dependencies = [
"bitflags 2.6.0",
"encoding_rs",
"indexmap",
"indexmap 2.3.0",
"num-bigint",
"serde",
"thiserror",
@ -7972,7 +8143,7 @@ dependencies = [
"cfg_aliases",
"codespan-reporting",
"document-features",
"indexmap",
"indexmap 2.3.0",
"log",
"naga",
"once_cell",
@ -8485,9 +8656,9 @@ dependencies = [
[[package]]
name = "zeromq"
version = "0.4.0"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb0560d00172817b7f7c2265060783519c475702ae290b154115ca75e976d4d0"
checksum = "6a4528179201f6eecf211961a7d3276faa61554c82651ecc66387f68fc3004bd"
dependencies = [
"async-trait",
"asynchronous-codec",
@ -8521,7 +8692,7 @@ dependencies = [
"crossbeam-utils",
"displaydoc",
"flate2",
"indexmap",
"indexmap 2.3.0",
"memchr",
"thiserror",
]

View file

@ -46,7 +46,7 @@ repository = "https://github.com/denoland/deno"
[workspace.dependencies]
deno_ast = { version = "=0.43.3", features = ["transpiling"] }
deno_core = { version = "0.318.0" }
deno_core = { version = "0.319.0" }
deno_bench_util = { version = "0.171.0", path = "./bench_util" }
deno_lockfile = "=0.23.1"
@ -60,10 +60,10 @@ deno_terminal = "0.2.0"
napi_sym = { version = "0.107.0", path = "./ext/napi/sym" }
test_util = { package = "test_server", path = "./tests/util/server" }
denokv_proto = "0.8.1"
denokv_remote = "0.8.1"
denokv_proto = "0.8.4"
denokv_remote = "0.8.4"
# denokv_sqlite brings in bundled sqlite if we don't disable the default features
denokv_sqlite = { default-features = false, version = "0.8.2" }
denokv_sqlite = { default-features = false, version = "0.8.4" }
# exts
deno_broadcast_channel = { version = "0.171.0", path = "./ext/broadcast_channel" }
@ -157,8 +157,8 @@ percent-encoding = "2.3.0"
phf = { version = "0.11", features = ["macros"] }
pin-project = "1.0.11" # don't pin because they yank crates from cargo
pretty_assertions = "=1.4.0"
prost = "0.11"
prost-build = "0.11"
prost = "0.13"
prost-build = "0.13"
rand = "=0.8.5"
regex = "^1.7.0"
reqwest = { version = "=0.12.5", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks", "json", "http2"] } # pinned because of https://github.com/seanmonstar/reqwest/pull/1955
@ -204,9 +204,15 @@ webpki-root-certs = "0.26.5"
webpki-roots = "0.26"
which = "4.2.5"
yoke = { version = "0.7.4", features = ["derive"] }
zeromq = { version = "=0.4.0", default-features = false, features = ["tcp-transport", "tokio-runtime"] }
zeromq = { version = "=0.4.1", default-features = false, features = ["tcp-transport", "tokio-runtime"] }
zstd = "=0.12.4"
opentelemetry = "0.27.0"
opentelemetry-http = "0.27.0"
opentelemetry-otlp = { version = "0.27.0", features = ["logs", "http-proto", "http-json"] }
opentelemetry-semantic-conventions = { version = "0.27.0", features = ["semconv_experimental"] }
opentelemetry_sdk = "0.27.0"
# crypto
hkdf = "0.12.3"
rsa = { version = "0.9.3", default-features = false, features = ["std", "pem", "hazmat"] } # hazmat needed for PrehashSigner in ext/node

View file

@ -576,7 +576,6 @@ pub struct UnstableConfig {
// TODO(bartlomieju): remove in Deno 2.5
pub legacy_flag_enabled: bool, // --unstable
pub bare_node_builtins: bool,
pub detect_cjs: bool,
pub sloppy_imports: bool,
pub features: Vec<String>, // --unstabe-kv --unstable-cron
}
@ -5720,7 +5719,6 @@ fn unstable_args_parse(
flags.unstable_config.bare_node_builtins =
matches.get_flag("unstable-bare-node-builtins");
flags.unstable_config.detect_cjs = matches.get_flag("unstable-detect-cjs");
flags.unstable_config.sloppy_imports =
matches.get_flag("unstable-sloppy-imports");

View file

@ -7,6 +7,7 @@ mod import_map;
mod lockfile;
mod package_json;
use deno_ast::MediaType;
use deno_ast::SourceMapOption;
use deno_config::deno_json::NodeModulesDirMode;
use deno_config::workspace::CreateResolverOptions;
@ -27,13 +28,13 @@ use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm::NpmSystemInfo;
use deno_path_util::normalize_path;
use deno_runtime::ops::otel::OtelConfig;
use deno_semver::npm::NpmPackageReqReference;
use import_map::resolve_import_map_value_from_specifier;
pub use deno_config::deno_json::BenchConfig;
pub use deno_config::deno_json::ConfigFile;
pub use deno_config::deno_json::FmtOptionsConfig;
pub use deno_config::deno_json::JsxImportSourceConfig;
pub use deno_config::deno_json::LintRulesConfig;
pub use deno_config::deno_json::ProseWrap;
pub use deno_config::deno_json::TsConfig;
@ -1129,6 +1130,23 @@ impl CliOptions {
}
}
pub fn otel_config(&self) -> Option<OtelConfig> {
if self
.flags
.unstable_config
.features
.contains(&String::from("otel"))
{
Some(OtelConfig {
runtime_name: Cow::Borrowed("deno"),
runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno),
..Default::default()
})
} else {
None
}
}
pub fn env_file_name(&self) -> Option<&String> {
self.flags.env_file.as_ref()
}
@ -1137,21 +1155,34 @@ impl CliOptions {
self
.main_module_cell
.get_or_init(|| {
let main_module = match &self.flags.subcommand {
Ok(match &self.flags.subcommand {
DenoSubcommand::Compile(compile_flags) => {
resolve_url_or_path(&compile_flags.source_file, self.initial_cwd())?
}
DenoSubcommand::Eval(_) => {
resolve_url_or_path("./$deno$eval.ts", self.initial_cwd())?
resolve_url_or_path("./$deno$eval.mts", self.initial_cwd())?
}
DenoSubcommand::Repl(_) => {
resolve_url_or_path("./$deno$repl.ts", self.initial_cwd())?
resolve_url_or_path("./$deno$repl.mts", self.initial_cwd())?
}
DenoSubcommand::Run(run_flags) => {
if run_flags.is_stdin() {
resolve_url_or_path("./$deno$stdin.ts", self.initial_cwd())?
resolve_url_or_path("./$deno$stdin.mts", self.initial_cwd())?
} else {
resolve_url_or_path(&run_flags.script, self.initial_cwd())?
let url =
resolve_url_or_path(&run_flags.script, self.initial_cwd())?;
if self.is_node_main()
&& url.scheme() == "file"
&& MediaType::from_specifier(&url) == MediaType::Unknown
{
try_resolve_node_binary_main_entrypoint(
&run_flags.script,
self.initial_cwd(),
)?
.unwrap_or(url)
} else {
url
}
}
}
DenoSubcommand::Serve(run_flags) => {
@ -1160,9 +1191,7 @@ impl CliOptions {
_ => {
bail!("No main module.")
}
};
Ok(main_module)
})
})
.as_ref()
.map_err(|err| deno_core::anyhow::anyhow!("{}", err))
@ -1211,7 +1240,7 @@ impl CliOptions {
// This is triggered via a secret environment variable which is used
// for functionality like child_process.fork. Users should NOT depend
// on this functionality.
pub fn is_npm_main(&self) -> bool {
pub fn is_node_main(&self) -> bool {
NPM_PROCESS_STATE.is_some()
}
@ -1589,9 +1618,11 @@ impl CliOptions {
|| self.workspace().has_unstable("bare-node-builtins")
}
pub fn unstable_detect_cjs(&self) -> bool {
self.flags.unstable_config.detect_cjs
|| self.workspace().has_unstable("detect-cjs")
pub fn detect_cjs(&self) -> bool {
// only enabled when there's a package.json in order to not have a
// perf penalty for non-npm Deno projects of searching for the closest
// package.json beside each module
self.workspace().package_jsons().next().is_some() || self.is_node_main()
}
fn byonm_enabled(&self) -> bool {
@ -1655,7 +1686,6 @@ impl CliOptions {
"byonm",
"bare-node-builtins",
"fmt-component",
"detect-cjs",
])
.collect();
@ -1793,6 +1823,36 @@ fn resolve_node_modules_folder(
Ok(Some(canonicalize_path_maybe_not_exists(&path)?))
}
fn try_resolve_node_binary_main_entrypoint(
specifier: &str,
initial_cwd: &Path,
) -> Result<Option<Url>, AnyError> {
// node allows running files at paths without a `.js` extension
// or at directories with an index.js file
let path = deno_core::normalize_path(initial_cwd.join(specifier));
if path.is_dir() {
let index_file = path.join("index.js");
Ok(if index_file.is_file() {
Some(deno_path_util::url_from_file_path(&index_file)?)
} else {
None
})
} else {
let path = path.with_extension(
path
.extension()
.and_then(|s| s.to_str())
.map(|s| format!("{}.js", s))
.unwrap_or("js".to_string()),
);
if path.is_file() {
Ok(Some(deno_path_util::url_from_file_path(&path)?))
} else {
Ok(None)
}
}
}
fn resolve_import_map_specifier(
maybe_import_map_path: Option<&str>,
maybe_config_file: Option<&ConfigFile>,

View file

@ -181,7 +181,6 @@ impl Emitter {
pub async fn load_and_emit_for_hmr(
&self,
specifier: &ModuleSpecifier,
module_kind: deno_ast::ModuleKind,
) -> Result<String, AnyError> {
let media_type = MediaType::from_specifier(specifier);
let source_code = tokio::fs::read_to_string(
@ -203,11 +202,16 @@ impl Emitter {
// this statement is probably wrong)
let mut options = self.transpile_and_emit_options.1.clone();
options.source_map = SourceMapOption::None;
let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script(
specifier,
media_type,
parsed_source.compute_is_script(),
)?;
let transpiled_source = parsed_source
.transpile(
&self.transpile_and_emit_options.0,
&deno_ast::TranspileModuleOptions {
module_kind: Some(module_kind),
module_kind: Some(ModuleKind::from_is_cjs(is_cjs)),
},
&options,
)?

View file

@ -42,12 +42,12 @@ use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::CreateInNpmPkgCheckerOptions;
use crate::resolver::CjsTracker;
use crate::resolver::CjsTrackerOptions;
use crate::resolver::CliDenoResolverFs;
use crate::resolver::CliGraphResolver;
use crate::resolver::CliGraphResolverOptions;
use crate::resolver::CliNodeResolver;
use crate::resolver::CliResolver;
use crate::resolver::CliResolverOptions;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::IsCjsResolverOptions;
use crate::resolver::NpmModuleLoader;
use crate::resolver::SloppyImportsCachedFs;
use crate::standalone::DenoCompileBinaryWriter;
@ -201,7 +201,7 @@ struct CliFactoryServices {
parsed_source_cache: Deferred<Arc<ParsedSourceCache>>,
permission_desc_parser: Deferred<Arc<RuntimePermissionDescriptorParser>>,
pkg_json_resolver: Deferred<Arc<PackageJsonResolver>>,
resolver: Deferred<Arc<CliGraphResolver>>,
resolver: Deferred<Arc<CliResolver>>,
root_cert_store_provider: Deferred<Arc<dyn RootCertStoreProvider>>,
root_permissions_container: Deferred<PermissionsContainer>,
sloppy_imports_resolver: Deferred<Option<Arc<CliSloppyImportsResolver>>>,
@ -523,14 +523,14 @@ impl CliFactory {
.await
}
pub async fn resolver(&self) -> Result<&Arc<CliGraphResolver>, AnyError> {
pub async fn resolver(&self) -> Result<&Arc<CliResolver>, AnyError> {
self
.services
.resolver
.get_or_try_init_async(
async {
let cli_options = self.cli_options()?;
Ok(Arc::new(CliGraphResolver::new(CliGraphResolverOptions {
Ok(Arc::new(CliResolver::new(CliResolverOptions {
sloppy_imports_resolver: self.sloppy_imports_resolver()?.cloned(),
node_resolver: Some(self.cli_node_resolver().await?.clone()),
npm_resolver: if cli_options.no_npm() {
@ -541,9 +541,6 @@ impl CliFactory {
workspace_resolver: self.workspace_resolver().await?.clone(),
bare_node_builtins_enabled: cli_options
.unstable_bare_node_builtins(),
maybe_jsx_import_source_config: cli_options
.workspace()
.to_maybe_jsx_import_source_config()?,
maybe_vendor_dir: cli_options.vendor_dir_path(),
})))
}
@ -652,7 +649,6 @@ impl CliFactory {
self.cjs_tracker()?.clone(),
self.fs().clone(),
Some(self.parsed_source_cache().clone()),
self.cli_options()?.is_npm_main(),
);
Ok(Arc::new(NodeCodeTranslator::new(
@ -706,6 +702,7 @@ impl CliFactory {
let cli_options = self.cli_options()?;
Ok(Arc::new(ModuleGraphBuilder::new(
self.caches()?.clone(),
self.cjs_tracker()?.clone(),
cli_options.clone(),
self.file_fetcher()?.clone(),
self.fs().clone(),
@ -794,8 +791,9 @@ impl CliFactory {
Ok(Arc::new(CjsTracker::new(
self.in_npm_pkg_checker()?.clone(),
self.pkg_json_resolver().clone(),
CjsTrackerOptions {
unstable_detect_cjs: options.unstable_detect_cjs(),
IsCjsResolverOptions {
detect_cjs: options.detect_cjs(),
is_node_main: options.is_node_main(),
},
)))
})
@ -809,7 +807,6 @@ impl CliFactory {
.cli_node_resolver
.get_or_try_init_async(async {
Ok(Arc::new(CliNodeResolver::new(
self.cjs_tracker()?.clone(),
self.fs().clone(),
self.in_npm_pkg_checker()?.clone(),
self.node_resolver().await?.clone(),
@ -939,6 +936,7 @@ impl CliFactory {
StorageKeyResolver::from_options(cli_options),
cli_options.sub_command().clone(),
self.create_cli_main_worker_options()?,
self.cli_options()?.otel_config(),
))
}
@ -949,10 +947,8 @@ impl CliFactory {
let create_hmr_runner = if cli_options.has_hmr() {
let watcher_communicator = self.watcher_communicator.clone().unwrap();
let emitter = self.emitter()?.clone();
let cjs_tracker = self.cjs_tracker()?.clone();
let fn_: crate::worker::CreateHmrRunnerCb = Box::new(move |session| {
Box::new(HmrRunner::new(
cjs_tracker.clone(),
emitter.clone(),
session,
watcher_communicator.clone(),

View file

@ -13,16 +13,19 @@ use crate::colors;
use crate::errors::get_error_class_name;
use crate::file_fetcher::FileFetcher;
use crate::npm::CliNpmResolver;
use crate::resolver::CliGraphResolver;
use crate::resolver::CjsTracker;
use crate::resolver::CliResolver;
use crate::resolver::CliSloppyImportsResolver;
use crate::resolver::SloppyImportsCachedFs;
use crate::tools::check;
use crate::tools::check::TypeChecker;
use crate::util::file_watcher::WatcherCommunicator;
use crate::util::fs::canonicalize_path;
use deno_config::deno_json::JsxImportSourceConfig;
use deno_config::workspace::JsrPackageConfig;
use deno_core::anyhow::bail;
use deno_graph::source::LoaderChecksum;
use deno_graph::source::ResolutionMode;
use deno_graph::FillFromLockfileOptions;
use deno_graph::JsrLoadError;
use deno_graph::ModuleLoadError;
@ -379,6 +382,7 @@ pub struct BuildFastCheckGraphOptions<'a> {
pub struct ModuleGraphBuilder {
caches: Arc<cache::Caches>,
cjs_tracker: Arc<CjsTracker>,
cli_options: Arc<CliOptions>,
file_fetcher: Arc<FileFetcher>,
fs: Arc<dyn FileSystem>,
@ -389,7 +393,7 @@ pub struct ModuleGraphBuilder {
module_info_cache: Arc<ModuleInfoCache>,
npm_resolver: Arc<dyn CliNpmResolver>,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliGraphResolver>,
resolver: Arc<CliResolver>,
root_permissions_container: PermissionsContainer,
}
@ -397,6 +401,7 @@ impl ModuleGraphBuilder {
#[allow(clippy::too_many_arguments)]
pub fn new(
caches: Arc<cache::Caches>,
cjs_tracker: Arc<CjsTracker>,
cli_options: Arc<CliOptions>,
file_fetcher: Arc<FileFetcher>,
fs: Arc<dyn FileSystem>,
@ -407,11 +412,12 @@ impl ModuleGraphBuilder {
module_info_cache: Arc<ModuleInfoCache>,
npm_resolver: Arc<dyn CliNpmResolver>,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliGraphResolver>,
resolver: Arc<CliResolver>,
root_permissions_container: PermissionsContainer,
) -> Self {
Self {
caches,
cjs_tracker,
cli_options,
file_fetcher,
fs,
@ -518,7 +524,7 @@ impl ModuleGraphBuilder {
None => MutLoaderRef::Owned(self.create_graph_loader()),
};
let cli_resolver = &self.resolver;
let graph_resolver = cli_resolver.as_graph_resolver();
let graph_resolver = self.create_graph_resolver()?;
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver();
let maybe_file_watcher_reporter = self
.maybe_file_watcher_reporter
@ -543,7 +549,7 @@ impl ModuleGraphBuilder {
npm_resolver: Some(&graph_npm_resolver),
module_analyzer: &analyzer,
reporter: maybe_file_watcher_reporter,
resolver: Some(graph_resolver),
resolver: Some(&graph_resolver),
locker: locker.as_mut().map(|l| l as _),
},
)
@ -666,7 +672,7 @@ impl ModuleGraphBuilder {
};
let parser = self.parsed_source_cache.as_capturing_parser();
let cli_resolver = &self.resolver;
let graph_resolver = cli_resolver.as_graph_resolver();
let graph_resolver = self.create_graph_resolver()?;
let graph_npm_resolver = cli_resolver.create_graph_npm_resolver();
graph.build_fast_check_type_graph(
@ -675,7 +681,7 @@ impl ModuleGraphBuilder {
fast_check_cache: fast_check_cache.as_ref().map(|c| c as _),
fast_check_dts: false,
jsr_url_provider: &CliJsrUrlProvider,
resolver: Some(graph_resolver),
resolver: Some(&graph_resolver),
npm_resolver: Some(&graph_npm_resolver),
workspace_fast_check: options.workspace_fast_check,
},
@ -739,6 +745,18 @@ impl ModuleGraphBuilder {
},
)
}
fn create_graph_resolver(&self) -> Result<CliGraphResolver, AnyError> {
let jsx_import_source_config = self
.cli_options
.workspace()
.to_maybe_jsx_import_source_config()?;
Ok(CliGraphResolver {
cjs_tracker: &self.cjs_tracker,
resolver: &self.resolver,
jsx_import_source_config,
})
}
}
/// Adds more explanatory information to a resolution error.
@ -1143,6 +1161,53 @@ fn format_deno_graph_error(err: &dyn Error) -> String {
message
}
#[derive(Debug)]
struct CliGraphResolver<'a> {
cjs_tracker: &'a CjsTracker,
resolver: &'a CliResolver,
jsx_import_source_config: Option<JsxImportSourceConfig>,
}
impl<'a> deno_graph::source::Resolver for CliGraphResolver<'a> {
fn default_jsx_import_source(&self) -> Option<String> {
self
.jsx_import_source_config
.as_ref()
.and_then(|c| c.default_specifier.clone())
}
fn default_jsx_import_source_types(&self) -> Option<String> {
self
.jsx_import_source_config
.as_ref()
.and_then(|c| c.default_types_specifier.clone())
}
fn jsx_import_source_module(&self) -> &str {
self
.jsx_import_source_config
.as_ref()
.map(|c| c.module.as_str())
.unwrap_or(deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE)
}
fn resolve(
&self,
raw_specifier: &str,
referrer_range: &deno_graph::Range,
mode: ResolutionMode,
) -> Result<ModuleSpecifier, ResolveError> {
self.resolver.resolve(
raw_specifier,
referrer_range,
self
.cjs_tracker
.get_referrer_kind(&referrer_range.specifier),
mode,
)
}
}
#[cfg(test)]
mod test {
use std::sync::Arc;

View file

@ -10,6 +10,7 @@ use super::tsc;
use super::urls::url_to_uri;
use crate::args::jsr_url;
use crate::lsp::logging::lsp_warn;
use crate::lsp::search::PackageSearchApi;
use crate::tools::lint::CliLinter;
use crate::util::path::relative_specifier;
@ -38,6 +39,7 @@ use deno_semver::package::PackageReq;
use deno_semver::package::PackageReqReference;
use deno_semver::Version;
use import_map::ImportMap;
use node_resolver::NodeModuleKind;
use once_cell::sync::Lazy;
use regex::Regex;
use std::borrow::Cow;
@ -466,6 +468,7 @@ impl<'a> TsResponseImportMapper<'a> {
&self,
specifier: &str,
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
) -> Option<String> {
let specifier_stem = specifier.strip_suffix(".js").unwrap_or(specifier);
let specifiers = std::iter::once(Cow::Borrowed(specifier)).chain(
@ -476,7 +479,7 @@ impl<'a> TsResponseImportMapper<'a> {
for specifier in specifiers {
if let Some(specifier) = self
.resolver
.as_graph_resolver(Some(&self.file_referrer))
.as_cli_resolver(Some(&self.file_referrer))
.resolve(
&specifier,
&deno_graph::Range {
@ -484,6 +487,7 @@ impl<'a> TsResponseImportMapper<'a> {
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
referrer_kind,
ResolutionMode::Types,
)
.ok()
@ -506,10 +510,11 @@ impl<'a> TsResponseImportMapper<'a> {
&self,
specifier_text: &str,
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
) -> bool {
self
.resolver
.as_graph_resolver(Some(&self.file_referrer))
.as_cli_resolver(Some(&self.file_referrer))
.resolve(
specifier_text,
&deno_graph::Range {
@ -517,6 +522,7 @@ impl<'a> TsResponseImportMapper<'a> {
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
referrer_kind,
deno_graph::source::ResolutionMode::Types,
)
.is_ok()
@ -585,6 +591,7 @@ fn try_reverse_map_package_json_exports(
/// like an import and rewrite the import specifier to include the extension
pub fn fix_ts_import_changes(
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
changes: &[tsc::FileTextChanges],
language_server: &language_server::Inner,
) -> Result<Vec<tsc::FileTextChanges>, AnyError> {
@ -601,8 +608,8 @@ pub fn fix_ts_import_changes(
if let Some(captures) = IMPORT_SPECIFIER_RE.captures(line) {
let specifier =
captures.iter().skip(1).find_map(|s| s).unwrap().as_str();
if let Some(new_specifier) =
import_mapper.check_unresolved_specifier(specifier, referrer)
if let Some(new_specifier) = import_mapper
.check_unresolved_specifier(specifier, referrer, referrer_kind)
{
line.replace(specifier, &new_specifier)
} else {
@ -632,6 +639,7 @@ pub fn fix_ts_import_changes(
/// resolution by Deno (includes the extension).
fn fix_ts_import_action<'a>(
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
action: &'a tsc::CodeFixAction,
language_server: &language_server::Inner,
) -> Option<Cow<'a, tsc::CodeFixAction>> {
@ -651,7 +659,7 @@ fn fix_ts_import_action<'a>(
};
let import_mapper = language_server.get_ts_response_import_mapper(referrer);
if let Some(new_specifier) =
import_mapper.check_unresolved_specifier(specifier, referrer)
import_mapper.check_unresolved_specifier(specifier, referrer, referrer_kind)
{
let description = action.description.replace(specifier, &new_specifier);
let changes = action
@ -682,7 +690,7 @@ fn fix_ts_import_action<'a>(
fix_id: None,
fix_all_description: None,
}))
} else if !import_mapper.is_valid_import(specifier, referrer) {
} else if !import_mapper.is_valid_import(specifier, referrer, referrer_kind) {
None
} else {
Some(Cow::Borrowed(action))
@ -747,8 +755,14 @@ pub fn ts_changes_to_edit(
) -> Result<Option<lsp::WorkspaceEdit>, AnyError> {
let mut text_document_edits = Vec::new();
for change in changes {
let text_document_edit = change.to_text_document_edit(language_server)?;
text_document_edits.push(text_document_edit);
let edit = match change.to_text_document_edit(language_server) {
Ok(e) => e,
Err(err) => {
lsp_warn!("Couldn't covert text document edit: {:#}", err);
continue;
}
};
text_document_edits.push(edit);
}
Ok(Some(lsp::WorkspaceEdit {
changes: None,
@ -1010,6 +1024,7 @@ impl CodeActionCollection {
pub fn add_ts_fix_action(
&mut self,
specifier: &ModuleSpecifier,
specifier_kind: NodeModuleKind,
action: &tsc::CodeFixAction,
diagnostic: &lsp::Diagnostic,
language_server: &language_server::Inner,
@ -1027,7 +1042,8 @@ impl CodeActionCollection {
"The action returned from TypeScript is unsupported.",
));
}
let Some(action) = fix_ts_import_action(specifier, action, language_server)
let Some(action) =
fix_ts_import_action(specifier, specifier_kind, action, language_server)
else {
return Ok(());
};
@ -1269,6 +1285,9 @@ impl CodeActionCollection {
import_start_from_specifier(document, i)
})?;
let referrer = document.specifier();
let referrer_kind = language_server
.is_cjs_resolver
.get_doc_module_kind(document);
let file_referrer = document.file_referrer();
let config_data = language_server
.config
@ -1291,10 +1310,11 @@ impl CodeActionCollection {
if !config_data.byonm {
return None;
}
if !language_server
.resolver
.is_bare_package_json_dep(&dep_key, referrer)
{
if !language_server.resolver.is_bare_package_json_dep(
&dep_key,
referrer,
referrer_kind,
) {
return None;
}
NpmPackageReqReference::from_str(&format!("npm:{}", &dep_key)).ok()?
@ -1313,7 +1333,7 @@ impl CodeActionCollection {
}
if language_server
.resolver
.npm_to_file_url(&npm_ref, document.specifier(), file_referrer)
.npm_to_file_url(&npm_ref, referrer, referrer_kind, file_referrer)
.is_some()
{
// The package import has types.

View file

@ -9,6 +9,7 @@ use super::jsr::CliJsrSearchApi;
use super::lsp_custom;
use super::npm::CliNpmSearchApi;
use super::registries::ModuleRegistry;
use super::resolver::LspIsCjsResolver;
use super::resolver::LspResolver;
use super::search::PackageSearchApi;
use super::tsc;
@ -35,6 +36,7 @@ use deno_semver::package::PackageNv;
use import_map::ImportMap;
use indexmap::IndexSet;
use lsp_types::CompletionList;
use node_resolver::NodeModuleKind;
use once_cell::sync::Lazy;
use regex::Regex;
use tower_lsp::lsp_types as lsp;
@ -159,15 +161,17 @@ pub async fn get_import_completions(
jsr_search_api: &CliJsrSearchApi,
npm_search_api: &CliNpmSearchApi,
documents: &Documents,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &LspResolver,
maybe_import_map: Option<&ImportMap>,
) -> Option<lsp::CompletionResponse> {
let document = documents.get(specifier)?;
let specifier_kind = is_cjs_resolver.get_doc_module_kind(&document);
let file_referrer = document.file_referrer();
let (text, _, range) = document.get_maybe_dependency(position)?;
let range = to_narrow_lsp_range(document.text_info(), &range);
let resolved = resolver
.as_graph_resolver(file_referrer)
.as_cli_resolver(file_referrer)
.resolve(
&text,
&Range {
@ -175,6 +179,7 @@ pub async fn get_import_completions(
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
specifier_kind,
ResolutionMode::Execution,
)
.ok();
@ -201,7 +206,7 @@ pub async fn get_import_completions(
// completions for import map specifiers
Some(lsp::CompletionResponse::List(completion_list))
} else if let Some(completion_list) =
get_local_completions(specifier, &text, &range, resolver)
get_local_completions(specifier, specifier_kind, &text, &range, resolver)
{
// completions for local relative modules
Some(lsp::CompletionResponse::List(completion_list))
@ -355,24 +360,26 @@ fn get_import_map_completions(
/// Return local completions that are relative to the base specifier.
fn get_local_completions(
base: &ModuleSpecifier,
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
text: &str,
range: &lsp::Range,
resolver: &LspResolver,
) -> Option<CompletionList> {
if base.scheme() != "file" {
if referrer.scheme() != "file" {
return None;
}
let parent = &text[..text.char_indices().rfind(|(_, c)| *c == '/')?.0 + 1];
let resolved_parent = resolver
.as_graph_resolver(Some(base))
.as_cli_resolver(Some(referrer))
.resolve(
parent,
&Range {
specifier: base.clone(),
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
referrer_kind,
ResolutionMode::Execution,
)
.ok()?;
@ -385,7 +392,7 @@ fn get_local_completions(
let de = de.ok()?;
let label = de.path().file_name()?.to_string_lossy().to_string();
let entry_specifier = resolve_path(de.path().to_str()?, &cwd).ok()?;
if entry_specifier == *base {
if entry_specifier == *referrer {
return None;
}
let full_text = format!("{parent}{label}");
@ -905,6 +912,7 @@ mod tests {
ModuleSpecifier::from_file_path(file_c).expect("could not create");
let actual = get_local_completions(
&specifier,
NodeModuleKind::Esm,
"./",
&lsp::Range {
start: lsp::Position {

View file

@ -4,6 +4,7 @@ use deno_ast::MediaType;
use deno_config::deno_json::DenoJsonCache;
use deno_config::deno_json::FmtConfig;
use deno_config::deno_json::FmtOptionsConfig;
use deno_config::deno_json::JsxImportSourceConfig;
use deno_config::deno_json::LintConfig;
use deno_config::deno_json::NodeModulesDirMode;
use deno_config::deno_json::TestConfig;
@ -1654,6 +1655,17 @@ impl ConfigData {
self.member_dir.maybe_pkg_json()
}
pub fn maybe_jsx_import_source_config(
&self,
) -> Option<JsxImportSourceConfig> {
self
.member_dir
.workspace
.to_maybe_jsx_import_source_config()
.ok()
.flatten()
}
pub fn scope_contains_specifier(&self, specifier: &ModuleSpecifier) -> bool {
specifier.as_str().starts_with(self.scope.as_str())
|| self

View file

@ -1707,6 +1707,7 @@ mod tests {
documents: Arc::new(documents),
assets: Default::default(),
config: Arc::new(config),
is_cjs_resolver: Default::default(),
resolver,
},
)

View file

@ -3,7 +3,9 @@
use super::cache::calculate_fs_version;
use super::cache::LspCache;
use super::config::Config;
use super::resolver::LspIsCjsResolver;
use super::resolver::LspResolver;
use super::resolver::SingleReferrerGraphResolver;
use super::testing::TestCollector;
use super::testing::TestModule;
use super::text::LineIndex;
@ -33,6 +35,7 @@ use deno_semver::npm::NpmPackageReqReference;
use deno_semver::package::PackageReq;
use indexmap::IndexMap;
use indexmap::IndexSet;
use node_resolver::NodeModuleKind;
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
@ -293,6 +296,8 @@ pub struct Document {
/// Contains the last-known-good set of dependencies from parsing the module.
config: Arc<Config>,
dependencies: Arc<IndexMap<String, deno_graph::Dependency>>,
/// If this is maybe a CJS script and maybe not an ES module.
is_script: Option<bool>,
// TODO(nayeemrmn): This is unused, use it for scope attribution for remote
// modules.
file_referrer: Option<ModuleSpecifier>,
@ -323,6 +328,7 @@ impl Document {
maybe_lsp_version: Option<i32>,
maybe_language_id: Option<LanguageId>,
maybe_headers: Option<HashMap<String, String>>,
is_cjs_resolver: &LspIsCjsResolver,
resolver: Arc<LspResolver>,
config: Arc<Config>,
cache: &Arc<LspCache>,
@ -342,6 +348,7 @@ impl Document {
maybe_headers.as_ref(),
media_type,
file_referrer.as_ref(),
is_cjs_resolver,
&resolver,
)
} else {
@ -367,6 +374,7 @@ impl Document {
file_referrer.as_ref(),
),
file_referrer,
is_script: maybe_module.as_ref().map(|m| m.is_script),
maybe_types_dependency,
line_index,
maybe_language_id,
@ -388,6 +396,7 @@ impl Document {
fn with_new_config(
&self,
is_cjs_resolver: &LspIsCjsResolver,
resolver: Arc<LspResolver>,
config: Arc<Config>,
) -> Arc<Self> {
@ -399,6 +408,7 @@ impl Document {
let dependencies;
let maybe_types_dependency;
let maybe_parsed_source;
let is_script;
let maybe_test_module_fut;
if media_type != self.media_type {
let parsed_source_result =
@ -408,6 +418,7 @@ impl Document {
&parsed_source_result,
self.maybe_headers.as_ref(),
self.file_referrer.as_ref(),
is_cjs_resolver,
&resolver,
)
.ok();
@ -415,6 +426,7 @@ impl Document {
.as_ref()
.map(|m| Arc::new(m.dependencies.clone()))
.unwrap_or_default();
is_script = maybe_module.as_ref().map(|m| m.is_script);
maybe_types_dependency = maybe_module
.as_ref()
.and_then(|m| Some(Arc::new(m.maybe_types_dependency.clone()?)));
@ -422,10 +434,19 @@ impl Document {
maybe_test_module_fut =
get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &config);
} else {
let graph_resolver =
resolver.as_graph_resolver(self.file_referrer.as_ref());
let cli_resolver = resolver.as_cli_resolver(self.file_referrer.as_ref());
let npm_resolver =
resolver.create_graph_npm_resolver(self.file_referrer.as_ref());
let config_data = resolver.as_config_data(self.file_referrer.as_ref());
let jsx_import_source_config =
config_data.and_then(|d| d.maybe_jsx_import_source_config());
let resolver = SingleReferrerGraphResolver {
valid_referrer: &self.specifier,
referrer_kind: is_cjs_resolver
.get_lsp_referrer_kind(&self.specifier, self.is_script),
cli_resolver,
jsx_import_source_config: jsx_import_source_config.as_ref(),
};
dependencies = Arc::new(
self
.dependencies
@ -436,7 +457,7 @@ impl Document {
d.with_new_resolver(
s,
&CliJsrUrlProvider,
Some(graph_resolver),
Some(&resolver),
Some(&npm_resolver),
),
)
@ -446,10 +467,11 @@ impl Document {
maybe_types_dependency = self.maybe_types_dependency.as_ref().map(|d| {
Arc::new(d.with_new_resolver(
&CliJsrUrlProvider,
Some(graph_resolver),
Some(&resolver),
Some(&npm_resolver),
))
});
is_script = self.is_script;
maybe_parsed_source = self.maybe_parsed_source().cloned();
maybe_test_module_fut = self
.maybe_test_module_fut
@ -461,6 +483,7 @@ impl Document {
// updated properties
dependencies,
file_referrer: self.file_referrer.clone(),
is_script,
maybe_types_dependency,
maybe_navigation_tree: Mutex::new(None),
// maintain - this should all be copies/clones
@ -485,6 +508,7 @@ impl Document {
fn with_change(
&self,
is_cjs_resolver: &LspIsCjsResolver,
version: i32,
changes: Vec<lsp::TextDocumentContentChangeEvent>,
) -> Result<Arc<Self>, AnyError> {
@ -518,6 +542,7 @@ impl Document {
self.maybe_headers.as_ref(),
media_type,
self.file_referrer.as_ref(),
is_cjs_resolver,
self.resolver.as_ref(),
)
} else {
@ -541,6 +566,7 @@ impl Document {
get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &self.config);
Ok(Arc::new(Self {
config: self.config.clone(),
is_script: maybe_module.as_ref().map(|m| m.is_script),
specifier: self.specifier.clone(),
file_referrer: self.file_referrer.clone(),
maybe_fs_version: self.maybe_fs_version.clone(),
@ -575,6 +601,7 @@ impl Document {
),
maybe_language_id: self.maybe_language_id,
dependencies: self.dependencies.clone(),
is_script: self.is_script,
maybe_types_dependency: self.maybe_types_dependency.clone(),
text: self.text.clone(),
text_info_cell: once_cell::sync::OnceCell::new(),
@ -602,6 +629,7 @@ impl Document {
),
maybe_language_id: self.maybe_language_id,
dependencies: self.dependencies.clone(),
is_script: self.is_script,
maybe_types_dependency: self.maybe_types_dependency.clone(),
text: self.text.clone(),
text_info_cell: once_cell::sync::OnceCell::new(),
@ -650,6 +678,13 @@ impl Document {
})
}
/// If this is maybe a CJS script and maybe not an ES module.
///
/// Use `LspIsCjsResolver` to determine for sure.
pub fn is_script(&self) -> Option<bool> {
self.is_script
}
pub fn line_index(&self) -> Arc<LineIndex> {
self.line_index.clone()
}
@ -797,6 +832,7 @@ impl FileSystemDocuments {
pub fn get(
&self,
specifier: &ModuleSpecifier,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &Arc<LspResolver>,
config: &Arc<Config>,
cache: &Arc<LspCache>,
@ -820,7 +856,14 @@ impl FileSystemDocuments {
};
if dirty {
// attempt to update the file on the file system
self.refresh_document(specifier, resolver, config, cache, file_referrer)
self.refresh_document(
specifier,
is_cjs_resolver,
resolver,
config,
cache,
file_referrer,
)
} else {
old_doc
}
@ -831,6 +874,7 @@ impl FileSystemDocuments {
fn refresh_document(
&self,
specifier: &ModuleSpecifier,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &Arc<LspResolver>,
config: &Arc<Config>,
cache: &Arc<LspCache>,
@ -847,6 +891,7 @@ impl FileSystemDocuments {
None,
None,
None,
is_cjs_resolver,
resolver.clone(),
config.clone(),
cache,
@ -863,6 +908,7 @@ impl FileSystemDocuments {
None,
None,
None,
is_cjs_resolver,
resolver.clone(),
config.clone(),
cache,
@ -890,6 +936,7 @@ impl FileSystemDocuments {
None,
None,
maybe_headers,
is_cjs_resolver,
resolver.clone(),
config.clone(),
cache,
@ -930,6 +977,11 @@ pub struct Documents {
/// The DENO_DIR that the documents looks for non-file based modules.
cache: Arc<LspCache>,
config: Arc<Config>,
/// Resolver for detecting if a document is CJS or ESM.
is_cjs_resolver: Arc<LspIsCjsResolver>,
/// A resolver that takes into account currently loaded import map and JSX
/// settings.
resolver: Arc<LspResolver>,
/// A flag that indicates that stated data is potentially invalid and needs to
/// be recalculated before being considered valid.
dirty: bool,
@ -937,9 +989,6 @@ pub struct Documents {
open_docs: HashMap<ModuleSpecifier, Arc<Document>>,
/// Documents stored on the file system.
file_system_docs: Arc<FileSystemDocuments>,
/// A resolver that takes into account currently loaded import map and JSX
/// settings.
resolver: Arc<LspResolver>,
/// The npm package requirements found in npm specifiers.
npm_reqs_by_scope:
Arc<BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>>,
@ -970,6 +1019,7 @@ impl Documents {
// the cache for remote modules here in order to get the
// x-typescript-types?
None,
&self.is_cjs_resolver,
self.resolver.clone(),
self.config.clone(),
&self.cache,
@ -1004,7 +1054,7 @@ impl Documents {
))
})?;
self.dirty = true;
let doc = doc.with_change(version, changes)?;
let doc = doc.with_change(&self.is_cjs_resolver, version, changes)?;
self.open_docs.insert(doc.specifier().clone(), doc.clone());
Ok(doc)
}
@ -1133,6 +1183,7 @@ impl Documents {
if let Some(old_doc) = old_doc {
self.file_system_docs.get(
specifier,
&self.is_cjs_resolver,
&self.resolver,
&self.config,
&self.cache,
@ -1157,6 +1208,7 @@ impl Documents {
} else {
self.file_system_docs.get(
&specifier,
&self.is_cjs_resolver,
&self.resolver,
&self.config,
&self.cache,
@ -1215,12 +1267,15 @@ impl Documents {
referrer: &ModuleSpecifier,
file_referrer: Option<&ModuleSpecifier>,
) -> Vec<Option<(ModuleSpecifier, MediaType)>> {
let document = self.get(referrer);
let file_referrer = document
let referrer_doc = self.get(referrer);
let file_referrer = referrer_doc
.as_ref()
.and_then(|d| d.file_referrer())
.or(file_referrer);
let dependencies = document.as_ref().map(|d| d.dependencies());
let dependencies = referrer_doc.as_ref().map(|d| d.dependencies());
let referrer_kind = self
.is_cjs_resolver
.get_maybe_doc_module_kind(referrer, referrer_doc.as_deref());
let mut results = Vec::new();
for raw_specifier in raw_specifiers {
if raw_specifier.starts_with("asset:") {
@ -1237,31 +1292,35 @@ impl Documents {
results.push(self.resolve_dependency(
specifier,
referrer,
referrer_kind,
file_referrer,
));
} else if let Some(specifier) = dep.maybe_code.maybe_specifier() {
results.push(self.resolve_dependency(
specifier,
referrer,
referrer_kind,
file_referrer,
));
} else {
results.push(None);
}
} else if let Ok(specifier) =
self.resolver.as_graph_resolver(file_referrer).resolve(
self.resolver.as_cli_resolver(file_referrer).resolve(
raw_specifier,
&deno_graph::Range {
specifier: referrer.clone(),
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
referrer_kind,
ResolutionMode::Types,
)
{
results.push(self.resolve_dependency(
&specifier,
referrer,
referrer_kind,
file_referrer,
));
} else {
@ -1280,7 +1339,11 @@ impl Documents {
) {
self.config = Arc::new(config.clone());
self.cache = Arc::new(cache.clone());
self.is_cjs_resolver = Arc::new(LspIsCjsResolver::new(cache));
self.resolver = resolver.clone();
node_resolver::PackageJsonThreadLocalCache::clear();
{
let fs_docs = &self.file_system_docs;
// Clean up non-existent documents.
@ -1300,14 +1363,21 @@ impl Documents {
if !config.specifier_enabled(doc.specifier()) {
continue;
}
*doc = doc.with_new_config(self.resolver.clone(), self.config.clone());
*doc = doc.with_new_config(
&self.is_cjs_resolver,
self.resolver.clone(),
self.config.clone(),
);
}
for mut doc in self.file_system_docs.docs.iter_mut() {
if !config.specifier_enabled(doc.specifier()) {
continue;
}
*doc.value_mut() =
doc.with_new_config(self.resolver.clone(), self.config.clone());
*doc.value_mut() = doc.with_new_config(
&self.is_cjs_resolver,
self.resolver.clone(),
self.config.clone(),
);
}
self.open_docs = open_docs;
let mut preload_count = 0;
@ -1324,6 +1394,7 @@ impl Documents {
{
fs_docs.refresh_document(
specifier,
&self.is_cjs_resolver,
&self.resolver,
&self.config,
&self.cache,
@ -1409,6 +1480,7 @@ impl Documents {
&self,
specifier: &ModuleSpecifier,
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<(ModuleSpecifier, MediaType)> {
if let Some(module_name) = specifier.as_str().strip_prefix("node:") {
@ -1422,10 +1494,12 @@ impl Documents {
let mut specifier = specifier.clone();
let mut media_type = None;
if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(&specifier) {
let (s, mt) =
self
.resolver
.npm_to_file_url(&npm_ref, referrer, file_referrer)?;
let (s, mt) = self.resolver.npm_to_file_url(
&npm_ref,
referrer,
referrer_kind,
file_referrer,
)?;
specifier = s;
media_type = Some(mt);
}
@ -1435,7 +1509,8 @@ impl Documents {
return Some((specifier, media_type));
};
if let Some(types) = doc.maybe_types_dependency().maybe_specifier() {
self.resolve_dependency(types, &specifier, file_referrer)
let specifier_kind = self.is_cjs_resolver.get_doc_module_kind(&doc);
self.resolve_dependency(types, &specifier, specifier_kind, file_referrer)
} else {
Some((doc.specifier().clone(), doc.media_type()))
}
@ -1503,6 +1578,7 @@ fn parse_and_analyze_module(
maybe_headers: Option<&HashMap<String, String>>,
media_type: MediaType,
file_referrer: Option<&ModuleSpecifier>,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &LspResolver,
) -> (Option<ParsedSourceResult>, Option<ModuleResult>) {
let parsed_source_result = parse_source(specifier.clone(), text, media_type);
@ -1511,6 +1587,7 @@ fn parse_and_analyze_module(
&parsed_source_result,
maybe_headers,
file_referrer,
is_cjs_resolver,
resolver,
);
(Some(parsed_source_result), Some(module_result))
@ -1536,11 +1613,26 @@ fn analyze_module(
parsed_source_result: &ParsedSourceResult,
maybe_headers: Option<&HashMap<String, String>>,
file_referrer: Option<&ModuleSpecifier>,
is_cjs_resolver: &LspIsCjsResolver,
resolver: &LspResolver,
) -> ModuleResult {
match parsed_source_result {
Ok(parsed_source) => {
let npm_resolver = resolver.create_graph_npm_resolver(file_referrer);
let cli_resolver = resolver.as_cli_resolver(file_referrer);
let config_data = resolver.as_config_data(file_referrer);
let valid_referrer = specifier.clone();
let jsx_import_source_config =
config_data.and_then(|d| d.maybe_jsx_import_source_config());
let resolver = SingleReferrerGraphResolver {
valid_referrer: &valid_referrer,
referrer_kind: is_cjs_resolver.get_lsp_referrer_kind(
&specifier,
Some(parsed_source.compute_is_script()),
),
cli_resolver,
jsx_import_source_config: jsx_import_source_config.as_ref(),
};
Ok(deno_graph::parse_module_from_ast(
deno_graph::ParseModuleFromAstOptions {
graph_kind: deno_graph::GraphKind::TypesOnly,
@ -1551,7 +1643,7 @@ fn analyze_module(
// dynamic imports like import(`./dir/${something}`) in the LSP
file_system: &deno_graph::source::NullFileSystem,
jsr_url_provider: &CliJsrUrlProvider,
maybe_resolver: Some(resolver.as_graph_resolver(file_referrer)),
maybe_resolver: Some(&resolver),
maybe_npm_resolver: Some(&npm_resolver),
},
))

View file

@ -22,6 +22,7 @@ use deno_semver::jsr::JsrPackageReqReference;
use indexmap::Equivalent;
use indexmap::IndexSet;
use log::error;
use node_resolver::NodeModuleKind;
use serde::Deserialize;
use serde_json::from_value;
use std::collections::BTreeMap;
@ -77,6 +78,7 @@ use super::parent_process_checker;
use super::performance::Performance;
use super::refactor;
use super::registries::ModuleRegistry;
use super::resolver::LspIsCjsResolver;
use super::resolver::LspResolver;
use super::testing;
use super::text;
@ -144,6 +146,7 @@ pub struct StateSnapshot {
pub project_version: usize,
pub assets: AssetsSnapshot,
pub config: Arc<Config>,
pub is_cjs_resolver: Arc<LspIsCjsResolver>,
pub documents: Arc<Documents>,
pub resolver: Arc<LspResolver>,
}
@ -203,6 +206,7 @@ pub struct Inner {
pub documents: Documents,
http_client_provider: Arc<HttpClientProvider>,
initial_cwd: PathBuf,
pub is_cjs_resolver: Arc<LspIsCjsResolver>,
jsr_search_api: CliJsrSearchApi,
/// Handles module registries, which allow discovery of modules
module_registry: ModuleRegistry,
@ -480,6 +484,7 @@ impl Inner {
let initial_cwd = std::env::current_dir().unwrap_or_else(|_| {
panic!("Could not resolve current working directory")
});
let is_cjs_resolver = Arc::new(LspIsCjsResolver::new(&cache));
Self {
assets,
@ -491,6 +496,7 @@ impl Inner {
documents,
http_client_provider,
initial_cwd: initial_cwd.clone(),
is_cjs_resolver,
jsr_search_api,
project_version: 0,
task_queue: Default::default(),
@ -601,6 +607,7 @@ impl Inner {
project_version: self.project_version,
assets: self.assets.snapshot(),
config: Arc::new(self.config.clone()),
is_cjs_resolver: self.is_cjs_resolver.clone(),
documents: Arc::new(self.documents.clone()),
resolver: self.resolver.snapshot(),
})
@ -622,6 +629,7 @@ impl Inner {
}
});
self.cache = LspCache::new(global_cache_url);
self.is_cjs_resolver = Arc::new(LspIsCjsResolver::new(&self.cache));
let deno_dir = self.cache.deno_dir();
let workspace_settings = self.config.workspace_settings();
let maybe_root_path = self
@ -982,7 +990,7 @@ impl Inner {
spawn(async move {
let specifier = {
let inner = ls.inner.read().await;
let resolver = inner.resolver.as_graph_resolver(Some(&referrer));
let resolver = inner.resolver.as_cli_resolver(Some(&referrer));
let Ok(specifier) = resolver.resolve(
&specifier,
&deno_graph::Range {
@ -990,6 +998,7 @@ impl Inner {
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
NodeModuleKind::Esm,
deno_graph::source::ResolutionMode::Types,
) else {
return;
@ -1622,6 +1631,10 @@ impl Inner {
let file_diagnostics = self
.diagnostics_server
.get_ts_diagnostics(&specifier, asset_or_doc.document_lsp_version());
let specifier_kind = asset_or_doc
.document()
.map(|d| self.is_cjs_resolver.get_doc_module_kind(d))
.unwrap_or(NodeModuleKind::Esm);
let mut includes_no_cache = false;
for diagnostic in &fixable_diagnostics {
match diagnostic.source.as_deref() {
@ -1660,7 +1673,13 @@ impl Inner {
.await;
for action in actions {
code_actions
.add_ts_fix_action(&specifier, &action, diagnostic, self)
.add_ts_fix_action(
&specifier,
specifier_kind,
&action,
diagnostic,
self,
)
.map_err(|err| {
error!("Unable to convert fix: {:#}", err);
LspError::internal_error()
@ -1806,10 +1825,9 @@ impl Inner {
error!("Unable to decode code action data: {:#}", err);
LspError::invalid_params("The CodeAction's data is invalid.")
})?;
let scope = self
.get_asset_or_document(&code_action_data.specifier)
.ok()
.and_then(|d| d.scope().cloned());
let maybe_asset_or_doc =
self.get_asset_or_document(&code_action_data.specifier).ok();
let scope = maybe_asset_or_doc.as_ref().and_then(|d| d.scope().cloned());
let combined_code_actions = self
.ts_server
.get_combined_code_fix(
@ -1836,6 +1854,11 @@ impl Inner {
let changes = if code_action_data.fix_id == "fixMissingImport" {
fix_ts_import_changes(
&code_action_data.specifier,
maybe_asset_or_doc
.as_ref()
.and_then(|d| d.document())
.map(|d| self.is_cjs_resolver.get_doc_module_kind(d))
.unwrap_or(NodeModuleKind::Esm),
&combined_code_actions.changes,
self,
)
@ -1889,6 +1912,10 @@ impl Inner {
if kind_suffix == ".rewrite.function.returnType" {
refactor_edit_info.edits = fix_ts_import_changes(
&action_data.specifier,
asset_or_doc
.document()
.map(|d| self.is_cjs_resolver.get_doc_module_kind(d))
.unwrap_or(NodeModuleKind::Esm),
&refactor_edit_info.edits,
self,
)
@ -2238,6 +2265,7 @@ impl Inner {
&self.jsr_search_api,
&self.npm_search_api,
&self.documents,
&self.is_cjs_resolver,
self.resolver.as_ref(),
self
.config

View file

@ -263,7 +263,7 @@ impl ReplLanguageServer {
}
fn get_document_uri(&self) -> Uri {
uri_parse_unencoded(self.cwd_uri.join("$deno$repl.ts").unwrap().as_str())
uri_parse_unencoded(self.cwd_uri.join("$deno$repl.mts").unwrap().as_str())
.unwrap()
}
}

View file

@ -2,16 +2,18 @@
use dashmap::DashMap;
use deno_ast::MediaType;
use deno_ast::ParsedSource;
use deno_cache_dir::npm::NpmCacheDir;
use deno_cache_dir::HttpCache;
use deno_config::deno_json::JsxImportSourceConfig;
use deno_config::workspace::PackageJsonDepResolution;
use deno_config::workspace::WorkspaceResolver;
use deno_core::url::Url;
use deno_graph::source::Resolver;
use deno_graph::source::ResolutionMode;
use deno_graph::GraphImport;
use deno_graph::ModuleSpecifier;
use deno_graph::Range;
use deno_npm::NpmSystemInfo;
use deno_path_util::url_from_directory_path;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_fs;
use deno_runtime::deno_node::NodeResolver;
@ -24,6 +26,7 @@ use deno_semver::package::PackageReq;
use indexmap::IndexMap;
use node_resolver::errors::ClosestPkgJsonError;
use node_resolver::InNpmPackageChecker;
use node_resolver::NodeModuleKind;
use node_resolver::NodeResolutionMode;
use std::borrow::Cow;
use std::collections::BTreeMap;
@ -33,6 +36,7 @@ use std::collections::HashSet;
use std::sync::Arc;
use super::cache::LspCache;
use super::documents::Document;
use super::jsr::JsrCacheResolver;
use crate::args::create_default_npmrc;
use crate::args::CacheSetting;
@ -53,21 +57,20 @@ use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::CreateInNpmPkgCheckerOptions;
use crate::npm::ManagedCliNpmResolver;
use crate::resolver::CjsTracker;
use crate::resolver::CjsTrackerOptions;
use crate::resolver::CliDenoResolverFs;
use crate::resolver::CliGraphResolver;
use crate::resolver::CliGraphResolverOptions;
use crate::resolver::CliNodeResolver;
use crate::resolver::CliResolver;
use crate::resolver::CliResolverOptions;
use crate::resolver::IsCjsResolver;
use crate::resolver::WorkerCliNpmGraphResolver;
use crate::tsc::into_specifier_and_media_type;
use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
#[derive(Debug, Clone)]
struct LspScopeResolver {
cjs_tracker: Option<Arc<LspCjsTracker>>,
graph_resolver: Arc<CliGraphResolver>,
resolver: Arc<CliResolver>,
jsr_resolver: Option<Arc<JsrCacheResolver>>,
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
node_resolver: Option<Arc<CliNodeResolver>>,
@ -81,8 +84,7 @@ struct LspScopeResolver {
impl Default for LspScopeResolver {
fn default() -> Self {
Self {
cjs_tracker: None,
graph_resolver: create_graph_resolver(None, None, None),
resolver: create_cli_resolver(None, None, None),
jsr_resolver: None,
npm_resolver: None,
node_resolver: None,
@ -103,7 +105,6 @@ impl LspScopeResolver {
) -> Self {
let mut npm_resolver = None;
let mut node_resolver = None;
let mut lsp_cjs_tracker = None;
let fs = Arc::new(deno_fs::RealFs);
let pkg_json_resolver = Arc::new(PackageJsonResolver::new(
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
@ -118,14 +119,7 @@ impl LspScopeResolver {
.await;
if let Some(npm_resolver) = &npm_resolver {
let in_npm_pkg_checker = create_in_npm_pkg_checker(npm_resolver);
let cjs_tracker = create_cjs_tracker(
in_npm_pkg_checker.clone(),
pkg_json_resolver.clone(),
);
lsp_cjs_tracker =
Some(Arc::new(LspCjsTracker::new(cjs_tracker.clone())));
node_resolver = Some(create_node_resolver(
cjs_tracker,
fs.clone(),
in_npm_pkg_checker,
npm_resolver,
@ -133,7 +127,7 @@ impl LspScopeResolver {
));
}
}
let graph_resolver = create_graph_resolver(
let cli_resolver = create_cli_resolver(
config_data.map(|d| d.as_ref()),
npm_resolver.as_ref(),
node_resolver.as_ref(),
@ -146,7 +140,9 @@ impl LspScopeResolver {
cache.for_specifier(config_data.map(|d| d.scope.as_ref())),
config_data.and_then(|d| d.lockfile.clone()),
)));
let npm_graph_resolver = graph_resolver.create_graph_npm_resolver();
let npm_graph_resolver = cli_resolver.create_graph_npm_resolver();
let maybe_jsx_import_source_config =
config_data.and_then(|d| d.maybe_jsx_import_source_config());
let graph_imports = config_data
.and_then(|d| d.member_dir.workspace.to_compiler_option_types().ok())
.map(|imports| {
@ -154,11 +150,18 @@ impl LspScopeResolver {
imports
.into_iter()
.map(|(referrer, imports)| {
let resolver = SingleReferrerGraphResolver {
valid_referrer: &referrer,
referrer_kind: NodeModuleKind::Esm,
cli_resolver: &cli_resolver,
jsx_import_source_config: maybe_jsx_import_source_config
.as_ref(),
};
let graph_import = GraphImport::new(
&referrer,
imports,
&CliJsrUrlProvider,
Some(graph_resolver.as_ref()),
Some(&resolver),
Some(&npm_graph_resolver),
);
(referrer, graph_import)
@ -182,6 +185,8 @@ impl LspScopeResolver {
.resolve_req_reference(
&req_ref,
&referrer,
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
NodeModuleKind::Esm,
NodeResolutionMode::Types,
)
.ok()?,
@ -195,8 +200,7 @@ impl LspScopeResolver {
let package_json_deps_by_resolution =
Arc::new(package_json_deps_by_resolution.unwrap_or_default());
Self {
cjs_tracker: lsp_cjs_tracker,
graph_resolver,
resolver: cli_resolver,
jsr_resolver,
npm_resolver,
node_resolver,
@ -216,30 +220,22 @@ impl LspScopeResolver {
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
));
let mut node_resolver = None;
let mut lsp_cjs_tracker = None;
if let Some(npm_resolver) = &npm_resolver {
let in_npm_pkg_checker = create_in_npm_pkg_checker(npm_resolver);
let cjs_tracker = create_cjs_tracker(
in_npm_pkg_checker.clone(),
pkg_json_resolver.clone(),
);
lsp_cjs_tracker = Some(Arc::new(LspCjsTracker::new(cjs_tracker.clone())));
node_resolver = Some(create_node_resolver(
cjs_tracker,
fs,
in_npm_pkg_checker,
npm_resolver,
pkg_json_resolver.clone(),
));
}
let graph_resolver = create_graph_resolver(
let graph_resolver = create_cli_resolver(
self.config_data.as_deref(),
npm_resolver.as_ref(),
node_resolver.as_ref(),
);
Arc::new(Self {
cjs_tracker: lsp_cjs_tracker,
graph_resolver,
resolver: graph_resolver,
jsr_resolver: self.jsr_resolver.clone(),
npm_resolver,
node_resolver,
@ -334,12 +330,12 @@ impl LspResolver {
}
}
pub fn as_graph_resolver(
pub fn as_cli_resolver(
&self,
file_referrer: Option<&ModuleSpecifier>,
) -> &dyn Resolver {
) -> &CliResolver {
let resolver = self.get_scope_resolver(file_referrer);
resolver.graph_resolver.as_ref()
resolver.resolver.as_ref()
}
pub fn create_graph_npm_resolver(
@ -347,15 +343,15 @@ impl LspResolver {
file_referrer: Option<&ModuleSpecifier>,
) -> WorkerCliNpmGraphResolver {
let resolver = self.get_scope_resolver(file_referrer);
resolver.graph_resolver.create_graph_npm_resolver()
resolver.resolver.create_graph_npm_resolver()
}
pub fn maybe_cjs_tracker(
pub fn as_config_data(
&self,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<&Arc<LspCjsTracker>> {
) -> Option<&Arc<ConfigData>> {
let resolver = self.get_scope_resolver(file_referrer);
resolver.cjs_tracker.as_ref()
resolver.config_data.as_ref()
}
pub fn maybe_node_resolver(
@ -429,13 +425,19 @@ impl LspResolver {
&self,
req_ref: &NpmPackageReqReference,
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
file_referrer: Option<&ModuleSpecifier>,
) -> Option<(ModuleSpecifier, MediaType)> {
let resolver = self.get_scope_resolver(file_referrer);
let node_resolver = resolver.node_resolver.as_ref()?;
Some(into_specifier_and_media_type(Some(
node_resolver
.resolve_req_reference(req_ref, referrer, NodeResolutionMode::Types)
.resolve_req_reference(
req_ref,
referrer,
referrer_kind,
NodeResolutionMode::Types,
)
.ok()?,
)))
}
@ -478,6 +480,7 @@ impl LspResolver {
&self,
specifier_text: &str,
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
) -> bool {
let resolver = self.get_scope_resolver(Some(referrer));
let Some(node_resolver) = resolver.node_resolver.as_ref() else {
@ -487,6 +490,7 @@ impl LspResolver {
.resolve_if_for_npm_pkg(
specifier_text,
referrer,
referrer_kind,
NodeResolutionMode::Types,
)
.ok()
@ -615,21 +619,6 @@ async fn create_npm_resolver(
Some(create_cli_npm_resolver_for_lsp(options).await)
}
fn create_cjs_tracker(
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
pkg_json_resolver: Arc<PackageJsonResolver>,
) -> Arc<CjsTracker> {
Arc::new(CjsTracker::new(
in_npm_pkg_checker,
pkg_json_resolver,
CjsTrackerOptions {
// todo(dsherret): support in the lsp by stabilizing the feature
// so that we don't have to pipe the config in here
unstable_detect_cjs: false,
},
))
}
fn create_in_npm_pkg_checker(
npm_resolver: &Arc<dyn CliNpmResolver>,
) -> Arc<dyn InNpmPackageChecker> {
@ -649,7 +638,6 @@ fn create_in_npm_pkg_checker(
}
fn create_node_resolver(
cjs_tracker: Arc<CjsTracker>,
fs: Arc<dyn deno_fs::FileSystem>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
npm_resolver: &Arc<dyn CliNpmResolver>,
@ -662,7 +650,6 @@ fn create_node_resolver(
pkg_json_resolver.clone(),
));
Arc::new(CliNodeResolver::new(
cjs_tracker.clone(),
fs,
in_npm_pkg_checker,
node_resolver_inner,
@ -670,13 +657,12 @@ fn create_node_resolver(
))
}
fn create_graph_resolver(
fn create_cli_resolver(
config_data: Option<&ConfigData>,
npm_resolver: Option<&Arc<dyn CliNpmResolver>>,
node_resolver: Option<&Arc<CliNodeResolver>>,
) -> Arc<CliGraphResolver> {
let workspace = config_data.map(|d| &d.member_dir.workspace);
Arc::new(CliGraphResolver::new(CliGraphResolverOptions {
) -> Arc<CliResolver> {
Arc::new(CliResolver::new(CliResolverOptions {
node_resolver: node_resolver.cloned(),
npm_resolver: npm_resolver.cloned(),
workspace_resolver: config_data.map(|d| d.resolver.clone()).unwrap_or_else(
@ -691,9 +677,6 @@ fn create_graph_resolver(
))
},
),
maybe_jsx_import_source_config: workspace.and_then(|workspace| {
workspace.to_maybe_jsx_import_source_config().ok().flatten()
}),
maybe_vendor_dir: config_data.and_then(|d| d.vendor_dir.as_ref()),
bare_node_builtins_enabled: config_data
.is_some_and(|d| d.unstable.contains("bare-node-builtins")),
@ -726,6 +709,141 @@ impl std::fmt::Debug for RedirectResolver {
}
}
#[derive(Debug)]
pub struct LspIsCjsResolver {
inner: IsCjsResolver,
}
impl Default for LspIsCjsResolver {
fn default() -> Self {
LspIsCjsResolver::new(&Default::default())
}
}
impl LspIsCjsResolver {
pub fn new(cache: &LspCache) -> Self {
#[derive(Debug)]
struct LspInNpmPackageChecker {
global_cache_dir: ModuleSpecifier,
}
impl LspInNpmPackageChecker {
pub fn new(cache: &LspCache) -> Self {
let npm_folder_path = cache.deno_dir().npm_folder_path();
Self {
global_cache_dir: url_from_directory_path(
&canonicalize_path_maybe_not_exists(&npm_folder_path)
.unwrap_or(npm_folder_path),
)
.unwrap_or_else(|_| {
ModuleSpecifier::parse("file:///invalid/").unwrap()
}),
}
}
}
impl InNpmPackageChecker for LspInNpmPackageChecker {
fn in_npm_package(&self, specifier: &ModuleSpecifier) -> bool {
if specifier.scheme() != "file" {
return false;
}
if specifier
.as_str()
.starts_with(self.global_cache_dir.as_str())
{
return true;
}
specifier.as_str().contains("/node_modules/")
}
}
let fs = Arc::new(deno_fs::RealFs);
let pkg_json_resolver = Arc::new(PackageJsonResolver::new(
deno_runtime::deno_node::DenoFsNodeResolverEnv::new(fs.clone()),
));
LspIsCjsResolver {
inner: IsCjsResolver::new(
Arc::new(LspInNpmPackageChecker::new(cache)),
pkg_json_resolver,
crate::resolver::IsCjsResolverOptions {
detect_cjs: true,
is_node_main: false,
},
),
}
}
pub fn get_maybe_doc_module_kind(
&self,
specifier: &ModuleSpecifier,
maybe_document: Option<&Document>,
) -> NodeModuleKind {
self.get_lsp_referrer_kind(
specifier,
maybe_document.and_then(|d| d.is_script()),
)
}
pub fn get_doc_module_kind(&self, document: &Document) -> NodeModuleKind {
self.get_lsp_referrer_kind(document.specifier(), document.is_script())
}
pub fn get_lsp_referrer_kind(
&self,
specifier: &ModuleSpecifier,
is_script: Option<bool>,
) -> NodeModuleKind {
self.inner.get_lsp_referrer_kind(specifier, is_script)
}
}
#[derive(Debug)]
pub struct SingleReferrerGraphResolver<'a> {
pub valid_referrer: &'a ModuleSpecifier,
pub referrer_kind: NodeModuleKind,
pub cli_resolver: &'a CliResolver,
pub jsx_import_source_config: Option<&'a JsxImportSourceConfig>,
}
impl<'a> deno_graph::source::Resolver for SingleReferrerGraphResolver<'a> {
fn default_jsx_import_source(&self) -> Option<String> {
self
.jsx_import_source_config
.and_then(|c| c.default_specifier.clone())
}
fn default_jsx_import_source_types(&self) -> Option<String> {
self
.jsx_import_source_config
.and_then(|c| c.default_types_specifier.clone())
}
fn jsx_import_source_module(&self) -> &str {
self
.jsx_import_source_config
.map(|c| c.module.as_str())
.unwrap_or(deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE)
}
fn resolve(
&self,
specifier_text: &str,
referrer_range: &Range,
mode: ResolutionMode,
) -> Result<ModuleSpecifier, deno_graph::source::ResolveError> {
// this resolver assumes it will only be used with a single referrer
// with the provided referrer kind
debug_assert_eq!(referrer_range.specifier, *self.valid_referrer);
self.cli_resolver.resolve(
specifier_text,
referrer_range,
self.referrer_kind,
mode,
)
}
}
impl RedirectResolver {
fn new(
cache: Arc<dyn HttpCache>,
@ -842,45 +960,6 @@ impl RedirectResolver {
}
}
#[derive(Debug)]
pub struct LspCjsTracker {
cjs_tracker: Arc<CjsTracker>,
}
impl LspCjsTracker {
pub fn new(cjs_tracker: Arc<CjsTracker>) -> Self {
Self { cjs_tracker }
}
pub fn is_cjs(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
maybe_parsed_source: Option<&ParsedSource>,
) -> bool {
if let Some(module_kind) =
self.cjs_tracker.get_known_kind(specifier, media_type)
{
module_kind.is_cjs()
} else {
let maybe_is_script = maybe_parsed_source.map(|p| p.compute_is_script());
maybe_is_script
.and_then(|is_script| {
self
.cjs_tracker
.is_cjs_with_known_is_script(specifier, media_type, is_script)
.ok()
})
.unwrap_or_else(|| {
self
.cjs_tracker
.is_maybe_cjs(specifier, media_type)
.unwrap_or(false)
})
}
}
}
#[cfg(test)]
mod tests {
use super::*;

View file

@ -69,6 +69,7 @@ use indexmap::IndexMap;
use indexmap::IndexSet;
use lazy_regex::lazy_regex;
use log::error;
use node_resolver::NodeModuleKind;
use once_cell::sync::Lazy;
use regex::Captures;
use regex::Regex;
@ -4401,25 +4402,15 @@ fn op_load<'s>(
None
} else {
let asset_or_document = state.get_asset_or_document(&specifier);
asset_or_document.map(|doc| {
let maybe_cjs_tracker = state
.state_snapshot
.resolver
.maybe_cjs_tracker(Some(&specifier));
LoadResponse {
asset_or_document.map(|doc| LoadResponse {
data: doc.text(),
script_kind: crate::tsc::as_ts_script_kind(doc.media_type()),
version: state.script_version(&specifier),
is_cjs: maybe_cjs_tracker
.map(|t| {
t.is_cjs(
&specifier,
doc.media_type(),
doc.maybe_parsed_source().and_then(|p| p.as_ref().ok()),
)
})
.unwrap_or(false),
}
is_cjs: doc
.document()
.map(|d| state.state_snapshot.is_cjs_resolver.get_doc_module_kind(d))
.unwrap_or(NodeModuleKind::Esm)
== NodeModuleKind::Cjs,
})
};
@ -4662,6 +4653,10 @@ fn op_script_names(state: &mut OpState) -> ScriptNames {
let (types, _) = documents.resolve_dependency(
types,
specifier,
state
.state_snapshot
.is_cjs_resolver
.get_doc_module_kind(doc),
doc.file_referrer(),
)?;
let types_doc = documents.get_or_load(&types, doc.file_referrer())?;
@ -5544,6 +5539,7 @@ mod tests {
documents: Arc::new(documents),
assets: Default::default(),
config: Arc::new(config),
is_cjs_resolver: Default::default(),
resolver,
});
let performance = Arc::new(Performance::default());

View file

@ -27,8 +27,8 @@ use crate::node;
use crate::node::CliNodeCodeTranslator;
use crate::npm::CliNpmResolver;
use crate::resolver::CjsTracker;
use crate::resolver::CliGraphResolver;
use crate::resolver::CliNodeResolver;
use crate::resolver::CliResolver;
use crate::resolver::ModuleCodeStringSource;
use crate::resolver::NotSupportedKindInNpmError;
use crate::resolver::NpmModuleLoader;
@ -60,7 +60,6 @@ use deno_core::RequestedModuleType;
use deno_core::ResolutionKind;
use deno_core::SourceCodeCacheInfo;
use deno_graph::source::ResolutionMode;
use deno_graph::source::Resolver;
use deno_graph::GraphKind;
use deno_graph::JsModule;
use deno_graph::JsonModule;
@ -73,6 +72,7 @@ use deno_runtime::deno_node::create_host_defined_options;
use deno_runtime::deno_node::NodeRequireLoader;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_semver::npm::NpmPackageReqReference;
use node_resolver::errors::ClosestPkgJsonError;
use node_resolver::InNpmPackageChecker;
use node_resolver::NodeResolutionMode;
@ -206,7 +206,6 @@ struct SharedCliModuleLoaderState {
lib_worker: TsTypeLib,
initial_cwd: PathBuf,
is_inspecting: bool,
is_npm_main: bool,
is_repl: bool,
cjs_tracker: Arc<CjsTracker>,
code_cache: Option<Arc<CodeCache>>,
@ -220,7 +219,7 @@ struct SharedCliModuleLoaderState {
npm_resolver: Arc<dyn CliNpmResolver>,
npm_module_loader: NpmModuleLoader,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliGraphResolver>,
resolver: Arc<CliResolver>,
}
pub struct CliModuleLoaderFactory {
@ -243,7 +242,7 @@ impl CliModuleLoaderFactory {
npm_resolver: Arc<dyn CliNpmResolver>,
npm_module_loader: NpmModuleLoader,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliGraphResolver>,
resolver: Arc<CliResolver>,
) -> Self {
Self {
shared: Arc::new(SharedCliModuleLoaderState {
@ -252,7 +251,6 @@ impl CliModuleLoaderFactory {
lib_worker: options.ts_type_lib_worker(),
initial_cwd: options.initial_cwd().to_path_buf(),
is_inspecting: options.is_inspecting(),
is_npm_main: options.is_npm_main(),
is_repl: matches!(
options.sub_command(),
DenoSubcommand::Repl(_) | DenoSubcommand::Jupyter(_)
@ -286,7 +284,6 @@ impl CliModuleLoaderFactory {
Rc::new(CliModuleLoader(Rc::new(CliModuleLoaderInner {
lib,
is_worker,
is_npm_main: self.shared.is_npm_main,
parent_permissions,
permissions,
graph_container: graph_container.clone(),
@ -295,13 +292,14 @@ impl CliModuleLoaderFactory {
parsed_source_cache: self.shared.parsed_source_cache.clone(),
shared: self.shared.clone(),
})));
let node_require_loader = Rc::new(CliNodeRequireLoader::new(
self.shared.emitter.clone(),
self.shared.fs.clone(),
let node_require_loader = Rc::new(CliNodeRequireLoader {
cjs_tracker: self.shared.cjs_tracker.clone(),
emitter: self.shared.emitter.clone(),
fs: self.shared.fs.clone(),
graph_container,
self.shared.in_npm_pkg_checker.clone(),
self.shared.npm_resolver.clone(),
));
in_npm_pkg_checker: self.shared.in_npm_pkg_checker.clone(),
npm_resolver: self.shared.npm_resolver.clone(),
});
CreateModuleLoaderResult {
module_loader,
node_require_loader,
@ -343,7 +341,6 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory {
struct CliModuleLoaderInner<TGraphContainer: ModuleGraphContainer> {
lib: TsTypeLib,
is_npm_main: bool,
is_worker: bool,
/// The initial set of permissions used to resolve the static imports in the
/// worker. These are "allow all" for main worker, and parent thread
@ -450,7 +447,7 @@ impl<TGraphContainer: ModuleGraphContainer>
let referrer = if referrer.is_empty() && self.shared.is_repl {
// FIXME(bartlomieju): this is a hacky way to provide compatibility with REPL
// and `Deno.core.evalContext` API. Ideally we should always have a referrer filled
"./$deno$repl.ts"
"./$deno$repl.mts"
} else {
referrer
};
@ -478,7 +475,12 @@ impl<TGraphContainer: ModuleGraphContainer>
self
.shared
.node_resolver
.resolve(raw_specifier, referrer, NodeResolutionMode::Execution)?
.resolve(
raw_specifier,
referrer,
self.shared.cjs_tracker.get_referrer_kind(referrer),
NodeResolutionMode::Execution,
)?
.into_url(),
);
}
@ -508,6 +510,7 @@ impl<TGraphContainer: ModuleGraphContainer>
start: deno_graph::Position::zeroed(),
end: deno_graph::Position::zeroed(),
},
self.shared.cjs_tracker.get_referrer_kind(referrer),
ResolutionMode::Execution,
)?),
};
@ -518,6 +521,7 @@ impl<TGraphContainer: ModuleGraphContainer>
return self.shared.node_resolver.resolve_req_reference(
&reference,
referrer,
self.shared.cjs_tracker.get_referrer_kind(referrer),
NodeResolutionMode::Execution,
);
}
@ -538,6 +542,7 @@ impl<TGraphContainer: ModuleGraphContainer>
&package_folder,
module.nv_reference.sub_path(),
Some(referrer),
self.shared.cjs_tracker.get_referrer_kind(referrer),
NodeResolutionMode::Execution,
)
.with_context(|| {
@ -668,14 +673,11 @@ impl<TGraphContainer: ModuleGraphContainer>
is_script,
..
})) => {
// todo(dsherret): revert in https://github.com/denoland/deno/pull/26439
if self.is_npm_main && *is_script
|| self.shared.cjs_tracker.is_cjs_with_known_is_script(
if self.shared.cjs_tracker.is_cjs_with_known_is_script(
specifier,
*media_type,
*is_script,
)?
{
)? {
return Ok(Some(CodeOrDeferredEmit::Cjs {
specifier,
media_type: *media_type,
@ -1031,6 +1033,7 @@ impl ModuleGraphUpdatePermit for WorkerModuleGraphUpdatePermit {
#[derive(Debug)]
struct CliNodeRequireLoader<TGraphContainer: ModuleGraphContainer> {
cjs_tracker: Arc<CjsTracker>,
emitter: Arc<Emitter>,
fs: Arc<dyn FileSystem>,
graph_container: TGraphContainer,
@ -1038,26 +1041,6 @@ struct CliNodeRequireLoader<TGraphContainer: ModuleGraphContainer> {
npm_resolver: Arc<dyn CliNpmResolver>,
}
impl<TGraphContainer: ModuleGraphContainer>
CliNodeRequireLoader<TGraphContainer>
{
pub fn new(
emitter: Arc<Emitter>,
fs: Arc<dyn FileSystem>,
graph_container: TGraphContainer,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
npm_resolver: Arc<dyn CliNpmResolver>,
) -> Self {
Self {
emitter,
fs,
graph_container,
in_npm_pkg_checker,
npm_resolver,
}
}
}
impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
for CliNodeRequireLoader<TGraphContainer>
{
@ -1103,4 +1086,12 @@ impl<TGraphContainer: ModuleGraphContainer> NodeRequireLoader
Ok(text)
}
}
fn is_maybe_cjs(
&self,
specifier: &ModuleSpecifier,
) -> Result<bool, ClosestPkgJsonError> {
let media_type = MediaType::from_specifier(specifier);
self.cjs_tracker.is_maybe_cjs(specifier, media_type)
}
}

View file

@ -62,10 +62,6 @@ pub struct CliCjsCodeAnalyzer {
cjs_tracker: Arc<CjsTracker>,
fs: deno_fs::FileSystemRc,
parsed_source_cache: Option<Arc<ParsedSourceCache>>,
// todo(dsherret): hack, remove in https://github.com/denoland/deno/pull/26439
// For example, this does not properly handle if cjs analysis was already done
// and has been cached.
is_npm_main: bool,
}
impl CliCjsCodeAnalyzer {
@ -74,14 +70,12 @@ impl CliCjsCodeAnalyzer {
cjs_tracker: Arc<CjsTracker>,
fs: deno_fs::FileSystemRc,
parsed_source_cache: Option<Arc<ParsedSourceCache>>,
is_npm_main: bool,
) -> Self {
Self {
cache,
cjs_tracker,
fs,
parsed_source_cache,
is_npm_main,
}
}
@ -106,9 +100,7 @@ impl CliCjsCodeAnalyzer {
}
let cjs_tracker = self.cjs_tracker.clone();
let is_npm_main = self.is_npm_main;
let is_maybe_cjs =
cjs_tracker.is_maybe_cjs(specifier, media_type)? || is_npm_main;
let is_maybe_cjs = cjs_tracker.is_maybe_cjs(specifier, media_type)?;
let analysis = if is_maybe_cjs {
let maybe_parsed_source = self
.parsed_source_cache
@ -135,7 +127,7 @@ impl CliCjsCodeAnalyzer {
parsed_source.specifier(),
media_type,
is_script,
)? || is_script && is_npm_main;
)?;
if is_cjs {
let analysis = parsed_source.analyze_cjs();
Ok(CliCjsAnalysis::Cjs {

View file

@ -18,6 +18,7 @@ pub struct BinEntries<'a> {
seen_names: HashMap<&'a str, &'a NpmPackageId>,
/// The bin entries
entries: Vec<(&'a NpmResolutionPackage, PathBuf)>,
sorted: bool,
}
/// Returns the name of the default binary for the given package.
@ -31,6 +32,20 @@ fn default_bin_name(package: &NpmResolutionPackage) -> &str {
.map_or(package.id.nv.name.as_str(), |(_, name)| name)
}
pub fn warn_missing_entrypoint(
bin_name: &str,
package_path: &Path,
entrypoint: &Path,
) {
log::warn!(
"{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.",
deno_terminal::colors::yellow("Warning"),
bin_name,
package_path.display(),
entrypoint.display()
);
}
impl<'a> BinEntries<'a> {
pub fn new() -> Self {
Self::default()
@ -42,6 +57,7 @@ impl<'a> BinEntries<'a> {
package: &'a NpmResolutionPackage,
package_path: PathBuf,
) {
self.sorted = false;
// check for a new collision, if we haven't already
// found one
match package.bin.as_ref().unwrap() {
@ -79,16 +95,21 @@ impl<'a> BinEntries<'a> {
&str, // bin name
&str, // bin script
) -> Result<(), AnyError>,
mut filter: impl FnMut(&NpmResolutionPackage) -> bool,
) -> Result<(), AnyError> {
if !self.collisions.is_empty() {
if !self.collisions.is_empty() && !self.sorted {
// walking the dependency tree to find out the depth of each package
// is sort of expensive, so we only do it if there's a collision
sort_by_depth(snapshot, &mut self.entries, &mut self.collisions);
self.sorted = true;
}
let mut seen = HashSet::new();
for (package, package_path) in &self.entries {
if !filter(package) {
continue;
}
if let Some(bin_entries) = &package.bin {
match bin_entries {
deno_npm::registry::NpmPackageVersionBinEntry::String(script) => {
@ -118,8 +139,8 @@ impl<'a> BinEntries<'a> {
}
/// Collect the bin entries into a vec of (name, script path)
pub fn into_bin_files(
mut self,
pub fn collect_bin_files(
&mut self,
snapshot: &NpmResolutionSnapshot,
) -> Vec<(String, PathBuf)> {
let mut bins = Vec::new();
@ -131,17 +152,18 @@ impl<'a> BinEntries<'a> {
bins.push((name.to_string(), package_path.join(script)));
Ok(())
},
|_| true,
)
.unwrap();
bins
}
/// Finish setting up the bin entries, writing the necessary files
/// to disk.
pub fn finish(
fn set_up_entries_filtered(
mut self,
snapshot: &NpmResolutionSnapshot,
bin_node_modules_dir_path: &Path,
filter: impl FnMut(&NpmResolutionPackage) -> bool,
mut handler: impl FnMut(&EntrySetupOutcome<'_>),
) -> Result<(), AnyError> {
if !self.entries.is_empty() && !bin_node_modules_dir_path.exists() {
std::fs::create_dir_all(bin_node_modules_dir_path).with_context(
@ -160,18 +182,54 @@ impl<'a> BinEntries<'a> {
Ok(())
},
|package, package_path, name, script| {
set_up_bin_entry(
let outcome = set_up_bin_entry(
package,
name,
script,
package_path,
bin_node_modules_dir_path,
)
)?;
handler(&outcome);
Ok(())
},
filter,
)?;
Ok(())
}
/// Finish setting up the bin entries, writing the necessary files
/// to disk.
pub fn finish(
self,
snapshot: &NpmResolutionSnapshot,
bin_node_modules_dir_path: &Path,
handler: impl FnMut(&EntrySetupOutcome<'_>),
) -> Result<(), AnyError> {
self.set_up_entries_filtered(
snapshot,
bin_node_modules_dir_path,
|_| true,
handler,
)
}
/// Finish setting up the bin entries, writing the necessary files
/// to disk.
pub fn finish_only(
self,
snapshot: &NpmResolutionSnapshot,
bin_node_modules_dir_path: &Path,
handler: impl FnMut(&EntrySetupOutcome<'_>),
only: &HashSet<&NpmPackageId>,
) -> Result<(), AnyError> {
self.set_up_entries_filtered(
snapshot,
bin_node_modules_dir_path,
|package| only.contains(&package.id),
handler,
)
}
}
// walk the dependency tree to find out the depth of each package
@ -233,16 +291,17 @@ fn sort_by_depth(
});
}
pub fn set_up_bin_entry(
package: &NpmResolutionPackage,
bin_name: &str,
pub fn set_up_bin_entry<'a>(
package: &'a NpmResolutionPackage,
bin_name: &'a str,
#[allow(unused_variables)] bin_script: &str,
#[allow(unused_variables)] package_path: &Path,
#[allow(unused_variables)] package_path: &'a Path,
bin_node_modules_dir_path: &Path,
) -> Result<(), AnyError> {
) -> Result<EntrySetupOutcome<'a>, AnyError> {
#[cfg(windows)]
{
set_up_bin_shim(package, bin_name, bin_node_modules_dir_path)?;
Ok(EntrySetupOutcome::Success)
}
#[cfg(unix)]
{
@ -252,9 +311,8 @@ pub fn set_up_bin_entry(
bin_script,
package_path,
bin_node_modules_dir_path,
)?;
)
}
Ok(())
}
#[cfg(windows)]
@ -301,14 +359,39 @@ fn make_executable_if_exists(path: &Path) -> Result<bool, AnyError> {
Ok(true)
}
pub enum EntrySetupOutcome<'a> {
#[cfg_attr(windows, allow(dead_code))]
MissingEntrypoint {
bin_name: &'a str,
package_path: &'a Path,
entrypoint: PathBuf,
package: &'a NpmResolutionPackage,
},
Success,
}
impl<'a> EntrySetupOutcome<'a> {
pub fn warn_if_failed(&self) {
match self {
EntrySetupOutcome::MissingEntrypoint {
bin_name,
package_path,
entrypoint,
..
} => warn_missing_entrypoint(bin_name, package_path, entrypoint),
EntrySetupOutcome::Success => {}
}
}
}
#[cfg(unix)]
fn symlink_bin_entry(
_package: &NpmResolutionPackage,
bin_name: &str,
fn symlink_bin_entry<'a>(
package: &'a NpmResolutionPackage,
bin_name: &'a str,
bin_script: &str,
package_path: &Path,
package_path: &'a Path,
bin_node_modules_dir_path: &Path,
) -> Result<(), AnyError> {
) -> Result<EntrySetupOutcome<'a>, AnyError> {
use std::io;
use std::os::unix::fs::symlink;
let link = bin_node_modules_dir_path.join(bin_name);
@ -318,14 +401,12 @@ fn symlink_bin_entry(
format!("Can't set up '{}' bin at {}", bin_name, original.display())
})?;
if !found {
log::warn!(
"{} Trying to set up '{}' bin for \"{}\", but the entry point \"{}\" doesn't exist.",
deno_terminal::colors::yellow("Warning"),
return Ok(EntrySetupOutcome::MissingEntrypoint {
bin_name,
package_path.display(),
original.display()
);
return Ok(());
package_path,
entrypoint: original,
package,
});
}
let original_relative =
@ -348,7 +429,7 @@ fn symlink_bin_entry(
original_relative.display()
)
})?;
return Ok(());
return Ok(EntrySetupOutcome::Success);
}
return Err(err).with_context(|| {
format!(
@ -359,5 +440,5 @@ fn symlink_bin_entry(
});
}
Ok(())
Ok(EntrySetupOutcome::Success)
}

View file

@ -10,6 +10,7 @@ use deno_runtime::deno_io::FromRawIoHandle;
use deno_semver::package::PackageNv;
use deno_semver::Version;
use std::borrow::Cow;
use std::collections::HashSet;
use std::rc::Rc;
use std::path::Path;
@ -61,7 +62,7 @@ impl<'a> LifecycleScripts<'a> {
}
}
fn has_lifecycle_scripts(
pub fn has_lifecycle_scripts(
package: &NpmResolutionPackage,
package_path: &Path,
) -> bool {
@ -83,7 +84,7 @@ fn is_broken_default_install_script(script: &str, package_path: &Path) -> bool {
}
impl<'a> LifecycleScripts<'a> {
fn can_run_scripts(&self, package_nv: &PackageNv) -> bool {
pub fn can_run_scripts(&self, package_nv: &PackageNv) -> bool {
if !self.strategy.can_run_scripts() {
return false;
}
@ -98,6 +99,9 @@ impl<'a> LifecycleScripts<'a> {
PackagesAllowedScripts::None => false,
}
}
pub fn has_run_scripts(&self, package: &NpmResolutionPackage) -> bool {
self.strategy.has_run(package)
}
/// Register a package for running lifecycle scripts, if applicable.
///
/// `package_path` is the path containing the package's code (its root dir).
@ -110,12 +114,12 @@ impl<'a> LifecycleScripts<'a> {
) {
if has_lifecycle_scripts(package, &package_path) {
if self.can_run_scripts(&package.id.nv) {
if !self.strategy.has_run(package) {
if !self.has_run_scripts(package) {
self
.packages_with_scripts
.push((package, package_path.into_owned()));
}
} else if !self.strategy.has_run(package)
} else if !self.has_run_scripts(package)
&& (self.config.explicit_install || !self.strategy.has_warned(package))
{
// Skip adding `esbuild` as it is known that it can work properly without lifecycle script
@ -149,22 +153,32 @@ impl<'a> LifecycleScripts<'a> {
self,
snapshot: &NpmResolutionSnapshot,
packages: &[NpmResolutionPackage],
root_node_modules_dir_path: Option<&Path>,
root_node_modules_dir_path: &Path,
progress_bar: &ProgressBar,
) -> Result<(), AnyError> {
self.warn_not_run_scripts()?;
let get_package_path =
|p: &NpmResolutionPackage| self.strategy.package_path(p);
let mut failed_packages = Vec::new();
let mut bin_entries = BinEntries::new();
if !self.packages_with_scripts.is_empty() {
let package_ids = self
.packages_with_scripts
.iter()
.map(|(p, _)| &p.id)
.collect::<HashSet<_>>();
// get custom commands for each bin available in the node_modules dir (essentially
// the scripts that are in `node_modules/.bin`)
let base =
resolve_baseline_custom_commands(snapshot, packages, get_package_path)?;
let base = resolve_baseline_custom_commands(
&mut bin_entries,
snapshot,
packages,
get_package_path,
)?;
let init_cwd = &self.config.initial_cwd;
let process_state = crate::npm::managed::npm_process_state(
snapshot.as_valid_serialized(),
root_node_modules_dir_path,
Some(root_node_modules_dir_path),
);
let mut env_vars = crate::task_runner::real_env_vars();
@ -221,7 +235,7 @@ impl<'a> LifecycleScripts<'a> {
custom_commands: custom_commands.clone(),
init_cwd,
argv: &[],
root_node_modules_dir: root_node_modules_dir_path,
root_node_modules_dir: Some(root_node_modules_dir_path),
stdio: Some(crate::task_runner::TaskIo {
stderr: TaskStdio::piped(),
stdout: TaskStdio::piped(),
@ -262,6 +276,17 @@ impl<'a> LifecycleScripts<'a> {
}
self.strategy.did_run_scripts(package)?;
}
// re-set up bin entries for the packages which we've run scripts for.
// lifecycle scripts can create files that are linked to by bin entries,
// and the only reliable way to handle this is to re-link bin entries
// (this is what PNPM does as well)
bin_entries.finish_only(
snapshot,
&root_node_modules_dir_path.join(".bin"),
|outcome| outcome.warn_if_failed(),
&package_ids,
)?;
}
if failed_packages.is_empty() {
Ok(())
@ -281,9 +306,10 @@ impl<'a> LifecycleScripts<'a> {
// take in all (non copy) packages from snapshot,
// and resolve the set of available binaries to create
// custom commands available to the task runner
fn resolve_baseline_custom_commands(
snapshot: &NpmResolutionSnapshot,
packages: &[NpmResolutionPackage],
fn resolve_baseline_custom_commands<'a>(
bin_entries: &mut BinEntries<'a>,
snapshot: &'a NpmResolutionSnapshot,
packages: &'a [NpmResolutionPackage],
get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
let mut custom_commands = crate::task_runner::TaskCustomCommands::new();
@ -306,6 +332,7 @@ fn resolve_baseline_custom_commands(
// doing it for packages that are set up already.
// realistically, scripts won't be run very often so it probably isn't too big of an issue.
resolve_custom_commands_from_packages(
bin_entries,
custom_commands,
snapshot,
packages,
@ -320,12 +347,12 @@ fn resolve_custom_commands_from_packages<
'a,
P: IntoIterator<Item = &'a NpmResolutionPackage>,
>(
bin_entries: &mut BinEntries<'a>,
mut commands: crate::task_runner::TaskCustomCommands,
snapshot: &'a NpmResolutionSnapshot,
packages: P,
get_package_path: impl Fn(&'a NpmResolutionPackage) -> PathBuf,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
let mut bin_entries = BinEntries::new();
for package in packages {
let package_path = get_package_path(package);
@ -333,7 +360,7 @@ fn resolve_custom_commands_from_packages<
bin_entries.add(package, package_path);
}
}
let bins = bin_entries.into_bin_files(snapshot);
let bins: Vec<(String, PathBuf)> = bin_entries.collect_bin_files(snapshot);
for (bin_name, script_path) in bins {
commands.insert(
bin_name.clone(),
@ -356,7 +383,9 @@ fn resolve_custom_commands_from_deps(
snapshot: &NpmResolutionSnapshot,
get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf,
) -> Result<crate::task_runner::TaskCustomCommands, AnyError> {
let mut bin_entries = BinEntries::new();
resolve_custom_commands_from_packages(
&mut bin_entries,
baseline,
snapshot,
package

View file

@ -55,6 +55,7 @@ use crate::util::progress_bar::ProgressMessagePrompt;
use super::super::cache::NpmCache;
use super::super::cache::TarballCache;
use super::super::resolution::NpmResolution;
use super::common::bin_entries;
use super::common::NpmPackageFsResolver;
use super::common::RegistryReadPermissionChecker;
@ -329,8 +330,7 @@ async fn sync_resolution_with_fs(
let mut cache_futures = FuturesUnordered::new();
let mut newest_packages_by_name: HashMap<&String, &NpmResolutionPackage> =
HashMap::with_capacity(package_partitions.packages.len());
let bin_entries =
Rc::new(RefCell::new(super::common::bin_entries::BinEntries::new()));
let bin_entries = Rc::new(RefCell::new(bin_entries::BinEntries::new()));
let mut lifecycle_scripts =
super::common::lifecycle_scripts::LifecycleScripts::new(
lifecycle_scripts,
@ -658,7 +658,28 @@ async fn sync_resolution_with_fs(
// 7. Set up `node_modules/.bin` entries for packages that need it.
{
let bin_entries = std::mem::take(&mut *bin_entries.borrow_mut());
bin_entries.finish(snapshot, &bin_node_modules_dir_path)?;
bin_entries.finish(
snapshot,
&bin_node_modules_dir_path,
|setup_outcome| {
match setup_outcome {
bin_entries::EntrySetupOutcome::MissingEntrypoint {
package,
package_path,
..
} if super::common::lifecycle_scripts::has_lifecycle_scripts(
package,
package_path,
) && lifecycle_scripts.can_run_scripts(&package.id.nv)
&& !lifecycle_scripts.has_run_scripts(package) =>
{
// ignore, it might get fixed when the lifecycle scripts run.
// if not, we'll warn then
}
outcome => outcome.warn_if_failed(),
}
},
)?;
}
// 8. Create symlinks for the workspace packages
@ -708,7 +729,7 @@ async fn sync_resolution_with_fs(
.finish(
snapshot,
&package_partitions.packages,
Some(root_node_modules_dir_path),
root_node_modules_dir_path,
progress_bar,
)
.await?;

View file

@ -4,7 +4,6 @@ use async_trait::async_trait;
use dashmap::DashMap;
use dashmap::DashSet;
use deno_ast::MediaType;
use deno_ast::ModuleKind;
use deno_config::workspace::MappedResolution;
use deno_config::workspace::MappedResolutionDiagnostic;
use deno_config::workspace::MappedResolutionError;
@ -17,9 +16,7 @@ use deno_core::ModuleSourceCode;
use deno_core::ModuleSpecifier;
use deno_graph::source::ResolutionMode;
use deno_graph::source::ResolveError;
use deno_graph::source::Resolver;
use deno_graph::source::UnknownBuiltInNodeModuleError;
use deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE;
use deno_graph::NpmLoadError;
use deno_graph::NpmResolvePkgReqsResult;
use deno_npm::resolution::NpmResolutionError;
@ -52,7 +49,6 @@ use std::path::PathBuf;
use std::sync::Arc;
use thiserror::Error;
use crate::args::JsxImportSourceConfig;
use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS;
use crate::node::CliNodeCodeTranslator;
use crate::npm::CliNpmResolver;
@ -108,7 +104,6 @@ impl deno_resolver::fs::DenoResolverFs for CliDenoResolverFs {
#[derive(Debug)]
pub struct CliNodeResolver {
cjs_tracker: Arc<CjsTracker>,
fs: Arc<dyn deno_fs::FileSystem>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
node_resolver: Arc<NodeResolver>,
@ -117,14 +112,12 @@ pub struct CliNodeResolver {
impl CliNodeResolver {
pub fn new(
cjs_tracker: Arc<CjsTracker>,
fs: Arc<dyn deno_fs::FileSystem>,
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
node_resolver: Arc<NodeResolver>,
npm_resolver: Arc<dyn CliNpmResolver>,
) -> Self {
Self {
cjs_tracker,
fs,
in_npm_pkg_checker,
node_resolver,
@ -140,9 +133,11 @@ impl CliNodeResolver {
&self,
specifier: &str,
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
mode: NodeResolutionMode,
) -> Result<Option<NodeResolution>, AnyError> {
let resolution_result = self.resolve(specifier, referrer, mode);
let resolution_result =
self.resolve(specifier, referrer, referrer_kind, mode);
match resolution_result {
Ok(res) => Ok(Some(res)),
Err(err) => {
@ -213,35 +208,26 @@ impl CliNodeResolver {
&self,
specifier: &str,
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
mode: NodeResolutionMode,
) -> Result<NodeResolution, NodeResolveError> {
let referrer_kind = if self
.cjs_tracker
.is_maybe_cjs(referrer, MediaType::from_specifier(referrer))
.map_err(|err| NodeResolveErrorKind::PackageResolve(err.into()))?
{
NodeModuleKind::Cjs
} else {
NodeModuleKind::Esm
};
let res =
self
.node_resolver
.resolve(specifier, referrer, referrer_kind, mode)?;
Ok(res)
.resolve(specifier, referrer, referrer_kind, mode)
}
pub fn resolve_req_reference(
&self,
req_ref: &NpmPackageReqReference,
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
mode: NodeResolutionMode,
) -> Result<ModuleSpecifier, AnyError> {
self.resolve_req_with_sub_path(
req_ref.req(),
req_ref.sub_path(),
referrer,
referrer_kind,
mode,
)
}
@ -251,6 +237,7 @@ impl CliNodeResolver {
req: &PackageReq,
sub_path: Option<&str>,
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
mode: NodeResolutionMode,
) -> Result<ModuleSpecifier, AnyError> {
let package_folder = self
@ -260,6 +247,7 @@ impl CliNodeResolver {
&package_folder,
sub_path,
Some(referrer),
referrer_kind,
mode,
);
match resolution_result {
@ -284,12 +272,14 @@ impl CliNodeResolver {
package_folder: &Path,
sub_path: Option<&str>,
maybe_referrer: Option<&ModuleSpecifier>,
referrer_kind: NodeModuleKind,
mode: NodeResolutionMode,
) -> Result<ModuleSpecifier, PackageSubpathResolveError> {
self.node_resolver.resolve_package_subpath_from_deno_module(
package_folder,
sub_path,
maybe_referrer,
referrer_kind,
mode,
)
}
@ -419,10 +409,6 @@ impl NpmModuleLoader {
}
}
pub struct CjsTrackerOptions {
pub unstable_detect_cjs: bool,
}
/// Keeps track of what module specifiers were resolved as CJS.
///
/// Modules that are `.js` or `.ts` are only known to be CJS or
@ -430,22 +416,22 @@ pub struct CjsTrackerOptions {
/// will be "maybe CJS" until they're loaded.
#[derive(Debug)]
pub struct CjsTracker {
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
pkg_json_resolver: Arc<PackageJsonResolver>,
unstable_detect_cjs: bool,
known: DashMap<ModuleSpecifier, ModuleKind>,
is_cjs_resolver: IsCjsResolver,
known: DashMap<ModuleSpecifier, NodeModuleKind>,
}
impl CjsTracker {
pub fn new(
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
pkg_json_resolver: Arc<PackageJsonResolver>,
options: CjsTrackerOptions,
options: IsCjsResolverOptions,
) -> Self {
Self {
is_cjs_resolver: IsCjsResolver::new(
in_npm_pkg_checker,
pkg_json_resolver,
unstable_detect_cjs: options.unstable_detect_cjs,
options,
),
known: Default::default(),
}
}
@ -485,47 +471,90 @@ impl CjsTracker {
.get_known_kind_with_is_script(specifier, media_type, is_script)
{
Some(kind) => kind,
None => self.check_based_on_pkg_json(specifier)?,
None => self.is_cjs_resolver.check_based_on_pkg_json(specifier)?,
};
Ok(kind.is_cjs())
Ok(kind == NodeModuleKind::Cjs)
}
pub fn get_known_kind(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
) -> Option<ModuleKind> {
) -> Option<NodeModuleKind> {
self.get_known_kind_with_is_script(specifier, media_type, None)
}
pub fn get_referrer_kind(
&self,
specifier: &ModuleSpecifier,
) -> NodeModuleKind {
if specifier.scheme() != "file" {
return NodeModuleKind::Esm;
}
self
.get_known_kind(specifier, MediaType::from_specifier(specifier))
.unwrap_or(NodeModuleKind::Esm)
}
fn get_known_kind_with_is_script(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
is_script: Option<bool>,
) -> Option<ModuleKind> {
if specifier.scheme() != "file" {
return Some(ModuleKind::Esm);
) -> Option<NodeModuleKind> {
self.is_cjs_resolver.get_known_kind_with_is_script(
specifier,
media_type,
is_script,
&self.known,
)
}
}
#[derive(Debug)]
pub struct IsCjsResolverOptions {
pub detect_cjs: bool,
pub is_node_main: bool,
}
#[derive(Debug)]
pub struct IsCjsResolver {
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
pkg_json_resolver: Arc<PackageJsonResolver>,
options: IsCjsResolverOptions,
}
impl IsCjsResolver {
pub fn new(
in_npm_pkg_checker: Arc<dyn InNpmPackageChecker>,
pkg_json_resolver: Arc<PackageJsonResolver>,
options: IsCjsResolverOptions,
) -> Self {
Self {
in_npm_pkg_checker,
pkg_json_resolver,
options,
}
}
match media_type {
MediaType::Mts | MediaType::Mjs | MediaType::Dmts => Some(ModuleKind::Esm),
MediaType::Cjs | MediaType::Cts | MediaType::Dcts => Some(ModuleKind::Cjs),
pub fn get_lsp_referrer_kind(
&self,
specifier: &ModuleSpecifier,
is_script: Option<bool>,
) -> NodeModuleKind {
if specifier.scheme() != "file" {
return NodeModuleKind::Esm;
}
match MediaType::from_specifier(specifier) {
MediaType::Mts | MediaType::Mjs | MediaType::Dmts => NodeModuleKind::Esm,
MediaType::Cjs | MediaType::Cts | MediaType::Dcts => NodeModuleKind::Cjs,
MediaType::Dts => {
// dts files are always determined based on the package.json because
// they contain imports/exports even when considered CJS
if let Some(value) = self.known.get(specifier).map(|v| *v) {
Some(value)
} else {
let value = self.check_based_on_pkg_json(specifier).ok();
if let Some(value) = value {
self.known.insert(specifier.clone(), value);
}
Some(value.unwrap_or(ModuleKind::Esm))
}
self.check_based_on_pkg_json(specifier).unwrap_or(NodeModuleKind::Esm)
}
MediaType::Wasm |
MediaType::Json => Some(ModuleKind::Esm),
MediaType::Json => NodeModuleKind::Esm,
MediaType::JavaScript
| MediaType::Jsx
| MediaType::TypeScript
@ -534,18 +563,63 @@ impl CjsTracker {
| MediaType::Css
| MediaType::SourceMap
| MediaType::Unknown => {
if let Some(value) = self.known.get(specifier).map(|v| *v) {
if value.is_cjs() && is_script == Some(false) {
match is_script {
Some(true) => self.check_based_on_pkg_json(specifier).unwrap_or(NodeModuleKind::Esm),
Some(false) | None => NodeModuleKind::Esm,
}
}
}
}
fn get_known_kind_with_is_script(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
is_script: Option<bool>,
known_cache: &DashMap<ModuleSpecifier, NodeModuleKind>,
) -> Option<NodeModuleKind> {
if specifier.scheme() != "file" {
return Some(NodeModuleKind::Esm);
}
match media_type {
MediaType::Mts | MediaType::Mjs | MediaType::Dmts => Some(NodeModuleKind::Esm),
MediaType::Cjs | MediaType::Cts | MediaType::Dcts => Some(NodeModuleKind::Cjs),
MediaType::Dts => {
// dts files are always determined based on the package.json because
// they contain imports/exports even when considered CJS
if let Some(value) = known_cache.get(specifier).map(|v| *v) {
Some(value)
} else {
let value = self.check_based_on_pkg_json(specifier).ok();
if let Some(value) = value {
known_cache.insert(specifier.clone(), value);
}
Some(value.unwrap_or(NodeModuleKind::Esm))
}
}
MediaType::Wasm |
MediaType::Json => Some(NodeModuleKind::Esm),
MediaType::JavaScript
| MediaType::Jsx
| MediaType::TypeScript
| MediaType::Tsx
// treat these as unknown
| MediaType::Css
| MediaType::SourceMap
| MediaType::Unknown => {
if let Some(value) = known_cache.get(specifier).map(|v| *v) {
if value == NodeModuleKind::Cjs && is_script == Some(false) {
// we now know this is actually esm
self.known.insert(specifier.clone(), ModuleKind::Esm);
Some(ModuleKind::Esm)
known_cache.insert(specifier.clone(), NodeModuleKind::Esm);
Some(NodeModuleKind::Esm)
} else {
Some(value)
}
} else if is_script == Some(false) {
// we know this is esm
self.known.insert(specifier.clone(), ModuleKind::Esm);
Some(ModuleKind::Esm)
known_cache.insert(specifier.clone(), NodeModuleKind::Esm);
Some(NodeModuleKind::Esm)
} else {
None
}
@ -556,27 +630,38 @@ impl CjsTracker {
fn check_based_on_pkg_json(
&self,
specifier: &ModuleSpecifier,
) -> Result<ModuleKind, ClosestPkgJsonError> {
) -> Result<NodeModuleKind, ClosestPkgJsonError> {
if self.in_npm_pkg_checker.in_npm_package(specifier) {
if let Some(pkg_json) =
self.pkg_json_resolver.get_closest_package_json(specifier)?
{
let is_file_location_cjs = pkg_json.typ != "module";
Ok(ModuleKind::from_is_cjs(is_file_location_cjs))
Ok(if is_file_location_cjs {
NodeModuleKind::Cjs
} else {
Ok(ModuleKind::Cjs)
NodeModuleKind::Esm
})
} else {
Ok(NodeModuleKind::Cjs)
}
} else if self.unstable_detect_cjs {
} else if self.options.detect_cjs || self.options.is_node_main {
if let Some(pkg_json) =
self.pkg_json_resolver.get_closest_package_json(specifier)?
{
let is_cjs_type = pkg_json.typ == "commonjs";
Ok(ModuleKind::from_is_cjs(is_cjs_type))
let is_cjs_type = pkg_json.typ == "commonjs"
|| self.options.is_node_main && pkg_json.typ == "none";
Ok(if is_cjs_type {
NodeModuleKind::Cjs
} else {
Ok(ModuleKind::Esm)
NodeModuleKind::Esm
})
} else if self.options.is_node_main {
Ok(NodeModuleKind::Cjs)
} else {
Ok(NodeModuleKind::Esm)
}
} else {
Ok(ModuleKind::Esm)
Ok(NodeModuleKind::Esm)
}
}
}
@ -587,48 +672,33 @@ pub type CliSloppyImportsResolver =
/// A resolver that takes care of resolution, taking into account loaded
/// import map, JSX settings.
#[derive(Debug)]
pub struct CliGraphResolver {
pub struct CliResolver {
node_resolver: Option<Arc<CliNodeResolver>>,
npm_resolver: Option<Arc<dyn CliNpmResolver>>,
sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
workspace_resolver: Arc<WorkspaceResolver>,
maybe_default_jsx_import_source: Option<String>,
maybe_default_jsx_import_source_types: Option<String>,
maybe_jsx_import_source_module: Option<String>,
maybe_vendor_specifier: Option<ModuleSpecifier>,
found_package_json_dep_flag: AtomicFlag,
bare_node_builtins_enabled: bool,
warned_pkgs: DashSet<PackageReq>,
}
pub struct CliGraphResolverOptions<'a> {
pub struct CliResolverOptions<'a> {
pub node_resolver: Option<Arc<CliNodeResolver>>,
pub npm_resolver: Option<Arc<dyn CliNpmResolver>>,
pub sloppy_imports_resolver: Option<Arc<CliSloppyImportsResolver>>,
pub workspace_resolver: Arc<WorkspaceResolver>,
pub bare_node_builtins_enabled: bool,
pub maybe_jsx_import_source_config: Option<JsxImportSourceConfig>,
pub maybe_vendor_dir: Option<&'a PathBuf>,
}
impl CliGraphResolver {
pub fn new(options: CliGraphResolverOptions) -> Self {
impl CliResolver {
pub fn new(options: CliResolverOptions) -> Self {
Self {
node_resolver: options.node_resolver,
npm_resolver: options.npm_resolver,
sloppy_imports_resolver: options.sloppy_imports_resolver,
workspace_resolver: options.workspace_resolver,
maybe_default_jsx_import_source: options
.maybe_jsx_import_source_config
.as_ref()
.and_then(|c| c.default_specifier.clone()),
maybe_default_jsx_import_source_types: options
.maybe_jsx_import_source_config
.as_ref()
.and_then(|c| c.default_types_specifier.clone()),
maybe_jsx_import_source_module: options
.maybe_jsx_import_source_config
.map(|c| c.module),
maybe_vendor_specifier: options
.maybe_vendor_dir
.and_then(|v| ModuleSpecifier::from_directory_path(v).ok()),
@ -638,10 +708,6 @@ impl CliGraphResolver {
}
}
pub fn as_graph_resolver(&self) -> &dyn Resolver {
self
}
pub fn create_graph_npm_resolver(&self) -> WorkerCliNpmGraphResolver {
WorkerCliNpmGraphResolver {
npm_resolver: self.npm_resolver.as_ref(),
@ -649,28 +715,12 @@ impl CliGraphResolver {
bare_node_builtins_enabled: self.bare_node_builtins_enabled,
}
}
}
impl Resolver for CliGraphResolver {
fn default_jsx_import_source(&self) -> Option<String> {
self.maybe_default_jsx_import_source.clone()
}
fn default_jsx_import_source_types(&self) -> Option<String> {
self.maybe_default_jsx_import_source_types.clone()
}
fn jsx_import_source_module(&self) -> &str {
self
.maybe_jsx_import_source_module
.as_deref()
.unwrap_or(DEFAULT_JSX_IMPORT_SOURCE_MODULE)
}
fn resolve(
pub fn resolve(
&self,
raw_specifier: &str,
referrer_range: &deno_graph::Range,
referrer_kind: NodeModuleKind,
mode: ResolutionMode,
) -> Result<ModuleSpecifier, ResolveError> {
fn to_node_mode(mode: ResolutionMode) -> NodeResolutionMode {
@ -686,7 +736,7 @@ impl Resolver for CliGraphResolver {
if let Some(node_resolver) = self.node_resolver.as_ref() {
if referrer.scheme() == "file" && node_resolver.in_npm_package(referrer) {
return node_resolver
.resolve(raw_specifier, referrer, to_node_mode(mode))
.resolve(raw_specifier, referrer, referrer_kind, to_node_mode(mode))
.map(|res| res.into_url())
.map_err(|e| ResolveError::Other(e.into()));
}
@ -759,6 +809,7 @@ impl Resolver for CliGraphResolver {
pkg_json.dir_path(),
sub_path.as_deref(),
Some(referrer),
referrer_kind,
to_node_mode(mode),
)
.map_err(|e| ResolveError::Other(e.into())),
@ -800,6 +851,7 @@ impl Resolver for CliGraphResolver {
pkg_folder,
sub_path.as_deref(),
Some(referrer),
referrer_kind,
to_node_mode(mode),
)
.map_err(|e| ResolveError::Other(e.into()))
@ -847,6 +899,7 @@ impl Resolver for CliGraphResolver {
pkg_folder,
npm_req_ref.sub_path(),
Some(referrer),
referrer_kind,
to_node_mode(mode),
)
.map_err(|e| ResolveError::Other(e.into()));
@ -855,7 +908,12 @@ impl Resolver for CliGraphResolver {
// do npm resolution for byonm
if is_byonm {
return node_resolver
.resolve_req_reference(&npm_req_ref, referrer, to_node_mode(mode))
.resolve_req_reference(
&npm_req_ref,
referrer,
referrer_kind,
to_node_mode(mode),
)
.map_err(|err| err.into());
}
}
@ -869,7 +927,12 @@ impl Resolver for CliGraphResolver {
// If byonm, check if the bare specifier resolves to an npm package
if is_byonm && referrer.scheme() == "file" {
let maybe_resolution = node_resolver
.resolve_if_for_npm_pkg(raw_specifier, referrer, to_node_mode(mode))
.resolve_if_for_npm_pkg(
raw_specifier,
referrer,
referrer_kind,
to_node_mode(mode),
)
.map_err(ResolveError::Other)?;
if let Some(res) = maybe_resolution {
match res {

View file

@ -528,7 +528,6 @@
"bare-node-builtins",
"byonm",
"cron",
"detect-cjs",
"ffi",
"fs",
"fmt-component",

View file

@ -47,6 +47,7 @@ use deno_runtime::deno_fs::FileSystem;
use deno_runtime::deno_fs::RealFs;
use deno_runtime::deno_io::fs::FsError;
use deno_runtime::deno_node::PackageJson;
use deno_runtime::ops::otel::OtelConfig;
use deno_semver::npm::NpmVersionReqParseError;
use deno_semver::package::PackageReq;
use deno_semver::Version;
@ -185,6 +186,7 @@ pub struct Metadata {
pub entrypoint_key: String,
pub node_modules: Option<NodeModules>,
pub unstable_config: UnstableConfig,
pub otel_config: Option<OtelConfig>, // None means disabled.
}
fn write_binary_bytes(
@ -718,10 +720,10 @@ impl<'a> DenoCompileBinaryWriter<'a> {
unstable_config: UnstableConfig {
legacy_flag_enabled: false,
bare_node_builtins: cli_options.unstable_bare_node_builtins(),
detect_cjs: cli_options.unstable_detect_cjs(),
sloppy_imports: cli_options.unstable_sloppy_imports(),
features: cli_options.unstable_features(),
},
otel_config: cli_options.otel_config(),
};
write_binary_bytes(

View file

@ -45,6 +45,8 @@ use deno_runtime::WorkerLogLevel;
use deno_semver::npm::NpmPackageReqReference;
use import_map::parse_from_json;
use node_resolver::analyze::NodeCodeTranslator;
use node_resolver::errors::ClosestPkgJsonError;
use node_resolver::NodeModuleKind;
use node_resolver::NodeResolutionMode;
use serialization::DenoCompileModuleSource;
use std::borrow::Cow;
@ -76,9 +78,9 @@ use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::CreateInNpmPkgCheckerOptions;
use crate::resolver::CjsTracker;
use crate::resolver::CjsTrackerOptions;
use crate::resolver::CliDenoResolverFs;
use crate::resolver::CliNodeResolver;
use crate::resolver::IsCjsResolverOptions;
use crate::resolver::NpmModuleLoader;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
@ -146,13 +148,27 @@ impl ModuleLoader for EmbeddedModuleLoader {
type_error(format!("Referrer uses invalid specifier: {}", err))
})?
};
let referrer_kind = if self
.shared
.cjs_tracker
.is_maybe_cjs(&referrer, MediaType::from_specifier(&referrer))?
{
NodeModuleKind::Cjs
} else {
NodeModuleKind::Esm
};
if self.shared.node_resolver.in_npm_package(&referrer) {
return Ok(
self
.shared
.node_resolver
.resolve(raw_specifier, &referrer, NodeResolutionMode::Execution)?
.resolve(
raw_specifier,
&referrer,
referrer_kind,
NodeResolutionMode::Execution,
)?
.into_url(),
);
}
@ -178,6 +194,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
pkg_json.dir_path(),
sub_path.as_deref(),
Some(&referrer),
referrer_kind,
NodeResolutionMode::Execution,
)?,
),
@ -192,6 +209,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
req,
sub_path.as_deref(),
&referrer,
referrer_kind,
NodeResolutionMode::Execution,
)
}
@ -211,6 +229,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
pkg_folder,
sub_path.as_deref(),
Some(&referrer),
referrer_kind,
NodeResolutionMode::Execution,
)?,
)
@ -224,6 +243,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
return self.shared.node_resolver.resolve_req_reference(
&reference,
&referrer,
referrer_kind,
NodeResolutionMode::Execution,
);
}
@ -250,6 +270,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
let maybe_res = self.shared.node_resolver.resolve_if_for_npm_pkg(
raw_specifier,
&referrer,
referrer_kind,
NodeResolutionMode::Execution,
)?;
if let Some(res) = maybe_res {
@ -429,6 +450,14 @@ impl NodeRequireLoader for EmbeddedModuleLoader {
) -> Result<String, AnyError> {
Ok(self.shared.fs.read_text_file_lossy_sync(path, None)?)
}
fn is_maybe_cjs(
&self,
specifier: &ModuleSpecifier,
) -> Result<bool, ClosestPkgJsonError> {
let media_type = MediaType::from_specifier(specifier);
self.shared.cjs_tracker.is_maybe_cjs(specifier, media_type)
}
}
struct StandaloneModuleLoaderFactory {
@ -628,14 +657,14 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
let cjs_tracker = Arc::new(CjsTracker::new(
in_npm_pkg_checker.clone(),
pkg_json_resolver.clone(),
CjsTrackerOptions {
unstable_detect_cjs: metadata.unstable_config.detect_cjs,
IsCjsResolverOptions {
detect_cjs: !metadata.workspace_resolver.package_jsons.is_empty(),
is_node_main: false,
},
));
let cache_db = Caches::new(deno_dir_provider.clone());
let node_analysis_cache = NodeAnalysisCache::new(cache_db.node_analysis_db());
let cli_node_resolver = Arc::new(CliNodeResolver::new(
cjs_tracker.clone(),
fs.clone(),
in_npm_pkg_checker.clone(),
node_resolver.clone(),
@ -646,7 +675,6 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
cjs_tracker.clone(),
fs.clone(),
None,
false,
);
let node_code_translator = Arc::new(NodeCodeTranslator::new(
cjs_esm_code_analyzer,
@ -800,6 +828,7 @@ pub async fn run(data: StandaloneData) -> Result<i32, AnyError> {
serve_port: None,
serve_host: None,
},
metadata.otel_config,
);
// Initialize v8 once from the main thread.

View file

@ -350,6 +350,7 @@ impl<'a> VfsEntryRef<'a> {
atime: None,
birthtime: None,
mtime: None,
ctime: None,
blksize: 0,
size: 0,
dev: 0,
@ -372,6 +373,7 @@ impl<'a> VfsEntryRef<'a> {
atime: None,
birthtime: None,
mtime: None,
ctime: None,
blksize: 0,
size: file.len,
dev: 0,
@ -394,6 +396,7 @@ impl<'a> VfsEntryRef<'a> {
atime: None,
birthtime: None,
mtime: None,
ctime: None,
blksize: 0,
size: 0,
dev: 0,

View file

@ -480,7 +480,7 @@ fn filter_coverages(
.filter(|e| {
let is_internal = e.url.starts_with("ext:")
|| e.url.ends_with("__anonymous__")
|| e.url.ends_with("$deno$test.js")
|| e.url.ends_with("$deno$test.mjs")
|| e.url.ends_with(".snap")
|| is_supported_test_path(Path::new(e.url.as_str()))
|| doc_test_re.is_match(e.url.as_str())

View file

@ -790,28 +790,26 @@ fn format_ensure_stable(
return Ok(Some(current_text));
}
Err(err) => {
panic!(
bail!(
concat!(
"Formatting succeeded initially, but failed when ensuring a ",
"stable format. This indicates a bug in the formatter where ",
"the text it produces is not syntactically correct. As a temporary ",
"workaround you can ignore this file ({}).\n\n{:#}"
"workaround you can ignore this file.\n\n{:#}"
),
file_path.display(),
err,
)
}
}
count += 1;
if count == 5 {
panic!(
bail!(
concat!(
"Formatting not stable. Bailed after {} tries. This indicates a bug ",
"in the formatter where it formats the file ({}) differently each time. As a ",
"in the formatter where it formats the file differently each time. As a ",
"temporary workaround you can ignore this file."
),
count,
file_path.display(),
)
}
}
@ -1215,6 +1213,8 @@ fn is_supported_ext_fmt(path: &Path) -> bool {
#[cfg(test)]
mod test {
use test_util::assert_starts_with;
use super::*;
#[test]
@ -1270,12 +1270,16 @@ mod test {
}
#[test]
#[should_panic(expected = "Formatting not stable. Bailed after 5 tries.")]
fn test_format_ensure_stable_unstable_format() {
let err =
format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| {
Ok(Some(format!("1{file_text}")))
})
.unwrap();
.unwrap_err();
assert_starts_with!(
err.to_string(),
"Formatting not stable. Bailed after 5 tries."
);
}
#[test]
@ -1289,8 +1293,8 @@ mod test {
}
#[test]
#[should_panic(expected = "Formatting succeeded initially, but failed when")]
fn test_format_ensure_stable_error_second() {
let err =
format_ensure_stable(&PathBuf::from("mod.ts"), "1", |_, file_text| {
if file_text == "1" {
Ok(Some("11".to_string()))
@ -1298,7 +1302,11 @@ mod test {
bail!("Error formatting.")
}
})
.unwrap();
.unwrap_err();
assert_starts_with!(
err.to_string(),
"Formatting succeeded initially, but failed when"
);
}
#[test]

View file

@ -1396,6 +1396,7 @@ mod tests {
.env_clear()
// use the deno binary in the target directory
.env("PATH", test_util::target_dir())
.env("RUST_BACKTRACE", "1")
.spawn()
.unwrap()
.wait()

View file

@ -61,7 +61,7 @@ pub async fn kernel(
let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?;
let main_module =
resolve_url_or_path("./$deno$jupyter.ts", cli_options.initial_cwd())
resolve_url_or_path("./$deno$jupyter.mts", cli_options.initial_cwd())
.unwrap();
// TODO(bartlomieju): should we run with all permissions?
let permissions =

View file

@ -63,7 +63,7 @@ pub use rules::LintRuleProvider;
const JSON_SCHEMA_VERSION: u8 = 1;
static STDIN_FILE_NAME: &str = "$deno$stdin.ts";
static STDIN_FILE_NAME: &str = "$deno$stdin.mts";
pub async fn lint(
flags: Arc<Flags>,

View file

@ -87,6 +87,7 @@ impl LintRule for NoSloppyImportsRule {
captures: Default::default(),
};
// fill this and capture the sloppy imports in the resolver
deno_graph::parse_module_from_ast(deno_graph::ParseModuleFromAstOptions {
graph_kind: deno_graph::GraphKind::All,
specifier: context.specifier().clone(),

View file

@ -7,7 +7,7 @@ use crate::cdp;
use crate::colors;
use crate::lsp::ReplLanguageServer;
use crate::npm::CliNpmResolver;
use crate::resolver::CliGraphResolver;
use crate::resolver::CliResolver;
use crate::tools::test::report_tests;
use crate::tools::test::reporters::PrettyTestReporter;
use crate::tools::test::reporters::TestReporter;
@ -44,12 +44,12 @@ use deno_core::url::Url;
use deno_core::LocalInspectorSession;
use deno_core::PollEventLoopOptions;
use deno_graph::source::ResolutionMode;
use deno_graph::source::Resolver;
use deno_graph::Position;
use deno_graph::PositionRange;
use deno_graph::SpecifierWithRange;
use deno_runtime::worker::MainWorker;
use deno_semver::npm::NpmPackageReqReference;
use node_resolver::NodeModuleKind;
use once_cell::sync::Lazy;
use regex::Match;
use regex::Regex;
@ -180,7 +180,7 @@ struct ReplJsxState {
pub struct ReplSession {
npm_resolver: Arc<dyn CliNpmResolver>,
resolver: Arc<CliGraphResolver>,
resolver: Arc<CliResolver>,
pub worker: MainWorker,
session: LocalInspectorSession,
pub context_id: u64,
@ -199,7 +199,7 @@ impl ReplSession {
pub async fn initialize(
cli_options: &CliOptions,
npm_resolver: Arc<dyn CliNpmResolver>,
resolver: Arc<CliGraphResolver>,
resolver: Arc<CliResolver>,
mut worker: MainWorker,
main_module: ModuleSpecifier,
test_event_receiver: TestEventReceiver,
@ -245,7 +245,7 @@ impl ReplSession {
assert_ne!(context_id, 0);
let referrer =
deno_core::resolve_path("./$deno$repl.ts", cli_options.initial_cwd())
deno_core::resolve_path("./$deno$repl.mts", cli_options.initial_cwd())
.unwrap();
let cwd_url =
@ -712,7 +712,12 @@ impl ReplSession {
.flat_map(|i| {
self
.resolver
.resolve(i, &referrer_range, ResolutionMode::Execution)
.resolve(
i,
&referrer_range,
NodeModuleKind::Esm,
ResolutionMode::Execution,
)
.ok()
.or_else(|| ModuleSpecifier::parse(i).ok())
})

View file

@ -4,8 +4,6 @@ use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::MediaType;
use deno_ast::ModuleKind;
use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::futures::StreamExt;
@ -18,7 +16,6 @@ use tokio::select;
use crate::cdp;
use crate::emit::Emitter;
use crate::resolver::CjsTracker;
use crate::util::file_watcher::WatcherCommunicator;
use crate::util::file_watcher::WatcherRestartMode;
@ -63,7 +60,6 @@ pub struct HmrRunner {
session: LocalInspectorSession,
watcher_communicator: Arc<WatcherCommunicator>,
script_ids: HashMap<String, String>,
cjs_tracker: Arc<CjsTracker>,
emitter: Arc<Emitter>,
}
@ -146,7 +142,6 @@ impl crate::worker::HmrRunner for HmrRunner {
let source_code = self.emitter.load_and_emit_for_hmr(
&module_url,
ModuleKind::from_is_cjs(self.cjs_tracker.is_maybe_cjs(&module_url, MediaType::from_specifier(&module_url))?),
).await?;
let mut tries = 1;
@ -179,14 +174,12 @@ impl crate::worker::HmrRunner for HmrRunner {
impl HmrRunner {
pub fn new(
cjs_tracker: Arc<CjsTracker>,
emitter: Arc<Emitter>,
session: LocalInspectorSession,
watcher_communicator: Arc<WatcherCommunicator>,
) -> Self {
Self {
session,
cjs_tracker,
emitter,
watcher_communicator,
script_ids: HashMap::new(),

View file

@ -121,8 +121,8 @@ delete Object.prototype.__proto__;
/** @type {Map<string, ts.SourceFile>} */
const sourceFileCache = new Map();
/** @type {Map<string, string>} */
const sourceTextCache = new Map();
/** @type {Map<string, ts.IScriptSnapshot & { isCjs?: boolean; }>} */
const scriptSnapshotCache = new Map();
/** @type {Map<string, number>} */
const sourceRefCounts = new Map();
@ -133,9 +133,6 @@ delete Object.prototype.__proto__;
/** @type {Map<string, boolean>} */
const isNodeSourceFileCache = new Map();
/** @type {Map<string, boolean>} */
const isCjsCache = new Map();
// Maps asset specifiers to the first scope that the asset was loaded into.
/** @type {Map<string, string | null>} */
const assetScopes = new Map();
@ -210,12 +207,13 @@ delete Object.prototype.__proto__;
const mapKey = path + key;
let sourceFile = documentRegistrySourceFileCache.get(mapKey);
if (!sourceFile || sourceFile.version !== version) {
const isCjs = /** @type {any} */ (scriptSnapshot).isCjs;
sourceFile = ts.createLanguageServiceSourceFile(
fileName,
scriptSnapshot,
{
...getCreateSourceFileOptions(sourceFileOptions),
impliedNodeFormat: (isCjsCache.get(fileName) ?? false)
impliedNodeFormat: isCjs
? ts.ModuleKind.CommonJS
: ts.ModuleKind.ESNext,
// in the lsp we want to be able to show documentation
@ -320,7 +318,7 @@ delete Object.prototype.__proto__;
if (lastRequestMethod != "cleanupSemanticCache") {
const mapKey = path + key;
documentRegistrySourceFileCache.delete(mapKey);
sourceTextCache.delete(path);
scriptSnapshotCache.delete(path);
ops.op_release(path);
}
} else {
@ -624,8 +622,6 @@ delete Object.prototype.__proto__;
`"data" is unexpectedly null for "${specifier}".`,
);
isCjsCache.set(specifier, isCjs);
sourceFile = ts.createSourceFile(
specifier,
data,
@ -699,7 +695,7 @@ delete Object.prototype.__proto__;
/** @type {[string, ts.Extension] | undefined} */
const resolved = ops.op_resolve(
containingFilePath,
isCjsCache.get(containingFilePath) ?? false,
containingFileMode === ts.ModuleKind.CommonJS,
[fileReference.fileName],
)?.[0];
if (resolved) {
@ -723,7 +719,14 @@ delete Object.prototype.__proto__;
}
});
},
resolveModuleNames(specifiers, base) {
resolveModuleNames(
specifiers,
base,
_reusedNames,
_redirectedReference,
_options,
containingSourceFile,
) {
if (logDebug) {
debug(`host.resolveModuleNames()`);
debug(` base: ${base}`);
@ -732,7 +735,7 @@ delete Object.prototype.__proto__;
/** @type {Array<[string, ts.Extension] | undefined>} */
const resolved = ops.op_resolve(
base,
isCjsCache.get(base) ?? false,
containingSourceFile?.impliedNodeFormat === ts.ModuleKind.CommonJS,
specifiers,
);
if (resolved) {
@ -814,19 +817,19 @@ delete Object.prototype.__proto__;
return ts.ScriptSnapshot.fromString(sourceFile.text);
}
}
let sourceText = sourceTextCache.get(specifier);
if (sourceText == undefined) {
let scriptSnapshot = scriptSnapshotCache.get(specifier);
if (scriptSnapshot == undefined) {
/** @type {{ data: string, version: string, isCjs: boolean }} */
const fileInfo = ops.op_load(specifier);
if (!fileInfo) {
return undefined;
}
isCjsCache.set(specifier, fileInfo.isCjs);
sourceTextCache.set(specifier, fileInfo.data);
scriptSnapshot = ts.ScriptSnapshot.fromString(fileInfo.data);
scriptSnapshot.isCjs = fileInfo.isCjs;
scriptSnapshotCache.set(specifier, scriptSnapshot);
scriptVersionCache.set(specifier, fileInfo.version);
sourceText = fileInfo.data;
}
return ts.ScriptSnapshot.fromString(sourceText);
return scriptSnapshot;
},
};
@ -1238,7 +1241,7 @@ delete Object.prototype.__proto__;
closed = true;
}
scriptVersionCache.delete(script);
sourceTextCache.delete(script);
scriptSnapshotCache.delete(script);
}
if (newConfigsByScope || opened || closed) {

View file

@ -2971,6 +2971,10 @@ declare namespace Deno {
* field from `stat` on Mac/BSD and `ftCreationTime` on Windows. This may
* not be available on all platforms. */
birthtime: Date | null;
/** The last change time of the file. This corresponds to the `ctime`
* field from `stat` on Mac/BSD and `ChangeTime` on Windows. This may
* not be available on all platforms. */
ctime: Date | null;
/** ID of the device containing the file. */
dev: number;
/** Inode number.
@ -2979,8 +2983,7 @@ declare namespace Deno {
ino: number | null;
/** The underlying raw `st_mode` bits that contain the standard Unix
* permissions for this file/directory.
*
* _Linux/Mac OS only._ */
*/
mode: number | null;
/** Number of hard links pointing to this file.
*

View file

@ -1225,6 +1225,108 @@ declare namespace Deno {
export {}; // only export exports
}
/**
* @category Telemetry
* @experimental
*/
export namespace tracing {
/**
* Whether tracing is enabled.
* @category Telemetry
* @experimental
*/
export const enabled: boolean;
/**
* Allowed attribute type.
* @category Telemetry
* @experimental
*/
export type AttributeValue = string | number | boolean | bigint;
/**
* A tracing span.
* @category Telemetry
* @experimental
*/
export class Span implements Disposable {
readonly traceId: string;
readonly spanId: string;
readonly parentSpanId: string;
readonly kind: string;
readonly name: string;
readonly startTime: number;
readonly endTime: number;
readonly status: null | { code: 1 } | { code: 2; message: string };
readonly attributes: Record<string, AttributeValue>;
readonly traceFlags: number;
/**
* Construct a new Span and enter it as the "current" span.
*/
constructor(
name: string,
kind?: "internal" | "server" | "client" | "producer" | "consumer",
);
/**
* Set an attribute on this span.
*/
setAttribute(
name: string,
value: AttributeValue,
): void;
/**
* Enter this span as the "current" span.
*/
enter(): void;
/**
* Exit this span as the "current" span and restore the previous one.
*/
exit(): void;
/**
* End this span, and exit it as the "current" span.
*/
end(): void;
[Symbol.dispose](): void;
/**
* Get the "current" span, if one exists.
*/
static current(): Span | undefined | null;
}
/**
* A SpanExporter compatible with OpenTelemetry.js
* https://open-telemetry.github.io/opentelemetry-js/interfaces/_opentelemetry_sdk_trace_base.SpanExporter.html
* @category Telemetry
* @experimental
*/
export class SpanExporter {}
/**
* A ContextManager compatible with OpenTelemetry.js
* https://open-telemetry.github.io/opentelemetry-js/interfaces/_opentelemetry_api.ContextManager.html
* @category Telemetry
* @experimental
*/
export class ContextManager {}
export {}; // only export exports
}
/**
* @category Telemetry
* @experimental
*/
export namespace metrics {
export {}; // only export exports
}
export {}; // only export exports
}

View file

@ -343,11 +343,6 @@ impl TypeCheckingCjsTracker {
media_type: MediaType,
code: &Arc<str>,
) -> bool {
if let Some(module_kind) =
self.cjs_tracker.get_known_kind(specifier, media_type)
{
module_kind.is_cjs()
} else {
let maybe_is_script = self
.module_info_cache
.as_module_analyzer()
@ -368,6 +363,16 @@ impl TypeCheckingCjsTracker {
.unwrap_or(false)
})
}
pub fn is_cjs_with_known_is_script(
&self,
specifier: &ModuleSpecifier,
media_type: MediaType,
is_script: bool,
) -> Result<bool, node_resolver::errors::ClosestPkgJsonError> {
self
.cjs_tracker
.is_cjs_with_known_is_script(specifier, media_type, is_script)
}
}
@ -627,8 +632,12 @@ fn op_load_inner(
match module {
Module::Js(module) => {
media_type = module.media_type;
if matches!(media_type, MediaType::Cjs | MediaType::Cts) {
is_cjs = true;
if let Some(npm_state) = &state.maybe_npm {
is_cjs = npm_state.cjs_tracker.is_cjs_with_known_is_script(
specifier,
module.media_type,
module.is_script,
)?;
}
let source = module
.fast_check_module()
@ -737,6 +746,7 @@ fn op_resolve_inner(
"Error converting a string module specifier for \"op_resolve\".",
)?
};
let referrer_module = state.graph.get(&referrer);
for specifier in args.specifiers {
if specifier.starts_with("node:") {
resolved.push((
@ -752,16 +762,19 @@ fn op_resolve_inner(
continue;
}
let graph = &state.graph;
let resolved_dep = graph
.get(&referrer)
let resolved_dep = referrer_module
.and_then(|m| m.js())
.and_then(|m| m.dependencies_prefer_fast_check().get(&specifier))
.and_then(|d| d.maybe_type.ok().or_else(|| d.maybe_code.ok()));
let maybe_result = match resolved_dep {
Some(ResolutionResolved { specifier, .. }) => {
resolve_graph_specifier_types(specifier, &referrer, state)?
resolve_graph_specifier_types(
specifier,
&referrer,
referrer_kind,
state,
)?
}
_ => {
match resolve_non_graph_specifier_types(
@ -834,6 +847,7 @@ fn op_resolve_inner(
fn resolve_graph_specifier_types(
specifier: &ModuleSpecifier,
referrer: &ModuleSpecifier,
referrer_kind: NodeModuleKind,
state: &State,
) -> Result<Option<(ModuleSpecifier, MediaType)>, AnyError> {
let graph = &state.graph;
@ -886,6 +900,7 @@ fn resolve_graph_specifier_types(
&package_folder,
module.nv_reference.sub_path(),
Some(referrer),
referrer_kind,
NodeResolutionMode::Types,
);
let maybe_url = match res_result {
@ -965,6 +980,7 @@ fn resolve_non_graph_specifier_types(
&package_folder,
npm_req_ref.sub_path(),
Some(referrer),
referrer_kind,
NodeResolutionMode::Types,
);
let maybe_url = match res_result {

View file

@ -586,7 +586,10 @@ fn generate_pseudo_file(
wrap_kind,
}));
let source = deno_ast::swc::codegen::to_code(&transformed);
let source = deno_ast::swc::codegen::to_code_with_comments(
Some(&parsed.comments().as_single_threaded()),
&transformed,
);
log::debug!("{}:\n{}", file.specifier, source);
@ -1165,6 +1168,33 @@ Deno.test("file:///main.ts$3-6.ts", async ()=>{
media_type: MediaType::TypeScript,
}],
},
// https://github.com/denoland/deno/issues/26728
Test {
input: Input {
source: r#"
/**
* ```ts
* // @ts-expect-error: can only add numbers
* add('1', '2');
* ```
*/
export function add(first: number, second: number) {
return first + second;
}
"#,
specifier: "file:///main.ts",
},
expected: vec![Expected {
source: r#"import { add } from "file:///main.ts";
Deno.test("file:///main.ts$3-7.ts", async ()=>{
// @ts-expect-error: can only add numbers
add('1', '2');
});
"#,
specifier: "file:///main.ts$3-7.ts",
media_type: MediaType::TypeScript,
}],
},
];
for test in tests {
@ -1376,6 +1406,31 @@ console.log(Foo);
media_type: MediaType::TypeScript,
}],
},
// https://github.com/denoland/deno/issues/26728
Test {
input: Input {
source: r#"
/**
* ```ts
* // @ts-expect-error: can only add numbers
* add('1', '2');
* ```
*/
export function add(first: number, second: number) {
return first + second;
}
"#,
specifier: "file:///main.ts",
},
expected: vec![Expected {
source: r#"import { add } from "file:///main.ts";
// @ts-expect-error: can only add numbers
add('1', '2');
"#,
specifier: "file:///main.ts$3-7.ts",
media_type: MediaType::TypeScript,
}],
},
];
for test in tests {

View file

@ -30,6 +30,7 @@ use deno_runtime::deno_tls::RootCertStoreProvider;
use deno_runtime::deno_web::BlobStore;
use deno_runtime::fmt_errors::format_js_error;
use deno_runtime::inspector_server::InspectorServer;
use deno_runtime::ops::otel::OtelConfig;
use deno_runtime::ops::process::NpmProcessStateProviderRc;
use deno_runtime::ops::worker_host::CreateWebWorkerCb;
use deno_runtime::web_worker::WebWorker;
@ -43,6 +44,7 @@ use deno_runtime::WorkerExecutionMode;
use deno_runtime::WorkerLogLevel;
use deno_semver::npm::NpmPackageReqReference;
use deno_terminal::colors;
use node_resolver::NodeModuleKind;
use node_resolver::NodeResolutionMode;
use tokio::select;
@ -142,6 +144,7 @@ struct SharedWorkerState {
storage_key_resolver: StorageKeyResolver,
options: CliMainWorkerOptions,
subcommand: DenoSubcommand,
otel_config: Option<OtelConfig>, // `None` means OpenTelemetry is disabled.
}
impl SharedWorkerState {
@ -405,6 +408,7 @@ impl CliMainWorkerFactory {
storage_key_resolver: StorageKeyResolver,
subcommand: DenoSubcommand,
options: CliMainWorkerOptions,
otel_config: Option<OtelConfig>,
) -> Self {
Self {
shared: Arc::new(SharedWorkerState {
@ -427,6 +431,7 @@ impl CliMainWorkerFactory {
storage_key_resolver,
options,
subcommand,
otel_config,
}),
}
}
@ -576,6 +581,7 @@ impl CliMainWorkerFactory {
mode,
serve_port: shared.options.serve_port,
serve_host: shared.options.serve_host.clone(),
otel_config: shared.otel_config.clone(),
},
extensions: custom_extensions,
startup_snapshot: crate::js::deno_isolate_init(),
@ -675,6 +681,7 @@ impl CliMainWorkerFactory {
package_folder,
sub_path,
/* referrer */ None,
NodeModuleKind::Esm,
NodeResolutionMode::Execution,
)?;
if specifier
@ -775,6 +782,7 @@ fn create_web_worker_callback(
mode: WorkerExecutionMode::Worker,
serve_port: shared.options.serve_port,
serve_host: shared.options.serve_host.clone(),
otel_config: shared.otel_config.clone(),
},
extensions: vec![],
startup_snapshot: crate::js::deno_isolate_init(),

View file

@ -346,9 +346,10 @@ const { 0: statStruct, 1: statBuf } = createByteStruct({
mtime: "date",
atime: "date",
birthtime: "date",
ctime: "date",
dev: "u64",
ino: "?u64",
mode: "?u64",
mode: "u64",
nlink: "?u64",
uid: "?u64",
gid: "?u64",
@ -377,9 +378,10 @@ function parseFileInfo(response) {
birthtime: response.birthtimeSet === true
? new Date(response.birthtime)
: null,
ctime: response.ctimeSet === true ? new Date(response.ctime) : null,
dev: response.dev,
mode: response.mode,
ino: unix ? response.ino : null,
mode: unix ? response.mode : null,
nlink: unix ? response.nlink : null,
uid: unix ? response.uid : null,
gid: unix ? response.gid : null,

View file

@ -229,6 +229,7 @@ impl FileSystem for InMemoryFs {
mtime: None,
atime: None,
birthtime: None,
ctime: None,
dev: 0,
ino: 0,
mode: 0,
@ -251,6 +252,7 @@ impl FileSystem for InMemoryFs {
mtime: None,
atime: None,
birthtime: None,
ctime: None,
dev: 0,
ino: 0,
mode: 0,

View file

@ -1795,6 +1795,8 @@ create_struct_writer! {
atime: u64,
birthtime_set: bool,
birthtime: u64,
ctime_set: bool,
ctime: u64,
// Following are only valid under Unix.
dev: u64,
ino: u64,
@ -1826,6 +1828,8 @@ impl From<FsStat> for SerializableStat {
atime: stat.atime.unwrap_or(0),
birthtime_set: stat.birthtime.is_some(),
birthtime: stat.birthtime.unwrap_or(0),
ctime_set: stat.ctime.is_some(),
ctime: stat.ctime.unwrap_or(0),
dev: stat.dev,
ino: stat.ino,

View file

@ -821,24 +821,46 @@ fn stat_extra(
Ok(info.dwVolumeSerialNumber as u64)
}
const WINDOWS_TICK: i64 = 10_000; // 100-nanosecond intervals in a millisecond
const SEC_TO_UNIX_EPOCH: i64 = 11_644_473_600; // Seconds between Windows epoch and Unix epoch
fn windows_time_to_unix_time_msec(windows_time: &i64) -> i64 {
let milliseconds_since_windows_epoch = windows_time / WINDOWS_TICK;
milliseconds_since_windows_epoch - SEC_TO_UNIX_EPOCH * 1000
}
use windows_sys::Wdk::Storage::FileSystem::FILE_ALL_INFORMATION;
use windows_sys::Win32::Foundation::NTSTATUS;
unsafe fn query_file_information(
handle: winapi::shared::ntdef::HANDLE,
) -> std::io::Result<FILE_ALL_INFORMATION> {
) -> Result<FILE_ALL_INFORMATION, NTSTATUS> {
use windows_sys::Wdk::Storage::FileSystem::NtQueryInformationFile;
use windows_sys::Win32::Foundation::RtlNtStatusToDosError;
use windows_sys::Win32::Foundation::ERROR_MORE_DATA;
use windows_sys::Win32::System::IO::IO_STATUS_BLOCK;
let mut info = std::mem::MaybeUninit::<FILE_ALL_INFORMATION>::zeroed();
let mut io_status_block =
std::mem::MaybeUninit::<IO_STATUS_BLOCK>::zeroed();
let status = NtQueryInformationFile(
handle as _,
std::ptr::null_mut(),
io_status_block.as_mut_ptr(),
info.as_mut_ptr() as *mut _,
std::mem::size_of::<FILE_ALL_INFORMATION>() as _,
18, /* FileAllInformation */
);
if status < 0 {
return Err(std::io::Error::last_os_error());
let converted_status = RtlNtStatusToDosError(status);
// If error more data is returned, then it means that the buffer is too small to get full filename information
// to have that we should retry. However, since we only use BasicInformation and StandardInformation, it is fine to ignore it
// since struct is populated with other data anyway.
// https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/nf-ntifs-ntqueryinformationfile#remarksdd
if converted_status != ERROR_MORE_DATA {
return Err(converted_status as NTSTATUS);
}
}
Ok(info.assume_init())
@ -862,10 +884,13 @@ fn stat_extra(
}
let result = get_dev(file_handle);
CloseHandle(file_handle);
fsstat.dev = result?;
if let Ok(file_info) = query_file_information(file_handle) {
fsstat.ctime = Some(windows_time_to_unix_time_msec(
&file_info.BasicInformation.ChangeTime,
) as u64);
if file_info.BasicInformation.FileAttributes
& winapi::um::winnt::FILE_ATTRIBUTE_REPARSE_POINT
!= 0
@ -898,6 +923,7 @@ fn stat_extra(
}
}
CloseHandle(file_handle);
Ok(())
}
}

View file

@ -42,6 +42,10 @@ const {
Uint8Array,
Promise,
} = primordials;
const {
getAsyncContext,
setAsyncContext,
} = core;
import { InnerBody } from "ext:deno_fetch/22_body.js";
import { Event } from "ext:deno_web/02_event.js";
@ -397,8 +401,10 @@ class CallbackContext {
/** @type {Promise<void> | undefined} */
closing;
listener;
asyncContext;
constructor(signal, args, listener) {
this.asyncContext = getAsyncContext();
// The abort signal triggers a non-graceful shutdown
signal?.addEventListener(
"abort",
@ -508,6 +514,10 @@ function fastSyncResponseOrStream(
*/
function mapToCallback(context, callback, onError) {
return async function (req) {
const asyncContext = getAsyncContext();
setAsyncContext(context.asyncContext);
try {
// Get the response from the user-provided callback. If that fails, use onError. If that fails, return a fallback
// 500 error.
let innerRequest;
@ -584,6 +594,9 @@ function mapToCallback(context, callback, onError) {
}
fastSyncResponseOrStream(req, inner.body, status, innerRequest);
} finally {
setAsyncContext(asyncContext);
}
};
}

View file

@ -94,6 +94,7 @@ pub struct FsStat {
pub mtime: Option<u64>,
pub atime: Option<u64>,
pub birthtime: Option<u64>,
pub ctime: Option<u64>,
pub dev: u64,
pub ino: u64,
@ -153,6 +154,16 @@ impl FsStat {
}
}
#[inline(always)]
fn get_ctime(ctime_or_0: i64) -> Option<u64> {
if ctime_or_0 > 0 {
// ctime return seconds since epoch, but we need milliseconds
return Some(ctime_or_0 as u64 * 1000);
}
None
}
Self {
is_file: metadata.is_file(),
is_directory: metadata.is_dir(),
@ -162,6 +173,7 @@ impl FsStat {
mtime: to_msec(metadata.modified()),
atime: to_msec(metadata.accessed()),
birthtime: to_msec(metadata.created()),
ctime: get_ctime(unix_or_zero!(ctime)),
dev: unix_or_zero!(dev),
ino: unix_or_zero!(ino),

View file

@ -1,16 +1,17 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
#[derive(Clone, Copy, Debug)]
pub struct KvConfig {
pub(crate) max_write_key_size_bytes: usize,
pub(crate) max_read_key_size_bytes: usize,
pub(crate) max_value_size_bytes: usize,
pub(crate) max_read_ranges: usize,
pub(crate) max_read_entries: usize,
pub(crate) max_checks: usize,
pub(crate) max_mutations: usize,
pub(crate) max_watched_keys: usize,
pub(crate) max_total_mutation_size_bytes: usize,
pub(crate) max_total_key_size_bytes: usize,
pub max_write_key_size_bytes: usize,
pub max_read_key_size_bytes: usize,
pub max_value_size_bytes: usize,
pub max_read_ranges: usize,
pub max_read_entries: usize,
pub max_checks: usize,
pub max_mutations: usize,
pub max_watched_keys: usize,
pub max_total_mutation_size_bytes: usize,
pub max_total_key_size_bytes: usize,
}
impl KvConfig {

View file

@ -14,6 +14,7 @@ use deno_core::url::Url;
#[allow(unused_imports)]
use deno_core::v8;
use deno_core::v8::ExternalReference;
use node_resolver::errors::ClosestPkgJsonError;
use node_resolver::NpmResolverRc;
use once_cell::sync::Lazy;
@ -157,6 +158,10 @@ pub trait NodeRequireLoader {
) -> Result<Cow<'a, Path>, AnyError>;
fn load_text_file_lossy(&self, path: &Path) -> Result<String, AnyError>;
/// Get if the module kind is maybe CJS and loading should determine
/// if its CJS or ESM.
fn is_maybe_cjs(&self, specifier: &Url) -> Result<bool, ClosestPkgJsonError>;
}
pub static NODE_ENV_VAR_ALLOWLIST: Lazy<HashSet<String>> = Lazy::new(|| {
@ -345,6 +350,7 @@ deno_core::extension!(deno_node,
ops::zlib::op_zlib_write,
ops::zlib::op_zlib_init,
ops::zlib::op_zlib_reset,
ops::zlib::op_zlib_crc32,
ops::zlib::brotli::op_brotli_compress,
ops::zlib::brotli::op_brotli_compress_async,
ops::zlib::brotli::op_create_brotli_compress,
@ -384,6 +390,7 @@ deno_core::extension!(deno_node,
ops::require::op_require_proxy_path,
ops::require::op_require_is_deno_dir_package,
ops::require::op_require_resolve_deno_dir,
ops::require::op_require_is_maybe_cjs,
ops::require::op_require_is_request_relative,
ops::require::op_require_resolve_lookup_paths,
ops::require::op_require_try_self_parent_path<P>,
@ -397,7 +404,6 @@ deno_core::extension!(deno_node,
ops::require::op_require_read_file<P>,
ops::require::op_require_as_file_path,
ops::require::op_require_resolve_exports<P>,
ops::require::op_require_read_closest_package_json<P>,
ops::require::op_require_read_package_scope<P>,
ops::require::op_require_package_imports_resolve<P>,
ops::require::op_require_break_on_next_statement,

View file

@ -1,16 +1,18 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_core::error::AnyError;
use deno_core::op2;
use deno_core::url::Url;
use deno_core::v8;
use deno_core::JsRuntimeInspector;
use deno_core::ModuleSpecifier;
use deno_core::OpState;
use deno_fs::FileSystemRc;
use deno_package_json::NodeModuleKind;
use deno_package_json::PackageJsonRc;
use deno_path_util::normalize_path;
use deno_path_util::url_from_file_path;
use deno_path_util::url_to_file_path;
use node_resolver::NodeModuleKind;
use node_resolver::errors::ClosestPkgJsonError;
use node_resolver::NodeResolutionMode;
use node_resolver::REQUIRE_CONDITIONS;
use std::borrow::Cow;
@ -217,17 +219,17 @@ pub fn op_require_resolve_deno_dir(
state: &mut OpState,
#[string] request: String,
#[string] parent_filename: String,
) -> Option<String> {
) -> Result<Option<String>, AnyError> {
let resolver = state.borrow::<NpmResolverRc>();
Ok(
resolver
.resolve_package_folder_from_package(
&request,
&ModuleSpecifier::from_file_path(&parent_filename).unwrap_or_else(|_| {
panic!("Url::from_file_path: [{:?}]", parent_filename)
}),
&url_from_file_path(&PathBuf::from(parent_filename))?,
)
.ok()
.map(|p| p.to_string_lossy().into_owned())
.map(|p| p.to_string_lossy().into_owned()),
)
}
#[op2(fast)]
@ -564,19 +566,17 @@ where
}))
}
#[op2]
#[serde]
pub fn op_require_read_closest_package_json<P>(
#[op2(fast)]
pub fn op_require_is_maybe_cjs(
state: &mut OpState,
#[string] filename: String,
) -> Result<Option<PackageJsonRc>, node_resolver::errors::ClosestPkgJsonError>
where
P: NodePermissions + 'static,
{
) -> Result<bool, ClosestPkgJsonError> {
let filename = PathBuf::from(filename);
// permissions: allow reading the closest package.json files
let pkg_json_resolver = state.borrow::<PackageJsonResolverRc>();
pkg_json_resolver.get_closest_package_json_from_path(&filename)
let Ok(url) = url_from_file_path(&filename) else {
return Ok(false);
};
let loader = state.borrow::<NodeRequireLoaderRc>();
loader.is_maybe_cjs(&url)
}
#[op2]

View file

@ -1,6 +1,7 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_core::op2;
use libc::c_ulong;
use std::borrow::Cow;
use std::cell::RefCell;
use zlib::*;
@ -381,6 +382,15 @@ pub fn op_zlib_close_if_pending(
Ok(())
}
#[op2(fast)]
#[smi]
pub fn op_zlib_crc32(#[buffer] data: &[u8], #[smi] value: u32) -> u32 {
// SAFETY: `data` is a valid buffer.
unsafe {
zlib::crc32(value as c_ulong, data.as_ptr(), data.len() as u32) as u32
}
}
#[cfg(test)]
mod tests {
use super::*;

View file

@ -11,6 +11,7 @@ import {
op_require_can_parse_as_esm,
op_require_init_paths,
op_require_is_deno_dir_package,
op_require_is_maybe_cjs,
op_require_is_request_relative,
op_require_node_module_paths,
op_require_package_imports_resolve,
@ -19,7 +20,6 @@ import {
op_require_path_is_absolute,
op_require_path_resolve,
op_require_proxy_path,
op_require_read_closest_package_json,
op_require_read_file,
op_require_read_package_scope,
op_require_real_path,
@ -1060,36 +1060,13 @@ Module.prototype._compile = function (content, filename, format) {
return result;
};
Module._extensions[".js"] = function (module, filename) {
const content = op_require_read_file(filename);
let format;
if (StringPrototypeEndsWith(filename, ".js")) {
const pkg = op_require_read_closest_package_json(filename);
if (pkg?.type === "module") {
format = "module";
} else if (pkg?.type === "commonjs") {
format = "commonjs";
}
}
module._compile(content, filename, format);
};
Module._extensions[".ts"] =
Module._extensions[".js"] =
Module._extensions[".ts"] =
Module._extensions[".jsx"] =
Module._extensions[".tsx"] =
function (module, filename) {
const content = op_require_read_file(filename);
let format;
const pkg = op_require_read_closest_package_json(filename);
if (pkg?.type === "module") {
format = "module";
} else if (pkg?.type === "commonjs") {
format = "commonjs";
}
const format = op_require_is_maybe_cjs(filename) ? undefined : "module";
module._compile(content, filename, format);
};
@ -1233,6 +1210,24 @@ function isBuiltin(moduleName) {
!StringPrototypeStartsWith(moduleName, "internal/");
}
function getBuiltinModule(id) {
if (!isBuiltin(id)) {
return undefined;
}
if (StringPrototypeStartsWith(id, "node:")) {
// Slice 'node:' prefix
id = StringPrototypeSlice(id, 5);
}
const mod = loadNativeModule(id, id);
if (mod) {
return mod.exports;
}
return undefined;
}
Module.isBuiltin = isBuiltin;
Module.createRequire = createRequire;
@ -1327,7 +1322,7 @@ export function register(_specifier, _parentUrl, _options) {
return undefined;
}
export { builtinModules, createRequire, isBuiltin, Module };
export { builtinModules, createRequire, getBuiltinModule, isBuiltin, Module };
export const _cache = Module._cache;
export const _extensions = Module._extensions;
export const _findPath = Module._findPath;

View file

@ -290,8 +290,8 @@ export function convertFileInfoToStats(origin: Deno.FileInfo): Stats {
isFIFO: () => false,
isCharacterDevice: () => false,
isSocket: () => false,
ctime: origin.mtime,
ctimeMs: origin.mtime?.getTime() || null,
ctime: origin.ctime,
ctimeMs: origin.ctime?.getTime() || null,
});
return stats;
@ -336,9 +336,9 @@ export function convertFileInfoToBigIntStats(
isFIFO: () => false,
isCharacterDevice: () => false,
isSocket: () => false,
ctime: origin.mtime,
ctimeMs: origin.mtime ? BigInt(origin.mtime.getTime()) : null,
ctimeNs: origin.mtime ? BigInt(origin.mtime.getTime()) * 1000000n : null,
ctime: origin.ctime,
ctimeMs: origin.ctime ? BigInt(origin.ctime.getTime()) : null,
ctimeNs: origin.ctime ? BigInt(origin.ctime.getTime()) * 1000000n : null,
});
return stats;
}

View file

@ -17,6 +17,7 @@ const {
import { TextDecoder, TextEncoder } from "ext:deno_web/08_text_encoding.js";
import { errorMap } from "ext:deno_node/internal_binding/uv.ts";
import { codes } from "ext:deno_node/internal/error_codes.ts";
import { ERR_NOT_IMPLEMENTED } from "ext:deno_node/internal/errors.ts";
export type BinaryEncodings = "binary";
@ -34,8 +35,7 @@ export type TextEncodings =
export type Encodings = BinaryEncodings | TextEncodings;
export function notImplemented(msg: string): never {
const message = msg ? `Not implemented: ${msg}` : "Not implemented";
throw new Error(message);
throw new ERR_NOT_IMPLEMENTED(msg);
}
export function warnNotImplemented(msg?: string) {

View file

@ -18,7 +18,7 @@
*/
import { primordials } from "ext:core/mod.js";
const { JSONStringify, SymbolFor } = primordials;
const { JSONStringify, SafeArrayIterator, SymbolFor } = primordials;
import { format, inspect } from "ext:deno_node/internal/util/inspect.mjs";
import { codes } from "ext:deno_node/internal/error_codes.ts";
import {
@ -1874,6 +1874,11 @@ export class ERR_SOCKET_CLOSED extends NodeError {
super("ERR_SOCKET_CLOSED", `Socket is closed`);
}
}
export class ERR_SOCKET_CONNECTION_TIMEOUT extends NodeError {
constructor() {
super("ERR_SOCKET_CONNECTION_TIMEOUT", `Socket connection timeout`);
}
}
export class ERR_SOCKET_DGRAM_IS_CONNECTED extends NodeError {
constructor() {
super("ERR_SOCKET_DGRAM_IS_CONNECTED", `Already connected`);
@ -2385,6 +2390,15 @@ export class ERR_INVALID_RETURN_VALUE extends NodeTypeError {
}
}
export class ERR_NOT_IMPLEMENTED extends NodeError {
constructor(message?: string) {
super(
"ERR_NOT_IMPLEMENTED",
message ? `Not implemented: ${message}` : "Not implemented",
);
}
}
export class ERR_INVALID_URL extends NodeTypeError {
input: string;
constructor(input: string) {
@ -2633,11 +2647,30 @@ export function aggregateTwoErrors(
}
return innerError || outerError;
}
export class NodeAggregateError extends AggregateError {
code: string;
constructor(errors, message) {
super(new SafeArrayIterator(errors), message);
this.code = errors[0]?.code;
}
get [kIsNodeError]() {
return true;
}
// deno-lint-ignore adjacent-overload-signatures
get ["constructor"]() {
return AggregateError;
}
}
codes.ERR_IPC_CHANNEL_CLOSED = ERR_IPC_CHANNEL_CLOSED;
codes.ERR_INVALID_ARG_TYPE = ERR_INVALID_ARG_TYPE;
codes.ERR_INVALID_ARG_VALUE = ERR_INVALID_ARG_VALUE;
codes.ERR_OUT_OF_RANGE = ERR_OUT_OF_RANGE;
codes.ERR_SOCKET_BAD_PORT = ERR_SOCKET_BAD_PORT;
codes.ERR_SOCKET_CONNECTION_TIMEOUT = ERR_SOCKET_CONNECTION_TIMEOUT;
codes.ERR_BUFFER_OUT_OF_BOUNDS = ERR_BUFFER_OUT_OF_BOUNDS;
codes.ERR_UNKNOWN_ENCODING = ERR_UNKNOWN_ENCODING;
codes.ERR_PARSE_ARGS_INVALID_OPTION_VALUE = ERR_PARSE_ARGS_INVALID_OPTION_VALUE;
@ -2838,6 +2871,7 @@ export default {
ERR_INVALID_SYNC_FORK_INPUT,
ERR_INVALID_THIS,
ERR_INVALID_TUPLE,
ERR_NOT_IMPLEMENTED,
ERR_INVALID_URI,
ERR_INVALID_URL,
ERR_INVALID_URL_SCHEME,

View file

@ -95,4 +95,5 @@ export function makeSyncWrite(fd: number) {
};
}
export const kReinitializeHandle = Symbol("kReinitializeHandle");
export const normalizedArgsSymbol = Symbol("normalizedArgs");

View file

@ -530,10 +530,12 @@ export function mapSysErrnoToUvErrno(sysErrno: number): number {
export const UV_EAI_MEMORY = codeMap.get("EAI_MEMORY")!;
export const UV_EBADF = codeMap.get("EBADF")!;
export const UV_ECANCELED = codeMap.get("ECANCELED")!;
export const UV_EEXIST = codeMap.get("EEXIST");
export const UV_EINVAL = codeMap.get("EINVAL")!;
export const UV_ENOENT = codeMap.get("ENOENT");
export const UV_ENOTSOCK = codeMap.get("ENOTSOCK")!;
export const UV_ETIMEDOUT = codeMap.get("ETIMEDOUT")!;
export const UV_UNKNOWN = codeMap.get("UNKNOWN")!;
export function errname(errno: number): string {

View file

@ -31,6 +31,7 @@ import {
isIP,
isIPv4,
isIPv6,
kReinitializeHandle,
normalizedArgsSymbol,
} from "ext:deno_node/internal/net.ts";
import { Duplex } from "node:stream";
@ -50,9 +51,11 @@ import {
ERR_SERVER_ALREADY_LISTEN,
ERR_SERVER_NOT_RUNNING,
ERR_SOCKET_CLOSED,
ERR_SOCKET_CONNECTION_TIMEOUT,
errnoException,
exceptionWithHostPort,
genericNodeError,
NodeAggregateError,
uvExceptionWithHostPort,
} from "ext:deno_node/internal/errors.ts";
import type { ErrnoException } from "ext:deno_node/internal/errors.ts";
@ -80,6 +83,7 @@ import { Buffer } from "node:buffer";
import type { LookupOneOptions } from "ext:deno_node/internal/dns/utils.ts";
import {
validateAbortSignal,
validateBoolean,
validateFunction,
validateInt32,
validateNumber,
@ -100,13 +104,25 @@ import { ShutdownWrap } from "ext:deno_node/internal_binding/stream_wrap.ts";
import { assert } from "ext:deno_node/_util/asserts.ts";
import { isWindows } from "ext:deno_node/_util/os.ts";
import { ADDRCONFIG, lookup as dnsLookup } from "node:dns";
import { codeMap } from "ext:deno_node/internal_binding/uv.ts";
import {
codeMap,
UV_ECANCELED,
UV_ETIMEDOUT,
} from "ext:deno_node/internal_binding/uv.ts";
import { guessHandleType } from "ext:deno_node/internal_binding/util.ts";
import { debuglog } from "ext:deno_node/internal/util/debuglog.ts";
import type { DuplexOptions } from "ext:deno_node/_stream.d.ts";
import type { BufferEncoding } from "ext:deno_node/_global.d.ts";
import type { Abortable } from "ext:deno_node/_events.d.ts";
import { channel } from "node:diagnostics_channel";
import { primordials } from "ext:core/mod.js";
const {
ArrayPrototypeIncludes,
ArrayPrototypePush,
FunctionPrototypeBind,
MathMax,
} = primordials;
let debug = debuglog("net", (fn) => {
debug = fn;
@ -120,6 +136,9 @@ const kBytesWritten = Symbol("kBytesWritten");
const DEFAULT_IPV4_ADDR = "0.0.0.0";
const DEFAULT_IPV6_ADDR = "::";
let autoSelectFamilyDefault = true;
let autoSelectFamilyAttemptTimeoutDefault = 250;
type Handle = TCP | Pipe;
interface HandleOptions {
@ -214,6 +233,8 @@ interface TcpSocketConnectOptions extends ConnectOptions {
hints?: number;
family?: number;
lookup?: LookupFunction;
autoSelectFamily?: boolean | undefined;
autoSelectFamilyAttemptTimeout?: number | undefined;
}
interface IpcSocketConnectOptions extends ConnectOptions {
@ -316,12 +337,6 @@ export function _normalizeArgs(args: unknown[]): NormalizedArgs {
return arr;
}
function _isTCPConnectWrap(
req: TCPConnectWrap | PipeConnectWrap,
): req is TCPConnectWrap {
return "localAddress" in req && "localPort" in req;
}
function _afterConnect(
status: number,
// deno-lint-ignore no-explicit-any
@ -372,7 +387,7 @@ function _afterConnect(
socket.connecting = false;
let details;
if (_isTCPConnectWrap(req)) {
if (req.localAddress && req.localPort) {
details = req.localAddress + ":" + req.localPort;
}
@ -384,7 +399,7 @@ function _afterConnect(
details,
);
if (_isTCPConnectWrap(req)) {
if (details) {
ex.localAddress = req.localAddress;
ex.localPort = req.localPort;
}
@ -393,6 +408,107 @@ function _afterConnect(
}
}
function _createConnectionError(req, status) {
let details;
if (req.localAddress && req.localPort) {
details = req.localAddress + ":" + req.localPort;
}
const ex = exceptionWithHostPort(
status,
"connect",
req.address,
req.port,
details,
);
if (details) {
ex.localAddress = req.localAddress;
ex.localPort = req.localPort;
}
return ex;
}
function _afterConnectMultiple(
context,
current,
status,
handle,
req,
readable,
writable,
) {
debug(
"connect/multiple: connection attempt to %s:%s completed with status %s",
req.address,
req.port,
status,
);
// Make sure another connection is not spawned
clearTimeout(context[kTimeout]);
// One of the connection has completed and correctly dispatched but after timeout, ignore this one
if (status === 0 && current !== context.current - 1) {
debug(
"connect/multiple: ignoring successful but timedout connection to %s:%s",
req.address,
req.port,
);
handle.close();
return;
}
const self = context.socket;
// Some error occurred, add to the list of exceptions
if (status !== 0) {
const ex = _createConnectionError(req, status);
ArrayPrototypePush(context.errors, ex);
self.emit(
"connectionAttemptFailed",
req.address,
req.port,
req.addressType,
ex,
);
// Try the next address, unless we were aborted
if (context.socket.connecting) {
_internalConnectMultiple(context, status === UV_ECANCELED);
}
return;
}
_afterConnect(status, self._handle, req, readable, writable);
}
function _internalConnectMultipleTimeout(context, req, handle) {
debug(
"connect/multiple: connection to %s:%s timed out",
req.address,
req.port,
);
context.socket.emit(
"connectionAttemptTimeout",
req.address,
req.port,
req.addressType,
);
req.oncomplete = undefined;
ArrayPrototypePush(context.errors, _createConnectionError(req, UV_ETIMEDOUT));
handle.close();
// Try the next address, unless we were aborted
if (context.socket.connecting) {
_internalConnectMultiple(context);
}
}
function _checkBindError(err: number, port: number, handle: TCP) {
// EADDRINUSE may not be reported until we call `listen()` or `connect()`.
// To complicate matters, a failed `bind()` followed by `listen()` or `connect()`
@ -495,6 +611,131 @@ function _internalConnect(
}
}
function _internalConnectMultiple(context, canceled?: boolean) {
clearTimeout(context[kTimeout]);
const self = context.socket;
// We were requested to abort. Stop all operations
if (self._aborted) {
return;
}
// All connections have been tried without success, destroy with error
if (canceled || context.current === context.addresses.length) {
if (context.errors.length === 0) {
self.destroy(new ERR_SOCKET_CONNECTION_TIMEOUT());
return;
}
self.destroy(new NodeAggregateError(context.errors));
return;
}
assert(self.connecting);
const current = context.current++;
if (current > 0) {
self[kReinitializeHandle](new TCP(TCPConstants.SOCKET));
}
const { localPort, port, flags } = context;
const { address, family: addressType } = context.addresses[current];
let localAddress;
let err;
if (localPort) {
if (addressType === 4) {
localAddress = DEFAULT_IPV4_ADDR;
err = self._handle.bind(localAddress, localPort);
} else { // addressType === 6
localAddress = DEFAULT_IPV6_ADDR;
err = self._handle.bind6(localAddress, localPort, flags);
}
debug(
"connect/multiple: binding to localAddress: %s and localPort: %d (addressType: %d)",
localAddress,
localPort,
addressType,
);
err = _checkBindError(err, localPort, self._handle);
if (err) {
ArrayPrototypePush(
context.errors,
exceptionWithHostPort(err, "bind", localAddress, localPort),
);
_internalConnectMultiple(context);
return;
}
}
debug(
"connect/multiple: attempting to connect to %s:%d (addressType: %d)",
address,
port,
addressType,
);
self.emit("connectionAttempt", address, port, addressType);
const req = new TCPConnectWrap();
req.oncomplete = FunctionPrototypeBind(
_afterConnectMultiple,
undefined,
context,
current,
);
req.address = address;
req.port = port;
req.localAddress = localAddress;
req.localPort = localPort;
req.addressType = addressType;
ArrayPrototypePush(
self.autoSelectFamilyAttemptedAddresses,
`${address}:${port}`,
);
if (addressType === 4) {
err = self._handle.connect(req, address, port);
} else {
err = self._handle.connect6(req, address, port);
}
if (err) {
const sockname = self._getsockname();
let details;
if (sockname) {
details = sockname.address + ":" + sockname.port;
}
const ex = exceptionWithHostPort(err, "connect", address, port, details);
ArrayPrototypePush(context.errors, ex);
self.emit("connectionAttemptFailed", address, port, addressType, ex);
_internalConnectMultiple(context);
return;
}
if (current < context.addresses.length - 1) {
debug(
"connect/multiple: setting the attempt timeout to %d ms",
context.timeout,
);
// If the attempt has not returned an error, start the connection timer
context[kTimeout] = setTimeout(
_internalConnectMultipleTimeout,
context.timeout,
context,
req,
self._handle,
);
}
}
// Provide a better error message when we call end() as a result
// of the other side sending a FIN. The standard "write after end"
// is overly vague, and makes it seem like the user's code is to blame.
@ -597,7 +838,7 @@ function _lookupAndConnect(
) {
const { localAddress, localPort } = options;
const host = options.host || "localhost";
let { port } = options;
let { port, autoSelectFamilyAttemptTimeout, autoSelectFamily } = options;
if (localAddress && !isIP(localAddress)) {
throw new ERR_INVALID_IP_ADDRESS(localAddress);
@ -621,6 +862,22 @@ function _lookupAndConnect(
port |= 0;
if (autoSelectFamily != null) {
validateBoolean(autoSelectFamily, "options.autoSelectFamily");
} else {
autoSelectFamily = autoSelectFamilyDefault;
}
if (autoSelectFamilyAttemptTimeout !== undefined) {
validateInt32(autoSelectFamilyAttemptTimeout);
if (autoSelectFamilyAttemptTimeout < 10) {
autoSelectFamilyAttemptTimeout = 10;
}
} else {
autoSelectFamilyAttemptTimeout = autoSelectFamilyAttemptTimeoutDefault;
}
// If host is an IP, skip performing a lookup
const addressType = isIP(host);
if (addressType) {
@ -649,6 +906,7 @@ function _lookupAndConnect(
const dnsOpts = {
family: options.family,
hints: options.hints || 0,
all: false,
};
if (
@ -665,6 +923,31 @@ function _lookupAndConnect(
self._host = host;
const lookup = options.lookup || dnsLookup;
if (
dnsOpts.family !== 4 && dnsOpts.family !== 6 && !localAddress &&
autoSelectFamily
) {
debug("connect: autodetecting");
dnsOpts.all = true;
defaultTriggerAsyncIdScope(self[asyncIdSymbol], function () {
_lookupAndConnectMultiple(
self,
asyncIdSymbol,
lookup,
host,
options,
dnsOpts,
port,
localAddress,
localPort,
autoSelectFamilyAttemptTimeout,
);
});
return;
}
defaultTriggerAsyncIdScope(self[asyncIdSymbol], function () {
lookup(
host,
@ -719,6 +1002,143 @@ function _lookupAndConnect(
});
}
function _lookupAndConnectMultiple(
self: Socket,
asyncIdSymbol: number,
// deno-lint-ignore no-explicit-any
lookup: any,
host: string,
options: TcpSocketConnectOptions,
dnsopts,
port: number,
localAddress: string,
localPort: number,
timeout: number | undefined,
) {
defaultTriggerAsyncIdScope(self[asyncIdSymbol], function emitLookup() {
lookup(host, dnsopts, function emitLookup(err, addresses) {
// It's possible we were destroyed while looking this up.
// XXX it would be great if we could cancel the promise returned by
// the look up.
if (!self.connecting) {
return;
} else if (err) {
self.emit("lookup", err, undefined, undefined, host);
// net.createConnection() creates a net.Socket object and immediately
// calls net.Socket.connect() on it (that's us). There are no event
// listeners registered yet so defer the error event to the next tick.
nextTick(_connectErrorNT, self, err);
return;
}
// Filter addresses by only keeping the one which are either IPv4 or IPV6.
// The first valid address determines which group has preference on the
// alternate family sorting which happens later.
const validAddresses = [[], []];
const validIps = [[], []];
let destinations;
for (let i = 0, l = addresses.length; i < l; i++) {
const address = addresses[i];
const { address: ip, family: addressType } = address;
self.emit("lookup", err, ip, addressType, host);
// It's possible we were destroyed while looking this up.
if (!self.connecting) {
return;
}
if (isIP(ip) && (addressType === 4 || addressType === 6)) {
destinations ||= addressType === 6 ? { 6: 0, 4: 1 } : { 4: 0, 6: 1 };
const destination = destinations[addressType];
// Only try an address once
if (!ArrayPrototypeIncludes(validIps[destination], ip)) {
ArrayPrototypePush(validAddresses[destination], address);
ArrayPrototypePush(validIps[destination], ip);
}
}
}
// When no AAAA or A records are available, fail on the first one
if (!validAddresses[0].length && !validAddresses[1].length) {
const { address: firstIp, family: firstAddressType } = addresses[0];
if (!isIP(firstIp)) {
err = new ERR_INVALID_IP_ADDRESS(firstIp);
nextTick(_connectErrorNT, self, err);
} else if (firstAddressType !== 4 && firstAddressType !== 6) {
err = new ERR_INVALID_ADDRESS_FAMILY(
firstAddressType,
options.host,
options.port,
);
nextTick(_connectErrorNT, self, err);
}
return;
}
// Sort addresses alternating families
const toAttempt = [];
for (
let i = 0,
l = MathMax(validAddresses[0].length, validAddresses[1].length);
i < l;
i++
) {
if (i in validAddresses[0]) {
ArrayPrototypePush(toAttempt, validAddresses[0][i]);
}
if (i in validAddresses[1]) {
ArrayPrototypePush(toAttempt, validAddresses[1][i]);
}
}
if (toAttempt.length === 1) {
debug(
"connect/multiple: only one address found, switching back to single connection",
);
const { address: ip, family: addressType } = toAttempt[0];
self._unrefTimer();
defaultTriggerAsyncIdScope(
self[asyncIdSymbol],
_internalConnect,
self,
ip,
port,
addressType,
localAddress,
localPort,
);
return;
}
self.autoSelectFamilyAttemptedAddresses = [];
debug("connect/multiple: will try the following addresses", toAttempt);
const context = {
socket: self,
addresses: toAttempt,
current: 0,
port,
localPort,
timeout,
[kTimeout]: null,
errors: [],
};
self._unrefTimer();
defaultTriggerAsyncIdScope(
self[asyncIdSymbol],
_internalConnectMultiple,
context,
);
});
});
}
function _afterShutdown(this: ShutdownWrap<TCP>) {
// deno-lint-ignore no-explicit-any
const self: any = this.handle[ownerSymbol];
@ -777,6 +1197,7 @@ export class Socket extends Duplex {
_host: string | null = null;
// deno-lint-ignore no-explicit-any
_parent: any = null;
autoSelectFamilyAttemptedAddresses: AddressInfo[] | undefined = undefined;
constructor(options: SocketOptions | number) {
if (typeof options === "number") {
@ -1546,6 +1967,16 @@ export class Socket extends Duplex {
set _handle(v: Handle | null) {
this[kHandle] = v;
}
// deno-lint-ignore no-explicit-any
[kReinitializeHandle](handle: any) {
this._handle?.close();
this._handle = handle;
this._handle[ownerSymbol] = this;
_initSocketHandle(this);
}
}
export const Stream = Socket;
@ -1593,6 +2024,33 @@ export function connect(...args: unknown[]) {
export const createConnection = connect;
/** https://docs.deno.com/api/node/net/#namespace_getdefaultautoselectfamily */
export function getDefaultAutoSelectFamily() {
return autoSelectFamilyDefault;
}
/** https://docs.deno.com/api/node/net/#namespace_setdefaultautoselectfamily */
export function setDefaultAutoSelectFamily(value: boolean) {
validateBoolean(value, "value");
autoSelectFamilyDefault = value;
}
/** https://docs.deno.com/api/node/net/#namespace_getdefaultautoselectfamilyattempttimeout */
export function getDefaultAutoSelectFamilyAttemptTimeout() {
return autoSelectFamilyAttemptTimeoutDefault;
}
/** https://docs.deno.com/api/node/net/#namespace_setdefaultautoselectfamilyattempttimeout */
export function setDefaultAutoSelectFamilyAttemptTimeout(value: number) {
validateInt32(value, "value", 1);
if (value < 10) {
value = 10;
}
autoSelectFamilyAttemptTimeoutDefault = value;
}
export interface ListenOptions extends Abortable {
fd?: number;
port?: number | undefined;
@ -2478,15 +2936,19 @@ export { BlockList, isIP, isIPv4, isIPv6, SocketAddress };
export default {
_createServerHandle,
_normalizeArgs,
isIP,
isIPv4,
isIPv6,
BlockList,
SocketAddress,
connect,
createConnection,
createServer,
getDefaultAutoSelectFamily,
getDefaultAutoSelectFamilyAttemptTimeout,
isIP,
isIPv4,
isIPv6,
Server,
setDefaultAutoSelectFamily,
setDefaultAutoSelectFamilyAttemptTimeout,
Socket,
SocketAddress,
Stream,
};

View file

@ -15,7 +15,7 @@ import {
import { warnNotImplemented } from "ext:deno_node/_utils.ts";
import { EventEmitter } from "node:events";
import Module from "node:module";
import Module, { getBuiltinModule } from "node:module";
import { report } from "ext:deno_node/internal/process/report.ts";
import { validateString } from "ext:deno_node/internal/validators.mjs";
import {
@ -38,7 +38,15 @@ import {
versions,
} from "ext:deno_node/_process/process.ts";
import { _exiting } from "ext:deno_node/_process/exiting.ts";
export { _nextTick as nextTick, chdir, cwd, env, version, versions };
export {
_nextTick as nextTick,
chdir,
cwd,
env,
getBuiltinModule,
version,
versions,
};
import {
createWritableStdioStream,
initStdin,
@ -728,6 +736,8 @@ Process.prototype.getegid = getegid;
/** This method is removed on Windows */
Process.prototype.geteuid = geteuid;
Process.prototype.getBuiltinModule = getBuiltinModule;
// TODO(kt3k): Implement this when we added -e option to node compat mode
Process.prototype._eval = undefined;
@ -909,7 +919,7 @@ Object.defineProperty(argv, "1", {
if (Deno.mainModule?.startsWith("file:")) {
return pathFromURL(new URL(Deno.mainModule));
} else {
return join(Deno.cwd(), "$deno$node.js");
return join(Deno.cwd(), "$deno$node.mjs");
}
},
});

View file

@ -40,6 +40,58 @@ import {
createBrotliCompress,
createBrotliDecompress,
} from "ext:deno_node/_brotli.js";
import { ERR_INVALID_ARG_TYPE } from "ext:deno_node/internal/errors.ts";
import { validateUint32 } from "ext:deno_node/internal/validators.mjs";
import { op_zlib_crc32 } from "ext:core/ops";
import { core, primordials } from "ext:core/mod.js";
import { TextEncoder } from "ext:deno_web/08_text_encoding.js";
const {
Uint8Array,
TypedArrayPrototypeGetBuffer,
TypedArrayPrototypeGetByteLength,
TypedArrayPrototypeGetByteOffset,
DataViewPrototypeGetBuffer,
DataViewPrototypeGetByteLength,
DataViewPrototypeGetByteOffset,
} = primordials;
const { isTypedArray, isDataView } = core;
const enc = new TextEncoder();
const toU8 = (input) => {
if (typeof input === "string") {
return enc.encode(input);
}
if (isTypedArray(input)) {
return new Uint8Array(
TypedArrayPrototypeGetBuffer(input),
TypedArrayPrototypeGetByteOffset(input),
TypedArrayPrototypeGetByteLength(input),
);
} else if (isDataView(input)) {
return new Uint8Array(
DataViewPrototypeGetBuffer(input),
DataViewPrototypeGetByteOffset(input),
DataViewPrototypeGetByteLength(input),
);
}
return input;
};
export function crc32(data, value = 0) {
if (typeof data !== "string" && !isArrayBufferView(data)) {
throw new ERR_INVALID_ARG_TYPE("data", [
"Buffer",
"TypedArray",
"DataView",
"string",
], data);
}
validateUint32(value, "value");
return op_zlib_crc32(toU8(data), value);
}
export class Options {
constructor() {
@ -87,6 +139,7 @@ export default {
BrotliOptions,
codes,
constants,
crc32,
createBrotliCompress,
createBrotliDecompress,
createDeflate,

View file

@ -28,6 +28,7 @@ const {
ArrayPrototypePush,
ArrayPrototypeShift,
ArrayPrototypeSome,
Error,
ErrorPrototypeToString,
ObjectDefineProperties,
ObjectPrototypeIsPrototypeOf,
@ -488,8 +489,11 @@ class WebSocket extends EventTarget {
/* error */
this[_readyState] = CLOSED;
const message = op_ws_get_error(rid);
const error = new Error(message);
const errorEv = new ErrorEvent("error", {
message: op_ws_get_error(rid),
error,
message,
});
this.dispatchEvent(errorEv);

View file

@ -15,8 +15,8 @@ use crate::errors::CanonicalizingPkgJsonDirError;
use crate::errors::ClosestPkgJsonError;
use crate::errors::PackageJsonLoadError;
// todo(dsherret): this isn't exactly correct and we should change it to instead
// be created per worker and passed down as a ctor arg to the pkg json resolver
// it would be nice if this was passed down as a ctor arg to the package.json resolver,
// but it's a little bit complicated to do that, so we just maintain a thread local cache
thread_local! {
static CACHE: RefCell<HashMap<PathBuf, PackageJsonRc>> = RefCell::new(HashMap::new());
}

View file

@ -50,6 +50,15 @@ pub static DEFAULT_CONDITIONS: &[&str] = &["deno", "node", "import"];
pub static REQUIRE_CONDITIONS: &[&str] = &["require", "node"];
static TYPES_ONLY_CONDITIONS: &[&str] = &["types"];
fn conditions_from_module_kind(
kind: NodeModuleKind,
) -> &'static [&'static str] {
match kind {
NodeModuleKind::Esm => DEFAULT_CONDITIONS,
NodeModuleKind::Cjs => REQUIRE_CONDITIONS,
}
}
pub type NodeModuleKind = deno_package_json::NodeModuleKind;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
@ -166,8 +175,7 @@ impl<TEnv: NodeResolverEnv> NodeResolver<TEnv> {
specifier,
referrer,
referrer_kind,
// even though the referrer may be CJS, if we're here that means we're doing ESM resolution
DEFAULT_CONDITIONS,
conditions_from_module_kind(referrer_kind),
mode,
)?;
@ -299,9 +307,9 @@ impl<TEnv: NodeResolverEnv> NodeResolver<TEnv> {
package_dir: &Path,
package_subpath: Option<&str>,
maybe_referrer: Option<&Url>,
referrer_kind: NodeModuleKind,
mode: NodeResolutionMode,
) -> Result<Url, PackageSubpathResolveError> {
let node_module_kind = NodeModuleKind::Esm;
let package_subpath = package_subpath
.map(|s| format!("./{s}"))
.unwrap_or_else(|| ".".to_string());
@ -309,8 +317,8 @@ impl<TEnv: NodeResolverEnv> NodeResolver<TEnv> {
package_dir,
&package_subpath,
maybe_referrer,
node_module_kind,
DEFAULT_CONDITIONS,
referrer_kind,
conditions_from_module_kind(referrer_kind),
mode,
)?;
// TODO(bartlomieju): skipped checking errors for commonJS resolution and
@ -441,10 +449,7 @@ impl<TEnv: NodeResolverEnv> NodeResolver<TEnv> {
/* sub path */ ".",
maybe_referrer,
referrer_kind,
match referrer_kind {
NodeModuleKind::Esm => DEFAULT_CONDITIONS,
NodeModuleKind::Cjs => REQUIRE_CONDITIONS,
},
conditions_from_module_kind(referrer_kind),
NodeResolutionMode::Types,
);
if let Ok(resolution) = resolution_result {

View file

@ -100,6 +100,7 @@ deno_websocket.workspace = true
deno_webstorage.workspace = true
node_resolver = { workspace = true, features = ["sync"] }
async-trait.workspace = true
color-print.workspace = true
dlopen2.workspace = true
encoding_rs.workspace = true
@ -114,7 +115,13 @@ log.workspace = true
netif = "0.1.6"
notify.workspace = true
once_cell.workspace = true
opentelemetry.workspace = true
opentelemetry-http.workspace = true
opentelemetry-otlp.workspace = true
opentelemetry-semantic-conventions.workspace = true
opentelemetry_sdk.workspace = true
percent-encoding.workspace = true
pin-project.workspace = true
regex.workspace = true
rustyline = { workspace = true, features = ["custom-bindings"] }
same-file = "1.0.6"

View file

@ -310,14 +310,13 @@ fn get_suggestions_for_terminal_errors(e: &JsError) -> Vec<FixSuggestion> {
{
return vec![
FixSuggestion::info_multiline(&[
cstr!("Deno supports CommonJS modules in <u>.cjs</> files, or when there's a <u>package.json</>"),
cstr!("with <i>\"type\": \"commonjs\"</> option and <i>--unstable-detect-cjs</> flag is used.")
cstr!("Deno supports CommonJS modules in <u>.cjs</> files, or when the closest"),
cstr!("<u>package.json</> has a <i>\"type\": \"commonjs\"</> option.")
]),
FixSuggestion::hint_multiline(&[
"Rewrite this module to ESM,",
cstr!("or change the file extension to <u>.cjs</u>,"),
cstr!("or add <u>package.json</> next to the file with <i>\"type\": \"commonjs\"</> option"),
cstr!("and pass <i>--unstable-detect-cjs</> flag."),
cstr!("or add <u>package.json</> next to the file with <i>\"type\": \"commonjs\"</> option."),
]),
FixSuggestion::docs("https://docs.deno.com/go/commonjs"),
];

View file

@ -29,6 +29,7 @@ import * as tty from "ext:runtime/40_tty.js";
import * as kv from "ext:deno_kv/01_db.ts";
import * as cron from "ext:deno_cron/01_cron.ts";
import * as webgpuSurface from "ext:deno_webgpu/02_surface.js";
import * as telemetry from "ext:runtime/telemetry.js";
const denoNs = {
Process: process.Process,
@ -134,7 +135,7 @@ const denoNs = {
createHttpClient: httpClient.createHttpClient,
};
// NOTE(bartlomieju): keep IDs in sync with `cli/main.rs`
// NOTE(bartlomieju): keep IDs in sync with `runtime/lib.rs`
const unstableIds = {
broadcastChannel: 1,
cron: 2,
@ -143,11 +144,12 @@ const unstableIds = {
http: 5,
kv: 6,
net: 7,
process: 8,
temporal: 9,
unsafeProto: 10,
webgpu: 11,
workerOptions: 12,
otel: 8,
process: 9,
temporal: 10,
unsafeProto: 11,
webgpu: 12,
workerOptions: 13,
};
const denoNsUnstableById = { __proto__: null };
@ -181,4 +183,9 @@ denoNsUnstableById[unstableIds.webgpu] = {
// denoNsUnstableById[unstableIds.workerOptions] = { __proto__: null }
denoNsUnstableById[unstableIds.otel] = {
tracing: telemetry.tracing,
metrics: telemetry.metrics,
};
export { denoNs, denoNsUnstableById, unstableIds };

View file

@ -86,6 +86,8 @@ import {
workerRuntimeGlobalProperties,
} from "ext:runtime/98_global_scope_worker.js";
import { SymbolDispose, SymbolMetadata } from "ext:deno_web/00_infra.js";
import { bootstrap as bootstrapOtel } from "ext:runtime/telemetry.js";
// deno-lint-ignore prefer-primordials
if (Symbol.metadata) {
throw "V8 supports Symbol.metadata now, no need to shim it";
@ -573,6 +575,7 @@ function bootstrapMainRuntime(runtimeOptions, warmup = false) {
10: serveHost,
11: serveIsMain,
12: serveWorkerCount,
13: otelConfig,
} = runtimeOptions;
if (mode === executionModes.serve) {
@ -673,9 +676,10 @@ function bootstrapMainRuntime(runtimeOptions, warmup = false) {
});
ObjectSetPrototypeOf(globalThis, Window.prototype);
bootstrapOtel(otelConfig);
if (inspectFlag) {
const consoleFromDeno = globalThis.console;
core.wrapConsole(consoleFromDeno, core.v8Console);
core.wrapConsole(globalThis.console, core.v8Console);
}
event.defineEventHandler(globalThis, "error");
@ -855,6 +859,7 @@ function bootstrapWorkerRuntime(
5: hasNodeModulesDir,
6: argv0,
7: nodeDebug,
13: otelConfig,
} = runtimeOptions;
performance.setTimeOrigin();
@ -882,8 +887,9 @@ function bootstrapWorkerRuntime(
}
ObjectSetPrototypeOf(globalThis, DedicatedWorkerGlobalScope.prototype);
const consoleFromDeno = globalThis.console;
core.wrapConsole(consoleFromDeno, core.v8Console);
bootstrapOtel(otelConfig);
core.wrapConsole(globalThis.console, core.v8Console);
event.defineEventHandler(self, "message");
event.defineEventHandler(self, "error", undefined, true);

395
runtime/js/telemetry.js Normal file
View file

@ -0,0 +1,395 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
import { core, primordials } from "ext:core/mod.js";
import {
op_otel_log,
op_otel_span_attribute,
op_otel_span_attribute2,
op_otel_span_attribute3,
op_otel_span_continue,
op_otel_span_flush,
op_otel_span_start,
} from "ext:core/ops";
import { Console } from "ext:deno_console/01_console.js";
import { performance } from "ext:deno_web/15_performance.js";
const {
SymbolDispose,
MathRandom,
Array,
ObjectEntries,
SafeMap,
ReflectApply,
SymbolFor,
Error,
} = primordials;
const { AsyncVariable, setAsyncContext } = core;
const CURRENT = new AsyncVariable();
let TRACING_ENABLED = false;
const SPAN_ID_BYTES = 8;
const TRACE_ID_BYTES = 16;
const TRACE_FLAG_SAMPLED = 1 << 0;
const hexSliceLookupTable = (function () {
const alphabet = "0123456789abcdef";
const table = new Array(256);
for (let i = 0; i < 16; ++i) {
const i16 = i * 16;
for (let j = 0; j < 16; ++j) {
table[i16 + j] = alphabet[i] + alphabet[j];
}
}
return table;
})();
function generateId(bytes) {
let out = "";
for (let i = 0; i < bytes / 4; i += 1) {
const r32 = (MathRandom() * 2 ** 32) >>> 0;
out += hexSliceLookupTable[(r32 >> 24) & 0xff];
out += hexSliceLookupTable[(r32 >> 16) & 0xff];
out += hexSliceLookupTable[(r32 >> 8) & 0xff];
out += hexSliceLookupTable[r32 & 0xff];
}
return out;
}
function submit(span) {
if (!(span.traceFlags & TRACE_FLAG_SAMPLED)) return;
op_otel_span_start(
span.traceId,
span.spanId,
span.parentSpanId ?? "",
span.kind,
span.name,
span.startTime,
span.endTime,
);
if (span.status !== null && span.status.code !== 0) {
op_otel_span_continue(span.code, span.message ?? "");
}
const attributes = ObjectEntries(span.attributes);
let i = 0;
while (i < attributes.length) {
if (i + 2 < attributes.length) {
op_otel_span_attribute3(
attributes.length,
attributes[i][0],
attributes[i][1],
attributes[i + 1][0],
attributes[i + 1][1],
attributes[i + 2][0],
attributes[i + 2][1],
);
i += 3;
} else if (i + 1 < attributes.length) {
op_otel_span_attribute2(
attributes.length,
attributes[i][0],
attributes[i][1],
attributes[i + 1][0],
attributes[i + 1][1],
);
i += 2;
} else {
op_otel_span_attribute(
attributes.length,
attributes[i][0],
attributes[i][1],
);
i += 1;
}
}
op_otel_span_flush();
}
const now = () => (performance.timeOrigin + performance.now()) / 1000;
const INVALID_SPAN_ID = "0000000000000000";
const INVALID_TRACE_ID = "00000000000000000000000000000000";
const NO_ASYNC_CONTEXT = {};
class Span {
traceId;
spanId;
parentSpanId;
kind;
name;
startTime;
endTime;
status = null;
attributes = { __proto__: null };
traceFlags = TRACE_FLAG_SAMPLED;
enabled = TRACING_ENABLED;
#asyncContext = NO_ASYNC_CONTEXT;
constructor(name, kind = "internal") {
if (!this.enabled) {
this.traceId = INVALID_TRACE_ID;
this.spanId = INVALID_SPAN_ID;
this.parentSpanId = INVALID_SPAN_ID;
return;
}
this.startTime = now();
this.spanId = generateId(SPAN_ID_BYTES);
let traceId;
let parentSpanId;
const parent = Span.current();
if (parent) {
if (parent.spanId !== undefined) {
parentSpanId = parent.spanId;
traceId = parent.traceId;
} else {
const context = parent.spanContext();
parentSpanId = context.spanId;
traceId = context.traceId;
}
}
if (
traceId && traceId !== INVALID_TRACE_ID && parentSpanId &&
parentSpanId !== INVALID_SPAN_ID
) {
this.traceId = traceId;
this.parentSpanId = parentSpanId;
} else {
this.traceId = generateId(TRACE_ID_BYTES);
this.parentSpanId = INVALID_SPAN_ID;
}
this.name = name;
switch (kind) {
case "internal":
this.kind = 0;
break;
case "server":
this.kind = 1;
break;
case "client":
this.kind = 2;
break;
case "producer":
this.kind = 3;
break;
case "consumer":
this.kind = 4;
break;
default:
throw new Error(`Invalid span kind: ${kind}`);
}
this.enter();
}
// helper function to match otel js api
spanContext() {
return {
traceId: this.traceId,
spanId: this.spanId,
traceFlags: this.traceFlags,
};
}
setAttribute(name, value) {
if (!this.enabled) return;
this.attributes[name] = value;
}
enter() {
if (!this.enabled) return;
const context = (CURRENT.get() || ROOT_CONTEXT).setValue(SPAN_KEY, this);
this.#asyncContext = CURRENT.enter(context);
}
exit() {
if (!this.enabled || this.#asyncContext === NO_ASYNC_CONTEXT) return;
setAsyncContext(this.#asyncContext);
this.#asyncContext = NO_ASYNC_CONTEXT;
}
end() {
if (!this.enabled || this.endTime !== undefined) return;
this.exit();
this.endTime = now();
submit(this);
}
[SymbolDispose]() {
this.end();
}
static current() {
return CURRENT.get()?.getValue(SPAN_KEY);
}
}
function hrToSecs(hr) {
return ((hr[0] * 1e3 + hr[1] / 1e6) / 1000);
}
// Exporter compatible with opentelemetry js library
class SpanExporter {
export(spans, resultCallback) {
try {
for (let i = 0; i < spans.length; i += 1) {
const span = spans[i];
const context = span.spanContext();
submit({
spanId: context.spanId,
traceId: context.traceId,
traceFlags: context.traceFlags,
name: span.name,
kind: span.kind,
parentSpanId: span.parentSpanId,
startTime: hrToSecs(span.startTime),
endTime: hrToSecs(span.endTime),
status: span.status,
attributes: span.attributes,
});
}
resultCallback({ code: 0 });
} catch (error) {
resultCallback({ code: 1, error });
}
}
async shutdown() {}
async forceFlush() {}
}
// SPAN_KEY matches symbol in otel-js library
const SPAN_KEY = SymbolFor("OpenTelemetry Context Key SPAN");
// Context tracker compatible with otel-js api
class Context {
#data = new SafeMap();
constructor(data) {
this.#data = data ? new SafeMap(data) : new SafeMap();
}
getValue(key) {
return this.#data.get(key);
}
setValue(key, value) {
const c = new Context(this.#data);
c.#data.set(key, value);
return c;
}
deleteValue(key) {
const c = new Context(this.#data);
c.#data.delete(key);
return c;
}
}
const ROOT_CONTEXT = new Context();
// Context manager for opentelemetry js library
class ContextManager {
active() {
return CURRENT.get() ?? ROOT_CONTEXT;
}
with(context, fn, thisArg, ...args) {
const ctx = CURRENT.enter(context);
try {
return ReflectApply(fn, thisArg, args);
} finally {
setAsyncContext(ctx);
}
}
bind(context, f) {
return (...args) => {
const ctx = CURRENT.enter(context);
try {
return ReflectApply(f, thisArg, args);
} finally {
setAsyncContext(ctx);
}
};
}
enable() {
return this;
}
disable() {
return this;
}
}
function otelLog(message, level) {
let traceId = "";
let spanId = "";
let traceFlags = 0;
const span = Span.current();
if (span) {
if (span.spanId !== undefined) {
spanId = span.spanId;
traceId = span.traceId;
traceFlags = span.traceFlags;
} else {
const context = span.spanContext();
spanId = context.spanId;
traceId = context.traceId;
traceFlags = context.traceFlags;
}
}
return op_otel_log(message, level, traceId, spanId, traceFlags);
}
const otelConsoleConfig = {
ignore: 0,
capture: 1,
replace: 2,
};
export function bootstrap(config) {
if (config.length === 0) return;
const { 0: consoleConfig } = config;
TRACING_ENABLED = true;
switch (consoleConfig) {
case otelConsoleConfig.capture:
core.wrapConsole(globalThis.console, new Console(otelLog));
break;
case otelConsoleConfig.replace:
ObjectDefineProperty(
globalThis,
"console",
core.propNonEnumerable(new Console(otelLog)),
);
break;
default:
break;
}
}
export const tracing = {
get enabled() {
return TRACING_ENABLED;
},
Span,
SpanExporter,
ContextManager,
};
// TODO(devsnek): implement metrics
export const metrics = {};

View file

@ -99,18 +99,24 @@ pub static UNSTABLE_GRANULAR_FLAGS: &[UnstableGranularFlag] = &[
show_in_help: true,
id: 7,
},
UnstableGranularFlag {
name: "otel",
help_text: "Enable unstable OpenTelemetry features",
show_in_help: false,
id: 8,
},
// TODO(bartlomieju): consider removing it
UnstableGranularFlag {
name: ops::process::UNSTABLE_FEATURE_NAME,
help_text: "Enable unstable process APIs",
show_in_help: false,
id: 8,
id: 9,
},
UnstableGranularFlag {
name: "temporal",
help_text: "Enable unstable Temporal API",
show_in_help: true,
id: 9,
id: 10,
},
UnstableGranularFlag {
name: "unsafe-proto",
@ -118,19 +124,19 @@ pub static UNSTABLE_GRANULAR_FLAGS: &[UnstableGranularFlag] = &[
show_in_help: true,
// This number is used directly in the JS code. Search
// for "unstableIds" to see where it's used.
id: 10,
id: 11,
},
UnstableGranularFlag {
name: deno_webgpu::UNSTABLE_FEATURE_NAME,
help_text: "Enable unstable `WebGPU` APIs",
show_in_help: true,
id: 11,
id: 12,
},
UnstableGranularFlag {
name: ops::worker_host::UNSTABLE_FEATURE_NAME,
help_text: "Enable unstable Web Worker APIs",
show_in_help: true,
id: 12,
id: 13,
},
];

View file

@ -4,6 +4,7 @@ pub mod bootstrap;
pub mod fs_events;
pub mod http;
pub mod os;
pub mod otel;
pub mod permissions;
pub mod process;
pub mod runtime;

View file

@ -186,6 +186,8 @@ fn op_get_exit_code(state: &mut OpState) -> i32 {
#[op2(fast)]
fn op_exit(state: &mut OpState) {
crate::ops::otel::otel_drop_state(state);
let code = state.borrow::<ExitCode>().get();
std::process::exit(code)
}

686
runtime/ops/otel.rs Normal file
View file

@ -0,0 +1,686 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::tokio_util::create_basic_runtime;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::{self};
use deno_core::futures::channel::mpsc;
use deno_core::futures::channel::mpsc::UnboundedSender;
use deno_core::futures::future::BoxFuture;
use deno_core::futures::stream;
use deno_core::futures::Stream;
use deno_core::futures::StreamExt;
use deno_core::op2;
use deno_core::v8;
use deno_core::OpState;
use once_cell::sync::Lazy;
use opentelemetry::logs::Severity;
use opentelemetry::trace::SpanContext;
use opentelemetry::trace::SpanId;
use opentelemetry::trace::SpanKind;
use opentelemetry::trace::Status as SpanStatus;
use opentelemetry::trace::TraceFlags;
use opentelemetry::trace::TraceId;
use opentelemetry::InstrumentationScope;
use opentelemetry::Key;
use opentelemetry::KeyValue;
use opentelemetry::StringValue;
use opentelemetry::Value;
use opentelemetry_otlp::HttpExporterBuilder;
use opentelemetry_otlp::Protocol;
use opentelemetry_otlp::WithExportConfig;
use opentelemetry_otlp::WithHttpConfig;
use opentelemetry_sdk::export::trace::SpanData;
use opentelemetry_sdk::logs::BatchLogProcessor;
use opentelemetry_sdk::logs::LogProcessor as LogProcessorTrait;
use opentelemetry_sdk::logs::LogRecord;
use opentelemetry_sdk::trace::BatchSpanProcessor;
use opentelemetry_sdk::trace::SpanProcessor as SpanProcessorTrait;
use opentelemetry_sdk::Resource;
use opentelemetry_semantic_conventions::resource::PROCESS_RUNTIME_NAME;
use opentelemetry_semantic_conventions::resource::PROCESS_RUNTIME_VERSION;
use opentelemetry_semantic_conventions::resource::TELEMETRY_SDK_LANGUAGE;
use opentelemetry_semantic_conventions::resource::TELEMETRY_SDK_NAME;
use opentelemetry_semantic_conventions::resource::TELEMETRY_SDK_VERSION;
use serde::Deserialize;
use serde::Serialize;
use std::borrow::Cow;
use std::env;
use std::fmt::Debug;
use std::pin::Pin;
use std::task::Context;
use std::task::Poll;
use std::thread;
use std::time::Duration;
use std::time::SystemTime;
type SpanProcessor = BatchSpanProcessor<OtelSharedRuntime>;
type LogProcessor = BatchLogProcessor<OtelSharedRuntime>;
deno_core::extension!(
deno_otel,
ops = [op_otel_log, op_otel_span_start, op_otel_span_continue, op_otel_span_attribute, op_otel_span_attribute2, op_otel_span_attribute3, op_otel_span_flush],
options = {
otel_config: Option<OtelConfig>, // `None` means OpenTelemetry is disabled.
},
state = |state, options| {
if let Some(otel_config) = options.otel_config {
otel_create_globals(otel_config, state).unwrap();
}
}
);
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OtelConfig {
pub runtime_name: Cow<'static, str>,
pub runtime_version: Cow<'static, str>,
pub console: OtelConsoleConfig,
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[repr(u8)]
pub enum OtelConsoleConfig {
Ignore = 0,
Capture = 1,
Replace = 2,
}
impl Default for OtelConfig {
fn default() -> Self {
Self {
runtime_name: Cow::Borrowed(env!("CARGO_PKG_NAME")),
runtime_version: Cow::Borrowed(env!("CARGO_PKG_VERSION")),
console: OtelConsoleConfig::Capture,
}
}
}
static OTEL_SHARED_RUNTIME_SPAWN_TASK_TX: Lazy<
UnboundedSender<BoxFuture<'static, ()>>,
> = Lazy::new(otel_create_shared_runtime);
fn otel_create_shared_runtime() -> UnboundedSender<BoxFuture<'static, ()>> {
let (spawn_task_tx, mut spawn_task_rx) =
mpsc::unbounded::<BoxFuture<'static, ()>>();
thread::spawn(move || {
let rt = create_basic_runtime();
rt.block_on(async move {
while let Some(task) = spawn_task_rx.next().await {
tokio::spawn(task);
}
});
});
spawn_task_tx
}
#[derive(Clone, Copy)]
struct OtelSharedRuntime;
impl hyper::rt::Executor<BoxFuture<'static, ()>> for OtelSharedRuntime {
fn execute(&self, fut: BoxFuture<'static, ()>) {
(*OTEL_SHARED_RUNTIME_SPAWN_TASK_TX)
.unbounded_send(fut)
.expect("failed to send task to shared OpenTelemetry runtime");
}
}
impl opentelemetry_sdk::runtime::Runtime for OtelSharedRuntime {
type Interval = Pin<Box<dyn Stream<Item = ()> + Send + 'static>>;
type Delay = Pin<Box<tokio::time::Sleep>>;
fn interval(&self, period: Duration) -> Self::Interval {
stream::repeat(())
.then(move |_| tokio::time::sleep(period))
.boxed()
}
fn spawn(&self, future: BoxFuture<'static, ()>) {
(*OTEL_SHARED_RUNTIME_SPAWN_TASK_TX)
.unbounded_send(future)
.expect("failed to send task to shared OpenTelemetry runtime");
}
fn delay(&self, duration: Duration) -> Self::Delay {
Box::pin(tokio::time::sleep(duration))
}
}
impl opentelemetry_sdk::runtime::RuntimeChannel for OtelSharedRuntime {
type Receiver<T: Debug + Send> = BatchMessageChannelReceiver<T>;
type Sender<T: Debug + Send> = BatchMessageChannelSender<T>;
fn batch_message_channel<T: Debug + Send>(
&self,
capacity: usize,
) -> (Self::Sender<T>, Self::Receiver<T>) {
let (batch_tx, batch_rx) = tokio::sync::mpsc::channel::<T>(capacity);
(batch_tx.into(), batch_rx.into())
}
}
#[derive(Debug)]
pub struct BatchMessageChannelSender<T: Send> {
sender: tokio::sync::mpsc::Sender<T>,
}
impl<T: Send> From<tokio::sync::mpsc::Sender<T>>
for BatchMessageChannelSender<T>
{
fn from(sender: tokio::sync::mpsc::Sender<T>) -> Self {
Self { sender }
}
}
impl<T: Send> opentelemetry_sdk::runtime::TrySend
for BatchMessageChannelSender<T>
{
type Message = T;
fn try_send(
&self,
item: Self::Message,
) -> Result<(), opentelemetry_sdk::runtime::TrySendError> {
self.sender.try_send(item).map_err(|err| match err {
tokio::sync::mpsc::error::TrySendError::Full(_) => {
opentelemetry_sdk::runtime::TrySendError::ChannelFull
}
tokio::sync::mpsc::error::TrySendError::Closed(_) => {
opentelemetry_sdk::runtime::TrySendError::ChannelClosed
}
})
}
}
pub struct BatchMessageChannelReceiver<T> {
receiver: tokio::sync::mpsc::Receiver<T>,
}
impl<T> From<tokio::sync::mpsc::Receiver<T>>
for BatchMessageChannelReceiver<T>
{
fn from(receiver: tokio::sync::mpsc::Receiver<T>) -> Self {
Self { receiver }
}
}
impl<T> Stream for BatchMessageChannelReceiver<T> {
type Item = T;
fn poll_next(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Self::Item>> {
self.receiver.poll_recv(cx)
}
}
mod hyper_client {
use http_body_util::BodyExt;
use http_body_util::Full;
use hyper::body::Body as HttpBody;
use hyper::body::Frame;
use hyper_util::client::legacy::connect::HttpConnector;
use hyper_util::client::legacy::Client;
use opentelemetry_http::Bytes;
use opentelemetry_http::HttpError;
use opentelemetry_http::Request;
use opentelemetry_http::Response;
use opentelemetry_http::ResponseExt;
use std::fmt::Debug;
use std::pin::Pin;
use std::task::Poll;
use std::task::{self};
use super::OtelSharedRuntime;
// same as opentelemetry_http::HyperClient except it uses OtelSharedRuntime
#[derive(Debug, Clone)]
pub struct HyperClient {
inner: Client<HttpConnector, Body>,
}
impl HyperClient {
pub fn new() -> Self {
Self {
inner: Client::builder(OtelSharedRuntime).build(HttpConnector::new()),
}
}
}
#[async_trait::async_trait]
impl opentelemetry_http::HttpClient for HyperClient {
async fn send(
&self,
request: Request<Vec<u8>>,
) -> Result<Response<Bytes>, HttpError> {
let (parts, body) = request.into_parts();
let request = Request::from_parts(parts, Body(Full::from(body)));
let mut response = self.inner.request(request).await?;
let headers = std::mem::take(response.headers_mut());
let mut http_response = Response::builder()
.status(response.status())
.body(response.into_body().collect().await?.to_bytes())?;
*http_response.headers_mut() = headers;
Ok(http_response.error_for_status()?)
}
}
#[pin_project::pin_project]
pub struct Body(#[pin] Full<Bytes>);
impl HttpBody for Body {
type Data = Bytes;
type Error = Box<dyn std::error::Error + Send + Sync + 'static>;
#[inline]
fn poll_frame(
self: Pin<&mut Self>,
cx: &mut task::Context<'_>,
) -> Poll<Option<Result<Frame<Self::Data>, Self::Error>>> {
self.project().0.poll_frame(cx).map_err(Into::into)
}
#[inline]
fn is_end_stream(&self) -> bool {
self.0.is_end_stream()
}
#[inline]
fn size_hint(&self) -> hyper::body::SizeHint {
self.0.size_hint()
}
}
}
fn otel_create_globals(
config: OtelConfig,
op_state: &mut OpState,
) -> anyhow::Result<()> {
// Parse the `OTEL_EXPORTER_OTLP_PROTOCOL` variable. The opentelemetry_*
// crates don't do this automatically.
// TODO(piscisaureus): enable GRPC support.
let protocol = match env::var("OTEL_EXPORTER_OTLP_PROTOCOL").as_deref() {
Ok("http/protobuf") => Protocol::HttpBinary,
Ok("http/json") => Protocol::HttpJson,
Ok("") | Err(env::VarError::NotPresent) => {
return Ok(());
}
Ok(protocol) => {
return Err(anyhow!(
"Env var OTEL_EXPORTER_OTLP_PROTOCOL specifies an unsupported protocol: {}",
protocol
));
}
Err(err) => {
return Err(anyhow!(
"Failed to read env var OTEL_EXPORTER_OTLP_PROTOCOL: {}",
err
))
}
};
// Define the resource attributes that will be attached to all log records.
// These attributes are sourced as follows (in order of precedence):
// * The `service.name` attribute from the `OTEL_SERVICE_NAME` env var.
// * Additional attributes from the `OTEL_RESOURCE_ATTRIBUTES` env var.
// * Default attribute values defined here.
// TODO(piscisaureus): add more default attributes (e.g. script path).
let mut resource = Resource::default();
// Add the runtime name and version to the resource attributes. Also override
// the `telemetry.sdk` attributes to include the Deno runtime.
resource = resource.merge(&Resource::new(vec![
KeyValue::new(PROCESS_RUNTIME_NAME, config.runtime_name),
KeyValue::new(PROCESS_RUNTIME_VERSION, config.runtime_version.clone()),
KeyValue::new(
TELEMETRY_SDK_LANGUAGE,
format!(
"deno-{}",
resource.get(Key::new(TELEMETRY_SDK_LANGUAGE)).unwrap()
),
),
KeyValue::new(
TELEMETRY_SDK_NAME,
format!(
"deno-{}",
resource.get(Key::new(TELEMETRY_SDK_NAME)).unwrap()
),
),
KeyValue::new(
TELEMETRY_SDK_VERSION,
format!(
"{}-{}",
config.runtime_version,
resource.get(Key::new(TELEMETRY_SDK_VERSION)).unwrap()
),
),
]));
// The OTLP endpoint is automatically picked up from the
// `OTEL_EXPORTER_OTLP_ENDPOINT` environment variable. Additional headers can
// be specified using `OTEL_EXPORTER_OTLP_HEADERS`.
let client = hyper_client::HyperClient::new();
let span_exporter = HttpExporterBuilder::default()
.with_http_client(client.clone())
.with_protocol(protocol)
.build_span_exporter()?;
let mut span_processor =
BatchSpanProcessor::builder(span_exporter, OtelSharedRuntime).build();
span_processor.set_resource(&resource);
op_state.put::<SpanProcessor>(span_processor);
let log_exporter = HttpExporterBuilder::default()
.with_http_client(client)
.with_protocol(protocol)
.build_log_exporter()?;
let log_processor =
BatchLogProcessor::builder(log_exporter, OtelSharedRuntime).build();
log_processor.set_resource(&resource);
op_state.put::<LogProcessor>(log_processor);
Ok(())
}
/// This function is called by the runtime whenever it is about to call
/// `os::process::exit()`, to ensure that all OpenTelemetry logs are properly
/// flushed before the process terminates.
pub fn otel_drop_state(state: &mut OpState) {
if let Some(processor) = state.try_take::<SpanProcessor>() {
let _ = processor.force_flush();
drop(processor);
}
if let Some(processor) = state.try_take::<LogProcessor>() {
let _ = processor.force_flush();
drop(processor);
}
}
#[op2(fast)]
fn op_otel_log(
state: &mut OpState,
#[string] message: String,
#[smi] level: i32,
#[string] trace_id: &str,
#[string] span_id: &str,
#[smi] trace_flags: u8,
) {
let Some(logger) = state.try_borrow::<LogProcessor>() else {
log::error!("op_otel_log: OpenTelemetry Logger not available");
return;
};
// Convert the integer log level that ext/console uses to the corresponding
// OpenTelemetry log severity.
let severity = match level {
..=0 => Severity::Debug,
1 => Severity::Info,
2 => Severity::Warn,
3.. => Severity::Error,
};
let mut log_record = LogRecord::default();
log_record.observed_timestamp = Some(SystemTime::now());
log_record.body = Some(message.into());
log_record.severity_number = Some(severity);
log_record.severity_text = Some(severity.name());
if let (Ok(trace_id), Ok(span_id)) =
(TraceId::from_hex(trace_id), SpanId::from_hex(span_id))
{
let span_context = SpanContext::new(
trace_id,
span_id,
TraceFlags::new(trace_flags),
false,
Default::default(),
);
log_record.trace_context = Some((&span_context).into());
}
logger.emit(
&mut log_record,
&InstrumentationScope::builder("deno").build(),
);
}
struct TemporarySpan(SpanData);
#[allow(clippy::too_many_arguments)]
#[op2(fast)]
fn op_otel_span_start<'s>(
scope: &mut v8::HandleScope<'s>,
state: &mut OpState,
trace_id: v8::Local<'s, v8::Value>,
span_id: v8::Local<'s, v8::Value>,
parent_span_id: v8::Local<'s, v8::Value>,
#[smi] span_kind: u8,
name: v8::Local<'s, v8::Value>,
start_time: f64,
end_time: f64,
) -> Result<(), anyhow::Error> {
if let Some(temporary_span) = state.try_take::<TemporarySpan>() {
let Some(span_processor) = state.try_borrow::<SpanProcessor>() else {
return Ok(());
};
span_processor.on_end(temporary_span.0);
};
let trace_id = {
let x = v8::ValueView::new(scope, trace_id.try_cast()?);
match x.data() {
v8::ValueViewData::OneByte(bytes) => {
TraceId::from_hex(&String::from_utf8_lossy(bytes))?
}
_ => return Err(anyhow!("invalid trace_id")),
}
};
let span_id = {
let x = v8::ValueView::new(scope, span_id.try_cast()?);
match x.data() {
v8::ValueViewData::OneByte(bytes) => {
SpanId::from_hex(&String::from_utf8_lossy(bytes))?
}
_ => return Err(anyhow!("invalid span_id")),
}
};
let parent_span_id = {
let x = v8::ValueView::new(scope, parent_span_id.try_cast()?);
match x.data() {
v8::ValueViewData::OneByte(bytes) => {
let s = String::from_utf8_lossy(bytes);
if s.is_empty() {
SpanId::INVALID
} else {
SpanId::from_hex(&s)?
}
}
_ => return Err(anyhow!("invalid parent_span_id")),
}
};
let name = {
let x = v8::ValueView::new(scope, name.try_cast()?);
match x.data() {
v8::ValueViewData::OneByte(bytes) => {
String::from_utf8_lossy(bytes).into_owned()
}
v8::ValueViewData::TwoByte(bytes) => String::from_utf16_lossy(bytes),
}
};
let temporary_span = TemporarySpan(SpanData {
span_context: SpanContext::new(
trace_id,
span_id,
TraceFlags::SAMPLED,
false,
Default::default(),
),
parent_span_id,
span_kind: match span_kind {
0 => SpanKind::Internal,
1 => SpanKind::Server,
2 => SpanKind::Client,
3 => SpanKind::Producer,
4 => SpanKind::Consumer,
_ => return Err(anyhow!("invalid span kind")),
},
name: Cow::Owned(name),
start_time: SystemTime::UNIX_EPOCH
.checked_add(std::time::Duration::from_secs_f64(start_time))
.ok_or_else(|| anyhow!("invalid start time"))?,
end_time: SystemTime::UNIX_EPOCH
.checked_add(std::time::Duration::from_secs_f64(end_time))
.ok_or_else(|| anyhow!("invalid start time"))?,
attributes: Vec::new(),
dropped_attributes_count: 0,
events: Default::default(),
links: Default::default(),
status: SpanStatus::Unset,
instrumentation_scope: InstrumentationScope::builder("deno").build(),
});
state.put(temporary_span);
Ok(())
}
#[op2(fast)]
fn op_otel_span_continue(
state: &mut OpState,
#[smi] status: u8,
#[string] error_description: Cow<'_, str>,
) {
if let Some(temporary_span) = state.try_borrow_mut::<TemporarySpan>() {
temporary_span.0.status = match status {
0 => SpanStatus::Unset,
1 => SpanStatus::Ok,
2 => SpanStatus::Error {
description: Cow::Owned(error_description.into_owned()),
},
_ => return,
};
}
}
macro_rules! attr {
($scope:ident, $temporary_span:ident, $name:ident, $value:ident) => {
let name = if let Ok(name) = $name.try_cast() {
let view = v8::ValueView::new($scope, name);
match view.data() {
v8::ValueViewData::OneByte(bytes) => {
Some(String::from_utf8_lossy(bytes).into_owned())
}
v8::ValueViewData::TwoByte(bytes) => {
Some(String::from_utf16_lossy(bytes))
}
}
} else {
None
};
let value = if let Ok(string) = $value.try_cast::<v8::String>() {
Some(Value::String(StringValue::from({
let x = v8::ValueView::new($scope, string);
match x.data() {
v8::ValueViewData::OneByte(bytes) => {
String::from_utf8_lossy(bytes).into_owned()
}
v8::ValueViewData::TwoByte(bytes) => String::from_utf16_lossy(bytes),
}
})))
} else if let Ok(number) = $value.try_cast::<v8::Number>() {
Some(Value::F64(number.value()))
} else if let Ok(boolean) = $value.try_cast::<v8::Boolean>() {
Some(Value::Bool(boolean.is_true()))
} else if let Ok(bigint) = $value.try_cast::<v8::BigInt>() {
let (i64_value, _lossless) = bigint.i64_value();
Some(Value::I64(i64_value))
} else {
None
};
if let (Some(name), Some(value)) = (name, value) {
$temporary_span
.0
.attributes
.push(KeyValue::new(name, value));
} else {
$temporary_span.0.dropped_attributes_count += 1;
}
};
}
#[op2(fast)]
fn op_otel_span_attribute<'s>(
scope: &mut v8::HandleScope<'s>,
state: &mut OpState,
#[smi] capacity: u32,
key: v8::Local<'s, v8::Value>,
value: v8::Local<'s, v8::Value>,
) {
if let Some(temporary_span) = state.try_borrow_mut::<TemporarySpan>() {
temporary_span.0.attributes.reserve_exact(
(capacity as usize) - temporary_span.0.attributes.capacity(),
);
attr!(scope, temporary_span, key, value);
}
}
#[op2(fast)]
fn op_otel_span_attribute2<'s>(
scope: &mut v8::HandleScope<'s>,
state: &mut OpState,
#[smi] capacity: u32,
key1: v8::Local<'s, v8::Value>,
value1: v8::Local<'s, v8::Value>,
key2: v8::Local<'s, v8::Value>,
value2: v8::Local<'s, v8::Value>,
) {
if let Some(temporary_span) = state.try_borrow_mut::<TemporarySpan>() {
temporary_span.0.attributes.reserve_exact(
(capacity as usize) - temporary_span.0.attributes.capacity(),
);
attr!(scope, temporary_span, key1, value1);
attr!(scope, temporary_span, key2, value2);
}
}
#[allow(clippy::too_many_arguments)]
#[op2(fast)]
fn op_otel_span_attribute3<'s>(
scope: &mut v8::HandleScope<'s>,
state: &mut OpState,
#[smi] capacity: u32,
key1: v8::Local<'s, v8::Value>,
value1: v8::Local<'s, v8::Value>,
key2: v8::Local<'s, v8::Value>,
value2: v8::Local<'s, v8::Value>,
key3: v8::Local<'s, v8::Value>,
value3: v8::Local<'s, v8::Value>,
) {
if let Some(temporary_span) = state.try_borrow_mut::<TemporarySpan>() {
temporary_span.0.attributes.reserve_exact(
(capacity as usize) - temporary_span.0.attributes.capacity(),
);
attr!(scope, temporary_span, key1, value1);
attr!(scope, temporary_span, key2, value2);
attr!(scope, temporary_span, key3, value3);
}
}
#[op2(fast)]
fn op_otel_span_flush(state: &mut OpState) {
let Some(temporary_span) = state.try_take::<TemporarySpan>() else {
return;
};
let Some(span_processor) = state.try_borrow::<SpanProcessor>() else {
return;
};
span_processor.on_end(temporary_span.0);
}

View file

@ -756,14 +756,17 @@ fn check_run_permission(
if !env_var_names.is_empty() {
// we don't allow users to launch subprocesses with any LD_ or DYLD_*
// env vars set because this allows executing code (ex. LD_PRELOAD)
return Err(CheckRunPermissionError::Other(deno_core::error::custom_error(
return Err(CheckRunPermissionError::Other(
deno_core::error::custom_error(
"NotCapable",
format!(
"Requires --allow-all permissions to spawn subprocess with {} environment variable{}.",
"Requires --allow-run permissions to spawn subprocess with {0} environment variable{1}. Alternatively, spawn with {2} environment variable{1} unset.",
env_var_names.join(", "),
if env_var_names.len() != 1 { "s" } else { "" }
)
)));
if env_var_names.len() != 1 { "s" } else { "" },
if env_var_names.len() != 1 { "these" } else { "the" }
),
),
));
}
permissions.check_run(cmd, api_name)?;
}

View file

@ -40,8 +40,8 @@ pub use prompter::PromptResponse;
#[derive(Debug, thiserror::Error)]
#[error("Requires {access}, {}", format_permission_error(.name))]
pub struct PermissionDeniedError {
access: String,
name: &'static str,
pub access: String,
pub name: &'static str,
}
fn format_permission_error(name: &'static str) -> String {
@ -1461,7 +1461,7 @@ pub struct SysDescriptor(String);
impl SysDescriptor {
pub fn parse(kind: String) -> Result<Self, SysDescriptorParseError> {
match kind.as_str() {
"hostname" | "osRelease" | "osUptime" | "loadavg"
"hostname" | "inspector" | "osRelease" | "osUptime" | "loadavg"
| "networkInterfaces" | "systemMemoryInfo" | "uid" | "gid" | "cpus"
| "homedir" | "getegid" | "statfs" | "getPriority" | "setPriority"
| "userInfo" => Ok(Self(kind)),

View file

@ -47,6 +47,7 @@ extension!(runtime,
"40_signals.js",
"40_tty.js",
"41_prompt.js",
"telemetry.js",
"90_deno_ns.js",
"98_global_scope_shared.js",
"98_global_scope_window.js",

View file

@ -312,6 +312,7 @@ pub fn create_runtime_snapshot(
),
ops::fs_events::deno_fs_events::init_ops(),
ops::os::deno_os::init_ops(Default::default()),
ops::otel::deno_otel::init_ops(None),
ops::permissions::deno_permissions::init_ops(),
ops::process::deno_process::init_ops(None),
ops::signal::deno_signal::init_ops(),

View file

@ -393,6 +393,13 @@ pub struct WebWorker {
maybe_worker_metadata: Option<WorkerMetadata>,
}
impl Drop for WebWorker {
fn drop(&mut self) {
// clean up the package.json thread local cache
node_resolver::PackageJsonThreadLocalCache::clear();
}
}
impl WebWorker {
pub fn bootstrap_from_options(
services: WebWorkerServiceOptions,
@ -505,6 +512,9 @@ impl WebWorker {
),
ops::fs_events::deno_fs_events::init_ops_and_esm(),
ops::os::deno_os_worker::init_ops_and_esm(),
ops::otel::deno_otel::init_ops_and_esm(
options.bootstrap.otel_config.clone(),
),
ops::permissions::deno_permissions::init_ops_and_esm(),
ops::process::deno_process::init_ops_and_esm(
services.npm_process_state_provider,

View file

@ -422,6 +422,9 @@ impl MainWorker {
),
ops::fs_events::deno_fs_events::init_ops_and_esm(),
ops::os::deno_os::init_ops_and_esm(exit_code.clone()),
ops::otel::deno_otel::init_ops_and_esm(
options.bootstrap.otel_config.clone(),
),
ops::permissions::deno_permissions::init_ops_and_esm(),
ops::process::deno_process::init_ops_and_esm(
services.npm_process_state_provider,

View file

@ -1,5 +1,6 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use crate::ops::otel::OtelConfig;
use deno_core::v8;
use deno_core::ModuleSpecifier;
use serde::Serialize;
@ -118,6 +119,8 @@ pub struct BootstrapOptions {
// Used by `deno serve`
pub serve_port: Option<u16>,
pub serve_host: Option<String>,
// OpenTelemetry output options. If `None`, OpenTelemetry is disabled.
pub otel_config: Option<OtelConfig>,
}
impl Default for BootstrapOptions {
@ -152,6 +155,7 @@ impl Default for BootstrapOptions {
mode: WorkerExecutionMode::None,
serve_port: Default::default(),
serve_host: Default::default(),
otel_config: None,
}
}
}
@ -193,6 +197,8 @@ struct BootstrapV8<'a>(
Option<bool>,
// serve worker count
Option<usize>,
// OTEL config
Box<[u8]>,
);
impl BootstrapOptions {
@ -219,6 +225,11 @@ impl BootstrapOptions {
self.serve_host.as_deref(),
serve_is_main,
serve_worker_count,
if let Some(otel_config) = self.otel_config.as_ref() {
Box::new([otel_config.console as u8])
} else {
Box::new([])
},
);
bootstrap.serialize(ser).unwrap()

View file

@ -16139,6 +16139,55 @@ fn lsp_cjs_import_dual() {
);
}
#[test]
fn lsp_type_commonjs() {
let context = TestContextBuilder::new()
.use_http_server()
.use_temp_cwd()
.add_npm_env_vars()
.build();
let temp_dir = context.temp_dir();
temp_dir.write("deno.json", r#"{}"#);
temp_dir.write(
"package.json",
r#"{
"type": "commonjs",
"dependencies": {
"@denotest/dual-cjs-esm": "1"
}
}"#,
);
context.run_npm("install");
let mut client = context.new_lsp_command().build();
client.initialize_default();
let main_url = temp_dir.path().join("main.ts").url_file();
let diagnostics = client.did_open(
json!({
"textDocument": {
"uri": main_url,
"languageId": "typescript",
"version": 1,
// getKind() should resolve as "cjs" and cause a type checker error
"text": "import mod = require('@denotest/dual-cjs-esm');\nconst kind: 'other' = mod.getKind(); console.log(kind);",
}
}),
);
assert_eq!(
json!(diagnostics.all()),
json!([{
"range": {
"start": { "line": 1, "character": 6, },
"end": { "line": 1, "character": 10, },
},
"severity": 1,
"code": 2322,
"source": "deno-ts",
"message": "Type '\"cjs\"' is not assignable to type '\"other\"'.",
}])
);
}
#[test]
fn lsp_ts_code_fix_any_param() {
let context = TestContextBuilder::new().use_temp_cwd().build();

View file

@ -77,6 +77,7 @@
"test-fs-rmdir-recursive.js",
"test-fs-write-file.js",
"test-http-url.parse-https.request.js",
"test-net-autoselectfamily.js",
"test-net-better-error-messages-path.js",
"test-net-connect-buffer.js",
"test-net-connect-buffer2.js",
@ -404,6 +405,7 @@
"test-http-url.parse-only-support-http-https-protocol.js",
"test-icu-transcode.js",
"test-net-access-byteswritten.js",
"test-net-autoselectfamily.js",
"test-net-better-error-messages-listen-path.js",
"test-net-better-error-messages-path.js",
"test-net-better-error-messages-port-hostname.js",

View file

@ -0,0 +1,3 @@
{
"type": "commonjs"
}

View file

@ -1767,7 +1767,6 @@ NOTE: This file should not be manually edited. Please edit `tests/node_compat/co
- [parallel/test-net-autoselectfamily-commandline-option.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-autoselectfamily-commandline-option.js)
- [parallel/test-net-autoselectfamily-default.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-autoselectfamily-default.js)
- [parallel/test-net-autoselectfamily-ipv4first.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-autoselectfamily-ipv4first.js)
- [parallel/test-net-autoselectfamily.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-autoselectfamily.js)
- [parallel/test-net-better-error-messages-listen.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-better-error-messages-listen.js)
- [parallel/test-net-binary.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-binary.js)
- [parallel/test-net-bind-twice.js](https://github.com/nodejs/node/tree/v18.12.1/test/parallel/test-net-bind-twice.js)

View file

@ -473,6 +473,7 @@ const pwdCommand = isWindows ?
module.exports = {
allowGlobals,
defaultAutoSelectFamilyAttemptTimeout: 2500,
expectsError,
expectWarning,
getArrayBufferViews,

View file

@ -1 +0,0 @@
{}

View file

@ -1 +0,0 @@
{}

View file

@ -1 +0,0 @@
{}

View file

@ -1 +0,0 @@
{}

View file

@ -0,0 +1,312 @@
// deno-fmt-ignore-file
// deno-lint-ignore-file
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
// Taken from Node 18.12.1
// This file is automatically generated by `tests/node_compat/runner/setup.ts`. Do not modify this file manually.
'use strict';
const common = require('../common');
const { parseDNSPacket, writeDNSPacket } = require('../common/dns');
const assert = require('assert');
const dgram = require('dgram');
const { Resolver } = require('dns');
const { createConnection, createServer } = require('net');
// Test that happy eyeballs algorithm is properly implemented.
// Purposely not using setDefaultAutoSelectFamilyAttemptTimeout here to test the
// parameter is correctly used in options.
//
// Some of the machines in the CI need more time to establish connection
const autoSelectFamilyAttemptTimeout = common.defaultAutoSelectFamilyAttemptTimeout;
function _lookup(resolver, hostname, options, cb) {
resolver.resolve(hostname, 'ANY', (err, replies) => {
assert.notStrictEqual(options.family, 4);
if (err) {
return cb(err);
}
const hosts = replies
.map((r) => ({ address: r.address, family: r.type === 'AAAA' ? 6 : 4 }))
.sort((a, b) => b.family - a.family);
if (options.all === true) {
return cb(null, hosts);
}
return cb(null, hosts[0].address, hosts[0].family);
});
}
function createDnsServer(ipv6Addrs, ipv4Addrs, cb) {
if (!Array.isArray(ipv6Addrs)) {
ipv6Addrs = [ipv6Addrs];
}
if (!Array.isArray(ipv4Addrs)) {
ipv4Addrs = [ipv4Addrs];
}
// Create a DNS server which replies with a AAAA and a A record for the same host
const socket = dgram.createSocket('udp4');
// TODO(kt3k): We use common.mustCallAtLeast instead of common.mustCall
// because Deno sends multiple requests to the DNS server.
// This can be addressed if Deno.resolveDns supports ANY record type.
// See https://github.com/denoland/deno/issues/14492
socket.on('message', common.mustCallAtLeast((msg, { address, port }) => {
const parsed = parseDNSPacket(msg);
const domain = parsed.questions[0].domain;
assert.strictEqual(domain, 'example.org');
socket.send(writeDNSPacket({
id: parsed.id,
questions: parsed.questions,
answers: [
...ipv6Addrs.map((address) => ({ type: 'AAAA', address, ttl: 123, domain: 'example.org' })),
...ipv4Addrs.map((address) => ({ type: 'A', address, ttl: 123, domain: 'example.org' })),
]
}), port, address);
}));
socket.bind(0, () => {
const resolver = new Resolver();
resolver.setServers([`127.0.0.1:${socket.address().port}`]);
cb({ dnsServer: socket, lookup: _lookup.bind(null, resolver) });
});
}
// Test that IPV4 is reached if IPV6 is not reachable
{
createDnsServer('::1', '127.0.0.1', common.mustCall(function({ dnsServer, lookup }) {
const ipv4Server = createServer((socket) => {
socket.on('data', common.mustCall(() => {
socket.write('response-ipv4');
socket.end();
}));
});
ipv4Server.listen(0, '127.0.0.1', common.mustCall(() => {
const port = ipv4Server.address().port;
const connection = createConnection({
host: 'example.org',
port: port,
lookup,
autoSelectFamily: true,
autoSelectFamilyAttemptTimeout,
});
let response = '';
connection.setEncoding('utf-8');
connection.on('ready', common.mustCall(() => {
assert.deepStrictEqual(connection.autoSelectFamilyAttemptedAddresses, [`::1:${port}`, `127.0.0.1:${port}`]);
}));
connection.on('data', (chunk) => {
response += chunk;
});
connection.on('end', common.mustCall(() => {
assert.strictEqual(response, 'response-ipv4');
ipv4Server.close();
dnsServer.close();
}));
connection.write('request');
}));
}));
}
// Test that only the last successful connection is established.
{
createDnsServer(
['2606:4700::6810:85e5', '2606:4700::6810:84e5', "::1"],
// TODO(kt3k): Comment out ipv4 addresses to make the test pass faster.
// Enable this when Deno.connect() call becomes cancellable.
// See https://github.com/denoland/deno/issues/26819
// ['104.20.22.46', '104.20.23.46', '127.0.0.1'],
['127.0.0.1'],
common.mustCall(function({ dnsServer, lookup }) {
const ipv4Server = createServer((socket) => {
socket.on('data', common.mustCall(() => {
socket.write('response-ipv4');
socket.end();
}));
});
ipv4Server.listen(0, '127.0.0.1', common.mustCall(() => {
const port = ipv4Server.address().port;
const connection = createConnection({
host: 'example.org',
port: port,
lookup,
autoSelectFamily: true,
autoSelectFamilyAttemptTimeout,
});
let response = '';
connection.setEncoding('utf-8');
connection.on('ready', common.mustCall(() => {
assert.deepStrictEqual(
connection.autoSelectFamilyAttemptedAddresses,
[
`2606:4700::6810:85e5:${port}`,
`104.20.22.46:${port}`,
`2606:4700::6810:84e5:${port}`,
`104.20.23.46:${port}`,
`::1:${port}`,
`127.0.0.1:${port}`,
]
);
}));
connection.on('data', (chunk) => {
response += chunk;
});
connection.on('end', common.mustCall(() => {
assert.strictEqual(response, 'response-ipv4');
ipv4Server.close();
dnsServer.close();
}));
connection.write('request');
}));
})
);
}
// Test that IPV4 is NOT reached if IPV6 is reachable
if (common.hasIPv6) {
createDnsServer('::1', '127.0.0.1', common.mustCall(function({ dnsServer, lookup }) {
const ipv4Server = createServer((socket) => {
socket.on('data', common.mustNotCall(() => {
socket.write('response-ipv4');
socket.end();
}));
});
const ipv6Server = createServer((socket) => {
socket.on('data', common.mustCall(() => {
socket.write('response-ipv6');
socket.end();
}));
});
ipv4Server.listen(0, '127.0.0.1', common.mustCall(() => {
const port = ipv4Server.address().port;
ipv6Server.listen(port, '::1', common.mustCall(() => {
const connection = createConnection({
host: 'example.org',
port,
lookup,
autoSelectFamily: true,
autoSelectFamilyAttemptTimeout,
});
let response = '';
connection.setEncoding('utf-8');
connection.on('ready', common.mustCall(() => {
assert.deepStrictEqual(connection.autoSelectFamilyAttemptedAddresses, [`::1:${port}`]);
}));
connection.on('data', (chunk) => {
response += chunk;
});
connection.on('end', common.mustCall(() => {
assert.strictEqual(response, 'response-ipv6');
ipv4Server.close();
ipv6Server.close();
dnsServer.close();
}));
connection.write('request');
}));
}));
}));
}
// Test that when all errors are returned when no connections succeeded
{
createDnsServer('::1', '127.0.0.1', common.mustCall(function({ dnsServer, lookup }) {
const connection = createConnection({
host: 'example.org',
port: 10,
lookup,
autoSelectFamily: true,
autoSelectFamilyAttemptTimeout,
});
connection.on('ready', common.mustNotCall());
connection.on('error', common.mustCall((error) => {
assert.deepStrictEqual(connection.autoSelectFamilyAttemptedAddresses, ['::1:10', '127.0.0.1:10']);
assert.strictEqual(error.constructor.name, 'AggregateError');
assert.strictEqual(error.errors.length, 2);
const errors = error.errors.map((e) => e.message);
assert.ok(errors.includes('connect ECONNREFUSED 127.0.0.1:10'));
if (common.hasIPv6) {
assert.ok(errors.includes('connect ECONNREFUSED ::1:10'));
}
dnsServer.close();
}));
}));
}
// Test that the option can be disabled
{
createDnsServer('::1', '127.0.0.1', common.mustCall(function({ dnsServer, lookup }) {
const ipv4Server = createServer((socket) => {
socket.on('data', common.mustCall(() => {
socket.write('response-ipv4');
socket.end();
}));
});
ipv4Server.listen(0, '127.0.0.1', common.mustCall(() => {
const port = ipv4Server.address().port;
const connection = createConnection({
host: 'example.org',
port,
lookup,
autoSelectFamily: false,
});
connection.on('ready', common.mustNotCall());
connection.on('error', common.mustCall((error) => {
assert.strictEqual(connection.autoSelectFamilyAttemptedAddresses, undefined);
if (common.hasIPv6) {
assert.strictEqual(error.code, 'ECONNREFUSED');
assert.strictEqual(error.message, `connect ECONNREFUSED ::1:${port}`);
} else if (error.code === 'EAFNOSUPPORT') {
assert.strictEqual(error.message, `connect EAFNOSUPPORT ::1:${port} - Local (undefined:undefined)`);
} else if (error.code === 'EUNATCH') {
assert.strictEqual(error.message, `connect EUNATCH ::1:${port} - Local (:::0)`);
} else {
assert.strictEqual(error.code, 'EADDRNOTAVAIL');
assert.strictEqual(error.message, `connect EADDRNOTAVAIL ::1:${port} - Local (:::0)`);
}
ipv4Server.close();
dnsServer.close();
}));
}));
}));
}

View file

@ -1 +0,0 @@
{}

View file

@ -1 +0,0 @@
{}

View file

@ -1 +0,0 @@
{}

View file

@ -0,0 +1,3 @@
import * as fs from "node:fs";
fs.writeFileSync("./testbin.js", "#!/usr/bin/env node\nconsole.log('run testbin');");

View file

@ -0,0 +1,10 @@
{
"name": "@denotest/bin-created-by-lifecycle",
"version": "1.0.0",
"scripts": {
"install": "node install.mjs"
},
"bin": {
"testbin": "testbin.js"
}
}

View file

@ -0,0 +1,12 @@
const tempDir = Deno.makeTempDirSync();
try {
// should work requiring these because this was launched via a node binary entrypoint
Deno.writeTextFileSync(`${tempDir}/index.js`, "module.exports = require('./other');");
Deno.writeTextFileSync(`${tempDir}/other.js`, "module.exports = (a, b) => a + b;");
const add = require(`${tempDir}/index.js`);
if (add(1, 2) !== 3) {
throw new Error("FAILED");
}
} finally {
Deno.removeSync(tempDir, { recursive: true });
}

View file

@ -0,0 +1,7 @@
{
"name": "@denotest/install-launch-cjs-temp-dir",
"version": "1.0.0",
"scripts": {
"install": "node install.js"
}
}

View file

@ -0,0 +1 @@
require("./output");

View file

@ -0,0 +1 @@
require("./output");

View file

@ -0,0 +1 @@
console.log("SUCCESS");

View file

@ -0,0 +1,7 @@
{
"name": "@denotest/install-no-ext",
"version": "1.0.0",
"scripts": {
"install": "node install/check && node install"
}
}

View file

@ -0,0 +1,4 @@
{
"args": "run -A main.ts",
"output": "processed\n"
}

Some files were not shown because too many files have changed in this diff Show more